ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a44018ff89c87707cb68e9984ba5ce71aa891f7 | r = 'S'
while r != 'N':
n = int(input('Escreve um número:'))
r = str(input('Quer continuar (S/N)')).upper()
print('Fim')
|
py | 1a4401c5c8fb571c260f97a79e73cbbdf6c37357 | from flask import render_template, Blueprint
from flask_login import login_required, current_user
import datetime
from project import app, db, localSystem
from project.models import *
home_blueprint = Blueprint(
'home', __name__,
template_folder = 'templates'
)
@home_blueprint.route('/')
def home():
localSystem = BoxOffice.query.first()
data = {}
data['news'] = db.session.query(Announcement).all()
data['changes'] = db.session.query(MovieChange).all()
data['dateChanges'] = db.session.query(DateChange).all()
data['boxOffice'] = Results.query.filter_by(date=(localSystem.currentDate - datetime.timedelta(days=1))).order_by(Results.movie_gross.desc()).all()
return render_template("index.html", user=current_user, system=localSystem, data=data)
|
py | 1a4402ae590e72824c8ab9e6a41dca1c8434a4bb | from table.CSVTable import CSVTable
from node_exec.base_nodes import defNode
CSV_IDENTIFIER = 'CSV'
@defNode(name='Open CSV Table', returnNames=['table'], identifier=CSV_IDENTIFIER)
def openCSVTable(path, separator=';', encodingOverride=None):
return CSVTable(path, separator, encodingOverride=encodingOverride) |
py | 1a4403ff4caf4c9d89ad9b0329f39d6fc9fa968e | #!/usr/bin/env vpython
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import subprocess
import sys
import tempfile
import time
import unittest
import mock
from parameterized import parameterized
import test_runner
class TestRunnerTest(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
time.sleep = mock.Mock()
def tearDown(self):
logging.disable(logging.NOTSET)
@parameterized.expand([
'url_unittests',
'./url_unittests',
'out/release/url_unittests',
'./out/release/url_unittests',
])
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
@mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
# Tests that the test runner doesn't attempt to download ash-chrome if not
# required.
def test_do_not_require_ash_chrome(self, command, mock_popen, mock_download,
_):
args = ['script_name', 'test', command]
with mock.patch.object(sys, 'argv', args):
test_runner.Main()
self.assertEqual(1, mock_popen.call_count)
mock_popen.assert_called_with([command])
self.assertFalse(mock_download.called)
@parameterized.expand([
'browser_tests',
'components_browsertests',
'content_browsertests',
'lacros_chrome_browsertests',
])
@mock.patch.object(os,
'listdir',
return_value=['wayland-0', 'wayland-0.lock'])
@mock.patch.object(tempfile,
'mkdtemp',
side_effect=['/tmp/xdg', '/tmp/ash-data'])
@mock.patch.object(os.environ, 'copy', side_effect=[{}, {}])
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(test_runner,
'_GetLatestVersionOfAshChrome',
return_value='793554')
@mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
@mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
# Tests that the test runner downloads and spawns ash-chrome if ash-chrome is
# required.
def test_require_ash_chrome(self, command, mock_popen, mock_download, *_):
args = ['script_name', 'test', command]
with mock.patch.object(sys, 'argv', args):
test_runner.Main()
mock_download.assert_called_with('793554')
self.assertEqual(2, mock_popen.call_count)
ash_chrome_args = mock_popen.call_args_list[0][0][0]
self.assertTrue(ash_chrome_args[0].endswith(
'build/lacros/prebuilt_ash_chrome/793554/test_ash_chrome'))
expected_ash_chrome_args = [
'--user-data-dir=/tmp/ash-data',
'--enable-wayland-server',
'--no-startup-window',
]
if command == 'lacros_chrome_browsertests':
expected_ash_chrome_args.append(
'--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock')
self.assertListEqual(expected_ash_chrome_args, ash_chrome_args[1:])
ash_chrome_env = mock_popen.call_args_list[0][1].get('env', {})
self.assertDictEqual({'XDG_RUNTIME_DIR': '/tmp/xdg'}, ash_chrome_env)
test_args = mock_popen.call_args_list[1][0][0]
if command == 'lacros_chrome_browsertests':
self.assertListEqual([
command,
'--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock'
], test_args)
else:
self.assertListEqual([command], test_args)
test_env = mock_popen.call_args_list[1][1].get('env', {})
self.assertDictEqual(
{
'XDG_RUNTIME_DIR': '/tmp/xdg',
'EGL_PLATFORM': 'surfaceless'
}, test_env)
@mock.patch.object(os,
'listdir',
return_value=['wayland-0', 'wayland-0.lock'])
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(test_runner,
'_GetLatestVersionOfAshChrome',
return_value='793554')
@mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
@mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
# Tests that when a ash-chrome version is specified, that version is used
# instead of the latest one.
def test_specify_ash_chrome_version(self, mock_popen, mock_download, *_):
args = [
'script_name', 'test', 'browser_tests', '--ash-chrome-version', '781122'
]
with mock.patch.object(sys, 'argv', args):
test_runner.Main()
mock_download.assert_called_with('781122')
@mock.patch.object(os,
'listdir',
return_value=['wayland-0', 'wayland-0.lock'])
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
@mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
# Tests that if a ash-chrome version is specified, uses ash-chrome to run
# tests anyway even if |_TARGETS_REQUIRE_ASH_CHROME| indicates an ash-chrome
# is not required.
def test_overrides_do_not_require_ash_chrome(self, mock_popen, mock_download,
*_):
args = [
'script_name', 'test', './url_unittests', '--ash-chrome-version',
'793554'
]
with mock.patch.object(sys, 'argv', args):
test_runner.Main()
mock_download.assert_called_with('793554')
self.assertEqual(2, mock_popen.call_count)
@mock.patch.object(os,
'listdir',
return_value=['wayland-0', 'wayland-0.lock'])
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(test_runner, '_GetLatestVersionOfAshChrome')
@mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
@mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
# Tests that when an ash-chrome path is specified, the test runner doesn't try
# to download prebuilt ash-chrome.
def test_specify_ash_chrome_path(self, mock_popen, mock_download,
mock_get_latest_version, *_):
args = [
'script_name',
'test',
'browser_tests',
'--ash-chrome-path',
'/ash/test_ash_chrome',
]
with mock.patch.object(sys, 'argv', args):
test_runner.Main()
self.assertFalse(mock_get_latest_version.called)
self.assertFalse(mock_download.called)
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary')
@mock.patch.object(subprocess, 'Popen', return_value=mock.Mock())
# Tests that arguments not known to the test runner are forwarded to the
# command that invokes tests.
def test_command_arguments(self, mock_popen, mock_download, _):
args = [
'script_name', 'test', './url_unittests', '--gtest_filter=Suite.Test'
]
with mock.patch.object(sys, 'argv', args):
test_runner.Main()
mock_popen.assert_called_with(
['./url_unittests', '--gtest_filter=Suite.Test'])
self.assertFalse(mock_download.called)
if __name__ == '__main__':
unittest.main()
|
py | 1a4406cdfb42445c51184e76b3c2c660bd372dbd | from direct.directnotify import DirectNotifyGlobal
import ShtikerPage
from direct.gui.DirectGui import *
from panda3d.core import *
from toontown.toonbase import TTLocalizer
from toontown.estate import FlowerBrowser
from toontown.estate import GardenGlobals
from toontown.estate import FlowerPicker
from toontown.estate import SpecialsPhoto
from toontown.toontowngui import TTDialog
GardenPage_Basket = 0
GardenPage_Collection = 1
GardenPage_Trophy = 2
GardenPage_Specials = 3
TROPHIES_PER_ROW = 5
class GardenPage(ShtikerPage.ShtikerPage):
notify = DirectNotifyGlobal.directNotify.newCategory('GardenPage')
def __init__(self):
self.notify.debug('__init__')
ShtikerPage.ShtikerPage.__init__(self)
self.mode = GardenPage_Basket
self.accept('use-special-response', self.useSpecialDone)
self.resultDialog = None
return
def enter(self):
self.notify.debug('enter')
if not hasattr(self, 'title'):
self.load()
self.setMode(self.mode, 1)
self.accept(localAvatar.uniqueName('flowerBasketChange'), self.updatePage)
ShtikerPage.ShtikerPage.enter(self)
def exit(self):
self.notify.debug('exit')
if hasattr(self, 'picker'):
self.picker.hide()
if hasattr(self, 'browser'):
self.browser.hide()
if hasattr(self, 'specialsFrame'):
self.specialsFrame.hide()
if hasattr(self, 'specialsPhoto'):
self.specialsPhoto.hide()
if hasattr(self, 'useSpecialButton'):
self.hide()
self.cleanupResultDialog()
ShtikerPage.ShtikerPage.exit(self)
def load(self):
self.notify.debug('load')
ShtikerPage.ShtikerPage.load(self)
gui = loader.loadModel('phase_3.5/models/gui/fishingBook')
trophyCase = gui.find('**/trophyCase1')
trophyCase.find('glass1').reparentTo(trophyCase, -1)
trophyCase.find('shelf').reparentTo(trophyCase, -1)
self.trophyCase = trophyCase
self.title = DirectLabel(parent=self, relief=None, text='', text_scale=0.1, pos=(0, 0, 0.65))
normalColor = (1, 1, 1, 1)
clickColor = (0.8, 0.8, 0, 1)
rolloverColor = (0.15, 0.82, 1.0, 1)
diabledColor = (1.0, 0.98, 0.15, 1)
self.basketTab = DirectButton(parent=self, relief=None, text=TTLocalizer.GardenPageBasketTab, text_scale=TTLocalizer.GPbasketTab, text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface1'), image_pos=(0.55, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[GardenPage_Basket], pos=(0.92, 0, 0.55))
self.collectionTab = DirectButton(parent=self, relief=None, text=TTLocalizer.GardenPageCollectionTab, text_scale=TTLocalizer.GPcollectionTab, text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface2'), image_pos=(0.12, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[GardenPage_Collection], pos=(0.92, 0, 0.1))
self.trophyTab = DirectButton(parent=self, relief=None, text=TTLocalizer.GardenPageTrophyTab, text_scale=TTLocalizer.GPtrophyTab, text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface3'), image_pos=(-0.28, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[GardenPage_Trophy], pos=(0.92, 0, -0.3))
self.specialsTab = DirectButton(parent=self, relief=None, text=TTLocalizer.GardenPageSpecialsTab, text_scale=TTLocalizer.GPspecialsTab, text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface3'), image_pos=(-0.28, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[GardenPage_Specials], pos=(0.92, 0, -0.3))
self.basketTab.setPos(-0.75, 0, 0.775)
self.collectionTab.setPos(-0.33, 0, 0.775)
self.trophyTab.setPos(0.09, 0, 0.775)
self.specialsTab.setPos(0.51, 0, 0.775)
gui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
self.gardenSpecialsList = DirectScrolledList(parent=self, relief=None, incButton_image=(gui.find('**/FndsLst_ScrollUp'),
gui.find('**/FndsLst_ScrollDN'),
gui.find('**/FndsLst_ScrollUp_Rllvr'),
gui.find('**/FndsLst_ScrollUp')), incButton_relief=None, incButton_pos=(0.0, 0.0, -1.1), incButton_image1_color=Vec4(1.0, 0.9, 0.4, 1.0), incButton_image3_color=Vec4(1.0, 1.0, 0.6, 0.5), incButton_scale=(1.0, 1.0, -1.0), decButton_image=(gui.find('**/FndsLst_ScrollUp'),
gui.find('**/FndsLst_ScrollDN'),
gui.find('**/FndsLst_ScrollUp_Rllvr'),
gui.find('**/FndsLst_ScrollUp')), decButton_relief=None, decButton_pos=(0.0, 0.0, 0.117), decButton_image1_color=Vec4(1.0, 1.0, 0.6, 1.0), decButton_image3_color=Vec4(1.0, 1.0, 0.6, 0.6), itemFrame_pos=(-0.2, 0.0, 0.05), itemFrame_relief=None, numItemsVisible=18, items=[], pos=(-0.6, 0, 0.45))
self.gardenSpecialsList.hide()
self.specialsFrame = DirectFrame(parent=self, relief=None, pos=(0.45, 0.0, 0.25), text='', text_wordwrap=14.4, text_pos=(0, -0.46), text_scale=0.06)
self.specialsInfo = DirectLabel(parent=self.specialsFrame, relief=None, pos=(0.0, 0.0, -0.0), text=' ', text_wordwrap=12.4, text_pos=(0, -0.46), text_scale=0.06)
self.specialsPhoto = SpecialsPhoto.SpecialsPhoto(-1, parent=self.specialsFrame)
self.specialsPhoto.setBackBounds(-0.3, 0.3, -0.235, 0.25)
self.specialsPhoto.setBackColor(1.0, 1.0, 0.74901, 1.0)
buttons = loader.loadModel('phase_3/models/gui/dialog_box_buttons_gui')
okImageList = (buttons.find('**/ChtBx_OKBtn_UP'), buttons.find('**/ChtBx_OKBtn_DN'), buttons.find('**/ChtBx_OKBtn_Rllvr'))
self.useSpecialButton = DirectButton(parent=self, relief=None, image=okImageList, pos=(0.45, 0, -0.5), text=TTLocalizer.UseSpecial, text_scale=0.06, text_pos=(0, -0.1), command=self.__useSpecial)
buttons.removeNode()
return
def setMode(self, mode, updateAnyways = 0):
messenger.send('wakeup')
if not updateAnyways:
if self.mode == mode:
return
else:
self.mode = mode
self.gardenSpecialsList.hide()
self.specialsPhoto.hide()
self.specialsFrame.hide()
self.useSpecialButton.hide()
if mode == GardenPage_Basket:
self.title['text'] = TTLocalizer.GardenPageTitleBasket
if not hasattr(self, 'picker'):
self.createFlowerPicker()
self.picker.show()
if hasattr(self, 'browser'):
self.browser.hide()
if hasattr(self, 'trophyFrame'):
self.trophyFrame.hide()
self.basketTab['state'] = DGG.DISABLED
self.collectionTab['state'] = DGG.NORMAL
self.trophyTab['state'] = DGG.NORMAL
self.specialsTab['state'] = DGG.NORMAL
elif mode == GardenPage_Collection:
self.title['text'] = TTLocalizer.GardenPageTitleCollection
if hasattr(self, 'picker'):
self.picker.hide()
if not hasattr(self, 'browser'):
self.createAlbumBrowser()
self.browser.show()
if hasattr(self, 'trophyFrame'):
self.trophyFrame.hide()
self.basketTab['state'] = DGG.NORMAL
self.collectionTab['state'] = DGG.DISABLED
self.trophyTab['state'] = DGG.NORMAL
self.specialsTab['state'] = DGG.NORMAL
elif mode == GardenPage_Trophy:
self.title['text'] = TTLocalizer.GardenPageTitleTrophy
if hasattr(self, 'picker'):
self.picker.hide()
if hasattr(self, 'browser'):
self.browser.hide()
if not hasattr(self, 'trophyFrame'):
self.createGardenTrophyFrame()
self.trophyFrame.show()
self.basketTab['state'] = DGG.NORMAL
self.collectionTab['state'] = DGG.NORMAL
self.trophyTab['state'] = DGG.DISABLED
self.specialsTab['state'] = DGG.NORMAL
elif mode == GardenPage_Specials:
self.title['text'] = TTLocalizer.GardenPageTitleSpecials
if hasattr(self, 'picker'):
self.picker.hide()
if hasattr(self, 'browser'):
self.browser.hide()
if hasattr(self, 'trophyFrame'):
self.trophyFrame.hide()
self.basketTab['state'] = DGG.NORMAL
self.collectionTab['state'] = DGG.NORMAL
self.trophyTab['state'] = DGG.NORMAL
self.specialsTab['state'] = DGG.DISABLED
self.gardenSpecialsList.show()
specialsList = localAvatar.getGardenSpecials()
self.specialsPhoto.show()
self.specialsFrame.show()
self.createGardenSpecialsList()
self.updatePage()
def createGardenSpecialsList(self):
self.clearGS()
self.specialsInfo['text'] = ''
self.useSpecialButton.hide()
self.specialsPhoto.hide()
self.specialsPhoto.update(-1)
self.specialsPhoto.show()
specialsList = localAvatar.getGardenSpecials()
firstEntry = None
if len(specialsList) == 0:
self.gardenSpecialsList['incButton_image1_color'] = Vec4(1.0, 0.9, 0.4, 0.0)
self.gardenSpecialsList['incButton_image3_color'] = Vec4(1.0, 0.9, 0.4, 0.0)
self.gardenSpecialsList['decButton_image1_color'] = Vec4(1.0, 0.9, 0.4, 0.0)
self.gardenSpecialsList['decButton_image3_color'] = Vec4(1.0, 0.9, 0.4, 0.0)
else:
self.gardenSpecialsList['incButton_image1_color'] = Vec4(1.0, 0.9, 0.4, 1.0)
self.gardenSpecialsList['incButton_image3_color'] = Vec4(1.0, 0.9, 0.4, 1.0)
self.gardenSpecialsList['decButton_image1_color'] = Vec4(1.0, 0.9, 0.4, 1.0)
self.gardenSpecialsList['decButton_image3_color'] = Vec4(1.0, 0.9, 0.4, 1.0)
for entry in specialsList:
if not firstEntry:
firstEntry = entry
someItem = DirectScrolledListItem(parent=self.gardenSpecialsList, text='%s x %s' % (GardenGlobals.Specials[entry[0]]['photoName'], entry[1]), text_align=TextNode.ALeft, text_fg=(0.0, 0.0, 0.0, 1), text_bg=(1.0, 1.0, 1, 0), text_scale=0.06, relief=None, command=self.showSpecialsPanel, extraArgs=[entry])
self.gardenSpecialsList.addItem(someItem)
self.specialsPhoto.show()
if firstEntry:
self.showSpecialsPanel(firstEntry)
return
def showSpecialsPanel(self, entry):
type = entry[0]
number = entry[1]
self.specialsPhoto.hide()
self.specialsPhoto.update(type)
self.specialsPhoto.show()
self.specialsInfo['text'] = GardenGlobals.Specials[entry[0]]['description']
self.selectedSpecial = type
specialInfo = GardenGlobals.Specials[entry[0]]
if 'useFromShtiker' in specialInfo and specialInfo['useFromShtiker']:
self.useSpecialButton.show()
else:
self.useSpecialButton.hide()
def __useSpecial(self):
self.useSpecialButton['state'] = DGG.DISABLED
localAvatar.sendUpdate('reqUseSpecial', [self.selectedSpecial])
def clearGS(self):
while len(self.gardenSpecialsList['items']) > 0:
for item in self.gardenSpecialsList['items']:
self.gardenSpecialsList.removeItem(item, 1)
if hasattr(item, 'destroy'):
item.destroy()
if hasattr(item, 'delete'):
item.delete()
del item
def createAlbumBrowser(self):
if not hasattr(self, 'browser'):
self.browser = FlowerBrowser.FlowerBrowser(self)
self.browser.setScale(1.1)
self.collectedTotal = DirectLabel(parent=self.browser, relief=None, text='', text_scale=0.06, pos=(0, 0, -0.61))
return
def createGardenTrophyFrame(self):
if not hasattr(self, 'trophyFrame'):
self.trophyFrame = DirectFrame(parent=self, relief=None, image=self.trophyCase, image_pos=(0, 1, 0), image_scale=0.034)
self.trophyFrame.hide()
self.trophies = []
hOffset = -0.5
vOffset = 0.4
for level, trophyDesc in GardenGlobals.TrophyDict.items():
trophy = GardenTrophy(-1)
trophy.nameLabel['text'] = trophyDesc[0]
trophy.reparentTo(self.trophyFrame)
trophy.setScale(0.36)
if level % TROPHIES_PER_ROW == 0:
hOffset = -0.5
vOffset -= 0.4
trophy.setPos(hOffset, 0, vOffset)
hOffset += 0.25
self.trophies.append(trophy)
return
def createFlowerPicker(self):
if not hasattr(self, 'picker'):
self.picker = FlowerPicker.FlowerPicker(self)
self.picker.setPos(-0.555, 0, 0.1)
self.picker.setScale(0.95)
self.FUDGE_FACTOR = 0.01
self.barLength = 1.1
self.shovelBar = DirectWaitBar(parent=self.picker, pos=(0.95, 0, -0.55), relief=DGG.SUNKEN, frameSize=(-0.65,
1.05,
-0.1,
0.1), borderWidth=(0.025, 0.025), scale=0.45, frameColor=(0.8, 0.8, 0.7, 1), barColor=(0.6, 0.4, 0.2, 1), range=self.barLength + self.FUDGE_FACTOR, value=self.barLength * 0.5 + self.FUDGE_FACTOR, text=' ' + TTLocalizer.Laff, text_scale=0.11, text_fg=(0.05, 0.14, 0.2, 1), text_align=TextNode.ALeft, text_pos=(-0.57, -0.035))
self.wateringCanBar = DirectWaitBar(parent=self.picker, pos=(0.95, 0, -0.75), relief=DGG.SUNKEN, frameSize=(-0.65,
1.05,
-0.1,
0.1), borderWidth=(0.025, 0.025), scale=0.45, frameColor=(0.8, 0.8, 0.7, 1), barColor=(0.4, 0.6, 1.0, 1), range=self.barLength + self.FUDGE_FACTOR, value=self.barLength * 0.5 + self.FUDGE_FACTOR, text=' ' + TTLocalizer.Laff, text_scale=0.11, text_fg=(0.05, 0.14, 0.2, 1), text_align=TextNode.ALeft, text_pos=(-0.57, -0.035))
def unload(self):
print 'gardenPage Unloading'
if hasattr(self, 'specialsPhoto'):
del self.specialsPhoto
if hasattr(self, 'trophies'):
del self.trophies
if hasattr(self, 'trophyCase'):
del self.trophyCase
if hasattr(self, 'useSpecialButton'):
self.useSpecialButton.destroy()
del self.useSpecialButton
self.cleanupResultDialog()
self.gardenSpecialsList.destroy()
self.basketTab.destroy()
self.collectionTab.destroy()
self.trophyTab.destroy()
self.specialsTab.destroy()
ShtikerPage.ShtikerPage.unload(self)
def updatePage(self):
if hasattr(self, 'collectedTotal'):
self.collectedTotal['text'] = TTLocalizer.GardenPageCollectedTotal % (len(base.localAvatar.flowerCollection), GardenGlobals.getNumberOfFlowerVarieties())
if hasattr(self, 'shovelBar'):
shovel = base.localAvatar.shovel
shovelName = TTLocalizer.ShovelNameDict[shovel]
curShovelSkill = base.localAvatar.shovelSkill
maxShovelSkill = GardenGlobals.ShovelAttributes[shovel]['skillPts']
if shovel == GardenGlobals.MAX_SHOVELS - 1:
maxShovelSkill -= 1
wateringCan = base.localAvatar.wateringCan
wateringCanName = TTLocalizer.WateringCanNameDict[wateringCan]
curWateringCanSkill = base.localAvatar.wateringCanSkill
maxWateringCanSkill = GardenGlobals.WateringCanAttributes[wateringCan]['skillPts']
if wateringCan == GardenGlobals.MAX_WATERING_CANS - 1:
maxWateringCanSkill -= 1
textToUse = TTLocalizer.GardenPageShovelInfo % (shovelName, curShovelSkill, maxShovelSkill)
self.shovelBar['text'] = textToUse
self.shovelBar['value'] = float(curShovelSkill) / float(maxShovelSkill) * self.barLength + self.FUDGE_FACTOR
textToUse = TTLocalizer.GardenPageWateringCanInfo % (wateringCanName, curWateringCanSkill, maxWateringCanSkill)
self.wateringCanBar['text'] = textToUse
self.wateringCanBar['value'] = float(curWateringCanSkill) / float(maxWateringCanSkill) * self.barLength + self.FUDGE_FACTOR
else:
print 'no shovel bar'
if self.mode == GardenPage_Collection:
if hasattr(self, 'browser'):
self.browser.update()
elif self.mode == GardenPage_Basket:
if hasattr(self, 'picker'):
newBasketFlower = base.localAvatar.flowerBasket.getFlower()
self.picker.update(newBasketFlower)
elif self.mode == GardenPage_Trophy:
if hasattr(self, 'trophies'):
for trophy in self.trophies:
trophy.setLevel(-1)
for trophyId in base.localAvatar.getGardenTrophies():
self.trophies[trophyId].setLevel(trophyId)
elif self.mode == GardenPage_Specials:
self.createGardenSpecialsList()
if not base.cr.playGame.getPlace().getState() == 'stickerBook':
self.specialsPhoto.hide()
def destroy(self):
self.notify.debug('destroy')
self.useSpecialButton.destroy()
if hasattr(self, 'gardenSpecialsList'):
self.clearGS()
self.gardenSpecialsList.destroy()
self.ignoreAll()
self.cleanupResultDialog()
DirectFrame.destroy(self)
def useSpecialDone(self, response):
stringToShow = ''
if response == 'success':
stringToShow = TTLocalizer.UseSpecialSuccess
elif response == 'badlocation':
stringToShow = TTLocalizer.UseSpecialBadLocation
else:
stringToShow = 'Unknown response %s' % response
self.resultDialog = TTDialog.TTDialog(parent=aspect2dp, style=TTDialog.Acknowledge, text=stringToShow, command=self.cleanupResultDialog)
def cleanupResultDialog(self, value = None):
if self.resultDialog:
self.resultDialog.destroy()
self.resultDialog = None
self.useSpecialButton['state'] = DGG.NORMAL
return
class GardenTrophy(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('GardenTrophy')
def __init__(self, level):
DirectFrame.__init__(self, relief=None)
self.initialiseoptions(GardenTrophy)
self.trophy = loader.loadModel('phase_3.5/models/gui/fishingTrophy')
self.trophy.reparentTo(self)
self.trophy.setPos(0, 1, 0)
self.trophy.setScale(0.1)
self.base = self.trophy.find('**/trophyBase')
self.column = self.trophy.find('**/trophyColumn')
self.top = self.trophy.find('**/trophyTop')
self.topBase = self.trophy.find('**/trophyTopBase')
self.statue = self.trophy.find('**/trophyStatue')
self.base.setColorScale(1, 1, 0.8, 1)
self.bowl = loader.loadModel('phase_3.5/models/gui/fishingTrophyBowl')
self.bowl.reparentTo(self)
self.bowl.setPos(0, 1, 0)
self.bowl.setScale(2.0)
self.bowlTop = self.bowl.find('**/fishingTrophyGreyBowl')
self.bowlBase = self.bowl.find('**/fishingTrophyBase')
self.bowlBase.setScale(1.25, 1, 1)
self.bowlBase.setColorScale(1, 1, 0.8, 1)
self.nameLabel = DirectLabel(parent=self, relief=None, pos=(0, 0, -0.15), text='Trophy Text', text_scale=0.125, text_fg=Vec4(0.9, 0.9, 0.4, 1))
self.shadow = loader.loadModel('phase_3/models/props/drop_shadow')
self.shadow.reparentTo(self)
self.shadow.setColor(1, 1, 1, 0.2)
self.shadow.setPosHprScale(0, 1, 0.35, 0, 90, 0, 0.1, 0.14, 0.1)
self.setLevel(level)
return
def setLevel(self, level):
self.level = level
order = ('C', 'D', 'B', 'A')
scales = (0.25, 0.25, 0.22, 0.25)
metalTrophy = ('wheelbarrel', 'shovels', 'flower', 'watering_can')
if self.level >= 0 and self.level < len(order):
modelStr = 'phase_5.5/models/estate/trophy'
modelStr += order[level]
self.gardenTrophy = loader.loadModel(modelStr)
self.gardenTrophy.setScale(scales[level])
self.gardenTrophy.reparentTo(self)
self.metalTrophy = self.gardenTrophy.find('**/%s' % metalTrophy[level])
if level == -1:
self.trophy.hide()
self.bowl.hide()
self.nameLabel.hide()
elif level == 0:
self.trophy.show()
self.trophy.hide()
self.bowl.hide()
self.nameLabel.show()
self.column.setScale(1.3229, 1.26468, 1.11878)
self.top.setPos(0, 0, -1)
self.__bronze()
elif level == 1:
self.trophy.show()
self.trophy.hide()
self.bowl.hide()
self.nameLabel.show()
self.column.setScale(1.3229, 1.26468, 1.61878)
self.top.setPos(0, 0, -0.5)
self.__bronze()
elif level == 2:
self.trophy.show()
self.trophy.hide()
self.bowl.hide()
self.nameLabel.show()
self.column.setScale(1.3229, 1.26468, 2.11878)
self.top.setPos(0, 0, 0)
self.__silver()
elif level == 3:
self.trophy.show()
self.trophy.hide()
self.bowl.hide()
self.nameLabel.show()
self.column.setScale(1.3229, 1.26468, 2.61878)
self.top.setPos(0, 0, 0.5)
self.__silver()
elif level == 4:
self.trophy.show()
self.bowl.hide()
self.nameLabel.show()
self.column.setScale(1.3229, 1.26468, 3.11878)
self.top.setPos(0, 0, 1)
self.__gold()
elif level == 5:
self.trophy.hide()
self.bowl.show()
self.bowlTop.setScale(1.75)
self.nameLabel.show()
self.__bronze()
elif level == 6:
self.trophy.hide()
self.bowl.show()
self.bowlTop.setScale(2.0)
self.nameLabel.show()
self.__silver()
elif level >= 7:
self.trophy.hide()
self.bowl.show()
self.bowlTop.setScale(2.25)
self.nameLabel.show()
self.__gold()
def __bronze(self):
self.top.setColorScale(0.9, 0.6, 0.33, 1)
self.bowlTop.setColorScale(0.9, 0.6, 0.33, 1)
self.metalTrophy.setColorScale(0.9, 0.6, 0.33, 1)
def __silver(self):
self.top.setColorScale(0.9, 0.9, 1, 1)
self.bowlTop.setColorScale(0.9, 0.9, 1, 1)
self.metalTrophy.setColorScale(0.9, 0.9, 1, 1)
def __gold(self):
self.top.setColorScale(1, 0.95, 0.1, 1)
self.bowlTop.setColorScale(1, 0.95, 0.1, 1)
self.metalTrophy.setColorScale(1, 0.95, 0.1, 1)
def destroy(self):
self.trophy.removeNode()
self.bowl.removeNode()
self.shadow.removeNode()
if hasattr(self, 'gardenTrophy'):
self.gardenTrophy.removeNode()
DirectFrame.destroy(self)
|
py | 1a440b7d83ada18dee8b54adb45566d6080a6b2d | __author__ = 'tpaulus'
import SendEmail
import nyt
import todoist
import wunderground
import properties
import parse
import html |
py | 1a440e2fbe51d8736c77dffd4dd2915c2a7700c8 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from django.http import JsonResponse
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators import csrf
# from django.contrib.auth.decorators import login_required
from tool.tools import createId
from reader.models import reader
from author.models import author
# connect to mysql and check
def loginReader(request):
lastUrl = ""
if "lastUrl" in request.POST:
lastUrl = request.POST['lastUrl']
context = {}
if "readerId" in request.session:
context['status'] = "success"
if lastUrl == "null":
# context['message'] = "/reader/readerIndex/"
return HttpResponseRedirect("/reader/index/")
elif lastUrl == "" or lastUrl is None:
context['status'] = "fail"
context['message'] = "錯誤的訪問"
return JsonResponse(context)
else:
# context['message'] = lastUrl
return HttpResponseRedirect(lastUrl)
# return JsonResponse(context)
if 'userName' not in request.POST and 'passwd' not in request.POST :
context['status'] = "fail"
context['message'] = "請重載後輸入 Email 和密碼"
return JsonResponse(context)
# return render(request, 'reader/login.html')
userName = unicode(request.POST['userName'])
passwd = createId(96,request.POST['passwd'])
try:
readerObj = reader.objects.get(email=userName)
if passwd != readerObj.passwd:
context['status'] = "fail"
context['message'] = "密碼錯誤!請重新登錄!"
return JsonResponse(context)
# return render(request, 'reader/loginFail.html', {'message': u'密碼錯誤!請重新登錄!'})
if readerObj.status == "allowed":
request.session["readerId"] = readerObj.id
request.session["userName"] = readerObj.name
# check user is or not author and author's status
isAuthor = author.isExist(readerObj.id)
request.session["isAuthor"] = isAuthor
authorStatus = author.getStatus(readerObj.id)
if not isAuthor:
request.session["authorStatus"] = ""
context['status'] = "success"
if lastUrl == "null":
context['message'] = "/reader/index/"
else:
context['message'] = lastUrl
return JsonResponse(context)
authorId = author.getId(readerObj.id)
if authorId != "":
request.session["authorId"] = authorId
if authorStatus == "active":
request.session["authorStatus"] = "active"
else:
request.session["authorStatus"] = authorStatus
context['status'] = "success"
if lastUrl == "null":
context['message'] = "/reader/index/"
else:
context['message'] = lastUrl
return JsonResponse(context)
elif readerObj.status == "abuse":
context['status'] = "fail"
context['message'] = "您尚未驗證郵箱!請前往注冊郵箱驗證身份!"
return JsonResponse(context)
else :
context['status'] = "fail"
context['message'] = '您的帳號狀態異常,無法登錄,目前狀態爲:' + str(readerObj.status) + '請聯繫管理員或重新註冊。'
return JsonResponse(context)
except reader.DoesNotExist:
context['status'] = "fail"
context['message'] = '用戶不存在!請重新登錄!'
return JsonResponse(context)
def logout(request):
# delete session
if "readerId" in request.session:
del request.session["readerId"] # if not exists, report error
del request.session["userName"] # if not exists, report error
del request.session["isAuthor"] # if not exists, report error
if 'authorId' in request.session:
del request.session["authorId"] # if not exists, report error
del request.session["authorStatus"] # if not exists, report error
request.session.flush()
return HttpResponseRedirect('/reader/login/')
else:
return HttpResponseRedirect('/reader/login/')
|
py | 1a440f043efc742f711d22ca46bcfe784e3c3727 | from dataclasses import asdict
from functools import wraps
import json
from protobuf_to_dict import protobuf_to_dict
from dacite import from_dict
from schemes.graph import GraphNode, GraphRelation
from configs.config import logger
def raise_customized_error(capture, target):
def _raise_customized_error(func):
@wraps(func)
def wapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except capture:
raise target
return wapper
return _raise_customized_error
def raise_grpc_error(capture, grpc_status_code):
def _raise_grpc_error(func):
@wraps(func)
def wrapper(self, request, context):
try:
return func(self, request, context)
except capture as e:
context.set_code(grpc_status_code)
if hasattr(e, "desc"):
context.set_details(e.desc)
else:
context.set_details("Maybe RPC Error.")
return wrapper
return _raise_grpc_error
def deco_log_error(logger):
def _deco_log_error(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
if logger:
logger.exception(e)
raise e
# return {"errors": {"code": e.code, "desc": e.desc}}
return wrapper
return _deco_log_error
def convert_node_to_graphnode(node):
label = str(node.labels)[1:]
dct = dict(node)
name = dct.pop("name")
gn = GraphNode(label, name, dct)
return gn
def convert_relation_to_graph_relation(relation):
start = convert_node_to_graphnode(relation.start_node)
end = convert_node_to_graphnode(relation.end_node)
kind = list(relation.types())[0]
props = dict(relation)
gr = GraphRelation(start, end, kind, props)
return gr
def convert_query_to_scheme():
def _convert_query_to_scheme(func):
@wraps(func)
def wrapper(self, qin, **kwargs):
query = func(self, qin, **kwargs)
result = []
for gobj in query:
if gobj.relationships:
obj = convert_relation_to_graph_relation(gobj)
else:
obj = convert_node_to_graphnode(gobj)
result.append(obj)
return result
return wrapper
return _convert_query_to_scheme
def convert_request_to(target):
"""
convert different kinds of request to needed input.
there are 4 needed inputs:
- GraphNode
- GraphRelation
- RawString
- ExtractorInput
"""
def _convert_request_to(func):
@wraps(func)
def wrapper(self, request, context):
dctreq = protobuf_to_dict(request)
if "props" in dctreq:
req_props = dctreq["props"]
dctreq["props"] = json.loads(req_props)
if "start" in dctreq:
start_props = dctreq["start"]["props"]
dctreq["start"]["props"] = json.loads(start_props)
if "end" in dctreq:
end_props = dctreq["end"]["props"]
dctreq["end"]["props"] = json.loads(end_props)
request = from_dict(target, dctreq)
result = func(self, request, context)
return result
return wrapper
return _convert_request_to
def convert_graphobj_to_dict(graphobj):
"""
A graphobj is a GraphNode or GraphRelation
"""
dct = asdict(graphobj)
if "props" in dct:
dct["props"] = json.dumps(dct["props"])
if "start" in dct:
start_props = dct["start"]["props"]
dct["start"]["props"] = json.dumps(start_props)
if "end" in dct:
end_props = dct["end"]["props"]
dct["end"]["props"] = json.dumps(end_props)
return dct
|
py | 1a440fbb15055eca43810c9d00e9f572cb8acc56 | import multiprocessing as mp
import os
from glob import glob
from subprocess import run
import pandas as pd
def ensure_file(file):
"""Ensure a single file exists, returns the full path of the file if True or throws an Assertion error if not"""
# tilde expansion
file_path = os.path.normpath(os.path.expanduser(file))
assert os.path.isfile(file_path), "The file {} doesn't exist. Please create the file first".format(file)
return file_path
def ensure_dir(file_path, create_if_not=True):
"""The function ensures the dir exists, if it doesn't it creates it and returns the path"""
# tilde expansion
file_path = os.path.normpath(os.path.expanduser(file_path))
if os.path.isfile(file_path):
directory = os.path.dirname(file_path)
else:
directory = file_path
if not os.path.exists(directory):
if create_if_not:
try:
os.makedirs(directory)
except FileExistsError:
pass
else:
raise FileNotFoundError(f"The directory {directory} doesnt exist, create it or pass create_if_not=True")
return directory
def empty_dir(dir_path, force=False):
if force:
run(f'rm -v {dir_path}/*', shell=True)
else:
files = os.listdir(dir_path)
if len(files) and not mp.current_process().daemon:
answer = input(
f'The directory {dir_path} contains {len(files)} files, do you want to remove them?\n [yes\\No] ')
if answer.lower() == 'yes':
run(f'rm -v {dir_path}/*', shell=True)
def convert_vid_to_qid(df: pd.DataFrame):
if df.index.name != 'qid' and df.index.name != 'topic':
if 'qid' in df.columns:
_df = df.set_index('qid')
elif 'topic' in df.columns:
_df = df.set_index('topic')
else:
assert False, "The DF doesn't has qid or topic"
else:
_df = df
_df.rename(index=lambda x: f'{x.split("-")[0]}', inplace=True)
return _df
def add_topic_to_qdf(qdf: pd.DataFrame):
"""This functions will add a topic column to the queries DF"""
if 'topic' not in qdf.columns:
if 'qid' in qdf.columns:
qdf = qdf.assign(topic=lambda x: x.qid.apply(lambda y: y.split('-')[0]))
else:
qdf = qdf.reset_index().assign(topic=lambda x: x.qid.apply(lambda y: y.split('-')[0]))
return qdf
def read_rm_prob_files(data_dir, number_of_docs, clipping='*'):
"""The function creates a DF from files, the probabilities are p(w|RM1) for all query words
If a query term doesn't appear in the file, it's implies p(w|R)=0"""
data_files = glob(f'{data_dir}/probabilities-{number_of_docs}+{clipping}')
if len(data_files) < 1:
data_files = glob(f'{data_dir}/probabilities-{number_of_docs}')
_list = []
for _file in data_files:
_col = f'{_file.rsplit("/")[-1].rsplit("-")[-1]}'
_df = pd.read_csv(_file, names=['qid', 'term', _col], sep=' ')
_df = _df.astype({'qid': str}).set_index(['qid', 'term'])
_list.append(_df)
return pd.concat(_list, axis=1).fillna(0)
def set_environment_paths(base_path=None):
base_path = base_path if base_path else os.path.dirname(os.path.abspath(__file__))
results_dir = ensure_dir(f'{base_path}/QppUqvProj/Results')
data_dir = ensure_dir(f'{base_path}/QppUqvProj/data')
return results_dir, data_dir
def char_range(a, z):
"""Creates a generator that iterates the characters from `c1` to `c2`, inclusive."""
# ord returns the ASCII value, chr returns the char of ASCII value
for c in range(ord(a), ord(z) + 1):
yield chr(c)
|
py | 1a441013d4330e610510a1378245b5bbda7d4f40 | import asyncio
from aiogram import types, Dispatcher
from aiogram.dispatcher import DEFAULT_RATE_LIMIT
from aiogram.dispatcher.handler import CancelHandler, current_handler
from aiogram.dispatcher.middlewares import BaseMiddleware
from aiogram.utils.exceptions import Throttled
class ThrottlingMiddleware(BaseMiddleware):
"""
Simple middleware
"""
def __init__(self, limit=DEFAULT_RATE_LIMIT, key_prefix='antiflood_'):
self.rate_limit = limit
self.prefix = key_prefix
super(ThrottlingMiddleware, self).__init__()
async def on_process_message(self, message: types.Message, data: dict):
"""
This handler is called when dispatcher receives a message
:param message:
"""
# Get current handler
handler = current_handler.get()
# Get dispatcher from context
dispatcher = Dispatcher.get_current()
# If handler was configured, get rate limit and key from handler
if handler:
limit = getattr(handler, 'throttling_rate_limit', self.rate_limit)
key = getattr(handler, 'throttling_key', f"{self.prefix}_{handler.__name__}")
else:
limit = self.rate_limit
key = f"{self.prefix}_message"
# Use Dispatcher.throttle method.
try:
await dispatcher.throttle(key, rate=limit)
except Throttled as t:
# Execute action
await self.message_throttled(message, t)
# Cancel current handler
raise CancelHandler()
async def message_throttled(self, message: types.Message, throttled: Throttled):
"""
Notify user only on first exceed and notify about unlocking only on last exceed
:param message:
:param throttled:
"""
handler = current_handler.get()
dispatcher = Dispatcher.get_current()
if handler:
key = getattr(handler, 'throttling_key', f"{self.prefix}_{handler.__name__}")
else:
key = f"{self.prefix}_message"
# Calculate how many time is left till the block ends
delta = throttled.rate - throttled.delta
# Prevent flooding
if throttled.exceeded_count <= 2:
await message.reply('Слишком много запросов')
# Sleep
await asyncio.sleep(delta)
# Check lock status
thr = await dispatcher.check_key(key)
# If current message is not last with current key - do not send message
if thr.exceeded_count == throttled.exceeded_count:
await message.reply('Разбанен')
|
py | 1a4410316c3b9c1a8bf05e9338005f9f7f3517d0 | #!/usr/bin/env python
from common.dbconnect import mongo_connect, find_session
from common.hashmethods import *
from common.entities import pcapFile
import logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import *
from canari.maltego.entities import EmailAddress
from canari.maltego.message import UIMessage
from canari.framework import configure
import re
from canari.config import config
__author__ = 'catalyst256'
__copyright__ = 'Copyright 2014, sniffmypacketsv2 Project'
__credits__ = []
__license__ = 'GPL'
__version__ = '0.1'
__maintainer__ = 'catalyst256'
__email__ = '[email protected]'
__status__ = 'Development'
__all__ = [
'dotransform'
]
@configure(
label='Extract Email Address(s)',
description='Extract email addresses from a pcap file',
uuids=['sniffMyPacketsv2.v2.pcap_2_emailaddr'],
inputs=[('[SmP] - Email', pcapFile)],
debug=True
)
def dotransform(request, response):
pcap = request.value
lookfor = ['MAIL FROM:', 'RCPT TO:']
pkts = rdpcap(pcap)
usedb = config['working/usedb']
# Check to see if we are using the database or not
if usedb > 0:
d = mongo_connect()
c = d['CREDS']
# Hash the pcap file
try:
md5pcap = md5_for_file(pcap)
except Exception as e:
return response + UIMessage(str(e))
x = find_session(md5pcap)
pcap_id = x[0]
else:
pass
addr = []
try:
for p in pkts:
for m in lookfor:
if p.haslayer(TCP) and p.haslayer(Raw):
raw = p[Raw].load
if m in raw:
for s in re.finditer('<([\S.-]+@[\S-]+)>', raw):
addr.append(s.group(1))
except Exception as e:
return response + UIMessage(str(e))
for x in addr:
if usedb > 0:
data = {'PCAP ID': pcap_id, 'Type': 'Email Address', 'Record': x}
t = d.CREDS.find({'Record': x}).count()
if t > 0:
pass
else:
c.insert(data)
else:
pass
e = EmailAddress(x)
response += e
return response
|
py | 1a44103af0ed63e3819cd153f37383b1feb89683 | import pandas as pd
import numpy as np
import itertools as it
import functools as ft
from numpy import zeros, arange
from collections import defaultdict
try:
from numba import jit, njit
except ImportError:
print('Install numba')
def multi_args(function, constants, variables, isProduct=False, maxLimit=None):
"""
Run a function on different parameters and
aggregate results
function
function to be parametrized
constants
arguments that would remain constant
throughtout all the scenarios
dictionary with key being argument name
and value being the argument value
variables
arguments that need to be varied
dictionary with key being argument name
and value being list of argument values
to substitute
isProduct
list of variables for which all combinations
are to be tried out.
maxLimit
Maximum number of simulations to be run
before terminating. Useful in case of long
running simulations.
default 1000
By default, this function zips through each of the
variables but if you need to have the Cartesian
product, specify those variables in isProduct.
returns a Series with different variables and
the results
"""
from functools import partial
import concurrent.futures
if maxLimit:
MAX_LIMIT = maxLimit
else:
MAX_LIMIT = 1000
func = partial(function, **constants)
arg_list = []
if isProduct:
args = it.product(*variables.values())
else:
args = zip(*variables.values())
keys = variables.keys()
with concurrent.futures.ProcessPoolExecutor() as executor:
tasks = []
for i, arg in enumerate(args):
kwds = {a: b for a, b in zip(keys, arg)}
tasks.append(executor.submit(func, **kwds))
arg_list.append(arg)
i += 1
if i >= MAX_LIMIT:
print('MAX LIMIT reached', MAX_LIMIT)
break
result = [task.result() for task in tasks]
s = pd.Series(result)
s.name = 'values'
s.index = pd.MultiIndex.from_tuples(arg_list, names=keys)
return s
def stop_loss(price, stop_loss, order='B', tick_size=0.05):
"""
Return the stop loss for the order
price
price from which stop loss is to be calculated
stop_loss
stop loss percentage from price
order
the original order type - B for Buy and S for Sell
If the original order is buy, then a sell stop
loss is generated and vice-versa
tick_size
tick_size to be rounded off
>>> stop_loss(100, 3)
>>> 97
Notes
------
* passing a negative value may throw unexpected results
* raises ValueError if order is other than B or S
"""
if order == 'B':
return tick(price * (1 - stop_loss * 0.01), tick_size)
elif order == 'S':
return tick(price * (1 + stop_loss * 0.01), tick_size)
else:
raise ValueError('order should be either B or S')
def tick(price, tick_size=0.05):
"""
Rounds a given price to the requested tick
"""
return round(price / tick_size)*tick_size
def create_orders(data, rename, **kwargs):
"""
create an orders dataframe from an existing dataframe
by renaming columns and providing additional columns
data
dataframe
rename
columns to be renamed as dictionary
kwargs
key value pairs with key being column names
and values being dataframe values
"""
data = data.rename(rename, axis='columns')
for k, v in kwargs.items():
data[k] = v
return data
def recursive_merge(dfs, on=None, how='inner', columns={}):
"""
Recursively merge all dataframes in the given list
Given a list of dataframes, merge them based on index or columns.
By default, dataframes are merged on index. Specify the **on**
argument to merge by columns. The "on" columns should be available
in all the dataframes
Parameters
-----------
dfs
list of dataframes
on
columns on which the dataframes are to be merged.
By default, merge is done on index
how
how to apply the merge
{'left', 'right', 'outer', 'inner'}, default 'inner'.
Same as pandas merge
columns
To return only specific columns from specific dataframes,
pass them as a dictionary with key being the index of the
dataframe in the list and value being the list of columns
to merge. **your keys should be string**
See examples for more details
>>> recursive_merge(dfs, columns = {'1': ['one', 'two']})
Fetch only the columns one and two from the second dataframe
"""
data = dfs[0]
for i, d in enumerate(dfs[1:], 1):
if columns.get(str(i)):
cols = list(columns.get(str(i)))
cols.extend(on)
else:
cols = d.columns
if on is None:
data = data.merge(d[cols], how=how, left_index=True, right_index=True)
else:
data = data.merge(d[cols], how=how, on=on)
return data
def get_nearest_option(spot, n=1, opt='C', step=100):
"""
Given a spot price, calculate the nearest options
spot
spot price of the instrument
n
number of nearest option prices
opt
call or put option. 'C' for call and 'P' for put
step
step size of the option price
returns a list of options
>>> get_nearest_option(23457, 2)
>>> [23400, 23500]
>>> get_nearest_option(23457, 2, 'P')
>>> [23400, 23300]
All calculations are based on in the money option. So,
get_nearest_option(24499) would return 24400
"""
in_money = int(spot/step) * step
option_prices = []
for i in range(n):
if opt == 'C':
strike = in_money + step*i
option_prices.append(strike)
elif opt == 'P':
strike = in_money - step*i
option_prices.append(strike)
else:
print('Option type not recognized; Check the opt argument')
return option_prices
def calendar(start, end, holidays=None, alldays=False,
start_time=None, end_time=None, freq='D', **kwargs):
"""
Generate a calendar removing the list of
given holidays.
Provide date arguments as strings in the
format **YYYY-MM-DD**
start
start date of the period
end
end date of the period
holidays
list of holidays as strings
alldays
True/False
True to generate dates for all days
including weekends. default: False
start_time
start time for each day as string
end_time
end time for each day as string
freq
frequency of the calendar
kwargs
kwargs to the pandas date range function
Note
-----
1) This function is slow, especially when generating
timestamps. So, use them only once at the start
of your program for better performance
2) This function generates calendar only for
business days. To use all the available days,
se the alldays argument to True
"""
if alldays:
dfunc = ft.partial(pd.date_range, freq='D', **kwargs)
else:
dfunc = ft.partial(pd.bdate_range, freq='B', **kwargs)
dates = list(dfunc(start=start, end=end))
if (holidays):
holidays = [pd.to_datetime(dt) for dt in holidays]
for hol in holidays:
dates.remove(hol)
# Initialize times
if (start_time or end_time):
if not(start_time):
start_time = "00:00:00"
if not(end_time):
end_time = "23:59:59"
timestamps = []
fmt = "{:%Y%m%d} {}"
for d in dates:
start_ts = fmt.format(d, start_time)
end_ts = fmt.format(d, end_time)
ts = pd.date_range(start=start_ts, end=end_ts, freq=freq, **kwargs)
timestamps.extend(ts)
return timestamps
else:
return dates
def get_ohlc_intraday(data, start_time, end_time, date_col=None,
col_mappings=None, sort=False):
"""
Get ohlc for a specific period in a day for all days
for all the symbols.
data
dataframe with symbol, timestamp, date, open, high, low, close columns.
The timestamp and date columns are assumed to be of pandas datetime type.
Each row represents data for a single stock at a specified period of time
If you have different column names, use the col_mappings argument
to rename the columns
start_time
start time for each day
end_time
end time for each day
date_col
date column to aggregate; this is in addition to time column.
If no date column is specified, a date column is created.
col_mappings
column mappings as a dictionary
(Eg.) if the symbol column is named as assetName and timestamp
as ts, then pass rename={'assetName': 'symbol', 'ts': 'timestamp'}
sort
Whether the data is sorted by timestamp.
If True, data is not sorted else data is sorted
returns
a dataframe with symbol, date, open, high, low and close columns
Note
-----
To speed up computation
1) If the data is already sorted, pass sort=True
2) If date column is already available, then pass date_col=column_name
Timestamp and date are assumed to be pandas datetime
"""
if col_mappings:
data = data.rename(col_mappings, axis='columns')
if not(sort):
data = data.sort_values(by='timestamp')
if not(date_col):
data['date'] = data['timestamp'].dt.date
date_col = 'date'
data = data.set_index('timestamp')
def calculate_ohlc(df):
"""
Internal function to calculate OHLC
"""
date = df.iloc[0].at[date_col].strftime('%Y-%m-%d')
fmt = "{date} {time}" # date time format
s = fmt.format(date=date, time=start_time)
e = fmt.format(date=date, time=end_time)
temp = df.loc[s:e]
agg = {'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last'}
return temp.groupby('symbol').agg(agg)
return data.groupby([date_col]).apply(calculate_ohlc)
def get_expanding_ohlc(data, freq, col_mappings=None):
"""
Given a dataframe with OHLC, timestamp and symbol columns
return a OHLC dataframe with open price, expanding high,
expanding low and close prices
data
dataframe with OHLC, timestamp and symbol columns
freq
frequency by which the data is to be resampled.
A pandas frequency string
col_mappings
column mappings as a dictionary
(Eg.) if the symbol column is named as assetName and timestamp
as ts, then pass rename={'assetName': 'symbol', 'ts': 'timestamp'}
Note
-----
The returned dataframe has the same length and index of the
original dataframe. The resampling is done only to calculate the
expanding high, low prices
"""
if col_mappings:
data = data.rename(col_mappings, axis='columns')
def calculate_ohlc(df):
temp = pd.DataFrame({
'high': df['high'].expanding().max(),
'low': df['low'].expanding().min()
})
temp['close'] = df['close']
temp['open'] = df['open'].iloc[0]
return temp
cols = ['open', 'high', 'low', 'close'] # for sorting return value
return data.resample(freq).apply(calculate_ohlc)[cols]
def generate_index(index, changes, dates=None):
"""
index
list of symbols that make up the latest index
changes
changes to the index as a dataframe.
The dataframe should have the following three columns
in the following order
1. date - date of change
2. symbol - security involving the change
3. flag - True/False indicating inclusion/exclusion into the index
True indicates inclusion and False exclusion
dates
list of dates to generate index
returns a dataframe with symbols for each date
Note
-----
* The changes dataframe is expected in the exact order.
Any other columns are discarded
"""
collect = {}
idx = index[:]
changes = changes.sort_values(by='date', ascending=False)
dates = [x for x in reversed(dates)]
uniq_dates = [x for x in changes.date.unique()]
for d in dates:
if d in uniq_dates:
formula = f'date=="{d}"'
chx = changes.query(formula)
for i, row in chx.iterrows():
try:
if not(row['flag']):
idx.append(row['symbol'])
else:
idx.remove(row['symbol'])
except Exception as e:
print(e, d, row)
collect[d] = idx[:]
frame = pd.melt(pd.DataFrame.from_dict(collect))
frame.columns = ['date', 'symbol']
return frame.sort_values(by='date').reset_index(drop=True)
def custom_index(data, on, window=30, function='median', num=30, sort_mode=False):
"""
Generate a custom index
data
dataframe with symbol and timestamp columns
on
column on which the index is to be generated
window
look back window
function
function to be applied
out
number of stocks to pick each day
sort_mode
whether to pick top stocks or bottom stocks
"""
from fastbt.datasource import DataSource
ds = DataSource(data)
ds.add_rolling(on=on, window=window, function=function,
lag=1, col_name='custom_index')
grouped = ds.data.groupby('timestamp')
if sort_mode:
return grouped.apply(lambda x: x.sort_values(
by='custom_index').head(num)).reset_index(drop=True)
else:
return grouped.apply(lambda x: x.sort_values(
by='custom_index').tail(num)).reset_index(drop=True)
@jit
def streak(values):
"""
Calculates the continuous streak of a variable.
Given an array of discrete values, calculate the
continuous streak of each value.
values
numpy array of values
Note
-----
1) Pass numpy arrays for faster computation. In case of pandas series,
pass series.values
2) Calculates the streak based on number of consecutive
values that appear in the array
"""
l = len(values)
arr = zeros(l)
arr[0] = 1
cnt = 1
for i in arange(1, l):
if values[i] == values[i-1]:
cnt += 1
else:
cnt = 1
arr[i] = cnt
return arr
@njit
def trend(up, down, threshold=2/3):
"""
up
numpy array
up values as the difference between open and high
down
numpy array
down values as the difference between open and low
threshold
threshold considered as a valid trend
"""
total = up+down
up_vals = up/total
down_vals = down/total
length = len(total)
arr = np.zeros(length)
for i in np.arange(length):
if up_vals[i] > threshold:
arr[i] = 1
elif down_vals[i] > threshold:
arr[i] = -1
else:
arr[i] = 0
return arr
def generate_weights(n=2, size=1):
"""
Generate random weights that sum to one; uses the dirichlet
distribution to generate weights
"""
return np.random.dirichlet(np.ones(n), size)
|
py | 1a44110e959859ee7579f69798d18bfe635ef410 | # Copyright (c) 2014 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import tempfile
import luigi.scheduler
import pickle
import unittest
import time
luigi.notifications.DEBUG = True
class SchedulerTest(unittest.TestCase):
def test_load_old_state(self):
tasks = {}
active_workers = {'Worker1': 1e9, 'Worker2': time.time()}
with tempfile.NamedTemporaryFile(delete=True) as fn:
with open(fn.name, 'w') as fobj:
state = (tasks, active_workers)
pickle.dump(state, fobj)
scheduler = luigi.scheduler.CentralPlannerScheduler(
state_path=fn.name)
scheduler.load()
scheduler.prune()
self.assertEquals(list(scheduler._active_workers.keys()),
['Worker2'])
def test_load_broken_state(self):
with tempfile.NamedTemporaryFile(delete=True) as fn:
with open(fn.name, 'w') as fobj:
print >> fobj, "b0rk"
scheduler = luigi.scheduler.CentralPlannerScheduler(
state_path=fn.name)
scheduler.load() # bad if this crashes
self.assertEquals(list(scheduler._active_workers.keys()), [])
if __name__ == '__main__':
unittest.main()
|
py | 1a44113a3182d9b4312df68ca8a51c3803debddf | # Warning
import warnings
import sklearn.exceptions
warnings.filterwarnings('ignore', category=DeprecationWarning)
warnings.filterwarnings('ignore', category=FutureWarning)
warnings.filterwarnings("ignore", category=sklearn.exceptions.UndefinedMetricWarning)
# Python
import numpy as np
import pandas as pd
import tqdm
# Pytorch for Deep Learning
import torch
from torch.utils.data import DataLoader
from config import Config
from data import SETIDataset
from model import SwinNet
from transforms import Transforms
from utils import prepare_data
from main import best_model_name, best_epoch
train_df = pd.read_csv('../input/seti-breakthrough-listen/train_labels.csv')
test_df = pd.read_csv('../input/seti-breakthrough-listen/sample_submission.csv')
prepare_data(train_df, test_df)
if __name__ == '__main__':
model = SwinNet()
model.load_state_dict(torch.load(best_model_name))
model = model.to(Config.device)
model.eval()
predicted_labels = None
for i in range(Config.num_tta):
test_dataset = SETIDataset(
images_filepaths = test_df['image_path'].values,
targets = test_df['target'].values,
transform = Transforms.test_transforms
)
test_loader = DataLoader(
test_dataset, batch_size=Config.batch_size,
shuffle=False, num_workers=Config.num_workers,
pin_memory=True
)
temp_preds = None
with torch.no_grad():
for (images, target) in tqdm(test_loader):
images = images.to(Config.device, non_blocking=True)
output = model(images)
predictions = torch.sigmoid(output).cpu().numpy()
if temp_preds is None:
temp_preds = predictions
else:
temp_preds = np.vstack((temp_preds, predictions))
if predicted_labels is None:
predicted_labels = temp_preds
else:
predicted_labels += temp_preds
predicted_labels /= Config.num_tta
torch.save(model.state_dict(), f"{Config.model}_{best_epoch}epochs_weights.pth")
sub_df = pd.DataFrame()
sub_df['id'] = test_df['id']
sub_df['target'] = predicted_labels
sub_df.to_csv('submission.csv', index=False) |
py | 1a4411ef23723dfd93cefa90c808a2fcc7f068eb | import sys
sys.path.append('../..')
import pysif
tg_solver = pysif.solver('taylor-green-512.ini')
tg_solver.initialization()
tg_solver.solve()
|
py | 1a441227e64e382ad4625868010e95b0aae1305d | from django.contrib import admin
from blog.models import Post, Comment
admin.site.register(Post)
admin.site.register(Comment)
|
py | 1a441360442d67851a6fd869c263c4b543247997 | """Core classes and exceptions for Simple-Salesforce"""
# has to be defined prior to login import
DEFAULT_API_VERSION = '29.0'
import logging
import warnings
import requests
import json
try:
from urlparse import urlparse, urljoin
except ImportError:
# Python 3+
from urllib.parse import urlparse, urljoin
from simple_salesforce.login import SalesforceLogin
from simple_salesforce.util import date_to_iso8601, SalesforceError
try:
from collections import OrderedDict
except ImportError:
# Python < 2.7
from ordereddict import OrderedDict
#pylint: disable=invalid-name
logger = logging.getLogger(__name__)
def _warn_request_deprecation():
"""Deprecation for (Salesforce/SFType).request attribute"""
warnings.warn(
'The request attribute has been deprecated and will be removed in a '
'future version. Please use Salesforce.session instead.',
DeprecationWarning
)
# pylint: disable=too-many-instance-attributes
class Salesforce(object):
"""Salesforce Instance
An instance of Salesforce is a handy way to wrap a Salesforce session
for easy use of the Salesforce REST API.
"""
# pylint: disable=too-many-arguments
def __init__(
self, username=None, password=None, security_token=None,
session_id=None, instance=None, instance_url=None,
organizationId=None, sandbox=False, version=DEFAULT_API_VERSION,
proxies=None, session=None, client_id=None):
"""Initialize the instance with the given parameters.
Available kwargs
Password Authentication:
* username -- the Salesforce username to use for authentication
* password -- the password for the username
* security_token -- the security token for the username
* sandbox -- True if you want to login to `test.salesforce.com`, False
if you want to login to `login.salesforce.com`.
Direct Session and Instance Access:
* session_id -- Access token for this session
Then either
* instance -- Domain of your Salesforce instance, i.e.
`na1.salesforce.com`
OR
* instance_url -- Full URL of your instance i.e.
`https://na1.salesforce.com
Universal Kwargs:
* version -- the version of the Salesforce API to use, for example
`29.0`
* proxies -- the optional map of scheme to proxy server
* session -- Custom requests session, created in calling code. This
enables the use of requests Session features not otherwise
exposed by simple_salesforce.
"""
# Determine if the user passed in the optional version and/or sandbox
# kwargs
self.sf_version = version
self.sandbox = sandbox
self.session = session or requests.Session()
self.proxies = self.session.proxies
# override custom session proxies dance
if proxies is not None:
if not session:
self.session.proxies = self.proxies = proxies
else:
logger.warning(
'Proxies must be defined on custom session object, '
'ignoring proxies: %s', proxies
)
# Determine if the user wants to use our username/password auth or pass
# in their own information
if all(arg is not None for arg in (
username, password, security_token)):
self.auth_type = "password"
# Pass along the username/password to our login helper
self.session_id, self.sf_instance = SalesforceLogin(
session=self.session,
username=username,
password=password,
security_token=security_token,
sandbox=self.sandbox,
sf_version=self.sf_version,
proxies=self.proxies,
client_id=client_id)
elif all(arg is not None for arg in (
session_id, instance or instance_url)):
self.auth_type = "direct"
self.session_id = session_id
# If the user provides the full url (as returned by the OAuth
# interface for example) extract the hostname (which we rely on)
if instance_url is not None:
self.sf_instance = urlparse(instance_url).hostname
else:
self.sf_instance = instance
elif all(arg is not None for arg in (
username, password, organizationId)):
self.auth_type = 'ipfilter'
# Pass along the username/password to our login helper
self.session_id, self.sf_instance = SalesforceLogin(
session=self.session,
username=username,
password=password,
organizationId=organizationId,
sandbox=self.sandbox,
sf_version=self.sf_version,
proxies=self.proxies,
client_id=client_id)
else:
raise TypeError(
'You must provide login information or an instance and token'
)
if self.sandbox:
self.auth_site = 'https://test.salesforce.com'
else:
self.auth_site = 'https://login.salesforce.com'
self.headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.session_id,
'X-PrettyPrint': '1'
}
self.base_url = ('https://{instance}/services/data/v{version}/'
.format(instance=self.sf_instance,
version=self.sf_version))
self.apex_url = ('https://{instance}/services/apexrest/'
.format(instance=self.sf_instance))
def describe(self):
"""Describes all available objects
"""
url = self.base_url + "sobjects"
result = self._call_salesforce('GET', url)
if result.status_code != 200:
raise SalesforceGeneralError(url,
'describe',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# SObject Handler
def __getattr__(self, name):
"""Returns an `SFType` instance for the given Salesforce object type
(given in `name`).
The magic part of the SalesforceAPI, this function translates
calls such as `salesforce_api_instance.Lead.metadata()` into fully
constituted `SFType` instances to make a nice Python API wrapper
for the REST API.
Arguments:
* name -- the name of a Salesforce object type, e.g. Lead or Contact
"""
# fix to enable serialization
# (https://github.com/heroku/simple-salesforce/issues/60)
if name.startswith('__'):
return super(Salesforce, self).__getattr__(name)
return SFType(
name, self.session_id, self.sf_instance, sf_version=self.sf_version,
proxies=self.proxies, session=self.session)
# User utlity methods
def set_password(self, user, password):
"""Sets the password of a user
salesforce dev documentation link:
https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm
Arguments:
* user: the userID of the user to set
* password: the new password
"""
url = self.base_url + 'sobjects/User/%s/password' % user
params = {'NewPassword': password}
result = self._call_salesforce('POST', url, data=json.dumps(params))
# salesforce return 204 No Content when the request is successful
if result.status_code != 200 and result.status_code != 204:
raise SalesforceGeneralError(url,
'User',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# pylint: disable=invalid-name
def setPassword(self, user, password):
# pylint: disable=line-too-long
"""Sets the password of a user
salesforce dev documentation link:
https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm
Arguments:
* user: the userID of the user to set
* password: the new password
"""
warnings.warn(
"This method has been deprecated."
"Please use set_password instread.",
DeprecationWarning)
return self.set_password(user, password)
# Generic Rest Function
def restful(self, path, params, method='GET'):
"""Allows you to make a direct REST call if you know the path
Arguments:
* path: The path of the request
Example: sobjects/User/ABC123/password'
* params: dict of parameters to pass to the path
* method: HTTP request method, default GET
"""
url = self.base_url + path
result = self._call_salesforce(method, url, params=params)
if result.status_code != 200:
raise SalesforceGeneralError(url,
path,
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# Search Functions
def search(self, search):
"""Returns the result of a Salesforce search as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* search -- the fully formatted SOSL search string, e.g.
`FIND {Waldo}`
"""
url = self.base_url + 'search/'
# `requests` will correctly encode the query string passed as `params`
params = {'q': search}
result = self._call_salesforce('GET', url, params=params)
if result.status_code != 200:
raise SalesforceGeneralError(url,
'search',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
def quick_search(self, search):
"""Returns the result of a Salesforce search as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* search -- the non-SOSL search string, e.g. `Waldo`. This search
string will be wrapped to read `FIND {Waldo}` before being
sent to Salesforce
"""
search_string = u'FIND {{{search_string}}}'.format(search_string=search)
return self.search(search_string)
# Query Handler
def query(self, query, **kwargs):
"""Return the result of a Salesforce SOQL query as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* query -- the SOQL query to send to Salesforce, e.g.
`SELECT Id FROM Lead WHERE Email = "[email protected]"`
"""
url = self.base_url + 'query/'
params = {'q': query}
# `requests` will correctly encode the query string passed as `params`
result = self._call_salesforce('GET', url, params=params, **kwargs)
if result.status_code != 200:
_exception_handler(result)
return result.json(object_pairs_hook=OrderedDict)
def query_more(
self, next_records_identifier, identifier_is_url=False, **kwargs):
"""Retrieves more results from a query that returned more results
than the batch maximum. Returns a dict decoded from the Salesforce
response JSON payload.
Arguments:
* next_records_identifier -- either the Id of the next Salesforce
object in the result, or a URL to the
next record in the result.
* identifier_is_url -- True if `next_records_identifier` should be
treated as a URL, False if
`next_records_identifer` should be treated as
an Id.
"""
if identifier_is_url:
# Don't use `self.base_url` here because the full URI is provided
url = (u'https://{instance}{next_record_url}'
.format(instance=self.sf_instance,
next_record_url=next_records_identifier))
else:
url = self.base_url + 'query/{next_record_id}'
url = url.format(next_record_id=next_records_identifier)
result = self._call_salesforce('GET', url, **kwargs)
if result.status_code != 200:
_exception_handler(result)
return result.json(object_pairs_hook=OrderedDict)
def query_all(self, query, **kwargs):
"""Returns the full set of results for the `query`. This is a
convenience
wrapper around `query(...)` and `query_more(...)`.
The returned dict is the decoded JSON payload from the final call to
Salesforce, but with the `totalSize` field representing the full
number of results retrieved and the `records` list representing the
full list of records retrieved.
Arguments
* query -- the SOQL query to send to Salesforce, e.g.
`SELECT Id FROM Lead WHERE Email = "[email protected]"`
"""
result = self.query(query, **kwargs)
all_records = []
while True:
all_records.extend(result['records'])
# fetch next batch if we're not done else break out of loop
if not result['done']:
result = self.query_more(result['nextRecordsUrl'],
True)
else:
break
result['records'] = all_records
return result
def apexecute(self, action, method='GET', data=None, **kwargs):
"""Makes an HTTP request to an APEX REST endpoint
Arguments:
* action -- The REST endpoint for the request.
* method -- HTTP method for the request (default GET)
* data -- A dict of parameters to send in a POST / PUT request
* kwargs -- Additional kwargs to pass to `requests.request`
"""
result = self._call_salesforce(method, self.apex_url + action,
data=json.dumps(data), **kwargs)
if result.status_code == 200:
try:
response_content = result.json()
# pylint: disable=broad-except
except Exception:
response_content = result.text
return response_content
def _call_salesforce(self, method, url, **kwargs):
"""Utility method for performing HTTP call to Salesforce.
Returns a `requests.result` object.
"""
result = self.session.request(
method, url, headers=self.headers, **kwargs)
if result.status_code >= 300:
_exception_handler(result)
return result
@property
def request(self):
"""Deprecated access to self.session for backwards compatibility"""
_warn_request_deprecation()
return self.session
@request.setter
def request(self, session):
"""Deprecated setter for self.session"""
_warn_request_deprecation()
self.session = session
class SFType(object):
"""An interface to a specific type of SObject"""
# pylint: disable=too-many-arguments
def __init__(
self, object_name, session_id, sf_instance, sf_version='27.0',
proxies=None, session=None):
"""Initialize the instance with the given parameters.
Arguments:
* object_name -- the name of the type of SObject this represents,
e.g. `Lead` or `Contact`
* session_id -- the session ID for authenticating to Salesforce
* sf_instance -- the domain of the instance of Salesforce to use
* sf_version -- the version of the Salesforce API to use
* proxies -- the optional map of scheme to proxy server
* session -- Custom requests session, created in calling code. This
enables the use of requests Session features not otherwise
exposed by simple_salesforce.
"""
self.session_id = session_id
self.name = object_name
self.session = session or requests.Session()
# don't wipe out original proxies with None
if not session and proxies is not None:
self.session.proxies = proxies
self.base_url = (
u'https://{instance}/services/data/v{sf_version}/sobjects'
'/{object_name}/'.format(instance=sf_instance,
object_name=object_name,
sf_version=sf_version))
def metadata(self, headers=None):
"""Returns the result of a GET to `.../{object_name}/` as a dict
decoded from the JSON payload returned by Salesforce.
Arguments:
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce('GET', self.base_url, headers=headers)
return result.json(object_pairs_hook=OrderedDict)
def describe(self, headers=None):
"""Returns the result of a GET to `.../{object_name}/describe` as a
dict decoded from the JSON payload returned by Salesforce.
Arguments:
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce(
method='GET', url=urljoin(self.base_url, 'describe'),
headers=headers
)
return result.json(object_pairs_hook=OrderedDict)
def describe_layout(self, record_id, headers=None):
"""Returns the layout of the object
Returns the result of a GET to
`.../{object_name}/describe/layouts/<recordid>` as a dict decoded from
the JSON payload returned by Salesforce.
Arguments:
* record_id -- the Id of the SObject to get
* headers -- a dict with additional request headers.
"""
custom_url_part = 'describe/layouts/{record_id}'.format(
record_id=record_id
)
result = self._call_salesforce(
method='GET',
url=urljoin(self.base_url, custom_url_part),
headers=headers
)
return result.json(object_pairs_hook=OrderedDict)
def get(self, record_id, headers=None):
"""Returns the result of a GET to `.../{object_name}/{record_id}` as a
dict decoded from the JSON payload returned by Salesforce.
Arguments:
* record_id -- the Id of the SObject to get
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce(
method='GET', url=urljoin(self.base_url, record_id),
headers=headers
)
return result.json(object_pairs_hook=OrderedDict)
def get_by_custom_id(self, custom_id_field, custom_id, headers=None):
"""Return an ``SFType`` by custom ID
Returns the result of a GET to
`.../{object_name}/{custom_id_field}/{custom_id}` as a dict decoded
from the JSON payload returned by Salesforce.
Arguments:
* custom_id_field -- the API name of a custom field that was defined
as an External ID
* custom_id - the External ID value of the SObject to get
* headers -- a dict with additional request headers.
"""
custom_url = urljoin(
self.base_url, '{custom_id_field}/{custom_id}'.format(
custom_id_field=custom_id_field, custom_id=custom_id
)
)
result = self._call_salesforce(
method='GET', url=custom_url, headers=headers
)
return result.json(object_pairs_hook=OrderedDict)
def create(self, data, headers=None):
"""Creates a new SObject using a POST to `.../{object_name}/`.
Returns a dict decoded from the JSON payload returned by Salesforce.
Arguments:
* data -- a dict of the data to create the SObject from. It will be
JSON-encoded before being transmitted.
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce(
method='POST', url=self.base_url,
data=json.dumps(data), headers=headers
)
return result.json(object_pairs_hook=OrderedDict)
def upsert(self, record_id, data, raw_response=False, headers=None):
"""Creates or updates an SObject using a PATCH to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- an identifier for the SObject as described in the
Salesforce documentation
* data -- a dict of the data to create or update the SObject from. It
will be JSON-encoded before being transmitted.
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce(
method='PATCH', url=urljoin(self.base_url, record_id),
data=json.dumps(data), headers=headers
)
return self._raw_response(result, raw_response)
def update(self, record_id, data, raw_response=False, headers=None):
"""Updates an SObject using a PATCH to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- the Id of the SObject to update
* data -- a dict of the data to update the SObject from. It will be
JSON-encoded before being transmitted.
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce(
method='PATCH', url=urljoin(self.base_url, record_id),
data=json.dumps(data), headers=headers
)
return self._raw_response(result, raw_response)
def delete(self, record_id, raw_response=False, headers=None):
"""Deletes an SObject using a DELETE to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- the Id of the SObject to delete
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
* headers -- a dict with additional request headers.
"""
result = self._call_salesforce(
method='DELETE', url=urljoin(self.base_url, record_id),
headers=headers
)
return self._raw_response(result, raw_response)
def deleted(self, start, end, headers=None):
# pylint: disable=line-too-long
"""Gets a list of deleted records
Use the SObject Get Deleted resource to get a list of deleted records
for the specified object.
.../deleted/?start=2013-05-05T00:00:00+00:00&end=2013-05-10T00:00:00+00:00
* start -- start datetime object
* end -- end datetime object
* headers -- a dict with additional request headers.
"""
url = urljoin(
self.base_url, 'deleted/?start={start}&end={end}'.format(
start=date_to_iso8601(start), end=date_to_iso8601(end)
)
)
result = self._call_salesforce(method='GET', url=url, headers=headers)
return result.json(object_pairs_hook=OrderedDict)
def updated(self, start, end, headers=None):
# pylint: disable=line-too-long
"""Gets a list of updated records
Use the SObject Get Updated resource to get a list of updated
(modified or added) records for the specified object.
.../updated/?start=2014-03-20T00:00:00+00:00&end=2014-03-22T00:00:00+00:00
* start -- start datetime object
* end -- end datetime object
* headers -- a dict with additional request headers.
"""
url = urljoin(
self.base_url, 'updated/?start={start}&end={end}'.format(
start=date_to_iso8601(start), end=date_to_iso8601(end)
)
)
result = self._call_salesforce(method='GET', url=url, headers=headers)
return result.json(object_pairs_hook=OrderedDict)
def _call_salesforce(self, method, url, **kwargs):
"""Utility method for performing HTTP call to Salesforce.
Returns a `requests.result` object.
"""
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.session_id,
'X-PrettyPrint': '1'
}
additional_headers = kwargs.pop('headers', dict())
headers.update(additional_headers or dict())
result = self.session.request(method, url, headers=headers, **kwargs)
if result.status_code >= 300:
_exception_handler(result, self.name)
return result
# pylint: disable=no-self-use
def _raw_response(self, response, body_flag):
"""Utility method for processing the response and returning either the
status code or the response object.
Returns either an `int` or a `requests.Response` object.
"""
if not body_flag:
return response.status_code
else:
return response
@property
def request(self):
"""Deprecated access to self.session for backwards compatibility"""
_warn_request_deprecation()
return self.session
@request.setter
def request(self, session):
"""Deprecated setter for self.session"""
_warn_request_deprecation()
self.session = session
class SalesforceAPI(Salesforce):
"""Deprecated SalesforceAPI Instance
This class implements the Username/Password Authentication Mechanism using
Arguments It has since been surpassed by the 'Salesforce' class, which
relies on kwargs
"""
# pylint: disable=too-many-arguments
def __init__(self, username, password, security_token, sandbox=False,
sf_version='27.0'):
"""Initialize the instance with the given parameters.
Arguments:
* username -- the Salesforce username to use for authentication
* password -- the password for the username
* security_token -- the security token for the username
* sandbox -- True if you want to login to `test.salesforce.com`, False
if you want to login to `login.salesforce.com`.
* sf_version -- the version of the Salesforce API to use, for example
"27.0"
"""
warnings.warn(
"Use of login arguments has been deprecated. Please use kwargs",
DeprecationWarning
)
super(SalesforceAPI, self).__init__(username=username,
password=password,
security_token=security_token,
sandbox=sandbox,
version=sf_version)
def _exception_handler(result, name=""):
"""Exception router. Determines which error to raise for bad results"""
try:
response_content = result.json()
# pylint: disable=broad-except
except Exception:
response_content = result.text
exc_map = {
300: SalesforceMoreThanOneRecord,
400: SalesforceMalformedRequest,
401: SalesforceExpiredSession,
403: SalesforceRefusedRequest,
404: SalesforceResourceNotFound,
}
exc_cls = exc_map.get(result.status_code, SalesforceGeneralError)
raise exc_cls(result.url, result.status_code, name, response_content)
class SalesforceMoreThanOneRecord(SalesforceError):
"""
Error Code: 300
The value returned when an external ID exists in more than one record. The
response body contains the list of matching records.
"""
message = u"More than one record for {url}. Response content: {content}"
class SalesforceMalformedRequest(SalesforceError):
"""
Error Code: 400
The request couldn't be understood, usually becaue the JSON or XML body
contains an error.
"""
message = u"Malformed request {url}. Response content: {content}"
class SalesforceExpiredSession(SalesforceError):
"""
Error Code: 401
The session ID or OAuth token used has expired or is invalid. The response
body contains the message and errorCode.
"""
message = u"Expired session for {url}. Response content: {content}"
class SalesforceRefusedRequest(SalesforceError):
"""
Error Code: 403
The request has been refused. Verify that the logged-in user has
appropriate permissions.
"""
message = u"Request refused for {url}. Response content: {content}"
class SalesforceResourceNotFound(SalesforceError):
"""
Error Code: 404
The requested resource couldn't be found. Check the URI for errors, and
verify that there are no sharing issues.
"""
message = u'Resource {name} Not Found. Response content: {content}'
def __str__(self):
return self.message.format(name=self.resource_name,
content=self.content)
class SalesforceGeneralError(SalesforceError):
"""
A non-specific Salesforce error.
"""
message = u'Error Code {status}. Response content: {content}'
def __str__(self):
return self.message.format(status=self.status, content=self.content)
|
py | 1a44137408af703395b8caca19c60ec3b0522247 | import statistics
import numpy as np
import logging
# ### self defined class
from carViewLibV2 import runWithFPS
class landMark():
def __init__(self, id):
self.markVaildCount = 4
self.markPosXList = []
self.markPosYList = []
self.frameTimeList = []
self.id = id
def addPos(self, pos, frameTime = 1.0/30.0):
self.markPosXList.append(pos['x'])
self.markPosYList.append(pos['y'])
self.frameTimeList.append(frameTime)
def getLastPos(self):
try:
rX, rY = self.markPosXList[-1],self.markPosYList[-1]
except:
rX, rY = None, None
return rX, rY
def isVaildMark(self):
if len(self.frameTimeList)>=self.markVaildCount:
return True
else:
return False
def getVelocity(self):
### call this function when mark left view
# DISTANCE_FACTOR = 80.0 ### carView04.mp4
# DISTANCE_FACTOR = 30.0 ### outside3.mp4
# DISTANCE_FACTOR = 60.0 ### testDistance3.mp4
# totalT = sum(self.frameTimeList)
# velcity = DISTANCE_FACTOR / totalT
### count last self.markVaildCount as velocity
DISTANCE_FACTOR = 1
distance = self.markPosYList[-1] - self.markPosYList[-self.markVaildCount]
totalT = sum(self.frameTimeList[-5:])
velcity = distance * DISTANCE_FACTOR / totalT
return velcity
def isInPosList(self, markPosYList, ft):
DISTANCE_MARK = 30
mx, my = self.getLastPos()
for i, posY in enumerate(markPosYList):
if my-2 <= posY and my+DISTANCE_MARK > posY:
pos = {"x": 0, "y": posY}
self.addPos(pos, frameTime = ft)
markPosYList.pop(i)
# print("markPosYList pop.")
return True
return False
class traceMark():
# DISTANCE_MARK = 15
def __init__(self):
self.count = 0
self.markList = []
self.markIdList = []
self.velocityList = []
self.previousVelocity = 0
def addMark(self, pos, ft):
mark = landMark(self.count)
mark.addPos(pos, frameTime=ft)
self.markList.append(mark)
self.markIdList.append(self.count)
self.count += 1
def getMedVelocity(self):
if len(self.velocityList)>5:
self.velocityList = self.velocityList[-5:]
mean = statistics.mean(self.velocityList)
# vStd = statistics.stdev(self.velocityList)
# try:
# self.velocityList = [v for v in self.velocityList if v > mean-(4*vStd) and v < mean+(4*vStd)]
# vel = statistics.median(self.velocityList)
# return vel
# except:
# return mean
if self.previousVelocity==mean: ### This's prevent not get any mark
return 0
else:
self.previousVelocity = mean
return mean
elif len(self.velocityList)>0:
mean = statistics.mean(self.velocityList)
if self.previousVelocity==mean: ### This's prevent not get any mark
return 0
else:
self.previousVelocity = mean
return mean
else:
return 0
def processMark(self, maxLocation, fps = 1.0/30.0):
# DISTANCE_MARK = 20
DISTANCE_MARK = 30
# array1D = maxLocation[int(len(maxLocation)/2):] ### take only bottom half
array1D = maxLocation[int(len(maxLocation)/2)-50:-50] ### take only bottom half
xArray = np.array(range(len(array1D)))
zeroIdx = [i for i in range(len(array1D)) if array1D[i] == 0]
yArrayTrim = [array1D[i] for i in range(len(array1D)) if i not in zeroIdx]
xArrayTrim = [xArray[i] for i in range(len(xArray)) if i not in zeroIdx]
markPosYList = []
tmpPosYList = []
currentIdx = -1
for i in range(len(xArrayTrim)):
currentY = xArrayTrim[i]
if currentIdx < 0:
markPosYList.append(currentY)
tmpPosYList.append(currentY)
currentIdx += 1
elif currentIdx >=0 and tmpPosYList[currentIdx] > currentY -2:
tmpPosYList[currentIdx] = currentY
elif currentIdx >=0 and markPosYList[currentIdx] < currentY -DISTANCE_MARK:
markPosYList.append(currentY)
tmpPosYList.append(currentY)
currentIdx += 1
# print("markPosYList:",markPosYList)
if len(markPosYList) > 0 and markPosYList[0] == 0:
markPosYList.pop(0) ### remove 0 from list
newList = []
ft = fps if type(fps)==type(0.1) else fps.getTime()
for mark in self.markList:
logging.debug((f"marklsit len: {len(self.markList)}, markpos: {mark.markPosYList}, {mark.frameTimeList}"))
if mark.isInPosList(markPosYList, ft) :
newList.append(mark)
# elif mark.isVaildMark():
if mark.isVaildMark():
vel = mark.getVelocity()
if vel <200:
self.velocityList.append(vel)
# vel = self.getMedVelocity()
logging.debug((f"velocity: {vel:.1f}, len: {len(self.velocityList)}"))
# logging.warning((f"velocity: {vel:.1f}, len: {len(self.velocityList)}"))
# print(f"velocity: {vel:.1f}")
else:
logging.debug("Invalid mark.")
self.markList = newList
for posY in markPosYList:
# print("Mark added")
pos = {"x": 0, "y": posY}
self.addMark(pos, ft)
# print("self.markList",len(self.markList))
|
py | 1a4413bf67209599b8ef06c1810a06378f123958 | #!/usr/bin/python
# Classification (U)
"""Program: rabbitmqadmin_list_vhost_topic_permissions.py
Description: Unit testing of RabbitMQAdmin.list_vhost_topic_permissions in
rabbitmq_class.py.
Usage:
test/unit/rabbitmq_class/rabbitmqadmin_list_vhost_topic_permissions.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import rabbitmq_class
import version
__version__ = version.__version__
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_basic
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.name = "UserName"
self.japd = "japd"
self.rmq = rabbitmq_class.RabbitMQAdmin(self.name, self.japd)
self.data = {"key": "value"}
self.vhost = "VhostName"
self.results = {"key": "value"}
@mock.patch("rabbitmq_class.RabbitMQBase.api_get")
def test_basic(self, mock_get):
"""Function: test_basic
Description: Test with basic set up.
Arguments:
"""
mock_get.return_value = self.data
self.assertEqual(
self.rmq.list_vhost_topic_permissions(self.vhost), self.results)
if __name__ == "__main__":
unittest.main()
|
py | 1a44141c061fe4cca1a2306a3cd943b2d554722d | import numpy
import scipy.linalg
import time
from pauxy.estimators.mixed import (
variational_energy, variational_energy_ortho_det, local_energy
)
from pauxy.estimators.greens_function import gab, gab_spin, gab_mod, gab_mod_ovlp
from pauxy.estimators.ci import get_hmatel, get_one_body_matel
from pauxy.utils.io import (
get_input_value,
write_qmcpack_wfn
)
from pauxy.utils.mpi import get_shared_array
class MultiSlater(object):
def __init__(self, system, wfn, nbasis=None, options={},
init=None, verbose=False, orbs=None):
self.verbose = verbose
if verbose:
print ("# Parsing MultiSlater trial wavefunction input options.")
init_time = time.time()
self.name = "MultiSlater"
self.type = "MultiSlater"
# TODO : Fix for MSD.
# This is for the overlap trial
if len(wfn) == 3:
# CI type expansion.
self.from_phmsd(system, wfn, orbs)
self.ortho_expansion = True
else:
self.psi = wfn[1]
self.coeffs = numpy.array(wfn[0], dtype=numpy.complex128)
self.ortho_expansion = False
self.split_trial_local_energy = options.get('split_trial_local_energy', False)
if verbose:
print("# split_trial_local_energy = {}".format(self.split_trial_local_energy))
if self.split_trial_local_energy:
if verbose:
print("# taking the determinant with the largest coefficient as the local energy trial")
imax = numpy.argmax(numpy.abs(self.coeffs))
self.le_coeffs = numpy.array([self.coeffs[imax]], dtype=numpy.complex128)
self.le_psi = numpy.array([self.psi[imax,:,:]], dtype=self.psi.dtype)
self.le_ortho_expansion = self.ortho_expansion
else:
self.le_psi = self.psi.copy()
self.le_coeffs = self.coeffs.copy()
self.le_ortho_expansion = self.ortho_expansion
if self.verbose:
if self.ortho_expansion:
print("# Assuming orthogonal trial wavefunction expansion.")
else:
print("# Assuming non-orthogonal trial wavefunction expansion.")
print("# Trial wavefunction shape: {}".format(self.psi.shape))
self.ndets = len(self.coeffs)
if self.ndets == 1:
# self.psi = self.psi[0]
self.G, self.GH = gab_spin(self.psi[0], self.psi[0],
system.nup, system.ndown)
else:
self.G = None
self.GH = None
if init is not None:
if verbose:
print("# Using initial wavefunction from file.")
self.init = init
else:
if verbose:
print("# Setting initial wavefunction as first determinant in"
" expansion.")
if len(self.psi.shape) == 3:
self.init = self.psi[0].copy()
else:
self.init = self.psi.copy()
self.error = False
self.initialisation_time = time.time() - init_time
self._nalpha = system.nup
self._nbeta = system.ndown
self._nelec = system.nelec
self._nbasis = system.nbasis
self._rchol = None
self._UVT = None
self._eri = None
self._mem_required = 0.0
self.ecoul0 = None
self.exxa0 = None
self.exxb0 = None
write_wfn = options.get('write_wavefunction', False)
output_file = options.get('output_file', 'wfn.h5')
if write_wfn:
self.write_wavefunction(filename=output_file)
if verbose:
print ("# Finished setting up trial wavefunction.")
def local_energy_2body(self, system):
"""Compute walkers two-body local energy
Parameters
----------
system : object
System object.
Returns
-------
(E, T, V) : tuple
Mixed estimates for walker's energy components.
"""
nalpha, nbeta = system.nup, system.ndown
nbasis = system.nbasis
naux = self._rchol.shape[1]
Ga, Gb = self.GH[0], self.GH[1]
Xa = self._rchol[:nalpha*nbasis].T.dot(Ga.ravel())
Xb = self._rchol[nalpha*nbasis:].T.dot(Gb.ravel())
ecoul = numpy.dot(Xa,Xa)
ecoul += numpy.dot(Xb,Xb)
ecoul += 2*numpy.dot(Xa,Xb)
rchol_a, rchol_b = self._rchol[:nalpha*nbasis], self._rchol[nalpha*nbasis:]
rchol_a = rchol_a.T
rchol_b = rchol_b.T
Ta = numpy.zeros((naux, nalpha, nalpha), dtype=rchol_a.dtype)
Tb = numpy.zeros((naux, nbeta, nbeta), dtype=rchol_b.dtype)
GaT = Ga.T
GbT = Gb.T
for x in range(naux):
rmi_a = rchol_a[x].reshape((nalpha,nbasis))
Ta[x] = rmi_a.dot(GaT)
rmi_b = rchol_b[x].reshape((nbeta,nbasis))
Tb[x] = rmi_b.dot(GbT)
exxa = numpy.tensordot(Ta, Ta, axes=((0,1,2),(0,2,1)))
exxb = numpy.tensordot(Tb, Tb, axes=((0,1,2),(0,2,1)))
exx = exxa + exxb
e2b = 0.5 * (ecoul - exx)
return ecoul, exxa, exxb
def calculate_energy(self, system):
if self.verbose:
print("# Computing trial wavefunction energy.")
start = time.time()
# Cannot use usual energy evaluation routines if trial is orthogonal.
if self.ortho_expansion:
self.energy, self.e1b, self.e2b = (
variational_energy_ortho_det(system,
self.spin_occs,
self.coeffs)
)
else:
(self.energy, self.e1b, self.e2b) = (
variational_energy(system, self.psi, self.coeffs,
G=self.G, GH=self.GH,
rchol=self._rchol, eri=self._eri,
C0 = self.psi,
ecoul0 = self.ecoul0,
exxa0 = self.exxa0,
exxb0 = self.exxb0,
UVT=self._UVT)
)
if self.verbose:
print("# (E, E1B, E2B): (%13.8e, %13.8e, %13.8e)"
%(self.energy.real, self.e1b.real, self.e2b.real))
print("# Time to evaluate local energy: %f s"%(time.time()-start))
def from_phmsd(self, system, wfn, orbs):
ndets = len(wfn[0])
self.psi = numpy.zeros((ndets,system.nbasis,system.ne),
dtype=numpy.complex128)
if self.verbose:
print("# Creating trial wavefunction from CI-like expansion.")
if orbs is None:
if self.verbose:
print("# Assuming RHF reference.")
I = numpy.eye(system.nbasis, dtype=numpy.complex128)
# Store alpha electrons first followed by beta electrons.
nb = system.nbasis
dets = [list(a) + [i+nb for i in c] for (a,c) in zip(wfn[1],wfn[2])]
self.spin_occs = [numpy.sort(d) for d in dets]
self.occa = wfn[1]
self.occb = wfn[2]
self.coeffs = numpy.array(wfn[0], dtype=numpy.complex128)
for idet, (occa, occb) in enumerate(zip(wfn[1], wfn[2])):
self.psi[idet,:,:system.nup] = I[:,occa]
self.psi[idet,:,system.nup:] = I[:,occb]
def recompute_ci_coeffs(self, system):
H = numpy.zeros((self.ndets, self.ndets), dtype=numpy.complex128)
S = numpy.zeros((self.ndets, self.ndets), dtype=numpy.complex128)
m = system.nbasis
na = system.nup
nb = system.ndown
if self.ortho_expansion:
for i in range(self.ndets):
for j in range(i,self.ndets):
di = self.spin_occs[i]
dj = self.spin_occs[j]
H[i,j] = get_hmatel(system,di,dj)[0]
e, ev = scipy.linalg.eigh(H, lower=False)
else:
na = system.nup
for i, di in enumerate(self.psi):
for j, dj in enumerate(self.psi):
if j >= i:
ga, gha, ioa = gab_mod_ovlp(di[:,:na], dj[:,:na])
gb, ghb, iob = gab_mod_ovlp(di[:,na:], dj[:,na:])
G = numpy.array([ga,gb])
Ghalf = numpy.array([gha,ghb])
ovlp = 1.0/(scipy.linalg.det(ioa)*scipy.linalg.det(iob))
if abs(ovlp) > 1e-12:
if self._rchol is not None:
rchol = self.rchol(i)
else:
rchol = None
H[i,j] = ovlp * local_energy(system, G,
Ghalf=Ghalf,
rchol=rchol)[0]
S[i,j] = ovlp
H[j,i] = numpy.conjugate(H[i,j])
S[j,i] = numpy.conjugate(S[i,j])
e, ev = scipy.linalg.eigh(H, S, lower=False)
# if self.verbose:
# print("Old and New CI coefficients: ")
# for co,cn in zip(self.coeffs,ev[:,0]):
# print("{} {}".format(co, cn))
return numpy.array(ev[:,0], dtype=numpy.complex128)
def contract_one_body(self, ints):
numer = 0.0
denom = 0.0
na = self._nalpha
for i in range(self.ndets):
for j in range(self.ndets):
cfac = self.coeffs[i].conj()*self.coeffs[j].conj()
if self.ortho_expansion:
di = self.spin_occs[i]
dj = self.spin_occs[j]
tij = get_one_body_matel(ints,di,dj)
numer += cfac * tij
if i == j:
denom += self.coeffs[i].conj()*self.coeffs[i].conj()
else:
di = self.psi[i]
dj = self.psi[j]
ga, gha, ioa = gab_mod_ovlp(di[:,:na], dj[:,:na])
gb, ghb, iob = gab_mod_ovlp(di[:,na:], dj[:,na:])
ovlp = 1.0/(scipy.linalg.det(ioa)*scipy.linalg.det(iob))
tij = numpy.dot(ints.ravel(), ga.ravel()+gb.ravel())
numer += cfac * ovlp * tij
denom += cfac * ovlp
return numer / denom
def write_wavefunction(self, filename='wfn.h5', init=None, occs=False):
if occs:
wfn = (self.coeffs, self.occa, self.occb)
else:
wfn = (self.coeffs, self.psi)
write_qmcpack_wfn(filename, wfn, 'uhf', self._nelec, self._nbasis,
init=init)
def half_rotate(self, system, comm=None):
# Half rotated cholesky vectors (by trial wavefunction).
M = system.nbasis
na = system.nup
nb = system.ndown
nchol = system.chol_vecs.shape[-1]
if self.verbose:
print("# Constructing half rotated Cholesky vectors.")
if isinstance(system.chol_vecs, numpy.ndarray):
chol = system.chol_vecs.reshape((M,M,nchol))
else:
chol = system.chol_vecs.toarray().reshape((M,M,nchol))
if (system.exact_eri):
shape = (self.ndets,(M**2*(na**2+nb**2) + M**2*(na*nb)))
self._eri = get_shared_array(comm, shape, numpy.complex128)
self._mem_required = self._eri.nbytes / (1024.0**3.0)
for i, psi in enumerate(self.psi):
vipjq_aa = numpy.einsum("mpX,rqX,mi,rj->ipjq", chol, chol, psi[:,:na].conj(), psi[:,:na].conj(), optimize=True)
vipjq_bb = numpy.einsum("mpX,rqX,mi,rj->ipjq", chol, chol, psi[:,na:].conj(), psi[:,na:].conj(), optimize=True)
vipjq_ab = numpy.einsum("mpX,rqX,mi,rj->ipjq", chol, chol, psi[:,:na].conj(), psi[:,na:].conj(), optimize=True)
self._eri[i,:M**2*na**2] = vipjq_aa.ravel()
self._eri[i,M**2*na**2:M**2*na**2+M**2*nb**2] = vipjq_bb.ravel()
self._eri[i,M**2*na**2+M**2*nb**2:] = vipjq_ab.ravel()
if (system.pno):
thresh_pno = system.thresh_pno
UVT_aa = []
UVT_bb = []
UVT_ab = []
nocca = system.nup
noccb = system.ndown
nvira = system.nbasis - system.nup
nvirb = system.nbasis - system.ndown
r_aa = []
for i in range(na):
for j in range(i, na):
Vab = vipjq_aa[i,:,j,:]
U, s, VT = numpy.linalg.svd(Vab)
idx = s > thresh_pno
U = U[:,idx]
s = s[idx]
r_aa += [s.shape[0] / float(system.nbasis)]
VT = VT[idx,:]
U = U.dot(numpy.diag(numpy.sqrt(s)))
VT = numpy.diag(numpy.sqrt(s)).dot(VT)
UVT_aa += [(U, VT)]
r_aa = numpy.array(r_aa)
r_aa = numpy.mean(r_aa)
r_bb = []
for i in range(nb):
for j in range(i, nb):
Vab = vipjq_bb[i,:,j,:]
U, s, VT = numpy.linalg.svd(Vab)
idx = s > thresh_pno
U = U[:,idx]
s = s[idx]
r_bb += [s.shape[0] / float(system.nbasis)]
VT = VT[idx,:]
U = U.dot(numpy.diag(numpy.sqrt(s)))
VT = numpy.diag(numpy.sqrt(s)).dot(VT)
UVT_bb += [(U, VT)]
r_bb = numpy.array(r_bb)
r_bb = numpy.mean(r_bb)
r_ab = []
for i in range(na):
for j in range(nb):
Vab = vipjq_ab[i,:,j,:]
U, s, VT = numpy.linalg.svd(Vab)
idx = s > thresh_pno
U = U[:,idx]
s = s[idx]
r_ab += [s.shape[0] / float(system.nbasis)]
VT = VT[idx,:]
U = U.dot(numpy.diag(numpy.sqrt(s)))
VT = numpy.diag(numpy.sqrt(s)).dot(VT)
UVT_ab += [(U, VT)]
r_ab = numpy.array(r_ab)
r_ab = numpy.mean(r_ab)
self._UVT = [UVT_aa, UVT_bb, UVT_ab]
self._eri = None
if self.verbose:
print("# Average number of orbitals (relative to total) for aa, bb, ab = {}, {}, {}".format(r_aa, r_bb, r_ab))
if self.verbose:
print("# Memory required by exact ERIs: "
" {:.4f} GB.".format(self._mem_required))
if comm is not None:
comm.barrier()
# else:
shape = (self.ndets*(M*(na+nb)), nchol)
self._rchol = get_shared_array(comm, shape, numpy.complex128)
for i, psi in enumerate(self.psi):
start_time = time.time()
if self.verbose:
print("# Rotating Cholesky for determinant {} of "
"{}.".format(i+1,self.ndets))
start = i*M*(na+nb)
compute = True
# Distribute amongst MPI tasks on this node.
if comm is not None:
nwork_per_thread = chol.shape[-1] // comm.size
if nwork_per_thread == 0:
start_n = 0
end_n = nchol
if comm.rank != 0:
# Just run on root processor if problem too small.
compute = False
else:
start_n = comm.rank * nwork_per_thread
end_n = (comm.rank+1) * nwork_per_thread
if comm.rank == comm.size - 1:
end_n = nchol
else:
start_n = 0
end_n = chol.shape[-1]
nchol_loc = end_n - start_n
# if comm.rank == 0:
# print(start_n, end_n, nchol_loc)
# print(numpy.may_share_memory(chol, chol[:,start_n:end_n]))
if compute:
rup = numpy.tensordot(psi[:,:na].conj(),
chol[:,:,start_n:end_n],
axes=((0),(0))).reshape((na*M,nchol_loc))
self._rchol[start:start+M*na,start_n:end_n] = rup[:]
rdn = numpy.tensordot(psi[:,na:].conj(),
chol[:,:,start_n:end_n],
axes=((0),(0))).reshape((nb*M,nchol_loc))
self._rchol[start+M*na:start+M*(na+nb),start_n:end_n] = rdn[:]
self._mem_required = self._rchol.nbytes / (1024.0**3.0)
if self.verbose:
print("# Memory required by half-rotated integrals: "
" {:.4f} GB.".format(self._mem_required))
print("# Time to half rotate {} seconds.".format(time.time()-start_time))
if comm is not None:
comm.barrier()
self._rot_hs_pot = self._rchol
if(system.control_variate):
self.ecoul0, self.exxa0, self.exxb0 = self.local_energy_2body(system)
def rot_chol(self, idet=0, spin=None):
"""Helper function"""
if spin is None:
stride = self._nbasis * (self._nalpha + self._nbeta)
return self._rchol[idet*stride:(idet+1)*stride]
else:
stride = self._nbasis * (self._nalpha + self._nbeta)
alpha = self._nbasis * self._nalpha
if spin == 0:
return self._rchol[idet*stride:idet*stride+alpha]
else:
beta = self._nbasis * self._nbeta
return self._rchol[idet*stride+alpha:idet*stride+alpha+beta]
def rot_hs_pot(self, idet=0, spin=None):
"""Helper function"""
if spin is None:
stride = self._nbasis * (self._nalpha + self._nbeta)
return self._rot_hs_pot[idet*stride:(idet+1)*stride]
else:
stride = self._nbasis * (self._nalpha + self._nbeta)
alpha = self._nbasis * self._nalpha
if spin == 0:
return self._rot_hs_pot[idet*stride:idet*stride+alpha]
else:
beta = self._nbasis * self._nbeta
return self._rot_hs_pot[idet*stride+alpha:idet*stride+alpha+beta]
# TODO: Implement
# def half_rotate_cplx(self, system, comm=None):
# # Half rotated cholesky vectors (by trial wavefunction).
# M = system.nbasis
# na = system.nup
# nb = system.ndown
# nchol = system.chol_vecs.shape[-1]
# if self.verbose:
# print("# Constructing half rotated Cholesky vectors.")
# if isinstance(system.chol_vecs, numpy.ndarray):
# chol = system.chol_vecs.reshape((M,M,-1))
# else:
# chol = system.chol_vecs.toarray().reshape((M,M,-1))
# if comm is None or comm.rank == 0:
# shape = (self.ndets*(M*(na+nb)), nchol)
# else:
# shape = None
# self.rchol = get_shared_array(comm, shape, numpy.complex128)
# if comm is None or comm.rank == 0:
# for i, psi in enumerate(self.psi):
# start_time = time.time()
# if self.verbose:
# print("# Rotating Cholesky for determinant {} of "
# "{}.".format(i+1,self.ndets))
# start = i*M*(na+nb)
# rup = numpy.tensordot(psi[:,:na].conj(),
# chol,
# axes=((0),(0)))
# self.rchol[start:start+M*na] = rup[:].reshape((-1,nchol))
# rdn = numpy.tensordot(psi[:,na:].conj(),
# chol,
# axes=((0),(0)))
# self.rchol[start+M*na:start+M*(na+nb)] = rdn[:].reshape((-1,nchol))
# if self.verbose:
# print("# Time to half rotate {} seconds.".format(time.time()-start_time))
# self.rot_hs_pot = self.rchol
|
py | 1a44146a4d170a28b1fabea45a3093f3d2196296 | # Standard libraries
import logging
import random
import re
import time
# third party libraries
import tweepy
class RetweetGiveaway:
def __init__(self, api, user):
"""
RetweetGiveaway class constructor, requires api object and user object
:param api tweepy.API: api object from tweepy library
:param user tweepy.API.me() : User object for current bot
"""
self.user = user
self.api = api
self.bot_action = []
def check_retweet(self, words_to_search, accounts_to_blacklist, hashtag_to_blacklist, giveaway_to_blacklist,
comment_with_hashtag, max_giveaway):
"""
Check for useful tweets by filtering out blacklisted
:param words_to_search list: List of Keywords to Search tweet for
:param accounts_to_blacklist list: List of Blacklisted Accounts to Ignore
:param hashtag_to_blacklist list: List of Blacklisted Hashtags in tweets to ignore
:param giveaway_to_blacklist list: List of Blacklisted Giveaways to Ignore
:param comment_with_hashtag boolean: If we comment with hashtag
:param max_giveaway integer: Maximum number of giveaway retrieve for each word
"""
action = []
regex_detect_tag = [r"\b(\w*INVIT(E|É)\w*)\b",
r"\b(\w*IDENTIFI(E|É)\w*)\b",
r"\b(\w*TAG\w*)\b",
r"\b(\w*MENTIONN(E|É)\w*)\b"]
regex_detect_tag = re.compile('|'.join(regex_detect_tag), re.IGNORECASE)
for word in words_to_search:
logging.info("Searching giveaway with the word : %s", word)
for tweet in tweepy.Cursor(self.api.search,
q=word, since=time.strftime('%Y-%m-%d', time.localtime()),
lang="fr", tweet_mode="extended").items(max_giveaway):
if tweet.retweet_count > 5:
is_in_blacklist = [ele for ele in giveaway_to_blacklist if (ele in tweet.full_text)]
if is_in_blacklist:
pass
else:
# Check if it's a retweet
if hasattr(tweet, 'retweeted_status'):
screen_name = tweet.retweeted_status.author.screen_name
entities = tweet.retweeted_status.entities
full_text = tweet.retweeted_status.full_text
extra = 0
else:
screen_name = tweet.user.screen_name
entities = tweet.entities
full_text = tweet.full_text
extra = 3
# Check if Tweet Author is blacklisted or not
if screen_name not in accounts_to_blacklist:
# Check for INVITE/TAG/MENTIONNE in retweet text
if re.search(regex_detect_tag, full_text):
# Check if tweet has Hashtags
if len(entities['hashtags']) > 0:
# if comment with hashtag is enabled
if comment_with_hashtag:
# Clean Hastags
h_list = self.manage_hashtag(entities['hashtags'],
hashtag_to_blacklist)
# If we find Hashtags -> Record the tweet
if h_list:
action.append(tweet)
action.append(1 + extra)
self.bot_action.append(action)
else:
action.append(tweet)
action.append(2 + extra)
self.bot_action.append(action)
else:
action.append(tweet)
action.append(2 + extra)
self.bot_action.append(action)
# Else Select Action 2
else:
action.append(tweet)
action.append(2 + extra)
self.bot_action.append(action)
# If regex-tags not found, record the tweet without action number
else:
action.append(tweet)
self.bot_action.append(action)
action = []
return self.bot_action
def manage_giveaway(self, list_giveaway, sentence_for_tag, list_name, hashtag_to_blacklist, managefollow,
like_giveaway, nb_account_to_tag):
"""
Handle Give away tweets by following/commenting/tagging depending on the giveaway levels
:param list_giveaway list: List of Giveaways tweets and (optional) Giveaway levels
:param sentence_for_tag list: List of Random Sentences to use for commenting
:param list_name list: List of Names to Randomly Tag on giveaways
:param hashtag_to_blacklist list: List of hastags to blacklist
:param managefollow managefollow: Database management object from ManageFollow
:param like_giveaway boolean: If we like giveaway
"""
for giveaway in list_giveaway:
tweet = giveaway[0]
try:
if hasattr(tweet, 'retweeted_status'):
retweeted = tweet.retweeted_status.retweeted
id_ = tweet.retweeted_status.id
author_id = tweet.retweeted_status.author.id
entities = tweet.retweeted_status.entities
screen_name = tweet.retweeted_status.user.screen_name
else:
retweeted = tweet.retweeted
id_ = tweet.id
author_id = tweet.user.id
entities = tweet.entities
screen_name = tweet.user.screen_name
if not retweeted:
self.api.retweet(id_)
if like_giveaway:
self.api.create_favorite(id_)
self.api.create_friendship(author_id)
if len(giveaway) == 2:
comment_level = giveaway[1]
self.comment(tweet, sentence_for_tag, comment_level, list_name, hashtag_to_blacklist,
nb_account_to_tag)
managefollow.update_table(author_id)
if len(entities['user_mentions']) > 0:
for mention in entities['user_mentions']:
self.api.create_friendship(mention['id'])
managefollow.update_table(mention['id'])
random_sleep_time = random.randrange(10, 20)
logging.info("You participated in the giveaway of : @%s. Sleeping for %ss...",
screen_name,
str(random_sleep_time))
time.sleep(random_sleep_time)
except tweepy.TweepError as e:
if e.api_code == 327:
pass
elif e.api_code == 161:
logging.warning("The account can no longer follow. We go to the next step.")
break
elif e.api_code == 136:
logging.info("You have been blocked by: %s", screen_name)
break
elif e.api_code == 326:
logging.warning("You have to do a captcha on the account: %s", self.user.screen_name)
break
else:
logging.error(e)
def comment(self, tweet, sentence_for_tag, hashtag, list_name, hashtag_to_blacklist, nb_account_to_tag):
"""
Add Comment to a given tweet using some rules.
:param tweet tweepy.tweet: Tweet object from tweepy library
:param sentence_for_tag list: List of random sentences
:param hashtag list: List of Hashtags
:param list_name list: List of user names
:param hashtag_to_blacklist list: List of Blacklisted Hastags to avoid
"""
random.shuffle(list_name)
nbrandom = random.randrange(0, len(sentence_for_tag))
randomsentence = sentence_for_tag[nbrandom]
# Random Sentence + Tag Comment + Hashtag Comment + Update Status
if hashtag == 1:
comment = "@" + tweet.retweeted_status.author.screen_name + " " + randomsentence + " "
comment = self.add_tag_comment(list_name, comment, nb_account_to_tag)
comment = self.add_hashtag_comment(comment, tweet.retweeted_status.entities['hashtags'],
hashtag_to_blacklist)
self.api.update_status(comment, tweet.retweeted_status.id)
# Random Sentence + Tag Comment + Update Status
elif hashtag == 2:
comment = "@" + tweet.retweeted_status.author.screen_name + " " + randomsentence + " "
comment = self.add_tag_comment(list_name, comment, nb_account_to_tag)
self.api.update_status(comment, tweet.retweeted_status.id)
# Hashtag Comment + Update Status
elif hashtag == 3:
comment = "@" + tweet.retweeted_status.author.screen_name + " "
comment = self.add_hashtag_comment(comment, tweet.retweeted_status.entities['hashtags'],
hashtag_to_blacklist)
self.api.update_status(comment, tweet.retweeted_status.id)
# User - Random Sentence + Tag Comment + Hashtag Comment + Update Status
elif hashtag == 4:
comment = "@" + tweet.user.screen_name + " " + randomsentence + " "
comment = self.add_tag_comment(list_name, comment, nb_account_to_tag)
comment = self.add_hashtag_comment(comment, tweet.entities['hashtags'],
hashtag_to_blacklist)
self.api.update_status(comment, tweet.id)
# User - Random Sentence + Tag Comment + Update Status
elif hashtag == 5:
comment = "@" + tweet.user.screen_name + " " + randomsentence + " "
comment = self.add_tag_comment(list_name, comment, nb_account_to_tag)
self.api.update_status(comment, tweet.id)
# User - Hashtag Comment + Update Status
elif hashtag == 6:
comment = "@" + tweet.user.screen_name + " "
comment = self.add_hashtag_comment(comment, tweet.entities['hashtags'],
hashtag_to_blacklist)
self.api.update_status(comment, tweet.id)
def manage_hashtag(self, hashtag_list, hashtag_to_blacklist):
"""
Filter Blacklisted Hastags
:param hashtag_list list: List of Hashtags from Tweet
:param hashtag_to_blacklist list: List of BlackListed Hashtags
"""
h_list = []
for h in hashtag_list:
h_list.append(h['text'].upper())
return list(set(h_list) - set(hashtag_to_blacklist))
def add_tag_comment(self, list_name, comment, nb_account_to_tag):
"""
Tag other users in comment.
:param list_name list: List of user names to add to comment
:param comment string: Tweet/text/Comment
"""
nbusernotif = 0
for username in list_name:
if nbusernotif < nb_account_to_tag:
# We don't want to tag ourselves
if username == "@" + self.user.screen_name:
pass
else:
comment = comment + username + " "
nbusernotif += 1
return comment
def add_hashtag_comment(self, comment, hashtag_list, hashtag_to_blacklist):
"""
Add hashtag in Comments
:param comment string: Comment to which to add hashtags
:param hashtag_list list: List of Hashtags
:param hashtag_to_blacklist list: List of Blacklisted Hashtags to avoid
"""
h_list = self.manage_hashtag(hashtag_list, hashtag_to_blacklist)
for hashtag in h_list:
comment = comment + "#" + hashtag + " "
return comment
|
py | 1a441478ebe6fc5269a8c0e9304838fba04daaf8 | import tensorflow as tf
a = tf.placeholder("float")
b = tf.placeholder("float")
y = tf.multiply(a,b)
sess = tf.Session()
print (sess.run(y, feed_dict={a:3,b:3}))
|
py | 1a44163f4948bbc0e984af0fb14513e8459453c0 | from juno.resources import handler_request
from juno.resources.routes import notification_routes
def find_all_event_types():
return handler_request.get(notification_routes.get_base_url_event_types())
def create_webhook(dictionary):
return handler_request.post(notification_routes.get_base_url_webhooks(), dictionary)
def find_all_webhooks():
return handler_request.get(notification_routes.get_base_url_webhooks())
def find_webhook_by_id(webhook_id):
return handler_request.get(
notification_routes.get_specific_webhook_by_id_url(webhook_id)
)
def update_webhook(webhook_id, dictionary):
return handler_request.patch(
notification_routes.get_update_webhook_url(webhook_id), dictionary
)
def delete_webhook(webhook_id):
return handler_request.delete(
notification_routes.get_delete_webhook_url(webhook_id)
)
|
py | 1a4416e0e7ecc1187b539e29a5010b969f302441 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# PROJECT_NAME Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'openstackdocstheme',
'reno.sphinxext',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
author = 'OpenStack-Ansible Contributors'
category = 'Miscellaneous'
copyright = '2014-2016, OpenStack-Ansible Contributors'
description = 'OpenStack-Ansible deploys OpenStack environments using Ansible.'
project = 'OpenStack-Ansible'
role_name = 'plugins'
target_name = 'openstack-ansible-' + role_name
title = 'OpenStack-Ansible Documentation: ' + role_name + 'role'
# Release notes do not need a version number in the title, they
# cover multiple releases.
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/' + target_name
openstackdocs_bug_project = project.lower()
openstackdocs_bug_tag = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'OpenStackAnsibleReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'OpenStackAnsibleReleaseNotes.tex', u'OpenStack-Ansible Release Notes Documentation',
u'OpenStack-Ansible Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'openstackansiblereleasenotes', u'OpenStack-Ansible Release Notes Documentation',
[u'OpenStack-Ansible Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OpenStackAnsibleReleaseNotes', u'OpenStack-Ansible Release Notes Documentation',
u'OpenStack-Ansible Developers', 'OpenStackAnsibleReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
|
py | 1a44172f392f2a4097b2ef6ba3e0f3908d3a2803 |
if Controller.isDebugVersion(): print("[highscore.py]")
from configparser import ConfigParser
import os
import getpass
# .................................................................................................................
# KIKI HIGHSCORE
# .................................................................................................................
class KikiHighscore (ConfigParser):
"""kiki highscore"""
# ................................................................ init
def __init__ (self):
"""initialization"""
ConfigParser.__init__ (self)
self.readfp (open(kikipy_path + "kiki.hsc"))
try:
if os.uname()[0] == "Darwin":
self.highscore_file_path = os.path.expanduser ("~/Library/Preferences/kiki.hsc")
else:
self.highscore_file_path = os.path.expanduser ("~/.kiki.hsc")
except:
self.highscore_file_path = os.path.expanduser ("~/.kiki.hsc")
try:
self.read (self.highscore_file_path)
except:
print ("creating " + self.highscore_file_path)
self.save ()
# ................................................................ minmoves for level
def levelUserMoves (self, level_name):
"""reads users number of moves for level level_name from highscore file"""
if not self.has_section (level_name):
self.add_section (level_name)
if self.has_option(level_name, getpass.getuser()):
min_moves = int (self.get (level_name, getpass.getuser()))
return min_moves
else:
return 0
# ................................................................ minmoves for level
def levelParMoves (self, level_name):
"""reads par number of moves for level level_name from highscore file"""
if not self.has_section (level_name):
self.add_section (level_name)
if self.has_option(level_name, "moves"):
par_moves = int (self.get (level_name, "moves"))
return par_moves
else:
self.set (level_name, "moves", "1000")
return 1000
# ................................................................ finish level
def levelFinished (self, level_name, moves):
"""writes data for finished level to highscore file"""
level_num = level_list.index(level_name)
if not self.has_section (level_name):
self.add_section (level_name)
if self.has_option(level_name, getpass.getuser()): # level already solved
old_moves = int (self.get (level_name, getpass.getuser()))
if moves < old_moves:
self.set (level_name, getpass.getuser(), str (int (moves)))
else: # first time solved
self.set (level_name, getpass.getuser(), str (int (moves)))
available_level_num = self.getLastAvailableLevel()
self.set ("main", "last_level", level_list[min(available_level_num+2, len(level_list)-1)])
self.set ("main", "current_level", level_list[min(level_num+1, len(level_list)-1)])
self.save ()
# ................................................................ get last level
def getLastAvailableLevel (self):
"""returns the index of last available level in level_list"""
last_level = str(self.get("main", "last_level"))
if last_level in level_list:
return level_list.index(str(last_level))
else:
return -1
# ................................................................ get last level
def getLastLevel (self):
"""returns the index of last played level in level_list"""
last_level = str(self.get("main", "current_level"))
if last_level in level_list:
return level_list.index(str(last_level))
else:
return -1
# ................................................................ save
def save (self):
"""save the highscore"""
try:
highscore_file = open(self.highscore_file_path, "w+")
self.write (highscore_file)
highscore_file.close()
except:
console.printError ("unable to write highscore to " + self.highscore_file_path)
# .................................................................................................................
highscore = KikiHighscore ()
|
py | 1a4417588797e376add80b230f8a9072062ba39f | from app.controllers.addresses_controller import AddressController
from app.models.address_model import Address
from app.models.instance_model import Instance
from app.models.pub_key_model import PubKey
from tests.data.mocks import instance, cluster, pub_key, address
from unittest.mock import patch, Mock
import tests.data as data
from tests.data.mocks import cluster, instance
import os
@patch.object(Address, 'all', return_value=[address], autospec=True)
@patch.object(Address, 'find', return_value=address, autospec=True)
@patch.object(Address, 'refresh', autospec=True)
def test_index(mock_all, mock_find, mock_refresh, credentials):
controller = AddressController(credentials=credentials)
resp = controller.index()
assert resp['data']['addresses'][0].address == data.address
@patch.object(PubKey, 'create', return_value=pub_key, autospec=True)
@patch('app.controllers.addresses_controller.Instance', return_value=instance, autospec=True)
@patch('app.controllers.addresses_controller.Cluster', return_value=cluster, autospec=True)
@patch.object(Address, 'create', return_value=address, autospec=True)
def test_create(mock_PubKey, mock_Instance, mock_Cluster, mock_Address, credentials):
controller = AddressController(credentials=credentials)
resp = controller.create()
assert resp['data']['address'].address == data.address
@patch.object(Address, 'find', return_value=address, autospec=True)
def test_show(mock_Adddress, credentials):
controller = AddressController(credentials=credentials)
resp = controller.show(id=data.address_id)
assert resp['data']['address'].address == data.address
def test_ip_address(credentials):
with patch('app.controllers.addresses_controller.Instance', return_value=instance, autospec=True):
controller = AddressController(credentials=credentials)
resp = controller.ip_address
assert resp == data.public_ip_address
def test_ssh_key_file(credentials):
controller = AddressController(credentials=credentials)
resp = controller.ssh_key_file_path
assert resp == data.ssh_key_file_path
def test_eni_ip(credentials):
with patch('app.controllers.addresses_controller.Cluster', return_value=cluster, autospec=True):
controller = AddressController(credentials=credentials)
resp = controller.eni_ip
assert resp == data.eni_ip
def test_bucket_name(credentials):
controller = AddressController(credentials=credentials)
resp = controller.bucket_name
assert resp == 'cluster-lbtkdldygfh-bucket'
|
py | 1a4419292dd14f87f661a464abd4b95e0eba4676 | from kubernetes import client as k8s_client
from kubernetes.client import rest as k8s_rest
from kubernetes import config as k8s_config
import boto3
from botocore.client import Config
from botocore.exceptions import ClientError
import argparse
import os
import tarfile
class MinioUploader(object):
def __init__(self, minio_secret, minio_secret_key, region_name):
k8s_config.load_incluster_config()
self.api_client = k8s_client.CoreV1Api()
try:
self.minio_service_endpoint = self.api_client.read_namespaced_service(name='minio-service', namespace='kubeflow').spec.cluster_ip
self.minio_service_enpoint_port=self.api_client.read_namespaced_service(name='minio-service', namespace='kubeflow').spec.ports[0].port
except ApiException as e:
if e.status == 403:
logging.warning(f"The service account doesn't have sufficient privileges "
f"to get the kubeflow minio-service. "
f"You will have to manually enter the minio cluster-ip. "
f"To make this function work ask someone with cluster "
f"priveleges to create an appropriate "
f"clusterrolebinding by running a command.\n"
f"kubectl create --namespace=kubeflow rolebinding "
"--clusterrole=kubeflow-view "
"--serviceaccount=${NAMESPACE}:default-editor "
"${NAMESPACE}-minio-view")
logging.error("API access denied with reason: {e.reason}")
self.minio_endpoint = "http://"+ self.minio_service_endpoint + ":%s"%self.minio_service_enpoint_port
print("minio endopoint : ", self.minio_endpoint)
self.client = boto3.client('s3',
endpoint_url=self.minio_endpoint,
aws_access_key_id=minio_secret,
aws_secret_access_key=minio_secret_key,
config=Config(signature_version='s3v4'),
region_name=region_name,
use_ssl=False)
def create_bucket(self, bucket_name):
try:
self.client.head_bucket(Bucket=bucket_name)
except ClientError:
bucket = {'Bucket': bucket_name}
self.client.create_bucket(**bucket)
def upload_to_bucket(self, blob_name, bucket_name, file_to_upload):
self.create_bucket(bucket_name)
self.client.upload_file(file_to_upload, bucket_name, blob_name)
return "s3://{}/{}".format(bucket_name, blob_name)
def flatten(tarinfo):
tarinfo.name = os.path.basename(tarinfo.name)
return tarinfo
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--minio-bucket', type=str, default='rasa', help='minio bucket name')
parser.add_argument('--minio-username', type=str, default='minio', help='minio secret name')
parser.add_argument('--minio-key', type=str, default='minio123', help='minio secret key')
parser.add_argument('--minio-region', type=str, default='us-east-1', help='minio region')
parser.add_argument('--model-name', type=str, default='rasa_model', help='trained model name')
parser.add_argument('--model-path', type=str, default='/mnt/models', help='trained model path')
FLAGS, unparsed = parser.parse_known_args()
#model_name=FLAGS.model_name + '.tar.gz'
#file_to_upload=FLAGS.model_path + '/' + model_name
minio_uploader = MinioUploader(minio_secret=FLAGS.minio_username, minio_secret_key=FLAGS.minio_key, region_name=FLAGS.minio_region)
tar = tarfile.open("models.tar.gz", "w:gz")
tar.add(FLAGS.model_path, arcname=os.path.basename("model"))
tar.close()
minio_uploader.upload_to_bucket("models.tar.gz", FLAGS.minio_bucket, "models.tar.gz")
print("uploaded successfully")
|
py | 1a441a6f03c2a7c6c5dd3f36040cfaf06176713c | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
"""Patterns supported CUTLASS."""
from functools import partial
from tvm import relay
from tvm.ir.transform import Sequential, PassContext
from tvm.relay import transform
from tvm.relay.build_module import bind_params_by_name
from tvm.relay.op.contrib.register import register_pattern_table # type: ignore
from ...dataflow_pattern import wildcard, is_op, is_constant
def make_gelu_pattern(bias_out, out_dtype="float16"):
mul = is_op("multiply")(bias_out, is_constant() | wildcard())
if out_dtype == "float16":
erf = is_op("cast")(is_op("erf")(is_op("cast")(mul)))
else:
erf = is_op("erf")(mul)
mul_half = is_op("multiply")(erf, is_constant() | wildcard())
add = is_op("add")(mul_half, is_constant() | wildcard())
return is_op("multiply")(add, bias_out)
def make_gemm_pattern(with_bias=True, with_act=None, out_dtype="float16"):
"""Create a pattern for dense op followed by activations."""
data = wildcard()
weight = wildcard()
bias = wildcard()
gemm = is_op("nn.dense")(data, weight)
if with_bias:
add_or_bias_add = is_op("add") | is_op("nn.bias_add")
gemm_out = add_or_bias_add(gemm, bias)
else:
gemm_out = gemm
if with_act is None:
return gemm_out
if isinstance(with_act, str) and with_act == "relu":
return is_op("nn.relu")(gemm_out)
assert isinstance(with_act, str) and with_act == "gelu"
return make_gelu_pattern(gemm_out, out_dtype)
def make_batch_matmul_pattern():
return is_op("nn.batch_matmul")(wildcard(), wildcard())
def make_conv2d_pattern(with_bias=False, with_act=None):
"""Create a pattern for dense op followed by activations."""
data = wildcard()
weight = wildcard()
bias = wildcard()
conv2d = is_op("nn.conv2d")(data, weight)
if with_bias:
add_or_bias_add = is_op("add") | is_op("nn.bias_add")
conv2d_out = add_or_bias_add(conv2d, bias)
else:
conv2d_out = conv2d
if with_act is not None:
if with_act == "relu":
return is_op("nn.relu")(conv2d_out)
if with_act == "sigmoid":
return is_op("sigmoid")(conv2d_out)
if with_act == "silu":
return is_op("multiply")(conv2d_out, is_op("sigmoid")(conv2d_out))
if with_act == "hardswish":
rhs = is_op("divide")(
is_op("clip")(is_op("add")(conv2d_out, is_constant())), is_constant()
)
return is_op("multiply")(conv2d_out, rhs)
raise ValueError("Unknown activation %s." % with_act)
return conv2d_out
def make_conv2d_transpose_pattern():
return is_op("nn.conv2d_transpose")(wildcard(), wildcard())
def make_conv2d_backward_weight_pattern():
return is_op("nn.conv2d_backward_weight")(wildcard(), wildcard())
def make_residual_block_pattern(tensor_op_out, binary_op="add", with_act="relu"):
"""Add pattern for residual blocks."""
residual_input = wildcard()
binary_out = is_op(binary_op)(tensor_op_out, residual_input) | is_op(binary_op)(
residual_input, tensor_op_out
)
if with_act is not None and with_act == "relu":
return is_op("nn.relu")(binary_out)
return binary_out
def check_dtype(lhs, rhs):
"""Check if dtypes in the given workload are supported by CUTLASS."""
return (
(lhs.dtype == "float16" and rhs.dtype == "float16")
or (lhs.dtype == "float32" and rhs.dtype == "float32")
or (lhs.dtype in ["int8", "uint8"] and rhs.dtype in ["int8", "uint8"])
)
def get_root_call(call, root_op_name):
if not isinstance(call, relay.Call):
return None
if str(call.op) == root_op_name:
return call
return get_root_call(call.args[0], root_op_name)
def check_gemm(call):
"""Check if the given dense workload can be offloaded to CUTLASS."""
dense = get_root_call(call, "nn.dense")
lhs = dense.args[0].checked_type
rhs = dense.args[1].checked_type
return check_dtype(lhs, rhs)
def check_batch_matmul(call):
"""Check if the given batch_matmul workload can be offloaded to CUTLASS."""
batch_matmul = get_root_call(call, "nn.batch_matmul")
lhs = batch_matmul.args[0].checked_type
rhs = batch_matmul.args[1].checked_type
transpose_a = batch_matmul.attrs.transpose_a
transpose_b = batch_matmul.attrs.transpose_b
return check_dtype(lhs, rhs) and not transpose_a and transpose_b
def is_depthwise_conv2d(ic, oc, groups):
return ic == oc == groups
def check_conv2d_common(op_name, expected_kernel_layout, call):
"""Check if the given conv2d workload can be offloaded to CUTLASS."""
conv2d = get_root_call(call, op_name)
data_layout = conv2d.attrs.data_layout
kernel_layout = conv2d.attrs.kernel_layout
data = conv2d.args[0].checked_type
weight = conv2d.args[1].checked_type
if (
data_layout != "NHWC"
or kernel_layout != expected_kernel_layout
or not check_dtype(data, weight)
):
return False
IC = data.shape[3]
OC = weight.shape[0]
return not is_depthwise_conv2d(IC, OC, conv2d.attrs.groups)
def check_conv2d(call):
return check_conv2d_common("nn.conv2d", "OHWI", call)
def check_conv2d_transpose(call):
# conv2d_transpose is implemented as dgrad, needs to swap the roles of C and K
return check_conv2d_common("nn.conv2d_transpose", "IHWO", call)
def check_conv2d_backward_weight(call):
return check_conv2d_common("nn.conv2d_backward_weight", "NHWC", call)
def check_conv2d_residual(call, binary_op):
"""Check if the given conv2d workload can be offloaded to CUTLASS."""
conv2d = get_root_call(call, "nn.conv2d")
if not check_conv2d(call):
return False
residual_binop = get_root_call(call, binary_op)
lhs = residual_binop.args[0]
rhs = residual_binop.args[1]
# residual_input is pattern-matched as a wildcard. Make sure it does not sit between
# residual binary op and the root conv2d of this pattern.
# If the root conv2d is the parent of both lhs and rhs, we should reject this pattern.
if get_root_call(lhs, "nn.conv2d") == conv2d and get_root_call(rhs, "nn.conv2d") == conv2d:
return False
return all(x == y for (x, y) in zip(lhs.checked_type.shape, rhs.checked_type.shape))
@register_pattern_table("cutlass")
def pattern_table():
"""Returns list of triples describing the name, dataflow pattern and predicate for all
the CUTLASS-supported operators."""
dense_pat = ("cutlass.dense", make_gemm_pattern(False, None), check_gemm)
dense_bias_pat = ("cutlass.dense_bias", make_gemm_pattern(True, None), check_gemm)
dense_bias_relu_pat = ("cutlass.dense_bias_relu", make_gemm_pattern(True, "relu"), check_gemm)
dense_bias_gelu_fp16_pat = (
"cutlass.dense_bias_gelu_fp16",
make_gemm_pattern(True, "gelu"),
check_gemm,
)
dense_bias_gelu_fp32_pat = (
"cutlass.dense_bias_gelu_fp32",
make_gemm_pattern(True, "gelu", out_dtype="float32"),
check_gemm,
)
dense_patterns = [
dense_bias_gelu_fp16_pat,
dense_bias_gelu_fp32_pat,
dense_bias_relu_pat,
dense_bias_pat,
dense_pat,
("cutlass.batch_matmul", make_batch_matmul_pattern(), check_batch_matmul),
]
conv2d_patterns = [
(
"cutlass.conv2d_bias_hardswish",
make_conv2d_pattern(with_bias=True, with_act="hardswish"),
check_conv2d,
),
(
"cutlass.conv2d_bias_silu",
make_conv2d_pattern(with_bias=True, with_act="silu"),
check_conv2d,
),
(
"cutlass.conv2d_bias_relu",
make_conv2d_pattern(with_bias=True, with_act="relu"),
check_conv2d,
),
(
"cutlass.conv2d_bias_sigmoid",
make_conv2d_pattern(with_bias=True, with_act="sigmoid"),
check_conv2d,
),
("cutlass.conv2d_bias", make_conv2d_pattern(with_bias=True), check_conv2d),
("cutlass.conv2d", make_conv2d_pattern(), check_conv2d),
]
# For now, no fusion for grad kernels
conv2d_grad_patterns = [
("cutlass.conv2d_transpose", make_conv2d_transpose_pattern(), check_conv2d_transpose),
(
"cutlass.conv2d_backward_weight",
make_conv2d_backward_weight_pattern(),
check_conv2d_backward_weight,
),
]
residual_block_patterns = []
for with_act, postfix in [("relu", "_relu"), (None, "")]:
for name, pat, _ in conv2d_patterns[:-1]:
for bin_op in ["add", "multiply"]:
residual_block_patterns.append(
(
name + "_residual_" + bin_op + postfix,
make_residual_block_pattern(pat, bin_op, with_act=with_act),
partial(check_conv2d_residual, binary_op=bin_op),
)
)
return residual_block_patterns + dense_patterns + conv2d_patterns + conv2d_grad_patterns
def partition_for_cutlass(mod, params=None):
"""Partition the input module into CUTLASS-supported subgraphs."""
if params is not None:
mod["main"] = bind_params_by_name(mod["main"], params)
remove_bn_pass = Sequential(
[
transform.InferType(),
transform.SimplifyInference(),
transform.FoldConstant(),
transform.FoldScaleAxis(),
]
)
with PassContext(opt_level=3):
mod = remove_bn_pass(mod)
cutlass_patterns = relay.op.contrib.get_pattern_table("cutlass")
seq = Sequential(
[
transform.InferType(),
transform.MergeComposite(cutlass_patterns),
transform.AnnotateTarget(["cutlass"], include_non_call_ops=False),
transform.PartitionGraph(bind_constants=False),
]
)
return seq(mod)
|
py | 1a441b11d5e4755524ce6f0079d8f03f4ef00b52 |
#Example case how to write a se file
#We write 3 cycles in a newly created file example.se.h5
import sewrite
#create the h5 output file
file_sewrite = sewrite.startfile('example.se.h5')
cycles=[1,2,3]
#writing global parameters:
#Make sure that you write the right units to ensure that MPPNP compute with the right values.
hattr_name = [ "codev", "modname", "mini", "zini", "rotini", "overini", "age_unit",
"mass_unit", "radius_unit", "rho_unit", "temperature_unit",
"dcoeff_unit","firstcycle"]
hattr_data = [ 'codev1', 'modname', 1., 0.02, 0., 0., 1., 1.,
1., 1., 1., 1.,cycles[0]]
file_sewrite.write_hattr(hattr_name, hattr_data)
rhot, tempt, mass, dcoeff, radius, delta_mass = [[1,2,3],[1,2,3],[1,2,3],[1,2,3],[1,2,3],[1,2,3]]
mtot, shellnb, age, deltat = [[1,2,3],[1,2,3],[1,2,3],[1,2,3]]
#write h5 cycle data
for i in range(len(cycles)):
#write data columns
dcol_name = ["rho", "temperature", "mass", "dcoeff", "radius", "delta_mass"]
dcol_data = [rhot, tempt, mass, dcoeff, radius, delta_mass]
file_sewrite.write_dcol(cycles[i], dcol_name, dcol_data)
#write data attributes
cattr_name = ["total_mass", "shellnb", "age", "deltat"]
cattr_data = [0.5, 1000,1234, 200]
file_sewrite.write_cattr(cycles[i], cattr_name, cattr_data)
|
py | 1a441b2489d4148db27ae5671b753f60a6170d03 | from ungenetico import Gene, Mutation, MutationUniform, Crossover, CrossoverSimple
import random
from dataclasses import dataclass, field
from typing import List
@dataclass
class GeneBool(Gene):
"""
Abstract Class Gen
Class Gen stores the genetic information
Attributes
----------
name: str
Variable name associated with the gen
value:
Value of the gen
min_value
"""
name: str
min_val: int = field(init=False, repr=False, default=0)
max_val: int = field(init=False, repr=False, default=1)
length: int
value: List[int]
mutation_operator: Mutation
crossover_operator: Crossover
_min: int = field(init=False, repr=False, default=0)
_max: int = field(init=False, repr=False, default=1)
_val: List[int] = field(init=False, repr=False)
_mutation_operator: Mutation = field(init=False, repr=False)
_crossover_operator: Crossover = field(init=False, repr=False)
@property
def value(self):
return self._val
@value.setter
def value(self, val):
if isinstance(val, property):
self._val = [random.randint(self.min_val, self.max_val) for _ in range(self.length)]
else:
for i in range(self.length):
if val[i] < self.min_val:
val[i] = self.min_val
elif val[i] > self.max_val:
val[i] = self.max_val
self._val[i] = int(round(val[i]))
@property
def mutation_operator(self):
return self._mutation_operator
@mutation_operator.setter
def mutation_operator(self, mo: Mutation):
if isinstance(mo, property):
self._mutation_operator = MutationUniform()
else:
self._mutation_operator = mo
@property
def crossover_operator(self):
return self._crossover_operator
@crossover_operator.setter
def crossover_operator(self, co: Crossover):
if isinstance(co, property):
self._crossover_operator = CrossoverSimple()
else:
self._crossover_operator = co
|
py | 1a441b8b9ce3d187d5e72d18660b8692e847f040 | # -*- coding: utf-8 -*-
from urllib.parse import quote, quote_plus, unquote, urlencode
from plexapi import X_PLEX_CONTAINER_SIZE, log, utils
from plexapi.base import PlexObject
from plexapi.exceptions import BadRequest, NotFound
from plexapi.media import MediaTag
from plexapi.settings import Setting
class Library(PlexObject):
""" Represents a PlexServer library. This contains all sections of media defined
in your Plex server including video, shows and audio.
Attributes:
key (str): '/library'
identifier (str): Unknown ('com.plexapp.plugins.library').
mediaTagVersion (str): Unknown (/system/bundle/media/flags/)
server (:class:`~plexapi.server.PlexServer`): PlexServer this client is connected to.
title1 (str): 'Plex Library' (not sure how useful this is).
title2 (str): Second title (this is blank on my setup).
"""
key = '/library'
def _loadData(self, data):
self._data = data
self._sectionsByID = {} # cached Section UUIDs
self.identifier = data.attrib.get('identifier')
self.mediaTagVersion = data.attrib.get('mediaTagVersion')
self.title1 = data.attrib.get('title1')
self.title2 = data.attrib.get('title2')
def sections(self):
""" Returns a list of all media sections in this library. Library sections may be any of
:class:`~plexapi.library.MovieSection`, :class:`~plexapi.library.ShowSection`,
:class:`~plexapi.library.MusicSection`, :class:`~plexapi.library.PhotoSection`.
"""
key = '/library/sections'
sections = []
for elem in self._server.query(key):
for cls in (MovieSection, ShowSection, MusicSection, PhotoSection):
if elem.attrib.get('type') == cls.TYPE:
section = cls(self._server, elem, key)
self._sectionsByID[section.key] = section
sections.append(section)
return sections
def section(self, title=None):
""" Returns the :class:`~plexapi.library.LibrarySection` that matches the specified title.
Parameters:
title (str): Title of the section to return.
"""
for section in self.sections():
if section.title.lower() == title.lower():
return section
raise NotFound('Invalid library section: %s' % title)
def sectionByID(self, sectionID):
""" Returns the :class:`~plexapi.library.LibrarySection` that matches the specified sectionID.
Parameters:
sectionID (str): ID of the section to return.
"""
if not self._sectionsByID or sectionID not in self._sectionsByID:
self.sections()
return self._sectionsByID[sectionID]
def all(self, **kwargs):
""" Returns a list of all media from all library sections.
This may be a very large dataset to retrieve.
"""
items = []
for section in self.sections():
for item in section.all(**kwargs):
items.append(item)
return items
def onDeck(self):
""" Returns a list of all media items on deck. """
return self.fetchItems('/library/onDeck')
def recentlyAdded(self):
""" Returns a list of all media items recently added. """
return self.fetchItems('/library/recentlyAdded')
def search(self, title=None, libtype=None, **kwargs):
""" Searching within a library section is much more powerful. It seems certain
attributes on the media objects can be targeted to filter this search down
a bit, but I havent found the documentation for it.
Example: "studio=Comedy%20Central" or "year=1999" "title=Kung Fu" all work. Other items
such as actor=<id> seem to work, but require you already know the id of the actor.
TLDR: This is untested but seems to work. Use library section search when you can.
"""
args = {}
if title:
args['title'] = title
if libtype:
args['type'] = utils.searchType(libtype)
for attr, value in kwargs.items():
args[attr] = value
key = '/library/all%s' % utils.joinArgs(args)
return self.fetchItems(key)
def cleanBundles(self):
""" Poster images and other metadata for items in your library are kept in "bundle"
packages. When you remove items from your library, these bundles aren't immediately
removed. Removing these old bundles can reduce the size of your install. By default, your
server will automatically clean up old bundles once a week as part of Scheduled Tasks.
"""
# TODO: Should this check the response for success or the correct mediaprefix?
self._server.query('/library/clean/bundles')
def emptyTrash(self):
""" If a library has items in the Library Trash, use this option to empty the Trash. """
for section in self.sections():
section.emptyTrash()
def optimize(self):
""" The Optimize option cleans up the server database from unused or fragmented data.
For example, if you have deleted or added an entire library or many items in a
library, you may like to optimize the database.
"""
self._server.query('/library/optimize')
def update(self):
""" Scan this library for new items."""
self._server.query('/library/sections/all/refresh')
def cancelUpdate(self):
""" Cancel a library update. """
key = '/library/sections/all/refresh'
self._server.query(key, method=self._server._session.delete)
def refresh(self):
""" Forces a download of fresh media information from the internet.
This can take a long time. Any locked fields are not modified.
"""
self._server.query('/library/sections/all/refresh?force=1')
def deleteMediaPreviews(self):
""" Delete the preview thumbnails for the all sections. This cannot be
undone. Recreating media preview files can take hours or even days.
"""
for section in self.sections():
section.deleteMediaPreviews()
def add(self, name='', type='', agent='', scanner='', location='', language='en', *args, **kwargs):
""" Simplified add for the most common options.
Parameters:
name (str): Name of the library
agent (str): Example com.plexapp.agents.imdb
type (str): movie, show, # check me
location (str): /path/to/files
language (str): Two letter language fx en
kwargs (dict): Advanced options should be passed as a dict. where the id is the key.
**Photo Preferences**
* **agent** (str): com.plexapp.agents.none
* **enableAutoPhotoTags** (bool): Tag photos. Default value false.
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
* **includeInGlobal** (bool): Include in dashboard. Default value true.
* **scanner** (str): Plex Photo Scanner
**Movie Preferences**
* **agent** (str): com.plexapp.agents.none, com.plexapp.agents.imdb, com.plexapp.agents.themoviedb
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
* **enableCinemaTrailers** (bool): Enable Cinema Trailers. Default value true.
* **includeInGlobal** (bool): Include in dashboard. Default value true.
* **scanner** (str): Plex Movie Scanner, Plex Video Files Scanner
**IMDB Movie Options** (com.plexapp.agents.imdb)
* **title** (bool): Localized titles. Default value false.
* **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true.
* **only_trailers** (bool): Skip extras which aren't trailers. Default value false.
* **redband** (bool): Use red band (restricted audiences) trailers when available. Default value false.
* **native_subs** (bool): Include extras with subtitles in Library language. Default value false.
* **cast_list** (int): Cast List Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
* **ratings** (int): Ratings Source, Default value 0 Possible options:
0:Rotten Tomatoes, 1:IMDb, 2:The Movie Database.
* **summary** (int): Plot Summary Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
* **country** (int): Default value 46 Possible options 0:Argentina, 1:Australia, 2:Austria,
3:Belgium, 4:Belize, 5:Bolivia, 6:Brazil, 7:Canada, 8:Chile, 9:Colombia, 10:Costa Rica,
11:Czech Republic, 12:Denmark, 13:Dominican Republic, 14:Ecuador, 15:El Salvador,
16:France, 17:Germany, 18:Guatemala, 19:Honduras, 20:Hong Kong SAR, 21:Ireland,
22:Italy, 23:Jamaica, 24:Korea, 25:Liechtenstein, 26:Luxembourg, 27:Mexico, 28:Netherlands,
29:New Zealand, 30:Nicaragua, 31:Panama, 32:Paraguay, 33:Peru, 34:Portugal,
35:Peoples Republic of China, 36:Puerto Rico, 37:Russia, 38:Singapore, 39:South Africa,
40:Spain, 41:Sweden, 42:Switzerland, 43:Taiwan, 44:Trinidad, 45:United Kingdom,
46:United States, 47:Uruguay, 48:Venezuela.
* **collections** (bool): Use collection info from The Movie Database. Default value false.
* **localart** (bool): Prefer artwork based on library language. Default value true.
* **adult** (bool): Include adult content. Default value false.
* **usage** (bool): Send anonymous usage data to Plex. Default value true.
**TheMovieDB Movie Options** (com.plexapp.agents.themoviedb)
* **collections** (bool): Use collection info from The Movie Database. Default value false.
* **localart** (bool): Prefer artwork based on library language. Default value true.
* **adult** (bool): Include adult content. Default value false.
* **country** (int): Country (used for release date and content rating). Default value 47 Possible
options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada,
9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador,
16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland,
23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands,
30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal,
36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa, 41:Spain,
42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States, 48:Uruguay,
49:Venezuela.
**Show Preferences**
* **agent** (str): com.plexapp.agents.none, com.plexapp.agents.thetvdb, com.plexapp.agents.themoviedb
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
* **episodeSort** (int): Episode order. Default -1 Possible options: 0:Oldest first, 1:Newest first.
* **flattenSeasons** (int): Seasons. Default value 0 Possible options: 0:Show,1:Hide.
* **includeInGlobal** (bool): Include in dashboard. Default value true.
* **scanner** (str): Plex Series Scanner
**TheTVDB Show Options** (com.plexapp.agents.thetvdb)
* **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true.
* **native_subs** (bool): Include extras with subtitles in Library language. Default value false.
**TheMovieDB Show Options** (com.plexapp.agents.themoviedb)
* **collections** (bool): Use collection info from The Movie Database. Default value false.
* **localart** (bool): Prefer artwork based on library language. Default value true.
* **adult** (bool): Include adult content. Default value false.
* **country** (int): Country (used for release date and content rating). Default value 47 options
0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada, 9:Chile,
10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador,
16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland,
23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands,
30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal,
36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa,
41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States,
48:Uruguay, 49:Venezuela.
**Other Video Preferences**
* **agent** (str): com.plexapp.agents.none, com.plexapp.agents.imdb, com.plexapp.agents.themoviedb
* **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true.
* **enableCinemaTrailers** (bool): Enable Cinema Trailers. Default value true.
* **includeInGlobal** (bool): Include in dashboard. Default value true.
* **scanner** (str): Plex Movie Scanner, Plex Video Files Scanner
**IMDB Other Video Options** (com.plexapp.agents.imdb)
* **title** (bool): Localized titles. Default value false.
* **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true.
* **only_trailers** (bool): Skip extras which aren't trailers. Default value false.
* **redband** (bool): Use red band (restricted audiences) trailers when available. Default value false.
* **native_subs** (bool): Include extras with subtitles in Library language. Default value false.
* **cast_list** (int): Cast List Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
* **ratings** (int): Ratings Source Default value 0 Possible options:
0:Rotten Tomatoes,1:IMDb,2:The Movie Database.
* **summary** (int): Plot Summary Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database.
* **country** (int): Country: Default value 46 Possible options: 0:Argentina, 1:Australia, 2:Austria,
3:Belgium, 4:Belize, 5:Bolivia, 6:Brazil, 7:Canada, 8:Chile, 9:Colombia, 10:Costa Rica,
11:Czech Republic, 12:Denmark, 13:Dominican Republic, 14:Ecuador, 15:El Salvador, 16:France,
17:Germany, 18:Guatemala, 19:Honduras, 20:Hong Kong SAR, 21:Ireland, 22:Italy, 23:Jamaica,
24:Korea, 25:Liechtenstein, 26:Luxembourg, 27:Mexico, 28:Netherlands, 29:New Zealand, 30:Nicaragua,
31:Panama, 32:Paraguay, 33:Peru, 34:Portugal, 35:Peoples Republic of China, 36:Puerto Rico,
37:Russia, 38:Singapore, 39:South Africa, 40:Spain, 41:Sweden, 42:Switzerland, 43:Taiwan, 44:Trinidad,
45:United Kingdom, 46:United States, 47:Uruguay, 48:Venezuela.
* **collections** (bool): Use collection info from The Movie Database. Default value false.
* **localart** (bool): Prefer artwork based on library language. Default value true.
* **adult** (bool): Include adult content. Default value false.
* **usage** (bool): Send anonymous usage data to Plex. Default value true.
**TheMovieDB Other Video Options** (com.plexapp.agents.themoviedb)
* **collections** (bool): Use collection info from The Movie Database. Default value false.
* **localart** (bool): Prefer artwork based on library language. Default value true.
* **adult** (bool): Include adult content. Default value false.
* **country** (int): Country (used for release date and content rating). Default
value 47 Possible options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize,
6:Bolivia, 7:Brazil, 8:Canada, 9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic,
13:Denmark, 14:Dominican Republic, 15:Ecuador, 16:El Salvador, 17:France, 18:Germany,
19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland, 23:Italy, 24:Jamaica,
25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands, 30:New Zealand,
31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal,
36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore,
40:South Africa, 41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad,
46:United Kingdom, 47:United States, 48:Uruguay, 49:Venezuela.
"""
part = '/library/sections?name=%s&type=%s&agent=%s&scanner=%s&language=%s&location=%s' % (
quote_plus(name), type, agent, quote_plus(scanner), language, quote_plus(location)) # noqa E126
if kwargs:
part += urlencode(kwargs)
return self._server.query(part, method=self._server._session.post)
def history(self, maxresults=9999999, mindate=None):
""" Get Play History for all library Sections for the owner.
Parameters:
maxresults (int): Only return the specified number of results (optional).
mindate (datetime): Min datetime to return results from.
"""
hist = []
for section in self.sections():
hist.extend(section.history(maxresults=maxresults, mindate=mindate))
return hist
class LibrarySection(PlexObject):
""" Base class for a single library section.
Attributes:
ALLOWED_FILTERS (tuple): ()
ALLOWED_SORT (tuple): ()
BOOLEAN_FILTERS (tuple<str>): ('unwatched', 'duplicate')
server (:class:`~plexapi.server.PlexServer`): Server this client is connected to.
initpath (str): Path requested when building this object.
agent (str): Unknown (com.plexapp.agents.imdb, etc)
allowSync (bool): True if you allow syncing content from this section.
art (str): Wallpaper artwork used to respresent this section.
composite (str): Composit image used to represent this section.
createdAt (datetime): Datetime this library section was created.
filters (str): Unknown
key (str): Key (or ID) of this library section.
language (str): Language represented in this section (en, xn, etc).
locations (str): Paths on disk where section content is stored.
refreshing (str): True if this section is currently being refreshed.
scanner (str): Internal scanner used to find media (Plex Movie Scanner, Plex Premium Music Scanner, etc.)
thumb (str): Thumbnail image used to represent this section.
title (str): Title of this section.
type (str): Type of content section represents (movie, artist, photo, show).
updatedAt (datetime): Datetime this library section was last updated.
uuid (str): Unique id for this section (32258d7c-3e6c-4ac5-98ad-bad7a3b78c63)
totalSize (int): Total number of item in the library
"""
ALLOWED_FILTERS = ()
ALLOWED_SORT = ()
BOOLEAN_FILTERS = ('unwatched', 'duplicate')
def _loadData(self, data):
self._data = data
self.agent = data.attrib.get('agent')
self.allowSync = utils.cast(bool, data.attrib.get('allowSync'))
self.art = data.attrib.get('art')
self.composite = data.attrib.get('composite')
self.createdAt = utils.toDatetime(data.attrib.get('createdAt'))
self.filters = data.attrib.get('filters')
self.key = data.attrib.get('key') # invalid key from plex
self.language = data.attrib.get('language')
self.locations = self.listAttrs(data, 'path', etag='Location')
self.refreshing = utils.cast(bool, data.attrib.get('refreshing'))
self.scanner = data.attrib.get('scanner')
self.thumb = data.attrib.get('thumb')
self.title = data.attrib.get('title')
self.type = data.attrib.get('type')
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
self.uuid = data.attrib.get('uuid')
# Private attrs as we dont want a reload.
self._total_size = None
def fetchItems(self, ekey, cls=None, container_start=None, container_size=None, **kwargs):
""" Load the specified key to find and build all items with the specified tag
and attrs. See :func:`~plexapi.base.PlexObject.fetchItem` for more details
on how this is used.
Parameters:
container_start (None, int): offset to get a subset of the data
container_size (None, int): How many items in data
"""
url_kw = {}
if container_start is not None:
url_kw["X-Plex-Container-Start"] = container_start
if container_size is not None:
url_kw["X-Plex-Container-Size"] = container_size
if ekey is None:
raise BadRequest('ekey was not provided')
data = self._server.query(ekey, params=url_kw)
if '/all' in ekey:
# totalSize is only included in the xml response
# if container size is used.
total_size = data.attrib.get("totalSize") or data.attrib.get("size")
self._total_size = utils.cast(int, total_size)
items = self.findItems(data, cls, ekey, **kwargs)
librarySectionID = data.attrib.get('librarySectionID')
if librarySectionID:
for item in items:
item.librarySectionID = librarySectionID
return items
@property
def totalSize(self):
if self._total_size is None:
part = '/library/sections/%s/all?X-Plex-Container-Start=0&X-Plex-Container-Size=1' % self.key
data = self._server.query(part)
self._total_size = int(data.attrib.get("totalSize"))
return self._total_size
def delete(self):
""" Delete a library section. """
try:
return self._server.query('/library/sections/%s' % self.key, method=self._server._session.delete)
except BadRequest: # pragma: no cover
msg = 'Failed to delete library %s' % self.key
msg += 'You may need to allow this permission in your Plex settings.'
log.error(msg)
raise
def reload(self, key=None):
return self._server.library.section(self.title)
def edit(self, agent=None, **kwargs):
""" Edit a library (Note: agent is required). See :class:`~plexapi.library.Library` for example usage.
Parameters:
kwargs (dict): Dict of settings to edit.
"""
if not agent:
agent = self.agent
part = '/library/sections/%s?agent=%s&%s' % (self.key, agent, urlencode(kwargs))
self._server.query(part, method=self._server._session.put)
# Reload this way since the self.key dont have a full path, but is simply a id.
for s in self._server.library.sections():
if s.key == self.key:
return s
def get(self, title):
""" Returns the media item with the specified title.
Parameters:
title (str): Title of the item to return.
"""
key = '/library/sections/%s/all?title=%s' % (self.key, quote(title, safe=''))
return self.fetchItem(key, title__iexact=title)
def all(self, sort=None, **kwargs):
""" Returns a list of media from this library section.
Parameters:
sort (string): The sort string
"""
sortStr = ''
if sort is not None:
sortStr = '?sort=' + sort
key = '/library/sections/%s/all%s' % (self.key, sortStr)
return self.fetchItems(key, **kwargs)
def agents(self):
""" Returns a list of available `:class:`~plexapi.media.Agent` for this library section.
"""
return self._server.agents(utils.searchType(self.type))
def settings(self):
""" Returns a list of all library settings. """
key = '/library/sections/%s/prefs' % self.key
data = self._server.query(key)
return self.findItems(data, cls=Setting)
def onDeck(self):
""" Returns a list of media items on deck from this library section. """
key = '/library/sections/%s/onDeck' % self.key
return self.fetchItems(key)
def recentlyAdded(self, maxresults=50):
""" Returns a list of media items recently added from this library section.
Parameters:
maxresults (int): Max number of items to return (default 50).
"""
return self.search(sort='addedAt:desc', maxresults=maxresults)
def analyze(self):
""" Run an analysis on all of the items in this library section. See
See :func:`~plexapi.base.PlexPartialObject.analyze` for more details.
"""
key = '/library/sections/%s/analyze' % self.key
self._server.query(key, method=self._server._session.put)
def emptyTrash(self):
""" If a section has items in the Trash, use this option to empty the Trash. """
key = '/library/sections/%s/emptyTrash' % self.key
self._server.query(key, method=self._server._session.put)
def update(self):
""" Scan this section for new media. """
key = '/library/sections/%s/refresh' % self.key
self._server.query(key)
def cancelUpdate(self):
""" Cancel update of this Library Section. """
key = '/library/sections/%s/refresh' % self.key
self._server.query(key, method=self._server._session.delete)
def refresh(self):
""" Forces a download of fresh media information from the internet.
This can take a long time. Any locked fields are not modified.
"""
key = '/library/sections/%s/refresh?force=1' % self.key
self._server.query(key)
def deleteMediaPreviews(self):
""" Delete the preview thumbnails for items in this library. This cannot
be undone. Recreating media preview files can take hours or even days.
"""
key = '/library/sections/%s/indexes' % self.key
self._server.query(key, method=self._server._session.delete)
def listChoices(self, category, libtype=None, **kwargs):
""" Returns a list of :class:`~plexapi.library.FilterChoice` objects for the
specified category and libtype. kwargs can be any of the same kwargs in
:func:`plexapi.library.LibraySection.search()` to help narrow down the choices
to only those that matter in your current context.
Parameters:
category (str): Category to list choices for (genre, contentRating, etc).
libtype (int): Library type of item filter.
**kwargs (dict): Additional kwargs to narrow down the choices.
Raises:
:class:`plexapi.exceptions.BadRequest`: Cannot include kwarg equal to specified category.
"""
# TODO: Should this be moved to base?
if category in kwargs:
raise BadRequest('Cannot include kwarg equal to specified category: %s' % category)
args = {}
for subcategory, value in kwargs.items():
args[category] = self._cleanSearchFilter(subcategory, value)
if libtype is not None:
args['type'] = utils.searchType(libtype)
key = '/library/sections/%s/%s%s' % (self.key, category, utils.joinArgs(args))
return self.fetchItems(key, cls=FilterChoice)
def search(self, title=None, sort=None, maxresults=None,
libtype=None, container_start=0, container_size=X_PLEX_CONTAINER_SIZE, **kwargs):
""" Search the library. The http requests will be batched in container_size. If you're only looking for the first <num>
results, it would be wise to set the maxresults option to that amount so this functions
doesn't iterate over all results on the server.
Parameters:
title (str): General string query to search for (optional).
sort (str): column:dir; column can be any of {addedAt, originallyAvailableAt, lastViewedAt,
titleSort, rating, mediaHeight, duration}. dir can be asc or desc (optional).
maxresults (int): Only return the specified number of results (optional).
libtype (str): Filter results to a spcifiec libtype (movie, show, episode, artist,
album, track; optional).
container_start (int): default 0
container_size (int): default X_PLEX_CONTAINER_SIZE in your config file.
**kwargs (dict): Any of the available filters for the current library section. Partial string
matches allowed. Multiple matches OR together. Negative filtering also possible, just add an
exclamation mark to the end of filter name, e.g. `resolution!=1x1`.
* unwatched: Display or hide unwatched content (True, False). [all]
* duplicate: Display or hide duplicate items (True, False). [movie]
* actor: List of actors to search ([actor_or_id, ...]). [movie]
* collection: List of collections to search within ([collection_or_id, ...]). [all]
* contentRating: List of content ratings to search within ([rating_or_key, ...]). [movie,tv]
* country: List of countries to search within ([country_or_key, ...]). [movie,music]
* decade: List of decades to search within ([yyy0, ...]). [movie]
* director: List of directors to search ([director_or_id, ...]). [movie]
* genre: List Genres to search within ([genere_or_id, ...]). [all]
* network: List of TV networks to search within ([resolution_or_key, ...]). [tv]
* resolution: List of video resolutions to search within ([resolution_or_key, ...]). [movie]
* studio: List of studios to search within ([studio_or_key, ...]). [music]
* year: List of years to search within ([yyyy, ...]). [all]
Raises:
:class:`plexapi.exceptions.BadRequest`: when applying unknown filter
"""
# cleanup the core arguments
args = {}
for category, value in kwargs.items():
args[category] = self._cleanSearchFilter(category, value, libtype)
if title is not None:
args['title'] = title
if sort is not None:
args['sort'] = self._cleanSearchSort(sort)
if libtype is not None:
args['type'] = utils.searchType(libtype)
results = []
subresults = []
offset = container_start
if maxresults is not None:
container_size = min(container_size, maxresults)
while True:
key = '/library/sections/%s/all%s' % (self.key, utils.joinArgs(args))
subresults = self.fetchItems(key, container_start=container_start,
container_size=container_size)
if not len(subresults):
if offset > self.totalSize:
log.info("container_start is higher then the number of items in the library")
break
results.extend(subresults)
# self.totalSize is not used as a condition in the while loop as
# this require a additional http request.
# self.totalSize is updated from .fetchItems
wanted_number_of_items = self.totalSize - offset
if maxresults is not None:
wanted_number_of_items = min(maxresults, wanted_number_of_items)
container_size = min(container_size, maxresults - len(results))
if wanted_number_of_items <= len(results):
break
container_start += container_size
return results
def _cleanSearchFilter(self, category, value, libtype=None):
# check a few things before we begin
if category.endswith('!'):
if category[:-1] not in self.ALLOWED_FILTERS:
raise BadRequest('Unknown filter category: %s' % category[:-1])
elif category not in self.ALLOWED_FILTERS:
raise BadRequest('Unknown filter category: %s' % category)
if category in self.BOOLEAN_FILTERS:
return '1' if value else '0'
if not isinstance(value, (list, tuple)):
value = [value]
# convert list of values to list of keys or ids
result = set()
choices = self.listChoices(category, libtype)
lookup = {c.title.lower(): unquote(unquote(c.key)) for c in choices}
allowed = set(c.key for c in choices)
for item in value:
item = str((item.id or item.tag) if isinstance(item, MediaTag) else item).lower()
# find most logical choice(s) to use in url
if item in allowed: result.add(item); continue
if item in lookup: result.add(lookup[item]); continue
matches = [k for t, k in lookup.items() if item in t]
if matches: map(result.add, matches); continue
# nothing matched; use raw item value
log.debug('Filter value not listed, using raw item value: %s' % item)
result.add(item)
return ','.join(result)
def _cleanSearchSort(self, sort):
sort = '%s:asc' % sort if ':' not in sort else sort
scol, sdir = sort.lower().split(':')
lookup = {s.lower(): s for s in self.ALLOWED_SORT}
if scol not in lookup:
raise BadRequest('Unknown sort column: %s' % scol)
if sdir not in ('asc', 'desc'):
raise BadRequest('Unknown sort dir: %s' % sdir)
return '%s:%s' % (lookup[scol], sdir)
def sync(self, policy, mediaSettings, client=None, clientId=None, title=None, sort=None, libtype=None,
**kwargs):
""" Add current library section as sync item for specified device.
See description of :func:`~plexapi.library.LibrarySection.search()` for details about filtering / sorting
and :func:`plexapi.myplex.MyPlexAccount.sync()` for possible exceptions.
Parameters:
policy (:class:`plexapi.sync.Policy`): policy of syncing the media (how many items to sync and process
watched media or not), generated automatically when method
called on specific LibrarySection object.
mediaSettings (:class:`plexapi.sync.MediaSettings`): Transcoding settings used for the media, generated
automatically when method called on specific
LibrarySection object.
client (:class:`plexapi.myplex.MyPlexDevice`): sync destination, see
:func:`plexapi.myplex.MyPlexAccount.sync`.
clientId (str): sync destination, see :func:`plexapi.myplex.MyPlexAccount.sync`.
title (str): descriptive title for the new :class:`plexapi.sync.SyncItem`, if empty the value would be
generated from metadata of current media.
sort (str): formatted as `column:dir`; column can be any of {`addedAt`, `originallyAvailableAt`,
`lastViewedAt`, `titleSort`, `rating`, `mediaHeight`, `duration`}. dir can be `asc` or
`desc`.
libtype (str): Filter results to a specific libtype (`movie`, `show`, `episode`, `artist`, `album`,
`track`).
Returns:
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
Raises:
:class:`plexapi.exceptions.BadRequest`: when the library is not allowed to sync
Example:
.. code-block:: python
from plexapi import myplex
from plexapi.sync import Policy, MediaSettings, VIDEO_QUALITY_3_MBPS_720p
c = myplex.MyPlexAccount()
target = c.device('Plex Client')
sync_items_wd = c.syncItems(target.clientIdentifier)
srv = c.resource('Server Name').connect()
section = srv.library.section('Movies')
policy = Policy('count', unwatched=True, value=1)
media_settings = MediaSettings.create(VIDEO_QUALITY_3_MBPS_720p)
section.sync(target, policy, media_settings, title='Next best movie', sort='rating:desc')
"""
from plexapi.sync import SyncItem
if not self.allowSync:
raise BadRequest('The requested library is not allowed to sync')
args = {}
for category, value in kwargs.items():
args[category] = self._cleanSearchFilter(category, value, libtype)
if sort is not None:
args['sort'] = self._cleanSearchSort(sort)
if libtype is not None:
args['type'] = utils.searchType(libtype)
myplex = self._server.myPlexAccount()
sync_item = SyncItem(self._server, None)
sync_item.title = title if title else self.title
sync_item.rootTitle = self.title
sync_item.contentType = self.CONTENT_TYPE
sync_item.metadataType = self.METADATA_TYPE
sync_item.machineIdentifier = self._server.machineIdentifier
key = '/library/sections/%s/all' % self.key
sync_item.location = 'library://%s/directory/%s' % (self.uuid, quote_plus(key + utils.joinArgs(args)))
sync_item.policy = policy
sync_item.mediaSettings = mediaSettings
return myplex.sync(client=client, clientId=clientId, sync_item=sync_item)
def history(self, maxresults=9999999, mindate=None):
""" Get Play History for this library Section for the owner.
Parameters:
maxresults (int): Only return the specified number of results (optional).
mindate (datetime): Min datetime to return results from.
"""
return self._server.history(maxresults=maxresults, mindate=mindate, librarySectionID=self.key, accountID=1)
class MovieSection(LibrarySection):
""" Represents a :class:`~plexapi.library.LibrarySection` section containing movies.
Attributes:
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('unwatched',
'duplicate', 'year', 'decade', 'genre', 'contentRating', 'collection',
'director', 'actor', 'country', 'studio', 'resolution', 'guid', 'label', 'unmatched')
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt',
'originallyAvailableAt', 'lastViewedAt', 'titleSort', 'rating',
'mediaHeight', 'duration')
TAG (str): 'Directory'
TYPE (str): 'movie'
"""
ALLOWED_FILTERS = ('unwatched', 'duplicate', 'year', 'decade', 'genre', 'contentRating',
'collection', 'director', 'actor', 'country', 'studio', 'resolution',
'guid', 'label', 'writer', 'producer', 'subtitleLanguage', 'audioLanguage',
'lastViewedAt', 'viewCount', 'addedAt', 'unmatched')
ALLOWED_SORT = ('addedAt', 'originallyAvailableAt', 'lastViewedAt', 'titleSort', 'rating',
'mediaHeight', 'duration')
TAG = 'Directory'
TYPE = 'movie'
METADATA_TYPE = 'movie'
CONTENT_TYPE = 'video'
def collection(self, **kwargs):
""" Returns a list of collections from this library section. """
return self.search(libtype='collection', **kwargs)
def sync(self, videoQuality, limit=None, unwatched=False, **kwargs):
""" Add current Movie library section as sync item for specified device.
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
Parameters:
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in
:mod:`plexapi.sync` module.
limit (int): maximum count of movies to sync, unlimited if `None`.
unwatched (bool): if `True` watched videos wouldn't be synced.
Returns:
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
Example:
.. code-block:: python
from plexapi import myplex
from plexapi.sync import VIDEO_QUALITY_3_MBPS_720p
c = myplex.MyPlexAccount()
target = c.device('Plex Client')
sync_items_wd = c.syncItems(target.clientIdentifier)
srv = c.resource('Server Name').connect()
section = srv.library.section('Movies')
section.sync(VIDEO_QUALITY_3_MBPS_720p, client=target, limit=1, unwatched=True,
title='Next best movie', sort='rating:desc')
"""
from plexapi.sync import Policy, MediaSettings
kwargs['mediaSettings'] = MediaSettings.createVideo(videoQuality)
kwargs['policy'] = Policy.create(limit, unwatched)
return super(MovieSection, self).sync(**kwargs)
class ShowSection(LibrarySection):
""" Represents a :class:`~plexapi.library.LibrarySection` section containing tv shows.
Attributes:
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('unwatched',
'year', 'genre', 'contentRating', 'network', 'collection', 'guid', 'label')
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt', 'lastViewedAt',
'originallyAvailableAt', 'titleSort', 'rating', 'unwatched', 'unmatched')
TAG (str): 'Directory'
TYPE (str): 'show'
"""
ALLOWED_FILTERS = ('unwatched', 'year', 'genre', 'contentRating', 'network', 'collection',
'guid', 'duplicate', 'label', 'show.title', 'show.year', 'show.userRating',
'show.viewCount', 'show.lastViewedAt', 'show.actor', 'show.addedAt', 'episode.title',
'episode.originallyAvailableAt', 'episode.resolution', 'episode.subtitleLanguage',
'episode.unwatched', 'episode.addedAt', 'episode.userRating', 'episode.viewCount',
'episode.lastViewedAt', 'unmatched')
ALLOWED_SORT = ('addedAt', 'lastViewedAt', 'originallyAvailableAt', 'titleSort',
'rating', 'unwatched')
TAG = 'Directory'
TYPE = 'show'
METADATA_TYPE = 'episode'
CONTENT_TYPE = 'video'
def searchShows(self, **kwargs):
""" Search for a show. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='show', **kwargs)
def searchEpisodes(self, **kwargs):
""" Search for an episode. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='episode', **kwargs)
def recentlyAdded(self, libtype='episode', maxresults=50):
""" Returns a list of recently added episodes from this library section.
Parameters:
maxresults (int): Max number of items to return (default 50).
"""
return self.search(sort='addedAt:desc', libtype=libtype, maxresults=maxresults)
def collection(self, **kwargs):
""" Returns a list of collections from this library section. """
return self.search(libtype='collection', **kwargs)
def sync(self, videoQuality, limit=None, unwatched=False, **kwargs):
""" Add current Show library section as sync item for specified device.
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
Parameters:
videoQuality (int): idx of quality of the video, one of VIDEO_QUALITY_* values defined in
:mod:`plexapi.sync` module.
limit (int): maximum count of episodes to sync, unlimited if `None`.
unwatched (bool): if `True` watched videos wouldn't be synced.
Returns:
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
Example:
.. code-block:: python
from plexapi import myplex
from plexapi.sync import VIDEO_QUALITY_3_MBPS_720p
c = myplex.MyPlexAccount()
target = c.device('Plex Client')
sync_items_wd = c.syncItems(target.clientIdentifier)
srv = c.resource('Server Name').connect()
section = srv.library.section('TV-Shows')
section.sync(VIDEO_QUALITY_3_MBPS_720p, client=target, limit=1, unwatched=True,
title='Next unwatched episode')
"""
from plexapi.sync import Policy, MediaSettings
kwargs['mediaSettings'] = MediaSettings.createVideo(videoQuality)
kwargs['policy'] = Policy.create(limit, unwatched)
return super(ShowSection, self).sync(**kwargs)
class MusicSection(LibrarySection):
""" Represents a :class:`~plexapi.library.LibrarySection` section containing music artists.
Attributes:
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('genre',
'country', 'collection')
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt',
'lastViewedAt', 'viewCount', 'titleSort')
TAG (str): 'Directory'
TYPE (str): 'artist'
"""
ALLOWED_FILTERS = ('genre', 'country', 'collection', 'mood', 'year', 'track.userRating', 'artist.title',
'artist.userRating', 'artist.genre', 'artist.country', 'artist.collection', 'artist.addedAt',
'album.title', 'album.userRating', 'album.genre', 'album.decade', 'album.collection',
'album.viewCount', 'album.lastViewedAt', 'album.studio', 'album.addedAt', 'track.title',
'track.userRating', 'track.viewCount', 'track.lastViewedAt', 'track.skipCount',
'track.lastSkippedAt')
ALLOWED_SORT = ('addedAt', 'lastViewedAt', 'viewCount', 'titleSort', 'userRating')
TAG = 'Directory'
TYPE = 'artist'
CONTENT_TYPE = 'audio'
METADATA_TYPE = 'track'
def albums(self):
""" Returns a list of :class:`~plexapi.audio.Album` objects in this section. """
key = '/library/sections/%s/albums' % self.key
return self.fetchItems(key)
def searchArtists(self, **kwargs):
""" Search for an artist. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='artist', **kwargs)
def searchAlbums(self, **kwargs):
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='album', **kwargs)
def searchTracks(self, **kwargs):
""" Search for a track. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='track', **kwargs)
def collection(self, **kwargs):
""" Returns a list of collections from this library section. """
return self.search(libtype='collection', **kwargs)
def sync(self, bitrate, limit=None, **kwargs):
""" Add current Music library section as sync item for specified device.
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
Parameters:
bitrate (int): maximum bitrate for synchronized music, better use one of MUSIC_BITRATE_* values from the
module :mod:`plexapi.sync`.
limit (int): maximum count of tracks to sync, unlimited if `None`.
Returns:
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
Example:
.. code-block:: python
from plexapi import myplex
from plexapi.sync import AUDIO_BITRATE_320_KBPS
c = myplex.MyPlexAccount()
target = c.device('Plex Client')
sync_items_wd = c.syncItems(target.clientIdentifier)
srv = c.resource('Server Name').connect()
section = srv.library.section('Music')
section.sync(AUDIO_BITRATE_320_KBPS, client=target, limit=100, sort='addedAt:desc',
title='New music')
"""
from plexapi.sync import Policy, MediaSettings
kwargs['mediaSettings'] = MediaSettings.createMusic(bitrate)
kwargs['policy'] = Policy.create(limit)
return super(MusicSection, self).sync(**kwargs)
class PhotoSection(LibrarySection):
""" Represents a :class:`~plexapi.library.LibrarySection` section containing photos.
Attributes:
ALLOWED_FILTERS (list<str>): List of allowed search filters. ('all', 'iso',
'make', 'lens', 'aperture', 'exposure', 'device', 'resolution')
ALLOWED_SORT (list<str>): List of allowed sorting keys. ('addedAt')
TAG (str): 'Directory'
TYPE (str): 'photo'
"""
ALLOWED_FILTERS = ('all', 'iso', 'make', 'lens', 'aperture', 'exposure', 'device', 'resolution', 'place',
'originallyAvailableAt', 'addedAt', 'title', 'userRating', 'tag', 'year')
ALLOWED_SORT = ('addedAt',)
TAG = 'Directory'
TYPE = 'photo'
CONTENT_TYPE = 'photo'
METADATA_TYPE = 'photo'
def searchAlbums(self, title, **kwargs):
""" Search for an album. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='photoalbum', title=title, **kwargs)
def searchPhotos(self, title, **kwargs):
""" Search for a photo. See :func:`~plexapi.library.LibrarySection.search()` for usage. """
return self.search(libtype='photo', title=title, **kwargs)
def sync(self, resolution, limit=None, **kwargs):
""" Add current Music library section as sync item for specified device.
See description of :func:`plexapi.library.LibrarySection.search()` for details about filtering / sorting and
:func:`plexapi.library.LibrarySection.sync()` for details on syncing libraries and possible exceptions.
Parameters:
resolution (str): maximum allowed resolution for synchronized photos, see PHOTO_QUALITY_* values in the
module :mod:`plexapi.sync`.
limit (int): maximum count of tracks to sync, unlimited if `None`.
Returns:
:class:`plexapi.sync.SyncItem`: an instance of created syncItem.
Example:
.. code-block:: python
from plexapi import myplex
from plexapi.sync import PHOTO_QUALITY_HIGH
c = myplex.MyPlexAccount()
target = c.device('Plex Client')
sync_items_wd = c.syncItems(target.clientIdentifier)
srv = c.resource('Server Name').connect()
section = srv.library.section('Photos')
section.sync(PHOTO_QUALITY_HIGH, client=target, limit=100, sort='addedAt:desc',
title='Fresh photos')
"""
from plexapi.sync import Policy, MediaSettings
kwargs['mediaSettings'] = MediaSettings.createPhoto(resolution)
kwargs['policy'] = Policy.create(limit)
return super(PhotoSection, self).sync(**kwargs)
class FilterChoice(PlexObject):
""" Represents a single filter choice. These objects are gathered when using filters
while searching for library items and is the object returned in the result set of
:func:`~plexapi.library.LibrarySection.listChoices()`.
Attributes:
TAG (str): 'Directory'
server (:class:`~plexapi.server.PlexServer`): PlexServer this client is connected to.
initpath (str): Relative path requested when retrieving specified `data` (optional).
fastKey (str): API path to quickly list all items in this filter
(/library/sections/<section>/all?genre=<key>)
key (str): Short key (id) of this filter option (used ad <key> in fastKey above).
thumb (str): Thumbnail used to represent this filter option.
title (str): Human readable name for this filter option.
type (str): Filter type (genre, contentRating, etc).
"""
TAG = 'Directory'
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
self.fastKey = data.attrib.get('fastKey')
self.key = data.attrib.get('key')
self.thumb = data.attrib.get('thumb')
self.title = data.attrib.get('title')
self.type = data.attrib.get('type')
@utils.registerPlexObject
class Hub(PlexObject):
""" Represents a single Hub (or category) in the PlexServer search.
Attributes:
TAG (str): 'Hub'
hubIdentifier (str): Unknown.
size (int): Number of items found.
title (str): Title of this Hub.
type (str): Type of items in the Hub.
items (str): List of items in the Hub.
"""
TAG = 'Hub'
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
self.hubIdentifier = data.attrib.get('hubIdentifier')
self.size = utils.cast(int, data.attrib.get('size'))
self.title = data.attrib.get('title')
self.type = data.attrib.get('type')
self.key = data.attrib.get('key')
self.items = self.findItems(data)
def __len__(self):
return self.size
@utils.registerPlexObject
class Collections(PlexObject):
TAG = 'Directory'
TYPE = 'collection'
_include = "?includeExternalMedia=1&includePreferences=1"
def _loadData(self, data):
self.ratingKey = utils.cast(int, data.attrib.get('ratingKey'))
self._details_key = "/library/metadata/%s%s" % (self.ratingKey, self._include)
self.key = data.attrib.get('key')
self.type = data.attrib.get('type')
self.title = data.attrib.get('title')
self.subtype = data.attrib.get('subtype')
self.summary = data.attrib.get('summary')
self.index = utils.cast(int, data.attrib.get('index'))
self.thumb = data.attrib.get('thumb')
self.addedAt = utils.toDatetime(data.attrib.get('addedAt'))
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
self.childCount = utils.cast(int, data.attrib.get('childCount'))
self.minYear = utils.cast(int, data.attrib.get('minYear'))
self.maxYear = utils.cast(int, data.attrib.get('maxYear'))
self.collectionMode = data.attrib.get('collectionMode')
self.collectionSort = data.attrib.get('collectionSort')
@property
def children(self):
return self.fetchItems(self.key)
def __len__(self):
return self.childCount
def delete(self):
part = '/library/metadata/%s' % self.ratingKey
return self._server.query(part, method=self._server._session.delete)
def modeUpdate(self, mode=None):
""" Update Collection Mode
Parameters:
mode: default (Library default)
hide (Hide Collection)
hideItems (Hide Items in this Collection)
showItems (Show this Collection and its Items)
Example:
collection = 'plexapi.library.Collections'
collection.updateMode(mode="hide")
"""
mode_dict = {'default': '-2',
'hide': '0',
'hideItems': '1',
'showItems': '2'}
key = mode_dict.get(mode)
if key is None:
raise BadRequest('Unknown collection mode : %s. Options %s' % (mode, list(mode_dict)))
part = '/library/metadata/%s/prefs?collectionMode=%s' % (self.ratingKey, key)
return self._server.query(part, method=self._server._session.put)
def sortUpdate(self, sort=None):
""" Update Collection Sorting
Parameters:
sort: realease (Order Collection by realease dates)
alpha (Order Collection Alphabetically)
Example:
colleciton = 'plexapi.library.Collections'
collection.updateSort(mode="alpha")
"""
sort_dict = {'release': '0',
'alpha': '1'}
key = sort_dict.get(sort)
if key is None:
raise BadRequest('Unknown sort dir: %s. Options: %s' % (sort, list(sort_dict)))
part = '/library/metadata/%s/prefs?collectionSort=%s' % (self.ratingKey, key)
return self._server.query(part, method=self._server._session.put)
def posters(self):
""" Returns list of available poster objects. :class:`~plexapi.media.Poster`. """
return self.fetchItems('/library/metadata/%s/posters' % self.ratingKey)
def uploadPoster(self, url=None, filepath=None):
""" Upload poster from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
if url:
key = '/library/metadata/%s/posters?url=%s' % (self.ratingKey, quote_plus(url))
self._server.query(key, method=self._server._session.post)
elif filepath:
key = '/library/metadata/%s/posters?' % self.ratingKey
data = open(filepath, 'rb').read()
self._server.query(key, method=self._server._session.post, data=data)
def setPoster(self, poster):
""" Set . :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
poster.select()
def arts(self):
""" Returns list of available art objects. :class:`~plexapi.media.Poster`. """
return self.fetchItems('/library/metadata/%s/arts' % self.ratingKey)
def uploadArt(self, url=None, filepath=None):
""" Upload art from url or filepath. :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video`. """
if url:
key = '/library/metadata/%s/arts?url=%s' % (self.ratingKey, quote_plus(url))
self._server.query(key, method=self._server._session.post)
elif filepath:
key = '/library/metadata/%s/arts?' % self.ratingKey
data = open(filepath, 'rb').read()
self._server.query(key, method=self._server._session.post, data=data)
def setArt(self, art):
""" Set :class:`~plexapi.media.Poster` to :class:`~plexapi.video.Video` """
art.select()
# def edit(self, **kwargs):
# TODO
|
py | 1a441b93be1937ffc8210d5bd710a180568bca98 | import pytest
import sqlalchemy as sa
from sqlalchemy_utils import get_mapper
from sqlalchemy_utils.functions.orm import _get_query_compile_state
class TestGetMapper(object):
@pytest.fixture
def Building(self, Base):
class Building(Base):
__tablename__ = 'building'
id = sa.Column(sa.Integer, primary_key=True)
return Building
def test_table(self, Building):
assert get_mapper(Building.__table__) == sa.inspect(Building)
def test_declarative_class(self, Building):
assert (
get_mapper(Building) ==
sa.inspect(Building)
)
def test_declarative_object(self, Building):
assert (
get_mapper(Building()) ==
sa.inspect(Building)
)
def test_mapper(self, Building):
assert (
get_mapper(Building.__mapper__) ==
sa.inspect(Building)
)
def test_class_alias(self, Building):
assert (
get_mapper(sa.orm.aliased(Building)) ==
sa.inspect(Building)
)
def test_instrumented_attribute(self, Building):
assert (
get_mapper(Building.id) == sa.inspect(Building)
)
def test_table_alias(self, Building):
alias = sa.orm.aliased(Building.__table__)
assert (
get_mapper(alias) ==
sa.inspect(Building)
)
def test_column(self, Building):
assert (
get_mapper(Building.__table__.c.id) ==
sa.inspect(Building)
)
def test_column_of_an_alias(self, Building):
assert (
get_mapper(sa.orm.aliased(Building.__table__).c.id) ==
sa.inspect(Building)
)
class TestGetMapperWithQueryEntities(object):
@pytest.fixture
def Building(self, Base):
class Building(Base):
__tablename__ = 'building'
id = sa.Column(sa.Integer, primary_key=True)
return Building
@pytest.fixture
def init_models(self, Building):
pass
def test_mapper_entity_with_mapper(self, session, Building):
query = session.query(Building.__mapper__)
entity = _get_query_compile_state(query)._entities[0]
assert get_mapper(entity) == sa.inspect(Building)
def test_mapper_entity_with_class(self, session, Building):
query = session.query(Building)
entity = _get_query_compile_state(query)._entities[0]
assert get_mapper(entity) == sa.inspect(Building)
def test_column_entity(self, session, Building):
query = session.query(Building.id)
entity = _get_query_compile_state(query)._entities[0]
assert get_mapper(entity) == sa.inspect(Building)
class TestGetMapperWithMultipleMappersFound(object):
@pytest.fixture
def Building(self, Base):
class Building(Base):
__tablename__ = 'building'
id = sa.Column(sa.Integer, primary_key=True)
class BigBuilding(Building):
pass
return Building
def test_table(self, Building):
with pytest.raises(ValueError):
get_mapper(Building.__table__)
def test_table_alias(self, Building):
alias = sa.orm.aliased(Building.__table__)
with pytest.raises(ValueError):
get_mapper(alias)
class TestGetMapperForTableWithoutMapper(object):
@pytest.fixture
def building(self):
metadata = sa.MetaData()
return sa.Table('building', metadata)
def test_table(self, building):
with pytest.raises(ValueError):
get_mapper(building)
def test_table_alias(self, building):
alias = sa.orm.aliased(building)
with pytest.raises(ValueError):
get_mapper(alias)
|
py | 1a441c14fb7df5ce190e13ba2478c8d39b9dcca4 | import pytest
from Machine.Machine import *
@pytest.mark.parametrize("test_input,expected", [
('12',12),
('13',13),
('01',1),
])
def test_conversion(test_input,expected):
assert int(test_input) is expected
def test_Machine():
m1=Machine()
assert m1.checkState() is States.Free
m1.reserveMachine()
assert m1.checkState() is States.Busy
m1.turnOff()
assert m1.checkState() is States.Offline
@pytest.mark.xfail
def test_divide_by_zero():
assert 1 / 0 == 1
|
py | 1a441c8cf1648957d902d6e1925978062eb843a1 | X XXXX XXXXXXXXX XXXX XXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX X X
XXX X XX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X X
XXX X XX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XX
XXX X XXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXXX
XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
|
py | 1a441d2ba67f2ea6efb2fe2b0cdac8210ca0158d | """
Day 2: 1202 Program Alarm
"""
from itertools import product
from utils import get_int_list
from intcode.cpu import IntcodeCpu
def puzzle1():
prog = get_int_list('day2')
prog[1] = 12
prog[2] = 2
cpu = IntcodeCpu(prog)
cpu.run()
print(cpu[0])
def puzzle2():
prog = get_int_list('day2')
cpu = IntcodeCpu(prog)
for noun, verb in product(range(100), range(100)):
cpu.push_state()
cpu[1] = noun
cpu[2] = verb
try:
cpu.run()
except (IndexError, ValueError):
continue
if cpu[0] == 19690720:
break
cpu.pop_state()
else:
print("Not Found")
return
print(100 * noun + verb)
if __name__ == '__main__':
puzzle1()
puzzle2()
|
py | 1a441dadb3bbc5c49e085d18a9b5d89759401a33 | # This file is part of the MapProxy project.
# Copyright (C) 2011 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import os
import tempfile
from lxml import etree, html
from nose.tools import eq_
from mapproxy.featureinfo import (combined_inputs, XSLTransformer,
XMLFeatureInfoDoc, HTMLFeatureInfoDoc)
from mapproxy.test.helper import strip_whitespace
def test_combined_inputs():
foo = '<a><b>foo</b></a>'
bar = '<a><b>bar</b></a>'
result = combined_inputs([foo, bar])
result = etree.tostring(result)
eq_(result, b'<a><b>foo</b><b>bar</b></a>')
class TestXSLTransformer(object):
def setup(self):
fd_, self.xsl_script = tempfile.mkstemp('.xsl')
xsl = b"""
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:template match="/">
<root>
<xsl:apply-templates select='/a/b'/>
</root>
</xsl:template>
<xsl:template match="/a/b">
<foo><xsl:value-of select="text()" /></foo>
</xsl:template>
</xsl:stylesheet>""".strip()
open(self.xsl_script, 'wb').write(xsl)
def teardown(self):
os.remove(self.xsl_script)
def test_transformer(self):
t = XSLTransformer(self.xsl_script)
doc = t.transform(XMLFeatureInfoDoc('<a><b>Text</b></a>'))
eq_(strip_whitespace(doc.as_string()), b'<root><foo>Text</foo></root>')
def test_multiple(self):
t = XSLTransformer(self.xsl_script)
doc = t.transform(XMLFeatureInfoDoc.combine([
XMLFeatureInfoDoc(x) for x in
[b'<a><b>ab</b></a>',
b'<a><b>ab1</b><b>ab2</b><b>ab3</b></a>',
b'<a><b>ab1</b><c>ac</c><b>ab2</b></a>',
]]))
eq_(strip_whitespace(doc.as_string()),
strip_whitespace(b'''
<root>
<foo>ab</foo>
<foo>ab1</foo><foo>ab2</foo><foo>ab3</foo>
<foo>ab1</foo><foo>ab2</foo>
</root>'''))
eq_(doc.info_type, 'xml')
class TestXMLFeatureInfoDocs(object):
def test_as_string(self):
input_tree = etree.fromstring('<root></root>')
doc = XMLFeatureInfoDoc(input_tree)
eq_(strip_whitespace(doc.as_string()),
b'<root/>')
def test_as_etree(self):
doc = XMLFeatureInfoDoc('<root>hello</root>')
eq_(doc.as_etree().getroot().text, 'hello')
def test_combine(self):
docs = [
XMLFeatureInfoDoc('<root><a>foo</a></root>'),
XMLFeatureInfoDoc('<root><b>bar</b></root>'),
XMLFeatureInfoDoc('<other_root><a>baz</a></other_root>'),
]
result = XMLFeatureInfoDoc.combine(docs)
eq_(strip_whitespace(result.as_string()),
strip_whitespace(b'<root><a>foo</a><b>bar</b><a>baz</a></root>'))
eq_(result.info_type, 'xml')
class TestXMLFeatureInfoDocsNoLXML(object):
def setup(self):
from mapproxy import featureinfo
self.old_etree = featureinfo.etree
featureinfo.etree = None
def teardown(self):
from mapproxy import featureinfo
featureinfo.etree = self.old_etree
def test_combine(self):
docs = [
XMLFeatureInfoDoc(b'<root><a>foo</a></root>'),
XMLFeatureInfoDoc(b'<root><b>bar</b></root>'),
XMLFeatureInfoDoc(b'<other_root><a>baz</a></other_root>'),
]
result = XMLFeatureInfoDoc.combine(docs)
eq_(b'<root><a>foo</a></root>\n<root><b>bar</b></root>\n<other_root><a>baz</a></other_root>',
result.as_string())
eq_(result.info_type, 'text')
class TestHTMLFeatureInfoDocs(object):
def test_as_string(self):
input_tree = html.fromstring('<p>Foo')
doc = HTMLFeatureInfoDoc(input_tree)
assert b'<body><p>Foo</p></body>' in strip_whitespace(doc.as_string())
def test_as_etree(self):
doc = HTMLFeatureInfoDoc('<p>hello</p>')
eq_(doc.as_etree().find('body/p').text, 'hello')
def test_combine(self):
docs = [
HTMLFeatureInfoDoc(b'<html><head><title>Hello<body><p>baz</p><p>baz2'),
HTMLFeatureInfoDoc(b'<p>foo</p>'),
HTMLFeatureInfoDoc(b'<body><p>bar</p></body>'),
]
result = HTMLFeatureInfoDoc.combine(docs)
assert b'<title>Hello</title>' in result.as_string()
assert (b'<body><p>baz</p><p>baz2</p><p>foo</p><p>bar</p></body>' in
result.as_string())
eq_(result.info_type, 'html')
def test_combine_parts(self):
docs = [
HTMLFeatureInfoDoc('<p>foo</p>'),
HTMLFeatureInfoDoc('<body><p>bar</p></body>'),
HTMLFeatureInfoDoc('<html><head><title>Hello<body><p>baz</p><p>baz2'),
]
result = HTMLFeatureInfoDoc.combine(docs)
assert (b'<body><p>foo</p><p>bar</p><p>baz</p><p>baz2</p></body>' in
result.as_string())
eq_(result.info_type, 'html')
class TestHTMLFeatureInfoDocsNoLXML(object):
def setup(self):
from mapproxy import featureinfo
self.old_etree = featureinfo.etree
featureinfo.etree = None
def teardown(self):
from mapproxy import featureinfo
featureinfo.etree = self.old_etree
def test_combine(self):
docs = [
HTMLFeatureInfoDoc(b'<html><head><title>Hello<body><p>baz</p><p>baz2'),
HTMLFeatureInfoDoc(b'<p>foo</p>'),
HTMLFeatureInfoDoc(b'<body><p>bar</p></body>'),
]
result = HTMLFeatureInfoDoc.combine(docs)
eq_(b"<html><head><title>Hello<body><p>baz</p>"
b"<p>baz2\n<p>foo</p>\n<body><p>bar</p></body>",
result.as_string())
eq_(result.info_type, 'text')
|
py | 1a441f259493dcdd1a0752af8aa998f1edb52dca | # -*- coding: utf-8 -*-
# @Time : 2020/9/26
# @Author : Benny Jane
# @Email : 暂无
# @File : command.py
# @Project : Flask-Demo
import os
import logging
from logging.handlers import RotatingFileHandler
from flask import request
basedir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
project_name = os.path.split(os.path.dirname(__file__))[1]
def register_logging(app):
class RequestFormatter(logging.Formatter):
# 通过继承,修改打印信息: 报错的url 与 远程地址
def format(self, record):
record.url = request.url
record.remote_addr = request.remote_addr
return super(RequestFormatter, self).format(record)
request_formatter = RequestFormatter(
'[%(asctime)s] %(remote_addr)s requested %(url)s\n'
'%(levelname)s in %(module)s: %(message)s'
)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
log_path = os.path.join(basedir, f'logs/{project_name}')
if not os.path.exists(log_path):
os.mkdir(log_path)
file_handler = RotatingFileHandler("{}/career_plan.log".format(log_path),
maxBytes=10 * 1024 * 1024, backupCount=10)
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.INFO)
# 需要设置整个日志的等级,开发调试模式下,默认为debug; 没有设置会导致无法输出日志
app.logger.setLevel(logging.INFO)
if not app.debug:
# 生产模式下,需要设置合适等级
# app.logger.setLevel(logging.ERROR)
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
|
py | 1a441fdd331755eee865484794aa75c2c9f48db3 | """
References:
[1] E. Branlard, M. Gaunaa - Cylindrical vortex wake model: skewed cylinder, application to yawed or tilted rotors - Wind Energy, 2015
[2] E. Branlard - Wind Turbine Aerodynamics and Vorticity Based Method, Springer, 2017
"""
#--- Legacy python 2.7
from __future__ import division
from __future__ import print_function
# --- General
import unittest
import numpy as np
import numpy.matlib
# --- Local
try:
from .elliptic import ellipticPiCarlson, ellipe, ellipk
from .VortexLine import vl_semiinf_u
except:
from elliptic import ellipticPiCarlson, ellipe, ellipk
from VortexLine import vl_semiinf_u
# --------------------------------------------------------------------------------}
# --- Helper function
# --------------------------------------------------------------------------------{
def skew_components(u_x,u_z,m):
coschi = 1/np.sqrt(1+m**2)
sinchi = m/np.sqrt(1+m**2)
u_zeta = u_z * coschi + u_x * sinchi
u_xi = - u_z * sinchi + u_x * coschi
return u_zeta,u_xi
def polar_components(u_x,u_y,vpsi):
u_r = np.multiply(u_x,np.cos(vpsi)) + np.multiply(u_y,np.sin(vpsi))
u_psi= -np.multiply(u_x,np.sin(vpsi)) + np.multiply(u_y,np.cos(vpsi))
return u_r,u_psi
# --------------------------------------------------------------------------------}
# --- Core functions, polar coordinates inputs
# --------------------------------------------------------------------------------{
def svc_tang_u_polar(vr,vpsi,vz,gamma_t=-1,R=1,m=0,ntheta=180,polar_out=False):
""" Induced velocity from a skewed semi infinite cylinder of tangential vorticity.
Takes polar coordinates as inputs, returns velocity either in Cartesian (default) or polar.
The cylinder axis is defined by x=m.z, m=tan(chi). The rotor is in the plane z=0.
The algorithm loops over the control points and performs the integration over theta
INPUTS:
vr,vpsi,vz : flat list of control points in polar coordinates
gamma_t : tangential vorticity of the vortex sheet (circulation per unit of length oriented along psi). (for WT rotating positively along psi , gamma psi is negative)
R : radius of cylinder
m =tan(chi): tangent of wake skew angle
ntheta : number of points used for integration
Reference: [1,2]"""
# m = 0
EPSILON_AXIS=1e-7; # relative threshold for using axis formula
vtheta = np.pi/2 + np.linspace(0, 2*np.pi, ntheta)
# Flattening
shape_in=vr.shape
vr = np.asarray(vr).ravel()
vpsi = np.asarray(vpsi).ravel()
vz = np.asarray(vz).ravel()
# Constants of theta
c = 1 + m**2
bz = R * m * np.cos(vtheta)
u_z = np.zeros(vr.shape)
if polar_out:
u_r = np.zeros(vr.shape)
u_psi = np.zeros(vr.shape)
# ---- Loop on all control points to find velocity
for i,(r,psi,z) in enumerate(zip(vr,vpsi,vz)):
# Functions of theta in the integrand
a = R**2 + r** 2 + z**2 - 2*R*r*np.cos(vtheta - psi)
b = 2 * m * R * np.cos(vtheta) - 2 * m * r * np.cos(psi) - 2 * z
ap, bp = R * z * np.sin(vtheta - psi), -R * np.sin(vtheta - psi)
ar, br = R * z * np.cos(vtheta - psi), -R * np.cos(vtheta - psi)
az = R * (R - r * np.cos(vtheta - psi))
D = 2*gamma_t/(4*np.pi)/(np.multiply(np.sqrt(a),(2 * np.sqrt(a * c)+ b)))
# Integrations
u_r[i] = np.trapz((ar * np.sqrt(c)+ np.multiply(br,np.sqrt(a)))*D, vtheta)
u_psi[i] = np.trapz((ap * np.sqrt(c)+ np.multiply(bp,np.sqrt(a)))*D, vtheta)
u_z[i] = np.trapz((az * np.sqrt(c)+ np.multiply(bz,np.sqrt(a)))*D, vtheta)
# Reshaping to desired shape
u_r = u_r.reshape(shape_in)
u_psi = u_psi.reshape(shape_in)
u_z = u_z.reshape(shape_in)
return u_r,u_psi,u_z
else:
# print('>>>>>> HACK')
# bx, by = -R * np.cos(vtheta), -R * np.sin(vtheta)
# u_x = np.zeros(vr.shape)
# u_y = np.zeros(vr.shape)
# # ---- Loop on all control points to find velocity
# for i,(r,psi,z) in enumerate(zip(vr,vpsi,vz)):
# # Functions of theta in the integrand
# a = R**2 + r** 2 + z**2 - 2*R*r*np.cos(vtheta - psi)
# b = 2 * m * R * np.cos(vtheta) - 2 * m * r * np.cos(psi) - 2 * z
# ax, ay = R * z * np.cos(vtheta), R * z * np.sin(vtheta)
# az = R * (R - r * np.cos(vtheta - psi))
# #D = 2*gamma_t/(4*np.pi)/(np.multiply(np.sqrt(a),(2 * np.sqrt(a * c)+ b)))
# D = -4*gamma_t/(np.sqrt(c)*4*np.pi)/(4*a*c-b**2)
# # Integrations
# u_x[i] = np.trapz((b*bx-2*ax*c)*D, vtheta)
# u_y[i] = np.trapz((b*by-2*ay*c)*D, vtheta)
# u_z[i] = np.trapz((b*bz-2*az*c)*D, vtheta)
bx, by = -R * np.cos(vtheta), -R * np.sin(vtheta)
u_x = np.zeros(vr.shape)
u_y = np.zeros(vr.shape)
# ---- Loop on all control points to find velocity
for i,(r,psi,z) in enumerate(zip(vr,vpsi,vz)):
# Functions of theta in the integrand
a = R**2 + r** 2 + z**2 - 2*R*r*np.cos(vtheta - psi)
b = 2 * m * R * np.cos(vtheta) - 2 * m * r * np.cos(psi) - 2 * z
ax, ay = R * z * np.cos(vtheta), R * z * np.sin(vtheta)
az = R * (R - r * np.cos(vtheta - psi))
D = 2*gamma_t/(4*np.pi)/(np.multiply(np.sqrt(a),(2 * np.sqrt(a * c)+ b)))
# Integrations
u_x[i] = np.trapz((ax * np.sqrt(c)+ np.multiply(bx,np.sqrt(a)))*D, vtheta)
u_y[i] = np.trapz((ay * np.sqrt(c)+ np.multiply(by,np.sqrt(a)))*D, vtheta)
u_z[i] = np.trapz((az * np.sqrt(c)+ np.multiply(bz,np.sqrt(a)))*D, vtheta)
# Reshaping to desired shape
u_x = u_x.reshape(shape_in)
u_y = u_y.reshape(shape_in)
u_z = u_z.reshape(shape_in)
return u_x,u_y,u_z
def svc_longi_u_polar(vr,vpsi,vz,gamma_l=-1,R=1,m=0,ntheta=180,polar_out=False):
""" Raw function, not intended to be exported.
Induced velocity from a skewed semi infinite cylinder of longitudinal vorticity.
Takes polar coordinates as inputs, returns velocity either in Cartesian (default) or polar.
The cylinder axis is defined by x=m.z, m=tan(chi). The rotor is in the plane z=0.
INPUTS:
vr,vpsi,vz : control points in polar coordinates, may be of any shape
gamma_t : tangential vorticity of the vortex sheet (circulation per unit of length oriented along psi). (for WT rotating positively along psi , gamma psi is negative)
R : radius of cylinder
m =tan(chi): tangent of wake skew angle
ntheta : number of points used for integration
Reference: [1,2]"""
EPSILON_AXIS=1e-7; # relative threshold for using axis formula
vtheta = np.linspace(0,2 * np.pi,ntheta) + np.pi / ntheta
# Flattening, and dimensionless!
shape_in=vr.shape
vr = np.asarray(vr/R).ravel()
vpsi = np.asarray(vpsi).ravel()
vz = np.asarray(vz/R).ravel()
u_z = np.zeros(vr.shape)
if polar_out:
u_r = np.zeros(vr.shape)
u_psi = np.zeros(vr.shape)
for i,(r,psi,z) in enumerate(zip(vr,vpsi,vz)):
Den1 = np.sqrt(1 + r**2 + z**2 - 2*r* np.cos(vtheta - psi))
Den2 = - z + m * np.cos(vtheta) + np.sqrt(1 + m ** 2) * np.sqrt(1 + r ** 2 + z ** 2 - 2 * r * np.cos(vtheta - psi)) - m * r * np.cos(psi)
DenInv = gamma_l/(4*np.pi)/np.multiply(Den1,Den2)
u_r[i] = np.trapz(( - m*z*np.sin(psi) + np.sin(vtheta-psi))*DenInv,vtheta)
u_psi[i] = np.trapz((r - m*z*np.cos(psi) - np.cos(vtheta-psi))*DenInv,vtheta)
u_z[i] = np.trapz(m * (-np.sin(vtheta) + r*np.sin(psi)) *DenInv,vtheta)
# Reshaping to input shape
u_r = u_psi.reshape(shape_in)
u_psi = u_psi.reshape(shape_in)
u_z = u_z.reshape(shape_in)
return (u_r,u_psi,u_z)
else:
u_x = np.zeros(vr.shape)
u_y = np.zeros(vr.shape)
for i,(r,psi,z) in enumerate(zip(vr,vpsi,vz)):
Den1 = np.sqrt(1 + r**2 + z**2 - 2*r* np.cos(vtheta - psi))
Den2 = - z + m * np.cos(vtheta) + np.sqrt(1 + m ** 2) * np.sqrt(1 + r ** 2 + z ** 2 - 2 * r * np.cos(vtheta - psi)) - m * r * np.cos(psi)
DenInv = gamma_l/(4*np.pi)/np.multiply(Den1,Den2)
u_x[i] = np.trapz( (np.sin(vtheta) - r*np.sin(psi)) *DenInv,vtheta)
u_y[i] = np.trapz((- m*z - np.cos(vtheta) + r*np.cos(psi)) *DenInv,vtheta)
u_z[i] = np.trapz(m * (-np.sin(vtheta) + r*np.sin(psi)) *DenInv,vtheta)
# Reshaping to input shape
u_x = u_x.reshape(shape_in)
u_y = u_y.reshape(shape_in)
u_z = u_z.reshape(shape_in)
return (u_x,u_y,u_z)
def svc_root_u_polar(vr,vpsi,vz,Gamma_r=-1,m=0,polar_out=False):
"""
Induced velocity from a skewed root vortex
Takes polar coordinates as inputs, returns velocity either in Cartesian (default) or polar.
The cylinder axis is defined by x=m.z, m=tan(chi). The rotor is in the plane z=0.
INPUTS:
vr,vpsi,vz : control points in polar coordinates, may be of any shape
Gamma_r : Root vortex circulation, negative for a wind turbine
m =tan(chi): tangent of wake skew angle
Reference: [1,2]"""
EPSILON_AXIS=1e-7; # relative threshold for using axis formula
chi = np.arctan(m)
if Gamma_r==0:
return vr*0,vr*0,vr*0
# Flattening
shape_in=vr.shape
vr = np.asarray(vr).ravel()
vpsi = np.asarray(vpsi).ravel()
vz = np.asarray(vz).ravel()
# --- Computes ux, uy, uz, upsi
u_z = np.zeros(vr.shape)
if (m == 0):
u_psi = np.multiply(Gamma_r/(4*np.pi*vr), (1+vz/np.sqrt(vr** 2 + vz**2)))
u_x = -np.sin(vpsi)*u_psi
u_y = np.cos(vpsi)*u_psi
else:
if (np.max(np.abs(vz)) > 0):
# need to use general formula
u_x = np.zeros(vr.shape)
u_y = np.zeros(vr.shape)
e = np.array([np.sin(chi),0,np.cos(chi)])
for i,(r,psi,z) in enumerate(zip(vr,vpsi,vz)):
u_x[i],u_y[i],u_z[i]= vl_semiinf_u(r*np.cos(psi),r*np.sin(psi),z,e[0],e[1],e[2],Gamma_r,visc_model=0,t=0)
u_psi = - u_x*np.sin(vpsi) + u_y* np.cos(vpsi)
else:
# rotor plane analytical (see Yaw article)
u_psi = np.zeros(vr.shape)
coschi = 1 / np.sqrt(1 + m ** 2)
sinchi = m / np.sqrt(1 + m ** 2)
Iz = vr > (EPSILON_AXIS)
bnIz = np.logical_not(Iz)
u_z [Iz] = np.multiply(Gamma_r/(4*np.pi*vr[Iz]), 1.0/(1-np.cos(vpsi[Iz])*sinchi)*sinchi*np.sin(vpsi[Iz]))
u_psi[Iz] = np.multiply(Gamma_r/(4*np.pi*vr[Iz]), 1.0/(1-np.cos(vpsi[Iz])*sinchi)*coschi)
u_z [bnIz] =0
u_psi[bnIz] =0
u_x = -np.sin(vpsi)*u_psi
u_y = np.cos(vpsi)*u_psi
# Reshaping to input shape
u_z = u_z.reshape(shape_in)
if polar_out:
u_r = u_x * np.cos(vpsi) + u_y * np.sin(vpsi)
u_psi = u_psi.reshape(shape_in)
return (u_r,u_psi,u_z)
else:
u_x = u_x.reshape(shape_in)
u_y = u_y.reshape(shape_in)
return (u_x,u_y,u_z)
def svc_u_polar(vr,vpsi,vz,gamma_t,gamma_l,Gamma_r,R=1,m=0,ntheta=180,polar_out=False):
""" Induced velocities from a skewed semi infinite cylinder with:
- tangential vorticity gamma_t
- longitudinal vorticity gamma_l
- a root vortex, Gamma_r
"""
u1 ,u2 ,u3 = svc_longi_u_polar(vr,vpsi,vz,gamma_l,R=R,m=m,ntheta=ntheta,polar_out=False)
u1t,u2t,u3t = svc_tang_u_polar (vr,vpsi,vz,gamma_t,R=R,m=m,ntheta=ntheta,polar_out=False)
u1 += u1t
u2 += u2t
u3 += u3t
u1t,u2t,u3t = svc_root_u_polar (vr,vpsi,vz,Gamma_r ,m=m, polar_out=False)
u1 += u1t
u2 += u2t
u3 += u3t
return u1,u2,u3
# --------------------------------------------------------------------------------}
# --- Main functions with Cartesian inputs
# --------------------------------------------------------------------------------{
def svc_longi_u(Xcp,Ycp,Zcp,gamma_l=-1,R=1,m=0,ntheta=180,polar_out=False):
""" Induced velocity from a skewed semi infinite cylinder of longitudinal vorticity.
The cylinder axis is defined by x=m.z, m=tan(chi). The rotor is in the plane z=0.
INPUTS:
Xcp,Ycp,Zcp: vector or matrix of control points Cartesian Coordinates
gamma_l : longitudinal vorticity of the vortex sheet (circulation per unit of length oriented along zeta), negative for a WT
R : radius of cylinder
m =tan(chi): tangent of wake skew angle
ntheta : number of points used for integration
Reference: [1,2]"""
vr, vpsi = np.sqrt(Xcp**2+Ycp**2), np.arctan2(Ycp,Xcp) # polar coords
u1,u2,u3=svc_longi_u_polar(vr,vpsi,Zcp,gamma_l,R,m,ntheta,polar_out=polar_out)
return u1,u2,u3 # ux,uy,uz OR ur,upsi,uz
def svc_tang_u(Xcp,Ycp,Zcp,gamma_t=-1,R=1,m=0,ntheta=180,polar_out=False):
""" Induced velocity from a skewed semi infinite cylinder of tangential vorticity.
The cylinder axis is defined by x=m.z, m=tan(chi). The rotor is in the plane z=0.
INPUTS:
Xcp,Ycp,Zcp: vector or matrix of control points Cartesian Coordinates
gamma_t : tangential vorticity of the vortex sheet (circulation per unit of length oriented along psi), negative for a WT
R : radius of cylinder
m =tan(chi): tangent of wake skew angle
ntheta : number of points used for integration
Reference: [1,2]"""
vr, vpsi = np.sqrt(Xcp**2+Ycp**2), np.arctan2(Ycp,Xcp) # polar coords
u1,u2,u3 = svc_tang_u_polar(vr,vpsi,Zcp,gamma_t,R,m,ntheta,polar_out=polar_out)
return u1,u2,u3 # ux,uy,uz OR ur,upsi,uz
def svc_root_u(Xcp,Ycp,Zcp,Gamma_r=-1,m=0,polar_out=False):
""" Induced velocity from a skewed root vortex.
The root vortex axis is defined by x=m.z, m=tan(chi). The rotor is in the plane z=0.
INPUTS:
Xcp,Ycp,Zcp: vector or matrix of control points Cartesian Coordinates
Gamma_r : Root vortex circulation, negative for a wind turbine
m =tan(chi): tangent of wake skew angle
ntheta : number of points used for integration
Reference: [1,2]"""
vr, vpsi = np.sqrt(Xcp**2+Ycp**2), np.arctan2(Ycp,Xcp) # polar coords
u1,u2,u3 = svc_root_u_polar(vr,vpsi,Zcp,Gamma_r,m,polar_out=polar_out)
return u1,u2,u3 # ux,uy,uz OR ur,upsi,uz
def svcs_tang_u(Xcp,Ycp,Zcp,gamma_t,R,m,Xcyl,Ycyl,Zcyl,ntheta=180, Ground=False):
"""
Computes the velocity field for nCyl*nr cylinders, extending along z:
nCyl: number of main cylinders
nr : number of concentric cylinders within a main cylinder
INPUTS:
Xcp,Ycp,Zcp: cartesian coordinates of control points where the velocity field is not be computed
gamma_t: array of size (nCyl,nr), distribution of gamma for each cylinder as function of radius
R : array of size (nCyl,nr),
m : array of size (nCyl,nr),
Xcyl,Ycyl,Zcyl: array of size nCyl) giving the center of the rotor
Ground: boolean, True if ground effect is to be accounted for
All inputs (except Ground) should be numpy arrays
"""
Xcp=np.asarray(Xcp)
Ycp=np.asarray(Ycp)
Zcp=np.asarray(Zcp)
ux = np.zeros(Xcp.shape)
uy = np.zeros(Xcp.shape)
uz = np.zeros(Xcp.shape)
nCyl,nr = R.shape
print('Tang. (skewed) ',end='')
for i in np.arange(nCyl):
Xcp0,Ycp0,Zcp0=Xcp-Xcyl[i],Ycp-Ycyl[i],Zcp-Zcyl[i]
if Ground:
YcpMirror = Ycp0+2*Ycyl[i]
Ylist = [Ycp0,YcpMirror]
else:
Ylist = [Ycp0]
for iy,Y in enumerate(Ylist):
for j in np.arange(nr):
if iy==0:
print('.',end='')
else:
print('m',end='')
if np.abs(gamma_t[i,j]) > 0:
ux1,uy1,uz1 = svc_tang_u(Xcp0,Y,Zcp0,gamma_t[i,j],R[i,j],m[i,j],ntheta=ntheta,polar_out=False)
ux = ux + ux1
uy = uy + uy1
uz = uz + uz1
print('')
return ux,uy,uz
def svcs_longi_u(Xcp,Ycp,Zcp,gamma_l,R,m,Xcyl,Ycyl,Zcyl,ntheta=180,Ground=False):
""" See svcs_tang_u """
Xcp=np.asarray(Xcp)
Ycp=np.asarray(Ycp)
Zcp=np.asarray(Zcp)
ux = np.zeros(Xcp.shape)
uy = np.zeros(Xcp.shape)
uz = np.zeros(Xcp.shape)
nCyl,nr = R.shape
print('Longi. (skewed) ',end='')
for i in np.arange(nCyl):
Xcp0,Ycp0,Zcp0=Xcp-Xcyl[i],Ycp-Ycyl[i],Zcp-Zcyl[i]
if Ground:
YcpMirror = Ycp0+2*Ycyl[i]
Ylist = [Ycp0,YcpMirror]
else:
Ylist = [Ycp0]
for iy,Y in enumerate(Ylist):
for j in np.arange(nr):
if iy==0:
print('.',end='')
else:
print('m',end='')
if np.abs(gamma_l[i,j]) > 0:
ux1,uy1,uz1 = svc_longi_u(Xcp0,Ycp0,Zcp0,gamma_l[i,j],R[i,j],m[i,j],ntheta=ntheta,polar_out=False)
ux = ux + ux1
uy = uy + uy1
uz = uz + uz1
print('')
return ux,uy,uz
# --------------------------------------------------------------------------------}
# --- Rewrite of Matlab functions, legacy
# --------------------------------------------------------------------------------{
def fV_Trailed(vr,vpsi,vz,m,gamma_longi,ntheta,nout=7):
""" See Yaw article for notations and coordinate system
Return induced velocity by an infinite number of trailed vortices (semi-infinite lines whose starting points lay on the rotor circle)
"""
u_x,u_y,u_z = svc_longi_u_polar(vr,vpsi,vz,gamma_longi,R=1,m=m,ntheta=ntheta,polar_out=False)
if nout==1:
return u_z
u_zeta,u_xi = skew_components(u_x,u_z,m)
u_r,u_psi = polar_components(u_x,u_y,vpsi)
outputs=(u_z,u_psi,u_x,u_y,u_zeta,u_xi,u_r)
return outputs[:nout]
def fV_Tangential(vr,vpsi,vz,m,gamma_t,ntheta,nout=7):
""" This function is purely for backward compatibility with Matlab scripts"""
u_x,u_y,u_z = svc_tang_u_polar(vr,vpsi,vz,gamma_t,R=1,m=m,ntheta=ntheta,polar_out=False)
if nout==1:
return u_z
u_zeta,u_xi=skew_components(u_x,u_z,m)
u_r,u_psi =polar_components(u_x,u_y,vpsi)
outputs=(u_z,u_psi,u_x,u_y,u_zeta,u_xi,u_r)
return outputs[:nout]
def fV_Root(vr,vpsi,vz, m =0, Gamma_r=-1,nout=1):
""" Return induced velocity by the root vortex
Coordinate system is true polar coordinates, with convention of Yaw article
"""
u_x,u_y,u_z= svc_root_u_polar(vr,vpsi,vz,Gamma_r=Gamma_r,m=m,polar_out=False)
if nout==1:
return u_z
u_zeta,u_xi = skew_components(u_x,u_z,m)
u_r,u_psi = polar_components(u_x,u_y,vpsi)
outputs=(u_z,u_psi,u_x,u_y,u_zeta,u_xi,u_r)
return outputs[:nout]
# --------------------------------------------------------------------------------}
# --- Rotor plane flow expansions
# --------------------------------------------------------------------------------{
def fKxit(vr,m):
""" Returns Kxit according to yaw article . vr is in [0;1], m=tan(chi)"""
EPSILON_AXIS=1e-7; # relative threshold for using axis formula
fOye = 0.5 * (vr + 0.4 * vr ** 3 + 0.4 * vr ** 5)
Kxit_num = np.zeros((1,len(vr)))
k2 = ((1 - vr) ** 2) / ((1 + vr) ** 2)
m1 = (np.sqrt(1 + m ** 2) + np.sqrt(vr ** 2 + m ** 2)) / (1 + vr)
m2 = (np.sqrt(1 + m ** 2) - np.sqrt(vr ** 2 + m ** 2)) / (1 + vr)
b1 = m1 ** 2 - 1
b2 = 1 - m2 ** 2
j2 = 1 - k2
kr2 = ellipk(vr ** 2)
Pi1 = ellipticPiCarlson(- b1,j2)
Pi2 = ellipticPiCarlson(b2,j2)
Kxit=np.zeros(vr.shape)
if (m == 0):
k2 = 4 * vr / ((vr + 1) ** 2)
k = np.sqrt(k2)
K = ellipk(k2)
E = ellipe(k2)
b1 = (vr) > (EPSILON_AXIS)
b0 = np.logical_not(b1)
Kxit[b1] = np.multiply(1/(np.pi)*np.sqrt(1.0/vr[b1]),(np.multiply((2 - k2[b1]) / k[b1],K[b1]) - np.multiply(2.0/k[b1],E[b1])))
Kxit[b0] = 0
else:
b1 = (vr) > (EPSILON_AXIS)
b0 = np.logical_not(b1)
Kxit[b1] = np.multiply(2*(1+m**2)*vr[b1]/(m**2*np.pi),kr2[b1]) - np.multiply(np.multiply(vr[b1],(vr[b1] + 1))*np.sqrt(m ** 2 + 1)/(2*m**2*np.pi*np.sqrt(m**2+vr[b1]**2)),(np.multiply((b1[b1]+j2[b1]),Pi1[b1]) + np.multiply((b2[b1]-j2[b1]),Pi2[b1])))
Kxit[b0] = 0
# See yaw article
chi = np.arctan(m)
vtheta = np.linspace(0,np.pi / 2,1000)
Kxit_num=np.zeros(vr.shape)
for ir in np.arange(len(vr)):
r = vr[ir]
Kxit_num[ir] = 2 * r / np.pi * np.trapz(np.sin(2 * vtheta) ** 2.0 / (np.multiply(np.sqrt((1 + r) ** 2 - 4 * r * np.cos(vtheta) ** 2),((r - np.cos(2 * vtheta)) ** 2 * np.cos(chi) ** 2 + np.sin(2 * vtheta) ** 2))), vtheta)
return Kxit,Kxit_num,fOye
def fKzt(r,m,nout=2):
""" Returns Kzt according to yaw article """
fOye = 0.5 * (r + 0.4 * r ** 3 + 0.4 * r ** 5)
vr = r
Kzt = np.zeros(vr.shape)
Kztnum = np.zeros(vr.shape)
if m == 0:
raise Exception('Not intended for m==0')
k2 = ((1 - r) ** 2) / ((1 + r) ** 2)
m1 = (np.sqrt(1 + m ** 2) + np.sqrt(r ** 2 + m ** 2)) / (1 + r)
m2 = (np.sqrt(1 + m ** 2) - np.sqrt(r ** 2 + m ** 2)) / (1 + r)
b1 = m1 ** 2 - 1
b2 = 1 - m2 ** 2
j2 = 1 - k2
kr2 = ellipk(r ** 2)
Pi1 = ellipticPiCarlson(- b1,j2)
Pi2 = ellipticPiCarlson(b2,j2)
Kzt = np.multiply(2 * np.sqrt(1 + m ** 2) * r / (m * np.pi),kr2) - np.multiply(np.multiply(r,(r + 1)) / (2 * m * np.pi * np.sqrt(m ** 2 + r ** 2)),(np.multiply((b1 + j2),Pi1) + np.multiply((b2 - j2),Pi2)))
# Coleman formula B.5 term 3 and 4 !!!!! Note the minus sign added
vtheta = np.linspace(0,np.pi,1000)
for ir,r in enumerate(vr):
Kztnum[ir] = - 1 / (np.pi) * r * np.sqrt(1 + m ** 2) / m * np.trapz(- 1.0 / (np.sqrt(1 + r ** 2 - 2 * r * np.cos(vtheta))) + np.sqrt(1 - 2 * r * np.cos(vtheta) + r ** 2) / (1 + r ** 2 - 2 * r * np.cos(vtheta) + m ** 2 * np.sin(vtheta) ** 2),vtheta)
if nout==1:
return Kzt
elif nout <=3:
outputs=(Kzt,Kztnum,fOye)
elif nout > 3:
Kztnum2 = np.zeros(vr.shape)
Kztnum3 = np.zeros(vr.shape)
Kztnum4 = np.zeros(vr.shape)
# My formula Alternative form1
vtheta = np.linspace(0,np.pi,1000)
for ir,r in enumerate(vr):
Kztnum2[ir] = r * m * np.sqrt(1 + m ** 2) / np.pi * np.trapz(np.sin(vtheta) ** 2.0 / (np.multiply(np.sqrt(1 + r ** 2 - 2 * r * np.cos(vtheta)),(1 + r ** 2 - 2 * r * np.cos(vtheta) + m ** 2 * np.sin(vtheta) ** 2))),vtheta)
# My formula Alternative form3 (WEIRD RESULTS !!!!!!!!!!!!)
vtheta = np.linspace(0,np.pi / 2,1000)
for ir,r in enumerate(vr):
Kztnum3[ir] = 2 * r * np.sqrt(1 + m ** 2) * m / np.pi * np.trapz(np.sin(2 * vtheta) ** 2.0 / (np.multiply(np.sqrt((1 + r) ** 2 - 4 * r * np.cos(vtheta) ** 2),((1 + r) ** 2 - 4 * r * np.cos(vtheta) ** 2 + m ** 2 * np.sin(2 * vtheta) ** 2))),vtheta)
# My formula Alternative form4
vtheta = np.linspace(0,np.pi / 2,1000)
for ir,r in enumerate(vr):
Kztnum4[ir] = 2 * r / np.pi * 1 * np.sin(chi) * np.trapz(np.sin(2 * vtheta) ** 2.0 / (np.multiply(np.sqrt((1 + r) ** 2 - 4 * r * np.cos(vtheta) ** 2),((r - np.cos(2 * vtheta)) ** 2 * np.cos(chi) ** 2 + np.sin(2 * vtheta) ** 2))),vtheta)
outputs=(Kzt,Kztnum,fOye,Kztnum2,Kztnum3,Kztnum4)
return outputs[:nout]
# --------------------------------------------------------------------------------}
# --- TEST
# --------------------------------------------------------------------------------{
class TestSkewedCylinder(unittest.TestCase):
def test_SVC_rotor(self):
""" """
""" See paragraph "Properties on the rotor disk" of [1] """
# data
gamma_t,R,chi = -5, 10, 30*np.pi/180
m=np.tan(chi) # tan(chi)
eps=10**-1 *R
# --- At rotor center (see also next test, stronger)
u_x,u_y,u_z = svc_tang_u(0,0,0,gamma_t,R,m)
u_zeta,u_xi=skew_components(u_x,u_z,m)
uz0=gamma_t/2
np.testing.assert_almost_equal(u_x ,np.tan(chi/2)*uz0 ,decimal=7)
np.testing.assert_almost_equal(u_z ,uz0 ,decimal=7)
np.testing.assert_almost_equal(u_zeta ,uz0 ,decimal=7)
# --- At psi=pi/2 (i.e. x=0), z=0 (Eq 9 from [1]), ux,uz,uzeta,uxi constant!
y=np.linspace(0,R-eps,4)
x=y*0
z=y*0
u_x,u_y,u_z=svc_tang_u(x,y,z,gamma_t,R,m)
u_zeta,u_xi=skew_components(u_x,u_z,m)
uz0=np.asarray([gamma_t/2]*len(x))
np.testing.assert_almost_equal(u_zeta ,uz0 ,decimal=7)
np.testing.assert_almost_equal(u_z ,uz0 ,decimal=7)
np.testing.assert_almost_equal(u_xi/u_zeta,[-np.tan(chi/2)]*len(x),decimal=7)
np.testing.assert_almost_equal(u_x /u_z ,[ np.tan(chi/2)]*len(x),decimal=7)
np.testing.assert_almost_equal(u_x ,uz0*np.tan(chi/2) ,decimal=7)
# --- Component zeta over the entire plane is g_t/2 (Eq 10 from [1])
vR,vPsi = np.meshgrid(np.linspace(0,R-eps,5), np.linspace(0,2*np.pi,12))
x=vR*np.cos(vPsi)
y=vR*np.sin(vPsi)
z=x*0
u_x,u_y,u_z=svc_tang_u(x,y,z,gamma_t,R,m)
u_zeta,u_xi=skew_components(u_x,u_z,m)
uz0=gamma_t/2
np.testing.assert_almost_equal(u_zeta , uz0 ,decimal=5)
# --- Plane y=0 (anti-)symmetry - x,z,zeta,xi: symmetric - y: anti-symmetric
x,y = np.meshgrid(np.linspace(-R/3,R/3,3), [-R/2,R/2] )
z=x*0
u_x,u_y,u_z=svc_tang_u(x,y,z,gamma_t,R,m)
u_zeta,u_xi=skew_components(u_x,u_z,m)
np.testing.assert_almost_equal(u_x [0,:], u_x [1,:])
np.testing.assert_almost_equal(u_z [0,:], u_z [1,:])
np.testing.assert_almost_equal(u_zeta[0,:], u_zeta[1,:])
np.testing.assert_almost_equal(u_xi [0,:], u_xi [1,:])
np.testing.assert_almost_equal(u_y [0,:],-u_y [1,:]) # anti-symmetric
# --- Radial anti-symmetry of components x,y,z about their origin value
r0 = R/2 # cannot do negative r here since definition of r is positive
psi0 = np.linspace(0,np.pi,10)
vPsi = np.array([psi0 , psi0+np.pi] ).T
x=r0*np.cos(vPsi)
y=r0*np.sin(vPsi)
z=x*0
u_x,u_y,u_z =svc_tang_u(x,y,z,gamma_t,R,m)
u_zeta,u_xi=skew_components(u_x,u_z,m)
u_x0,u_y0,u_z0=svc_tang_u(0,0,0,gamma_t,R,m)
u_zeta0,u_xi0=skew_components(u_x0,u_z0,m)
np.testing.assert_almost_equal(u_x [:,0]+u_x [:,1], 2*u_x0 )
np.testing.assert_almost_equal(u_y [:,0]+u_y [:,1], 2*u_y0 )
np.testing.assert_almost_equal(u_z [:,0]+u_z [:,1], 2*u_z0 )
np.testing.assert_almost_equal(u_zeta[:,0]+u_zeta[:,1], 2*u_zeta0)
np.testing.assert_almost_equal(u_xi [:,0]+u_xi [:,1], 2*u_xi0 )
def test_SVC_farwake(self):
""" """
""" See paragraph "Properties on the rotor disk" of [1] """
# data
gamma_t,R,chi = -5, 10, 30*np.pi/180
m=np.tan(chi) # tan(chi)
eps=10**-1 *R
z0=1000*R # Far wake
# --- At rotor center (see also next test, stronger)
#u_x,u_y,u_z=svc_tang_u(0,0,0,gamma_t,R,m)
#uz0=gamma_t/2
#np.testing.assert_almost_equal(u_x ,np.tan(chi/2)*uz0 ,decimal=7)
#np.testing.assert_almost_equal(u_z ,uz0 ,decimal=7)
#np.testing.assert_almost_equal(u_zeta ,uz0 ,decimal=7)
# --- Component zeta over the entire plane is g_t/2 (Eq 10 from [1])
vR,vTheta = np.meshgrid(np.linspace(0,R-eps,5), np.linspace(0,2*np.pi,12))
x=vR*np.cos(vTheta)+z0*m
y=vR*np.sin(vTheta)
z=x*0 + z0
u_x,u_y,u_z=svc_tang_u(x,y,z,gamma_t,R,m)
u_zeta,u_xi=skew_components(u_x,u_z,m)
np.testing.assert_almost_equal(u_zeta , gamma_t , decimal=5)
np.testing.assert_almost_equal(u_xi , -gamma_t*np.tan(chi/2) , decimal=5)
np.testing.assert_almost_equal(u_z , gamma_t , decimal=5)
np.testing.assert_almost_equal(u_x , gamma_t*np.tan(chi/2) , decimal=5)
#print('ux',u_x)
#print('uy',u_y)
#print('uz',u_z)
#print('uzeta',u_zeta)
#print('uxi',u_xi)
# def test_singularities(self):
# # TODO!
#
# def test_regularization(self):
# #TODO
#
# def test_multirotor(self):
# #TODO
def test_SVC_rings(self):
# Test that induction is close to the one obtained from a series of rings
try:
from .VortexRing import rings_u
except:
try:
from wiz.VortexRing import rings_u
except:
from VortexRing import rings_u
# Parameters
chi = 30*np.pi/180
m=np.tan(chi)
gamma_t, R= -1, 10
eps=10**-6 *R
# Parameters for rings
nRings = 1000
z_max = 20*2*R
Zr = np.linspace(0,z_max,nRings)
dzeta = (Zr[1]-Zr[0])/np.cos(chi)
vGamma_r = Zr*0 + gamma_t*dzeta
vR_r = Zr*0 + R
Xr = m*Zr
Yr = 0*Zr
def compare(x,y,z,dec):
ux,uy,uz = svc_tang_u(x,y,z,gamma_t,R,m, polar_out=False)
ux_r,uy_r,uz_r = rings_u(x,y,z,vGamma_r,vR_r,Xr,Yr,Zr,polar_out = False)
np.testing.assert_almost_equal(ux,ux_r,decimal=dec)
np.testing.assert_almost_equal(uy,uy_r,decimal=dec)
np.testing.assert_almost_equal(uz,uz_r,decimal=dec)
return ux,uy,uz,ux_r,uy_r,uz_r
# --- test on rotor
x0=np.linspace(-2*R,2*R,40)
x,y,z=x0,x0*0,x0*0
b=np.abs(np.sqrt((x-z*m)**2)-R)>0.1*R
x,y,z=x[b],y[b],z[b]
ux,uy,uz,ux_r,uy_r,uz_r=compare(x,y,z,1)
# --- test at -R downstream
x,y,z=x0,x0*0,x0*0-R
b=np.abs(np.sqrt((x-z*m)**2)-R)>0.1*R
x,y,z=x[b],y[b],z[b]
ux,uy,uz,ux_r,uy_r,uz_r=compare(x,y,z,2)
# --- test at +R upstream
x,y,z=x0,x0*0,x0*0+R
b=np.abs(np.sqrt((x-z*m)**2)-R)>0.1*R
x,y,z=x[b],y[b],z[b]
ux,uy,uz,ux_r,uy_r,uz_r=compare(x,y,z,2)
#import matplotlib.pyplot as plt
#plt.figure()
#plt.plot(x,ux)
#plt.plot(x,ux_r)
#plt.figure()
#plt.plot(x,uy)
#plt.plot(x,uy_r)
#plt.figure()
#plt.plot(x,uz)
#plt.plot(x,uz_r)
#plt.show()
if __name__ == "__main__":
# TestCylinder().test_singularities()
unittest.main()
|
py | 1a44206e5e27954ec72c4a90b4f89be2b9bc6414 | arp_table = [('10.220.88.1', '0062.ec29.70fe'),
('10.220.88.20', 'c89c.1dea.0eb6'),
('10.220.88.21', '1c6a.7aaf.576c'),
('10.220.88.28', '5254.aba8.9aea'),
('10.220.88.29', '5254.abbe.5b7b'),
('10.220.88.30', '5254.ab71.e119'),
('10.220.88.32', '5254.abc7.26aa'),
('10.220.88.33', '5254.ab3a.8d26'),
('10.220.88.35', '5254.abfb.af12'),
('10.220.88.37', '0001.00ff.0001'),
('10.220.88.38', '0002.00ff.0001'),
('10.220.88.39', '6464.9be8.08c8'),
('10.220.88.40', '001c.c4bf.826a'),
('10.220.88.41', '001b.7873.5634')]
for ip_addr, mac_addr in arp_table:
mac_addr = mac_addr.upper()
mac_addr = mac_addr.split('.')
mac_addr = "".join(mac_addr)
#Process 2 hex digits at a time
new_mac = []
while len(mac_addr) > 0:
two_digits = mac_addr[:2]
mac_addr = mac_addr[2:]
new_mac.append(two_digits)
#Join back the digits with a colon
new_mac = ":".join(new_mac)
print(new_mac)
|
py | 1a4420b9f8bc38c30271a7b81f14c615c051fb96 | #Except for the pytorch part content of this file is copied from https://github.com/abisee/pointer-generator/blob/master/
from __future__ import unicode_literals, print_function, division
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import os
import time
import argparse
from datetime import datetime
import torch
from torch.autograd import Variable
import pandas as pd
from tqdm import tqdm
from rouge import Rouge
from data_util.batcher import Batcher
from data_util.data import Vocab
from data_util import data, config
from model import Model
from data_util.utils import write_for_rouge
from train_util import get_input_from_batch
import warnings
warnings.filterwarnings("ignore", category=UserWarning)
use_cuda = config.use_gpu and torch.cuda.is_available()
class Beam(object):
def __init__(self, tokens, log_probs, state, context, coverage):
self.tokens = tokens
self.log_probs = log_probs
self.state = state
self.context = context
self.coverage = coverage
def extend(self, token, log_prob, state, context, coverage):
return Beam(tokens = self.tokens + [token],
log_probs = self.log_probs + [log_prob],
state = state,
context = context,
coverage = coverage)
@property
def latest_token(self):
return self.tokens[-1]
@property
def avg_log_prob(self):
return sum(self.log_probs) / len(self.tokens)
class BeamSearch(object):
def __init__(self, model_file_path, data_folder, log_file_id):
# model_name = os.path.basename(model_file_path)
self._decode_dir = os.path.join(config.log_root, 'decode_%s' % (log_file_id))
self._rouge_ref_dir = os.path.join(self._decode_dir, 'rouge_ref')
self._rouge_dec_dir = os.path.join(self._decode_dir, 'rouge_dec_dir')
for p in [self._decode_dir, self._rouge_ref_dir, self._rouge_dec_dir]:
if not os.path.exists(p):
os.mkdir(p)
dp = config.get_data_paths(data_folder)
self.vocab = Vocab(dp['vocab'], config.vocab_size)
self.batcher = Batcher(dp['decode'], self.vocab, mode='decode', batch_size=config.beam_size, single_pass=True)
time.sleep(15)
self.model = Model(model_file_path, is_eval=True)
def sort_beams(self, beams):
return sorted(beams, key=lambda h: h.avg_log_prob, reverse=True)
def decode(self, log_file_id):
start = time.time()
counter = 0
batch = self.batcher.next_batch()
while batch is not None:
# Run beam search to get best Hypothesis
best_summary = self.beam_search(batch)
# Extract the output ids from the hypothesis and convert back to words
output_ids = [int(t) for t in best_summary.tokens[1:]]
decoded_words = data.outputids2words(output_ids, self.vocab,
(batch.art_oovs[0] if config.pointer_gen else None))
# Remove the [STOP] token from decoded_words, if necessary
try:
fst_stop_idx = decoded_words.index(data.STOP_DECODING)
decoded_words = decoded_words[:fst_stop_idx]
except ValueError:
decoded_words = decoded_words
original_abstract_sents = batch.original_abstracts_sents[0]
write_for_rouge(original_abstract_sents, decoded_words, counter,
self._rouge_ref_dir, self._rouge_dec_dir)
counter += 1
if counter % config.print_interval == 0:
print('Examples %d-%d decoded in %d sec'%(counter-config.print_interval, counter, time.time() - start))
start = time.time()
batch = self.batcher.next_batch()
print("Decoder has finished reading dataset for single pass.")
print("Now starting ROUGE eval...")
rouge_1_df, rouge_2_df, rouge_l_df = self.rouge_eval(self._rouge_dec_dir, self._rouge_ref_dir)
self.rouge_save(log_file_id, rouge_1_df, rouge_2_df, rouge_l_df)
def beam_search(self, batch):
#batch should have only one example
enc_batch, enc_padding_mask, enc_lens, enc_batch_extend_vocab, extra_zeros, c_t_0, coverage_t_0 = \
get_input_from_batch(batch, use_cuda)
encoder_outputs, encoder_feature, encoder_hidden = self.model.encoder(enc_batch, enc_lens)
s_t_0 = self.model.reduce_state(encoder_hidden)
dec_h, dec_c = s_t_0 # 1 x 2*hidden_size
dec_h = dec_h.squeeze()
dec_c = dec_c.squeeze()
#decoder batch preparation, it has beam_size example initially everything is repeated
beams = [Beam(tokens=[self.vocab.word2id(data.START_DECODING)],
log_probs=[0.0],
state=(dec_h[0], dec_c[0]),
context = c_t_0[0],
coverage=(coverage_t_0[0] if config.is_coverage else None))
for _ in range(config.beam_size)]
results = []
steps = 0
while steps < config.max_dec_steps and len(results) < config.beam_size:
latest_tokens = [h.latest_token for h in beams]
latest_tokens = [t if t < self.vocab.size() else self.vocab.word2id(data.UNKNOWN_TOKEN) \
for t in latest_tokens]
y_t_1 = Variable(torch.LongTensor(latest_tokens))
if use_cuda:
y_t_1 = y_t_1.cuda()
all_state_h =[]
all_state_c = []
all_context = []
for h in beams:
state_h, state_c = h.state
all_state_h.append(state_h)
all_state_c.append(state_c)
all_context.append(h.context)
s_t_1 = (torch.stack(all_state_h, 0).unsqueeze(0), torch.stack(all_state_c, 0).unsqueeze(0))
c_t_1 = torch.stack(all_context, 0)
coverage_t_1 = None
if config.is_coverage:
all_coverage = []
for h in beams:
all_coverage.append(h.coverage)
coverage_t_1 = torch.stack(all_coverage, 0)
final_dist, s_t, c_t, attn_dist, p_gen, coverage_t = self.model.decoder(y_t_1, s_t_1,
encoder_outputs, encoder_feature, enc_padding_mask, c_t_1,
extra_zeros, enc_batch_extend_vocab, coverage_t_1, steps)
log_probs = torch.log(final_dist)
topk_log_probs, topk_ids = torch.topk(log_probs, config.beam_size * 2)
dec_h, dec_c = s_t
dec_h = dec_h.squeeze()
dec_c = dec_c.squeeze()
all_beams = []
num_orig_beams = 1 if steps == 0 else len(beams)
for i in range(num_orig_beams):
h = beams[i]
state_i = (dec_h[i], dec_c[i])
context_i = c_t[i]
coverage_i = (coverage_t[i] if config.is_coverage else None)
for j in range(config.beam_size * 2): # for each of the top 2*beam_size hyps:
new_beam = h.extend(token=topk_ids[i, j].item(),
log_prob=topk_log_probs[i, j].item(),
state=state_i,
context=context_i,
coverage=coverage_i)
all_beams.append(new_beam)
beams = []
for h in self.sort_beams(all_beams):
if h.latest_token == self.vocab.word2id(data.STOP_DECODING):
if steps >= config.min_dec_steps:
results.append(h)
else:
beams.append(h)
if len(beams) == config.beam_size or len(results) == config.beam_size:
break
steps += 1
if len(results) == 0:
results = beams
beams_sorted = self.sort_beams(results)
return beams_sorted[0]
def rouge_eval(self, decoded_dir, ref_dir):
rouge = Rouge()
columns=['F1','Recall','Precision']
rouge_l_df = pd.DataFrame(columns=columns)
rouge_1_df = pd.DataFrame(columns=columns)
rouge_2_df = pd.DataFrame(columns=columns)
not_found_list = []
file_count = len(os.listdir(ref_dir))
print('Rouge Evaluation started for {} files..'.format(file_count))
for i in tqdm (range(file_count), desc='Running'):
index = str(i).zfill(6)
dec_file = decoded_dir + "/" + index + '_decoded.txt'
ref_file = ref_dir + "/" + index + '_reference.txt'
if os.path.isfile(dec_file) and os.path.isfile(ref_file):
with open(dec_file, 'r') as file:
decoded = file.read().rstrip().decode("utf8")
with open(ref_file, 'r') as file:
reference = file.read().rstrip().decode("utf8")
# If somehow reference file is empty (a rare case bug, cause of which is undetected) put a placeholder.
if reference == '':
reference = '[Input can not be found]'
score = rouge.get_scores(decoded, reference)[0]
rouge_l_df.loc[i] = [score['rouge-l']['f'], score['rouge-l']['r'], score['rouge-l']['p']]
rouge_1_df.loc[i] = [score['rouge-1']['f'], score['rouge-1']['r'], score['rouge-1']['p']]
rouge_2_df.loc[i] = [score['rouge-2']['f'], score['rouge-2']['r'], score['rouge-2']['p']]
else:
not_found_list.append((dec_file, ref_file))
if len(not_found_list) != 0:
print('{} files could not be identified.'.format(len(not_found_list)))
#print(not_found_list)
print('Evaluation Finished..')
return [rouge_1_df, rouge_2_df, rouge_l_df]
def rouge_save(self, save_dir, rouge_1_df, rouge_2_df, rouge_l_df):
save_dir = "logs/decode_"+save_dir
if not os.path.exists(save_dir+'/rouge_scores/'):
os.makedirs(save_dir+'/rouge_scores/')
rouge_l_df.to_csv(save_dir+'/rouge_scores/rouge_l.csv')
rouge_1_df.to_csv(save_dir+'/rouge_scores/rouge_1.csv')
rouge_2_df.to_csv(save_dir+'/rouge_scores/rouge_2.csv')
print('Rouge scores saved..')
with open(save_dir+'/rouge_scores/summary.txt', 'w') as f:
for df, rouge in zip([rouge_1_df, rouge_2_df,rouge_l_df], ['ROUGE-1','ROUGE-2','ROUGE-L']):
print(rouge)
f.write(rouge+"\n")
for metric in rouge_l_df.columns:
line = "{} Mean {}".format(round(df[metric].mean(),4), metric)
print(line)
f.write(line+"\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Decode script")
parser.add_argument("-m",
dest="model_file_path",
required=False,
default=None,
help="Model file for retraining (default: None).")
parser.add_argument("-d",
dest="data_folder",
required=True,
default=None,
help="Dataset name 'data_T50', 'cnn' or 'movie_quotes' (default: None).")
parser.add_argument("-l",
dest="log_file_id",
required=False,
default=datetime.now().strftime("%Y%m%d_%H%M%S"),
help="Postfix for decode log file (default: date_time).")
args = parser.parse_args()
beam_Search_processor = BeamSearch(args.model_file_path, args.data_folder, args.log_file_id)
beam_Search_processor.decode(args.log_file_id)
# rouge_1_df, rouge_2_df, rouge_l_df = beam_Search_processor.rouge_eval(beam_Search_processor._rouge_dec_dir, beam_Search_processor._rouge_ref_dir)
# beam_Search_processor.rouge_save(args.log_file_id, rouge_1_df, rouge_2_df, rouge_l_df)
|
py | 1a44229435a1ea457646a8e31632c4925a097e78 | #!/usr/bin/env python
from setuptools import setup, find_packages
desc = ''
with open('README.rst') as f:
desc = f.read()
setup(
name='wheelify',
version='0.1.4',
description=('Simple manylinux wheel builder utility'),
long_description=desc,
url='https://github.com/jmvrbanac/wheelify',
author='John Vrbanac',
author_email='[email protected]',
license='Apache v2',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.6',
],
keywords='manylinux wheel builder',
packages=find_packages(exclude=['contrib', 'docs', 'test*']),
install_requires=[],
package_data={},
data_files=[],
entry_points={
'console_scripts': [
'wheelify = wheelify.app:main'
],
},
)
|
py | 1a44238cd3a093439d5e8677a4698514c736789d | from setuptools import setup
from setuptools import find_packages
import os
this_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='megnet',
version='0.3.5',
description='MatErials Graph Networks for machine learning of molecules and crystals.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Chi Chen',
author_email='[email protected]',
download_url='https://github.com/materialsvirtuallab/megnet',
license='BSD',
install_requires=['keras', 'numpy', 'tensorflow', "scikit-learn",
'pymatgen', 'monty'],
extras_require={
'model_saving': ['h5py'],
'molecules': ['openbabel', 'rdkit']
},
packages=find_packages(),
package_data={
"megnet": ["*.json", "*.md"]
},
keywords=["materials", "science", "machine", "learning", "deep", "graph", "networks", "neural"],
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Software Development :: Libraries :: Python Modules"
],
)
|
py | 1a4423c69e6822aace3b64f48bbf95cafab36bae | """Message Flags class."""
import logging
import binascii
from insteonplm.constants import (MESSAGE_FLAG_EXTENDED_0X10,
MESSAGE_TYPE_ALL_LINK_BROADCAST,
MESSAGE_TYPE_ALL_LINK_CLEANUP,
MESSAGE_TYPE_ALL_LINK_CLEANUP_ACK,
MESSAGE_TYPE_ALL_LINK_CLEANUP_NAK,
MESSAGE_TYPE_BROADCAST_MESSAGE,
MESSAGE_TYPE_DIRECT_MESSAGE_ACK,
MESSAGE_TYPE_DIRECT_MESSAGE_NAK)
_LOGGER = logging.getLogger(__name__)
class MessageFlags():
"""Message Flags class use in Standard and Extended messages."""
def __init__(self, flags=0x00):
"""Init the MessageFlags class."""
self._messageType = None
self._extended = None
self._hopsLeft = None
self._hopsMax = None
if flags is not None:
self._set_properties(flags)
def __repr__(self):
"""Representation of the message flags."""
return self.hex
def __str__(self):
"""Return a string representation of message flags."""
return self.hex
def __eq__(self, other):
"""Test for equality."""
if hasattr(other, 'messageType'):
is_eq = self._messageType == other.messageType
is_eq = is_eq and self._extended == other.extended
return is_eq
return False
def __ne__(self, other):
"""Test for not equals."""
if hasattr(other, 'messageType'):
return not self.__eq__(other)
return True
def matches_pattern(self, other):
"""Test if current message match a patterns or template."""
if hasattr(other, 'messageType'):
messageTypeIsEqual = False
if self.messageType is None or other.messageType is None:
messageTypeIsEqual = True
else:
messageTypeIsEqual = (self.messageType == other.messageType)
extendedIsEqual = False
if self.extended is None or other.extended is None:
extendedIsEqual = True
else:
extendedIsEqual = (self.extended == other.extended)
return messageTypeIsEqual and extendedIsEqual
return False
@classmethod
def get_properties(cls):
"""Get all properties of the MessageFlags class."""
property_names = [p for p in dir(cls)
if isinstance(getattr(cls, p), property)]
return property_names
@property
def isBroadcast(self):
"""Test if the message is a broadcast message type."""
return (self._messageType & MESSAGE_TYPE_BROADCAST_MESSAGE ==
MESSAGE_TYPE_BROADCAST_MESSAGE)
@property
def isDirect(self):
"""Test if the message is a direct message type."""
direct = (self._messageType == 0x00)
if self.isDirectACK or self.isDirectNAK:
direct = True
return direct
@property
def isDirectACK(self):
"""Test if the message is a direct ACK message type."""
return self._messageType == MESSAGE_TYPE_DIRECT_MESSAGE_ACK
@property
def isDirectNAK(self):
"""Test if the message is a direct NAK message type."""
return self._messageType == MESSAGE_TYPE_DIRECT_MESSAGE_NAK
@property
def isAllLinkBroadcast(self):
"""Test if the message is an ALl-Link broadcast message type."""
return self._messageType == MESSAGE_TYPE_ALL_LINK_BROADCAST
@property
def isAllLinkCleanup(self):
"""Test if the message is a All-Link cleanup message type."""
return self._messageType == MESSAGE_TYPE_ALL_LINK_CLEANUP
@property
def isAllLinkCleanupACK(self):
"""Test if the message is a All-LInk cleanup ACK message type."""
return self._messageType == MESSAGE_TYPE_ALL_LINK_CLEANUP_ACK
@property
def isAllLinkCleanupNAK(self):
"""Test if the message is a All-Link cleanup NAK message type."""
return self._messageType == MESSAGE_TYPE_ALL_LINK_CLEANUP_NAK
@property
def isExtended(self):
"""Test if the message is an extended message type."""
return self._extended == 1
@property
def hopsLeft(self):
"""Return the number of hops left in message the trasport."""
return self._hopsLeft
@property
def hopsMax(self):
"""Return the maximum number of hops allowed for this message."""
return self._hopsMax
@hopsMax.setter
def hopsMax(self, val):
"""Set the maximum number of hops allowed for this message."""
self._hopsMax = val
@property
def messageType(self):
"""Return the message type."""
return self._messageType
@messageType.setter
def messageType(self, val):
"""Set the message type."""
if val in range(0, 8):
self._messageType = val
else:
raise ValueError
@property
def extended(self):
"""Return the extended flag."""
return self._extended
@extended.setter
def extended(self, val):
"""Set the extended flag."""
if val in [None, 0, 1]:
self._extended = val
else:
raise ValueError
# pylint: disable=protected-access
@classmethod
def create(cls, messageType, extended, hopsleft=3, hopsmax=3):
"""Create message flags.
messageType: integter 0 to 7:
MESSAGE_TYPE_DIRECT_MESSAGE = 0
MESSAGE_TYPE_DIRECT_MESSAGE_ACK = 1
MESSAGE_TYPE_ALL_LINK_CLEANUP = 2
MESSAGE_TYPE_ALL_LINK_CLEANUP_ACK = 3
MESSAGE_TYPE_BROADCAST_MESSAGE = 4
MESSAGE_TYPE_DIRECT_MESSAGE_NAK = 5
MESSAGE_TYPE_ALL_LINK_BROADCAST = 6
MESSAGE_TYPE_ALL_LINK_CLEANUP_NAK = 7
extended: 1 for extended, 0 for standard
hopsleft: int 0 - 3
hopsmax: int 0 - 3
"""
flags = MessageFlags(None)
if messageType < 8:
flags._messageType = messageType
else:
flags._messageType = messageType >> 5
if extended in [0, 1, True, False]:
if extended:
flags._extended = 1
else:
flags._extended = 0
else:
flags._extended = extended >> 4
flags._hopsLeft = hopsleft
flags._hopsMax = hopsmax
return flags
@classmethod
def template(cls, messageType=None, extended=None,
hopsleft=None, hopsmax=None):
"""Create message flags template.
messageType: integter 0 to 7 or None:
MESSAGE_TYPE_DIRECT_MESSAGE = 0
MESSAGE_TYPE_DIRECT_MESSAGE_ACK = 1
MESSAGE_TYPE_ALL_LINK_CLEANUP = 2
MESSAGE_TYPE_ALL_LINK_CLEANUP_ACK = 3
MESSAGE_TYPE_BROADCAST_MESSAGE = 4
MESSAGE_TYPE_DIRECT_MESSAGE_NAK = 5
MESSAGE_TYPE_ALL_LINK_BROADCAST = 6
MESSAGE_TYPE_ALL_LINK_CLEANUP_NAK = 7
extended: 1 for extended, 0 for standard or None
hopsleft: int 0 - 3
hopsmax: int 0 - 3
"""
flags = MessageFlags(None)
if messageType is None:
flags._messageType = None
elif messageType < 8:
flags._messageType = messageType
else:
flags._messageType = messageType >> 5
if extended is None:
flags._extended = None
elif extended in [0, 1, True, False]:
if extended:
flags._extended = 1
else:
flags._extended = 0
else:
flags._extended = extended >> 4
flags._hopsLeft = hopsleft
flags._hopsMax = hopsmax
return flags
@property
def bytes(self):
"""Return a byte representation of the message flags."""
flagByte = 0x00
messageType = 0
if self._messageType is not None:
messageType = self._messageType << 5
extendedBit = 0 if self._extended is None else self._extended << 4
hopsMax = 0 if self._hopsMax is None else self._hopsMax
hopsLeft = 0 if self._hopsLeft is None else (self._hopsLeft << 2)
flagByte = flagByte | messageType | extendedBit | hopsLeft | hopsMax
return bytes([flagByte])
@property
def hex(self):
"""Return a hexadecimal representation of the message flags."""
return binascii.hexlify(self.bytes).decode()
# pylint: disable=no-self-use
def _normalize(self, flags):
"""Take any format of flags and turn it into a hex string."""
norm = None
if isinstance(flags, MessageFlags):
norm = flags.bytes
elif isinstance(flags, bytearray):
norm = binascii.hexlify(flags)
elif isinstance(flags, int):
norm = bytes([flags])
elif isinstance(flags, bytes):
norm = binascii.hexlify(flags)
elif isinstance(flags, str):
flags = flags[0:2]
norm = binascii.hexlify(binascii.unhexlify(flags.lower()))
elif flags is None:
norm = None
else:
_LOGGER.warning('MessageFlags with unknown type %s: %r',
type(flags), flags)
return norm
def _set_properties(self, flags):
"""Set the properties of the message flags based on a byte input."""
flagByte = self._normalize(flags)
if flagByte is not None:
self._messageType = (flagByte[0] & 0xe0) >> 5
self._extended = (flagByte[0] & MESSAGE_FLAG_EXTENDED_0X10) >> 4
self._hopsLeft = (flagByte[0] & 0x0c) >> 2
self._hopsMax = flagByte[0] & 0x03
else:
self._messageType = None
self._extended = None
self._hopsLeft = None
self._hopsMax = None
|
py | 1a4424f0435f4415dd6862dfa0d185238ac13bb0 | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import itertools
import numpy as np
import pytest
import cirq
from cirq.protocols.act_on_protocol_test import DummyActOnArgs
from cirq.testing import (
EqualsTester,
assert_allclose_up_to_global_phase,
)
_bools = (False, True)
_paulis = (cirq.X, cirq.Y, cirq.Z)
def _assert_not_mirror(gate) -> None:
trans_x = gate.transform(cirq.X)
trans_y = gate.transform(cirq.Y)
trans_z = gate.transform(cirq.Z)
right_handed = (
trans_x.flip ^ trans_y.flip ^ trans_z.flip ^ (trans_x.to.relative_index(trans_y.to) != 1)
)
assert right_handed, 'Mirrors'
def _assert_no_collision(gate) -> None:
trans_x = gate.transform(cirq.X)
trans_y = gate.transform(cirq.Y)
trans_z = gate.transform(cirq.Z)
assert trans_x.to != trans_y.to, 'Collision'
assert trans_y.to != trans_z.to, 'Collision'
assert trans_z.to != trans_x.to, 'Collision'
def _all_rotations():
for (
pauli,
flip,
) in itertools.product(_paulis, _bools):
yield cirq.PauliTransform(pauli, flip)
def _all_rotation_pairs():
for px, flip_x, pz, flip_z in itertools.product(_paulis, _bools, _paulis, _bools):
if px == pz:
continue
yield cirq.PauliTransform(px, flip_x), cirq.PauliTransform(pz, flip_z)
def _all_clifford_gates():
for trans_x, trans_z in _all_rotation_pairs():
yield cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z)
@pytest.mark.parametrize('pauli,flip_x,flip_z', itertools.product(_paulis, _bools, _bools))
def test_init_value_error(pauli, flip_x, flip_z):
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_xz_map((pauli, flip_x), (pauli, flip_z))
@pytest.mark.parametrize('trans_x,trans_z', _all_rotation_pairs())
def test_init_from_xz(trans_x, trans_z):
gate = cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z)
assert gate.transform(cirq.X) == trans_x
assert gate.transform(cirq.Z) == trans_z
_assert_not_mirror(gate)
_assert_no_collision(gate)
@pytest.mark.parametrize(
'trans1,trans2,from1',
(
(trans1, trans2, from1)
for trans1, trans2, from1 in itertools.product(_all_rotations(), _all_rotations(), _paulis)
if trans1.to != trans2.to
),
)
def test_init_from_double_map_vs_kwargs(trans1, trans2, from1):
from2 = cirq.Pauli.by_relative_index(from1, 1)
from1_str, from2_str = (str(frm).lower() + '_to' for frm in (from1, from2))
gate_kw = cirq.SingleQubitCliffordGate.from_double_map(**{from1_str: trans1, from2_str: trans2})
gate_map = cirq.SingleQubitCliffordGate.from_double_map({from1: trans1, from2: trans2})
# Test initializes the same gate
assert gate_kw == gate_map
# Test initializes what was expected
assert gate_map.transform(from1) == trans1
assert gate_map.transform(from2) == trans2
_assert_not_mirror(gate_map)
_assert_no_collision(gate_map)
@pytest.mark.parametrize(
'trans1,from1',
((trans1, from1) for trans1, from1 in itertools.product(_all_rotations(), _paulis)),
)
def test_init_from_double_invalid(trans1, from1):
from2 = cirq.Pauli.by_relative_index(from1, 1)
# Test throws on invalid arguments
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map({from1: trans1, from2: trans1})
@pytest.mark.parametrize('trans,frm', itertools.product(_all_rotations(), _paulis))
def test_init_from_single_map_vs_kwargs(trans, frm):
from_str = str(frm).lower() + '_to'
# pylint: disable=unexpected-keyword-arg
gate_kw = cirq.SingleQubitCliffordGate.from_single_map(**{from_str: trans})
gate_map = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate_kw == gate_map
@pytest.mark.parametrize(
'trans,frm',
(
(trans, frm)
for trans, frm in itertools.product(_all_rotations(), _paulis)
if trans.to != frm
),
)
def test_init_90rot_from_single(trans, frm):
gate = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate.transform(frm) == trans
_assert_not_mirror(gate)
_assert_no_collision(gate)
# Check that it decomposes to one gate
assert len(gate.decompose_rotation()) == 1
# Check that this is a 90 degree rotation gate
assert (
gate.merged_with(gate).merged_with(gate).merged_with(gate) == cirq.SingleQubitCliffordGate.I
)
# Check that flipping the transform produces the inverse rotation
trans_rev = cirq.PauliTransform(trans.to, not trans.flip)
gate_rev = cirq.SingleQubitCliffordGate.from_single_map({frm: trans_rev})
assert gate ** -1 == gate_rev
@pytest.mark.parametrize(
'trans,frm',
(
(trans, frm)
for trans, frm in itertools.product(_all_rotations(), _paulis)
if trans.to == frm and trans.flip
),
)
def test_init_180rot_from_single(trans, frm):
gate = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate.transform(frm) == trans
_assert_not_mirror(gate)
_assert_no_collision(gate)
# Check that it decomposes to one gate
assert len(gate.decompose_rotation()) == 1
# Check that this is a 180 degree rotation gate
assert gate.merged_with(gate) == cirq.SingleQubitCliffordGate.I
@pytest.mark.parametrize(
'trans,frm',
(
(trans, frm)
for trans, frm in itertools.product(_all_rotations(), _paulis)
if trans.to == frm and not trans.flip
),
)
def test_init_ident_from_single(trans, frm):
gate = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate.transform(frm) == trans
_assert_not_mirror(gate)
_assert_no_collision(gate)
# Check that it decomposes to zero gates
assert len(gate.decompose_rotation()) == 0
# Check that this is an identity gate
assert gate == cirq.SingleQubitCliffordGate.I
@pytest.mark.parametrize(
'pauli,sqrt,expected',
(
(cirq.X, False, cirq.SingleQubitCliffordGate.X),
(cirq.Y, False, cirq.SingleQubitCliffordGate.Y),
(cirq.Z, False, cirq.SingleQubitCliffordGate.Z),
(cirq.X, True, cirq.SingleQubitCliffordGate.X_sqrt),
(cirq.Y, True, cirq.SingleQubitCliffordGate.Y_sqrt),
(cirq.Z, True, cirq.SingleQubitCliffordGate.Z_sqrt),
),
)
def test_init_from_pauli(pauli, sqrt, expected):
gate = cirq.SingleQubitCliffordGate.from_pauli(pauli, sqrt=sqrt)
assert gate == expected
def test_pow():
assert cirq.SingleQubitCliffordGate.X ** -1 == cirq.SingleQubitCliffordGate.X
assert cirq.SingleQubitCliffordGate.H ** -1 == cirq.SingleQubitCliffordGate.H
assert cirq.SingleQubitCliffordGate.X_sqrt == cirq.SingleQubitCliffordGate.X ** 0.5
assert cirq.SingleQubitCliffordGate.Y_sqrt == cirq.SingleQubitCliffordGate.Y ** 0.5
assert cirq.SingleQubitCliffordGate.Z_sqrt == cirq.SingleQubitCliffordGate.Z ** 0.5
assert cirq.SingleQubitCliffordGate.X_nsqrt == cirq.SingleQubitCliffordGate.X ** -0.5
assert cirq.SingleQubitCliffordGate.Y_nsqrt == cirq.SingleQubitCliffordGate.Y ** -0.5
assert cirq.SingleQubitCliffordGate.Z_nsqrt == cirq.SingleQubitCliffordGate.Z ** -0.5
assert cirq.SingleQubitCliffordGate.X_sqrt ** -1 == cirq.SingleQubitCliffordGate.X_nsqrt
assert cirq.inverse(cirq.SingleQubitCliffordGate.X_nsqrt) == (
cirq.SingleQubitCliffordGate.X_sqrt
)
with pytest.raises(TypeError):
_ = cirq.SingleQubitCliffordGate.Z ** 0.25
def test_init_from_quarter_turns():
eq = cirq.testing.EqualsTester()
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 0),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 0),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 0),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 8),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 8),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 8),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, -4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, -4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, -4),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 1),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 5),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 9),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, -3),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 1),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 5),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 9),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, -3),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 1),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 5),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 9),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, -3),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 2),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 6),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 3),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 7),
)
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_init_from_quarter_turns_reconstruct(gate):
new_gate = functools.reduce(
cirq.SingleQubitCliffordGate.merged_with,
(
cirq.SingleQubitCliffordGate.from_quarter_turns(pauli, qt)
for pauli, qt in gate.decompose_rotation()
),
cirq.SingleQubitCliffordGate.I,
)
assert gate == new_gate
def test_init_invalid():
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map()
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map({})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.X, False)}, y_to=(cirq.Y, False)
)
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.X, False), cirq.Y: (cirq.Y, False)}
)
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map()
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map({})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map({cirq.X: (cirq.X, False)})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map(x_to=(cirq.X, False))
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.Y, False), cirq.Y: (cirq.Z, False), cirq.Z: (cirq.X, False)}
)
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.X, False), cirq.Y: (cirq.X, False)}
)
def test_eq_ne_and_hash():
eq = EqualsTester()
for trans_x, trans_z in _all_rotation_pairs():
gate_gen = lambda: cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z)
eq.make_equality_group(gate_gen)
@pytest.mark.parametrize(
'gate,rep',
(
(cirq.SingleQubitCliffordGate.I, 'cirq.SingleQubitCliffordGate(X:+X, Y:+Y, Z:+Z)'),
(cirq.SingleQubitCliffordGate.H, 'cirq.SingleQubitCliffordGate(X:+Z, Y:-Y, Z:+X)'),
(cirq.SingleQubitCliffordGate.X, 'cirq.SingleQubitCliffordGate(X:+X, Y:-Y, Z:-Z)'),
(cirq.SingleQubitCliffordGate.X_sqrt, 'cirq.SingleQubitCliffordGate(X:+X, Y:+Z, Z:-Y)'),
),
)
def test_repr(gate, rep):
assert repr(gate) == rep
@pytest.mark.parametrize(
'gate,trans_y',
(
(cirq.SingleQubitCliffordGate.I, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.H, (cirq.Y, True)),
(cirq.SingleQubitCliffordGate.X, (cirq.Y, True)),
(cirq.SingleQubitCliffordGate.Y, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.Z, (cirq.Y, True)),
(cirq.SingleQubitCliffordGate.X_sqrt, (cirq.Z, False)),
(cirq.SingleQubitCliffordGate.X_nsqrt, (cirq.Z, True)),
(cirq.SingleQubitCliffordGate.Y_sqrt, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.Y_nsqrt, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.Z_sqrt, (cirq.X, True)),
(cirq.SingleQubitCliffordGate.Z_nsqrt, (cirq.X, False)),
),
)
def test_y_rotation(gate, trans_y):
assert gate.transform(cirq.Y) == trans_y
@pytest.mark.parametrize(
'gate,gate_equiv',
(
(cirq.SingleQubitCliffordGate.I, cirq.X ** 0),
(cirq.SingleQubitCliffordGate.H, cirq.H),
(cirq.SingleQubitCliffordGate.X, cirq.X),
(cirq.SingleQubitCliffordGate.Y, cirq.Y),
(cirq.SingleQubitCliffordGate.Z, cirq.Z),
(cirq.SingleQubitCliffordGate.X_sqrt, cirq.X ** 0.5),
(cirq.SingleQubitCliffordGate.X_nsqrt, cirq.X ** -0.5),
(cirq.SingleQubitCliffordGate.Y_sqrt, cirq.Y ** 0.5),
(cirq.SingleQubitCliffordGate.Y_nsqrt, cirq.Y ** -0.5),
(cirq.SingleQubitCliffordGate.Z_sqrt, cirq.Z ** 0.5),
(cirq.SingleQubitCliffordGate.Z_nsqrt, cirq.Z ** -0.5),
),
)
def test_decompose(gate, gate_equiv):
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit(gate(q0)).unitary()
mat_check = cirq.Circuit(
gate_equiv(q0),
).unitary()
assert_allclose_up_to_global_phase(mat, mat_check, rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize(
'gate,gate_equiv',
(
(cirq.SingleQubitCliffordGate.I, cirq.X ** 0),
(cirq.SingleQubitCliffordGate.H, cirq.H),
(cirq.SingleQubitCliffordGate.X, cirq.X),
(cirq.SingleQubitCliffordGate.Y, cirq.Y),
(cirq.SingleQubitCliffordGate.Z, cirq.Z),
(cirq.SingleQubitCliffordGate.X_sqrt, cirq.X ** 0.5),
(cirq.SingleQubitCliffordGate.X_nsqrt, cirq.X ** -0.5),
(cirq.SingleQubitCliffordGate.Y_sqrt, cirq.Y ** 0.5),
(cirq.SingleQubitCliffordGate.Y_nsqrt, cirq.Y ** -0.5),
(cirq.SingleQubitCliffordGate.Z_sqrt, cirq.Z ** 0.5),
(cirq.SingleQubitCliffordGate.Z_nsqrt, cirq.Z ** -0.5),
),
)
def test_known_matrix(gate, gate_equiv):
assert cirq.has_unitary(gate)
mat = cirq.unitary(gate)
mat_check = cirq.unitary(gate_equiv)
assert_allclose_up_to_global_phase(mat, mat_check, rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_inverse(gate):
assert gate == cirq.inverse(cirq.inverse(gate))
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_inverse_matrix(gate):
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit(gate(q0)).unitary()
mat_inv = cirq.Circuit(cirq.inverse(gate)(q0)).unitary()
assert_allclose_up_to_global_phase(mat, mat_inv.T.conj(), rtol=1e-7, atol=1e-7)
def test_commutes_notimplemented_type():
with pytest.raises(TypeError):
cirq.commutes(cirq.SingleQubitCliffordGate.X, 'X')
assert cirq.commutes(cirq.SingleQubitCliffordGate.X, 'X', default='default') == 'default'
with pytest.raises(TypeError):
cirq.commutes(cirq.CliffordGate.X, 'X')
assert cirq.commutes(cirq.CliffordGate.X, 'X', default='default') == 'default'
@pytest.mark.parametrize(
'gate,other', itertools.product(_all_clifford_gates(), _all_clifford_gates())
)
def test_commutes_single_qubit_gate(gate, other):
q0 = cirq.NamedQubit('q0')
gate_op = gate(q0)
other_op = other(q0)
mat = cirq.Circuit(
gate_op,
other_op,
).unitary()
mat_swap = cirq.Circuit(
other_op,
gate_op,
).unitary()
commutes = cirq.commutes(gate, other)
commutes_check = cirq.allclose_up_to_global_phase(mat, mat_swap)
assert commutes == commutes_check
# Test after switching order
mat_swap = cirq.Circuit(
gate.equivalent_gate_before(other)(q0),
gate_op,
).unitary()
assert_allclose_up_to_global_phase(mat, mat_swap, rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_parses_single_qubit_gate(gate):
assert gate == cirq.read_json(json_text=(cirq.to_json(gate)))
@pytest.mark.parametrize(
'gate,pauli,half_turns',
itertools.product(_all_clifford_gates(), _paulis, (1.0, 0.25, 0.5, -0.5)),
)
def test_commutes_pauli(gate, pauli, half_turns):
# TODO(#4328) cirq.X**1 should be _PauliX instead of XPowGate
pauli_gate = pauli if half_turns == 1 else pauli ** half_turns
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit(
gate(q0),
pauli_gate(q0),
).unitary()
mat_swap = cirq.Circuit(
pauli_gate(q0),
gate(q0),
).unitary()
commutes = cirq.commutes(gate, pauli_gate)
commutes_check = np.allclose(mat, mat_swap)
assert commutes == commutes_check, f"gate: {gate}, pauli {pauli}"
def test_to_clifford_tableau_util_function():
tableau = cirq.ops.clifford_gate._to_clifford_tableau(
x_to=cirq.PauliTransform(to=cirq.X, flip=False),
z_to=cirq.PauliTransform(to=cirq.Z, flip=False),
)
assert tableau == cirq.CliffordTableau(num_qubits=1, initial_state=0)
tableau = cirq.ops.clifford_gate._to_clifford_tableau(
x_to=cirq.PauliTransform(to=cirq.X, flip=False),
z_to=cirq.PauliTransform(to=cirq.Z, flip=True),
)
assert tableau == cirq.CliffordTableau(num_qubits=1, initial_state=1)
tableau = cirq.ops.clifford_gate._to_clifford_tableau(
rotation_map={
cirq.X: cirq.PauliTransform(to=cirq.X, flip=False),
cirq.Z: cirq.PauliTransform(to=cirq.Z, flip=False),
}
)
assert tableau == cirq.CliffordTableau(num_qubits=1, initial_state=0)
tableau = cirq.ops.clifford_gate._to_clifford_tableau(
rotation_map={
cirq.X: cirq.PauliTransform(to=cirq.X, flip=False),
cirq.Z: cirq.PauliTransform(to=cirq.Z, flip=True),
}
)
assert tableau == cirq.CliffordTableau(num_qubits=1, initial_state=1)
with pytest.raises(ValueError):
cirq.ops.clifford_gate._to_clifford_tableau()
@pytest.mark.parametrize(
'gate,sym,exp',
(
(cirq.SingleQubitCliffordGate.I, 'I', 1),
(cirq.SingleQubitCliffordGate.H, 'H', 1),
(cirq.SingleQubitCliffordGate.X, 'X', 1),
(cirq.SingleQubitCliffordGate.X_sqrt, 'X', 0.5),
(cirq.SingleQubitCliffordGate.X_nsqrt, 'X', -0.5),
(
cirq.SingleQubitCliffordGate.from_xz_map((cirq.Y, False), (cirq.X, True)),
'(X^-0.5-Z^0.5)',
1,
),
),
)
def test_text_diagram_info(gate, sym, exp):
assert cirq.circuit_diagram_info(gate) == cirq.CircuitDiagramInfo(
wire_symbols=(sym,), exponent=exp
)
@pytest.mark.parametrize(
"clifford_gate",
(
cirq.SingleQubitCliffordGate.I,
cirq.SingleQubitCliffordGate.H,
cirq.SingleQubitCliffordGate.X,
cirq.SingleQubitCliffordGate.Y,
cirq.SingleQubitCliffordGate.Z,
cirq.SingleQubitCliffordGate.X_sqrt,
cirq.SingleQubitCliffordGate.Y_sqrt,
cirq.SingleQubitCliffordGate.Z_sqrt,
cirq.SingleQubitCliffordGate.X_nsqrt,
cirq.SingleQubitCliffordGate.Y_nsqrt,
cirq.SingleQubitCliffordGate.Z_nsqrt,
),
)
def test_from_unitary(clifford_gate):
u = cirq.unitary(clifford_gate)
result_gate = cirq.SingleQubitCliffordGate.from_unitary(u)
assert result_gate == clifford_gate
def test_from_unitary_with_phase_shift():
u = np.exp(0.42j) * cirq.unitary(cirq.SingleQubitCliffordGate.Y_sqrt)
gate = cirq.SingleQubitCliffordGate.from_unitary(u)
assert gate == cirq.SingleQubitCliffordGate.Y_sqrt
def test_from_unitary_not_clifford():
# Not a single-qubit gate.
u = cirq.unitary(cirq.CNOT)
assert cirq.SingleQubitCliffordGate.from_unitary(u) is None
# Not an unitary matrix.
u = 2 * cirq.unitary(cirq.X)
assert cirq.SingleQubitCliffordGate.from_unitary(u) is None
# Not a Clifford gate.
u = cirq.unitary(cirq.T)
assert cirq.SingleQubitCliffordGate.from_unitary(u) is None
@pytest.mark.parametrize('trans_x,trans_z', _all_rotation_pairs())
def test_to_phased_xz_gate(trans_x, trans_z):
gate = cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z)
actual_phased_xz_gate = gate.to_phased_xz_gate()._canonical()
expect_phased_xz_gates = cirq.PhasedXZGate.from_matrix(cirq.unitary(gate))
assert np.isclose(actual_phased_xz_gate.x_exponent, expect_phased_xz_gates.x_exponent)
assert np.isclose(actual_phased_xz_gate.z_exponent, expect_phased_xz_gates.z_exponent)
assert np.isclose(
actual_phased_xz_gate.axis_phase_exponent, expect_phased_xz_gates.axis_phase_exponent
)
def test_from_xz_to_clifford_tableau():
seen_tableau = []
for trans_x, trans_z in _all_rotation_pairs():
tableau = cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z).clifford_tableau
tableau_number = sum(2 ** i * t for i, t in enumerate(tableau.matrix().ravel()))
tableau_number = tableau_number * 4 + 2 * tableau.rs[0] + tableau.rs[1]
seen_tableau.append(tableau_number)
# Satisfy the symplectic property
assert sum(tableau.matrix()[0, :2] * tableau.matrix()[1, 1::-1]) % 2 == 1
# Should not have any duplication.
assert len(set(seen_tableau)) == 24
@pytest.mark.parametrize(
'clifford_gate,standard_gate',
[
(cirq.CliffordGate.I, cirq.I),
(cirq.CliffordGate.X, cirq.X),
(cirq.CliffordGate.Y, cirq.Y),
(cirq.CliffordGate.Z, cirq.Z),
(cirq.CliffordGate.H, cirq.H),
(cirq.CliffordGate.S, cirq.S),
(cirq.CliffordGate.CNOT, cirq.CNOT),
(cirq.CliffordGate.CZ, cirq.CZ),
(cirq.CliffordGate.SWAP, cirq.SWAP),
],
)
def test_common_clifford_gate(clifford_gate, standard_gate):
# cirq.unitary is relied on the _decompose_ methods.
u_c = cirq.unitary(clifford_gate)
u_s = cirq.unitary(standard_gate)
cirq.testing.assert_allclose_up_to_global_phase(u_c, u_s, atol=1e-8)
@pytest.mark.parametrize('clifford_gate_name', ("I", "X", "Y", "Z", "H", "S", "CNOT", "CZ", "SWAP"))
def test_common_clifford_gate_caching(clifford_gate_name):
cache_name = f"_{clifford_gate_name}"
delattr(cirq.CliffordGate, cache_name)
assert not hasattr(cirq.CliffordGate, cache_name)
_ = getattr(cirq.CliffordGate, clifford_gate_name)
assert hasattr(cirq.CliffordGate, cache_name)
def test_multi_qubit_clifford_pow():
assert cirq.CliffordGate.X ** -1 == cirq.CliffordGate.X
assert cirq.CliffordGate.H ** -1 == cirq.CliffordGate.H
assert cirq.CliffordGate.S ** 2 == cirq.CliffordGate.Z
assert cirq.CliffordGate.S ** -1 == cirq.CliffordGate.S ** 3
assert cirq.CliffordGate.S ** -3 == cirq.CliffordGate.S
assert cirq.CliffordGate.CNOT ** 3 == cirq.CliffordGate.CNOT
assert cirq.CliffordGate.CNOT ** -3 == cirq.CliffordGate.CNOT
with pytest.raises(TypeError):
_ = cirq.CliffordGate.Z ** 0.25
def test_stabilizer_effec():
assert cirq.has_stabilizer_effect(cirq.CliffordGate.X)
assert cirq.has_stabilizer_effect(cirq.CliffordGate.H)
assert cirq.has_stabilizer_effect(cirq.CliffordGate.S)
assert cirq.has_stabilizer_effect(cirq.CliffordGate.CNOT)
assert cirq.has_stabilizer_effect(cirq.CliffordGate.CZ)
qubits = cirq.LineQubit.range(2)
gate = cirq.CliffordGate.from_op_list(
[cirq.H(qubits[1]), cirq.CZ(*qubits), cirq.H(qubits[1])], qubits
)
assert cirq.has_stabilizer_effect(gate)
def test_clifford_gate_from_op_list():
# Since from_op_list() ==> _act_on_() ==> tableau.then() and then() has already covered
# lots of random circuit cases, here we just test a few well-known relationships.
qubit = cirq.NamedQubit('test')
gate = cirq.CliffordGate.from_op_list([cirq.X(qubit), cirq.Z(qubit)], [qubit])
assert gate == cirq.CliffordGate.Y # The tableau ignores the global phase
gate = cirq.CliffordGate.from_op_list([cirq.Z(qubit), cirq.X(qubit)], [qubit])
assert gate == cirq.CliffordGate.Y # The tableau ignores the global phase
gate = cirq.CliffordGate.from_op_list([cirq.X(qubit), cirq.Y(qubit)], [qubit])
assert gate == cirq.CliffordGate.Z # The tableau ignores the global phase
gate = cirq.CliffordGate.from_op_list([cirq.Z(qubit), cirq.X(qubit)], [qubit])
assert gate == cirq.CliffordGate.Y # The tableau ignores the global phase
# Two qubits gates
qubits = cirq.LineQubit.range(2)
gate = cirq.CliffordGate.from_op_list(
[cirq.H(qubits[1]), cirq.CZ(*qubits), cirq.H(qubits[1])], qubits
)
assert gate == cirq.CliffordGate.CNOT
gate = cirq.CliffordGate.from_op_list(
[cirq.H(qubits[1]), cirq.CNOT(*qubits), cirq.H(qubits[1])], qubits
)
assert gate == cirq.CliffordGate.CZ
# Note the order of qubits matters
gate = cirq.CliffordGate.from_op_list(
[cirq.H(qubits[0]), cirq.CZ(qubits[1], qubits[0]), cirq.H(qubits[0])], qubits
)
assert gate != cirq.CliffordGate.CNOT
# But if we reverse the qubit_order, they will equal again.
gate = cirq.CliffordGate.from_op_list(
[cirq.H(qubits[0]), cirq.CZ(qubits[1], qubits[0]), cirq.H(qubits[0])], qubits[::-1]
)
assert gate == cirq.CliffordGate.CNOT
with pytest.raises(
ValueError, match="only be constructed from the operations that has stabilizer effect"
):
cirq.CliffordGate.from_op_list([cirq.T(qubit)], [qubit])
def test_clifford_gate_from_tableau():
t = cirq.CliffordGate.X.clifford_tableau
assert cirq.CliffordGate.from_clifford_tableau(t) == cirq.CliffordGate.X
t = cirq.CliffordGate.H.clifford_tableau
assert cirq.CliffordGate.from_clifford_tableau(t) == cirq.CliffordGate.H
t = cirq.CliffordGate.CNOT.clifford_tableau
assert cirq.CliffordGate.from_clifford_tableau(t) == cirq.CliffordGate.CNOT
with pytest.raises(ValueError):
t = cirq.CliffordTableau(num_qubits=1)
t.xs = np.array([1, 1]).reshape(2, 1)
t.zs = np.array([1, 1]).reshape(2, 1) # This violates the sympletic property.
cirq.CliffordGate.from_clifford_tableau(t)
with pytest.raises(ValueError, match="Input argument has to be a CliffordTableau instance."):
cirq.CliffordGate.from_clifford_tableau(1)
def test_multi_clifford_decompose_by_unitary():
# Construct a random clifford gate:
n, num_ops = 5, 20 # because we relied on unitary cannot test large-scale qubits
gate_candidate = [cirq.X, cirq.Y, cirq.Z, cirq.H, cirq.S, cirq.CNOT, cirq.CZ]
for seed in range(100):
prng = np.random.RandomState(seed)
qubits = cirq.LineQubit.range(n)
ops = []
for _ in range(num_ops):
g = prng.randint(len(gate_candidate))
indices = (prng.randint(n),) if g < 5 else prng.choice(n, 2, replace=False)
ops.append(gate_candidate[g].on(*[qubits[i] for i in indices]))
gate = cirq.CliffordGate.from_op_list(ops, qubits)
decomposed_ops = cirq.decompose(gate.on(*qubits))
circ = cirq.Circuit(decomposed_ops)
circ.append(cirq.I.on_each(qubits)) # make sure the dimension aligned.
cirq.testing.assert_allclose_up_to_global_phase(
cirq.unitary(gate), cirq.unitary(circ), atol=1e-7
)
def test_pad_tableau_bad_input():
with pytest.raises(
ValueError, match="Input axes of padding should match with the number of qubits"
):
tableau = cirq.CliffordTableau(num_qubits=3)
cirq.ops.clifford_gate._pad_tableau(tableau, num_qubits_after_padding=4, axes=[1, 2])
with pytest.raises(
ValueError, match='The number of qubits in the input tableau should not be larger than'
):
tableau = cirq.CliffordTableau(num_qubits=3)
cirq.ops.clifford_gate._pad_tableau(tableau, num_qubits_after_padding=2, axes=[0, 1, 2])
def test_pad_tableau():
tableau = cirq.CliffordTableau(num_qubits=1)
padded_tableau = cirq.ops.clifford_gate._pad_tableau(
tableau, num_qubits_after_padding=2, axes=[0]
)
assert padded_tableau == cirq.CliffordTableau(num_qubits=2)
tableau = cirq.CliffordTableau(num_qubits=1, initial_state=1)
padded_tableau = cirq.ops.clifford_gate._pad_tableau(
tableau, num_qubits_after_padding=1, axes=[0]
)
assert padded_tableau == cirq.CliffordGate.X.clifford_tableau
# Tableau for H
# [0 1 0]
# [1 0 0]
tableau = cirq.CliffordGate.H.clifford_tableau
padded_tableau = cirq.ops.clifford_gate._pad_tableau(
tableau, num_qubits_after_padding=2, axes=[0]
)
np.testing.assert_equal(
padded_tableau.matrix().astype(np.int64),
np.array(
[
[0, 0, 1, 0],
[0, 1, 0, 0],
[1, 0, 0, 0],
[0, 0, 0, 1],
]
),
)
np.testing.assert_equal(padded_tableau.rs.astype(np.int64), np.zeros(4))
# The tableau of H again but pad for another ax
tableau = cirq.CliffordGate.H.clifford_tableau
padded_tableau = cirq.ops.clifford_gate._pad_tableau(
tableau, num_qubits_after_padding=2, axes=[1]
)
np.testing.assert_equal(
padded_tableau.matrix().astype(np.int64),
np.array(
[
[1, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 0],
[0, 1, 0, 0],
]
),
)
np.testing.assert_equal(padded_tableau.rs.astype(np.int64), np.zeros(4))
def test_clifford_gate_act_on_small_case():
# Note this is also covered by the `from_op_list` one, etc.
qubits = cirq.LineQubit.range(5)
args = cirq.ActOnCliffordTableauArgs(
tableau=cirq.CliffordTableau(num_qubits=5),
qubits=qubits,
prng=np.random.RandomState(),
)
expected_args = cirq.ActOnCliffordTableauArgs(
tableau=cirq.CliffordTableau(num_qubits=5),
qubits=qubits,
prng=np.random.RandomState(),
)
cirq.act_on(cirq.H, expected_args, qubits=[qubits[0]], allow_decompose=False)
cirq.act_on(cirq.CliffordGate.H, args, qubits=[qubits[0]], allow_decompose=False)
assert args.tableau == expected_args.tableau
cirq.act_on(cirq.CNOT, expected_args, qubits=[qubits[0], qubits[1]], allow_decompose=False)
cirq.act_on(cirq.CliffordGate.CNOT, args, qubits=[qubits[0], qubits[1]], allow_decompose=False)
assert args.tableau == expected_args.tableau
cirq.act_on(cirq.H, expected_args, qubits=[qubits[0]], allow_decompose=False)
cirq.act_on(cirq.CliffordGate.H, args, qubits=[qubits[0]], allow_decompose=False)
assert args.tableau == expected_args.tableau
cirq.act_on(cirq.S, expected_args, qubits=[qubits[0]], allow_decompose=False)
cirq.act_on(cirq.CliffordGate.S, args, qubits=[qubits[0]], allow_decompose=False)
assert args.tableau == expected_args.tableau
cirq.act_on(cirq.X, expected_args, qubits=[qubits[2]], allow_decompose=False)
cirq.act_on(cirq.CliffordGate.X, args, qubits=[qubits[2]], allow_decompose=False)
assert args.tableau == expected_args.tableau
def test_clifford_gate_act_on_large_case():
n, num_ops = 50, 1000 # because we don't need unitary, it is fast.
gate_candidate = [cirq.X, cirq.Y, cirq.Z, cirq.H, cirq.S, cirq.CNOT, cirq.CZ]
for seed in range(10):
prng = np.random.RandomState(seed)
t1 = cirq.CliffordTableau(num_qubits=n)
t2 = cirq.CliffordTableau(num_qubits=n)
qubits = cirq.LineQubit.range(n)
args1 = cirq.ActOnCliffordTableauArgs(tableau=t1, qubits=qubits, prng=prng)
args2 = cirq.ActOnCliffordTableauArgs(tableau=t2, qubits=qubits, prng=prng)
ops = []
for _ in range(num_ops):
g = prng.randint(len(gate_candidate))
indices = (prng.randint(n),) if g < 5 else prng.choice(n, 2, replace=False)
cirq.act_on(
gate_candidate[g], args1, qubits=[qubits[i] for i in indices], allow_decompose=False
)
ops.append(gate_candidate[g].on(*[qubits[i] for i in indices]))
compiled_gate = cirq.CliffordGate.from_op_list(ops, qubits)
cirq.act_on(compiled_gate, args2, qubits)
assert args1.tableau == args2.tableau
def test_clifford_gate_act_on_ch_form():
# Although we don't support CH_form from the _act_on_, it will fall back
# to the decomposititon method and apply it through decomposed ops.
# Here we run it for the coverage only.
args = cirq.ActOnStabilizerCHFormArgs(
initial_state=cirq.StabilizerStateChForm(num_qubits=2, initial_state=1),
qubits=cirq.LineQubit.range(2),
prng=np.random.RandomState(),
)
cirq.act_on(cirq.CliffordGate.X, args, qubits=cirq.LineQubit.range(1))
np.testing.assert_allclose(args.state.state_vector(), np.array([0, 0, 0, 1]))
def test_clifford_gate_act_on_fail():
with pytest.raises(TypeError, match="Failed to act"):
cirq.act_on(cirq.CliffordGate.X, DummyActOnArgs(), qubits=())
|
py | 1a442553474fc98aadb142b9b3e2d9ef43fabb6f | """SUMMARY: Acoustic event detection with voice activity detection (VAD)
methods.
"""
import numpy as np
def activity_detection(x, thres, low_thres=None, n_smooth=1, n_salt=0):
"""Activity detection.
Args:
x: array
thres: float, threshold
low_thres:float, second lower threshold
n_smooth: integar, number of frames to smooth.
n_salt: integar, number of frames equal or shorter this value will be
removed. Set this value to 0 means do not use delete_salt_noise.
Return: list of [bgn, fin]
"""
locts = np.where(x>thres)[0]
# Find pairs of [bgn, fin]
bgn_fin_pairs = find_bgn_fin_pairs(locts)
# Second threshold
if low_thres is not None:
bgn_fin_pairs = activity_detection_with_second_thres(
x, bgn_fin_pairs, low_thres)
# Smooth
bgn_fin_pairs = smooth(bgn_fin_pairs, n_smooth)
# Remove salt noise
bgn_fin_pairs = remove_salt_noise(bgn_fin_pairs, n_salt)
return bgn_fin_pairs
def find_bgn_fin_pairs(locts):
"""Find pairs of [bgn, fin] from loctation array
"""
if len(locts)==0:
return []
else:
bgns = [locts[0]]
fins = []
for i1 in range(1,len(locts)):
if locts[i1]-locts[i1-1]>1:
fins.append(locts[i1-1])
bgns.append(locts[i1])
fins.append(locts[-1])
assert len(bgns)==len(fins)
lists = []
for i1 in range(len(bgns)):
lists.append([bgns[i1], fins[i1]])
return lists
def activity_detection_with_second_thres(x, bgn_fin_pairs, thres):
"""Double threshold method.
"""
new_bgn_fin_pairs = []
for [bgn, fin] in bgn_fin_pairs:
while(bgn != -1):
if x[bgn] < thres:
break
bgn -= 1
while(fin != len(x)):
if x[fin] < thres:
break
fin += 1
new_bgn_fin_pairs.append([bgn + 1, fin])
new_bgn_fin_pairs = smooth(new_bgn_fin_pairs, n_smooth=1)
return new_bgn_fin_pairs
def smooth(bgn_fin_pairs, n_smooth):
"""Smooth the [bgn, fin] pairs.
"""
new_bgn_fin_pairs = []
if len(bgn_fin_pairs) == 0:
return []
[mem_bgn, fin] = bgn_fin_pairs[0]
for n in range(1, len(bgn_fin_pairs)):
[pre_bgn, pre_fin] = bgn_fin_pairs[n - 1]
[bgn, fin] = bgn_fin_pairs[n]
if bgn - pre_fin <= n_smooth:
pass
else:
new_bgn_fin_pairs.append([mem_bgn, pre_fin])
mem_bgn = bgn
new_bgn_fin_pairs.append([mem_bgn, fin])
return new_bgn_fin_pairs
def remove_salt_noise(bgn_fin_pairs, n_salt):
"""Remove salt noise
"""
new_bgn_fin_pairs = []
for [bgn, fin] in bgn_fin_pairs:
if fin - bgn <= n_salt:
pass
else:
new_bgn_fin_pairs.append([bgn, fin])
return new_bgn_fin_pairs
|
py | 1a44275025722fb768234b2c54388790e7cbf157 | #Crie um programa onde 4 jogadores um dado e tenham
#resultados aleatórios.
##Guarde esse resultados em um dicionário. No final, coloque esse
#dicionário em ordem ,sabendo que o vencedor tirou o maior número na dado.
from random import randint
from time import sleep
from operator import itemgetter
jogadores = {'Jogador 1': randint(1, 6),
'Jogador 2': randint(1, 6),
'Jogador 3': randint(1, 6),
'Jogador 4': randint(1, 6)}
for j, d in jogadores.items():
print(f'{j} tirou {d} no dado')
sleep(1)
print('-=' * 20)
print('-----RANKING-----')
sleep(1)
ranking = {}
ranking = sorted(jogadores.items(), key=itemgetter(1), reverse=True)
for i, v in enumerate(ranking):
print(f'{i+1}º Lugar: {v[0]} com {v[1]}')
|
py | 1a44286cea52e0f1120c634f79edaaa50e8e62fe | # -*- coding: utf-8 -*-
import os
import click
from matplusc3d import combine_files
@click.command()
@click.argument('c3dfile', metavar='[filename.c3d]', required=False, type=click.Path())
@click.option('--overwrite', is_flag=True, help="Overwrite existing c3dfiles. "
"If not set, a file new file 'filename_updated.c3d' will be created")
def main(c3dfile, overwrite):
"""Command line tool for adding virtual makers to Coda Motion
C3D files from the information in exported Mat files.
The tool assume the c3d and mat files have the same filename
but different extensions. If called without arguments the tool
will find all matching c3d/mat files in the working directory.
"""
if not c3dfile:
click.confirm('Combine all C3D/Mat files in current dir?', abort=True)
filelist = [(f, os.path.splitext(f)[0]+'.mat')
for f in os.listdir('.') if f.upper().endswith('.C3D')]
elif os.path.isfile(c3dfile):
matfile = os.path.splitext(c3dfile)[0]+'.mat'
if not os.path.isfile(matfile):
raise click.UsageError('No mat file found matching {}'
''.format(c3dfile))
filelist = [(c3dfile, matfile)]
else:
raise click.UsageError('No such file {}'.format(c3dfile))
filelist = [(str(f), str(m)) for f, m in filelist
if os.path.exists(f) and os.path.exists(m)]
for c3dfile, matfile in filelist:
postfix = '' if overwrite else '_updated'
new_c3d = combine_files(c3dfile, matfile, postfix=postfix)
print('Updated: {}'.format(new_c3d))
if __name__ == "__main__":
main()
|
py | 1a4428bd7ff2e4037e246020c635f517c275b9f1 | #
# MIT License
#
# Copyright (c) 2020 Pablo Rodriguez Nava, @pablintino
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import pytest
from app import db, create_app
from tests import setup_db, teardown_db, clean_db
class TestConfig:
SQLALCHEMY_DATABASE_URI = 'mssql+pyodbc://test:[email protected],4490/altium_db_test_ut?driver=ODBC+Driver+17+for+SQL+Server'
REDIS_URL = 'redis://'
SQLALCHEMY_TRACK_MODIFICATIONS = False
@pytest.fixture(scope="session")
def app():
yield create_app(config_class=TestConfig)
@pytest.fixture(scope="session")
def database(app):
assert app is not None
setup_db(app)
yield db
teardown_db()
@pytest.fixture(scope="function")
def db_session(database, app):
assert app is not None
with app.app_context():
clean_db()
yield database.session
database.session.rollback()
|
py | 1a4429799f6f6b6b131d41e09f952ab94aa49cb3 | from django.urls import path, include
from .viewsets.numbers import (
NumbersViewset,
DetailNumbersViewset,
StartNumber,
StopNumber,
StatusNumber,
LoginNumber,
)
from .viewsets.messages import (
WhatsappChatAllViewset,
WhatsappChatViewset,
WhatsappChatDetailViewset,
WhatsappMediaViewset,
WhastappMediaDetailViewset
)
from .viewsets.contacts import ContactsViewset, GroupContactsViewset
from .viewsets.media import UsersMediaViewset
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register('media', UsersMediaViewset, base_name='api_media')
router.register('group', GroupContactsViewset, base_name='api_group')
router.register('contacts', ContactsViewset, base_name='api_contacts')
urlpatterns = [
path('', include(router.urls)),
path('auth/', include('users_auth.urls')),
# path('group/<int:group_id>/contacts/<int:contact_id>/', GroupContactsViewset.as_view(
# {"get":"contact_detail", "put":"contact_detail", "delete":"contact_detail"})),
# path('contacts/', ContactsViewset.as_view(), name='api_contacts'),
path('numbers/', NumbersViewset.as_view(), name='api_numbers'),
path('numbers/<int:pk>/', DetailNumbersViewset.as_view(), name='api_number_detail'),
path('numbers/<int:pk>/start/', StartNumber.as_view(), name='api_number_start'),
path('numbers/<int:pk>/stop/', StopNumber.as_view(), name='api_number_stop'),
path('numbers/<int:pk>/status/', StatusNumber.as_view(), name='api_number_status'),
path('numbers/<int:pk>/login/', LoginNumber.as_view(), name='api_number_login'),
# path('numbers/<int:pk>/logout/'),
path('numbers/<int:pk>/chats/', WhatsappChatViewset.as_view(), name='api_messages_chat'),
path('numbers/<int:pk>/chats/<int:id>/', WhatsappChatDetailViewset.as_view(), name='api_messages_chat_detail'),
path('numbers/<int:pk>/media/', WhatsappMediaViewset.as_view(), name='api_messages_media'),
path('numbers/<int:pk>/media/<int:id>/', WhastappMediaDetailViewset.as_view(), name='api_messages_media_detail'),
path('messages/', WhatsappChatAllViewset.as_view(), name='api_messages_all')
] |
py | 1a442a5e9ef9778faa5dcd5134b345b82eb6269b | # Copyright (c) 2019 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
from kmip.core import enums
from kmip.demos import utils
from kmip.pie import client
# NOTE: This demo script shows how to delete the first Name attribute from
# the user-specified object. The object *must* have at least one Name
# attribute for attribute deletion to work. Otherwise, the client
# call to delete_attribute will fail.
if __name__ == '__main__':
logger = utils.build_console_logger(logging.INFO)
parser = utils.build_cli_parser(enums.Operation.DELETE_ATTRIBUTE)
opts, args = parser.parse_args(sys.argv[1:])
if opts.uuid is None:
logger.error("No UUID provided, existing early from demo.")
sys.exit()
with client.ProxyKmipClient(
config=opts.config,
config_file=opts.config_file
) as c:
try:
object_id, modified_attribute = c.delete_attribute(
unique_identifier=opts.uuid,
attribute_name="Name",
attribute_index=0
)
logger.info(
"Successfully deleted 'Name' attribute from object: {}".format(
object_id
)
)
logger.info("Deleted attribute: {}".format(modified_attribute))
except Exception as e:
logger.error(e)
|
py | 1a442a7ec5f6a7d9513767835d43db8935c71a7f | """
Project: SSITH CyberPhysical Demonstrator
Name: component.py
Author: Ethan Lew
Date: 02 October 2020
an object to establish communication and messaging between services
pub/sub approach -- establish service subscribers and publishers at
initialization. register topics and callbacks for service components.
"""
from .message import Message, Envelope, MessageLevel
import threading
import typing as typ
import zmq
import time
import enum
import functools
import collections
import struct
class ComponentStatus(enum.Enum):
"""component states and commands"""
READY = enum.auto()
ERROR = enum.auto()
class ComponentPollerStatus(enum.Enum):
"""component poller states"""
POLLER_START = enum.auto()
POLLER_END = enum.auto()
def coroutine(func):
"""decorator for coroutine creation/initialization"""
@functools.wraps(func)
def primer(*args, **kwargs):
gen = func(*args, **kwargs)
next(gen)
return gen
return primer
class ComponentMetaDict(dict):
"""resolve overloaded attributes"""
def __setitem__(self, key, value):
if hasattr(value, 'recv_spec'):
if "_recv_methods" not in self:
self["_recv_methods"] = {}
self["_recv_methods"][getattr(value, 'recv_spec')] = value
elif hasattr(value, 'recv_can_spec'):
if "_recv_can_methods" not in self:
self["_recv_can_methods"] = {}
self["_recv_can_methods"][getattr(value, 'recv_can_spec')] = value
else:
super().__setitem__(key, value)
def _getitem__(self, key):
if key not in self and '_' and key.isupper():
return key.upper()
else:
return super().__getitem__(key)
class ComponentMeta(type):
"""recognize special decorators, and build out the implied attributes"""
@classmethod
def __prepare__(metacls, name, bases):
def _register_can(*args):
recv_spec = args
def decorate(func):
func.recv_can_spec = recv_spec
return func
return decorate
def _register_topic(*args):
recv_spec = args
def decorate(func):
func.recv_spec = recv_spec
return func
return decorate
d = ComponentMetaDict()
d["recv_topic"] = _register_topic
# TODO: add CAN registry in Component, which may not be the best place for it
d["recv_can"] = _register_can
return d
@classmethod
def _build(cls, attributes):
pass
def __new__(meta, clsname, bases, attributes):
del attributes["recv_topic"]
del attributes["recv_can"]
cls = super().__new__(meta, clsname, bases, attributes)
return cls
class ThreadExiting(threading.Thread):
"""ThreadExiting presents a thread associated with a stop event. Computations done by the
thread are decomposed into poller initialization, polling iteration, and deinitialization.
This permits the thread to stop gracefully, attending to resources used."""
def __init__(self, name, *args, sample_frequency=60, **kwargs):
super().__init__(name=name, *args, daemon=True, **kwargs)
self.stop_evt = threading.Event()
self.poller_start_time = None
self.polling_thread = None
self.sample_period = 1.0 / sample_frequency
def on_poll(self, t):
pass
def on_start(self):
pass
def run(self):
self.on_start()
while not self.stopped:
it_start = time.time()
if self.poller_start_time is None:
self.poller_start_time = time.time()
t = self.poller_start_time
else:
t = time.time() - self.poller_start_time
self.on_poll(t)
it_end = time.time()
it_diff = it_end - it_start
if (self.sample_period - it_diff) > 0:
time.sleep(self.sample_period - it_diff)
self.on_exit()
def on_exit(self):
pass
def stop(self):
self.stop_evt.set()
@property
def stopped(self):
return self.stop_evt.is_set()
def exit(self):
self.stop()
self.join()
class Component(ThreadExiting, metaclass=ComponentMeta):
def __init__(self, name: str,
in_descr: typ.List[typ.Tuple],
out_descr: typ.List[typ.Tuple],
*args,
ip_addr='127.0.0.1' ,
**kwargs):
"""
:param in_descr: port description tuple (elements are of the form (port number, topic name))
:param out_descr: port description tuple (elements are of the form (port number, topic name))
"""
super(Component, self).__init__(name, *args, **kwargs)
self.ip_addr = ip_addr
self._in_ports: typ.Set[typ.Tuple] = set(in_descr)
self._out_ports: typ.Set[typ.Tuple] = set(out_descr)
self._name = name
self._zmq_context: typ.Union[zmq.Context, None] = None
self._in_socks: typ.List[zmq.socket.Socket] = []
self._out_sock: typ.Union[zmq.socket.Socket, None] = None
# time since initialization
self._epoch = time.time()
self._unbound = False
self._ready = False
self._start_finished = False
# bind here so startup messages can be received
self.bind()
def send_message(self, message: Message, topic: str, level=MessageLevel.NORMAL) -> None:
"""send message over pub/sub network
"""
assert self._out_sock is not None, f"requested to send message with no outgoing socket (None)"
assert topic in self.out_topics, f"requested to send message with invalid topic {topic}"
self._out_sock.send_string(topic, zmq.SNDMORE)
self._out_sock.send_pyobj(Envelope.serialize(Envelope(self, message, level=level)))
@coroutine
def get_receiver(self):
"""create an iterable object that yields incoming messages"""
# prime the coroutine
yield None
# main loop -- iterate through input sockets and yield incoming messages
try:
while not self.stopped:
for sn in self._in_socks:
if self.stopped:
yield (f"{self.name}-command", Message(ComponentStatus.EXIT), None)
while isinstance(sn, zmq.Socket) and sn.poll(1) == zmq.POLLIN:
topic = sn.recv_string(zmq.DONTWAIT)
recv: bytes = sn.recv_pyobj(zmq.DONTWAIT)
env: Envelope = Envelope.deserialize(recv)
msg = env.message
t = env.send_time
yield (topic, msg, t)
except Exception as exc:
# TODO: log here
pass
def on_recv(self, topic, msg, t) -> None:
pass
def _process_recvs(self, topic: str, msg: Message, t):
"""find reception response in the receiver registry"""
rgy = self.recv_methods
if (topic,) in rgy:
rgy[(topic,)](self, msg, t)
if isinstance(msg.message, collections.Hashable) and (topic, msg.message) in rgy:
rgy[(topic, msg.message)](self, t)
self.on_recv(topic, msg, t)
def recv(self, id: int, data_len: int, data: bytes):
"""find reception response in the can receiver registry"""
rgy = self.recv_can_methods
if id in rgy:
fmt = rgy[id][0]
# use network order (!)
msg = struct.unpack(fmt, bytearray(data)[:data_len])
rgy[id][1](self, msg)
@property
def recv_methods(self):
"""get all registered receive components for self"""
return getattr(self, "_recv_methods", {})
@property
def recv_can_methods(self):
"""get all registered receive components for self"""
d = getattr(self, "_recv_can_methods", {})
return {k[0]: (k[1], v) for k,v in d.items()}
def run(self) -> None:
"""component mainloop"""
self._ready = True
recvr = self.get_receiver()
self.on_start()
self._start_finished = True
for topic, msg, t in recvr:
self._process_recvs(topic, msg, t)
if self.stopped:
break
self.unbind()
self.on_exit()
def bind(self) -> None:
"""setup sockets"""
if self._zmq_context is not None:
self.unbind()
self._zmq_context = zmq.Context()
if len(self.out_topics) > 0:
# assert that all out port nums are the same
ports = set([i for i, _ in self._out_ports])
assert len(
ports) == 1, f"outgoing port numbers must all be the same for {self.__class__.__name__} (got {ports})"
self._out_sock = self._zmq_context.socket(zmq.PUB)
self._out_sock.bind(f"tcp://*:{list(self._out_ports)[0][0]}")
for port, topic in self._in_ports:
in_sock = self._zmq_context.socket(zmq.SUB)
in_sock.connect(f"tcp://{self.ip_addr}:{port}")
in_sock.setsockopt_string(zmq.SUBSCRIBE, topic)
self._in_socks.append(in_sock)
# give zmq time
time.sleep(0.2)
def unbind(self) -> None:
"""unbind/disconnect sockets, terminate zmq session"""
self._unbound = True
for idx, s in enumerate(self._in_socks):
s.close()
self._in_socks[idx] = None
if self._out_sock is not None:
self._out_sock.close()
if self._zmq_context is not None:
self._zmq_context.term()
self._zmq_context = None
self._in_socks = []
self._out_sock = None
@property
def in_topics(self):
return [i for _, i in self._in_ports]
@property
def out_topics(self):
return [i for _, i in self._out_ports]
@property
def name(self):
return self._name
def wait_ready_command(self):
"""wait until the service has finished booting"""
import time
while not self._start_finished:
time.sleep(0.2)
return ComponentStatus.READY
class ComponentPoller(Component):
"""add a polling thread to respond at a given sampling frequency"""
def __init__(self, *args, sample_frequency=60, **kwargs):
super().__init__(*args, **kwargs)
# poller properties
self.polling_thread = ThreadExiting(name=f"{self.name}-poller", sample_frequency=sample_frequency, **kwargs)
self.polling_thread.on_poll = self.on_poll_poll
self.polling_thread.on_start = self.on_poll_start
self.polling_thread.on_exit = self.on_poll_exit
self._call_started = False
def on_poll_start(self):
pass
def on_poll_exit(self):
pass
def on_poll_poll(self, t):
pass
def start_poller(self):
"""start sensor polling thread"""
if not self.stopped:
self.polling_thread.start()
self._call_started = True
def stop_poller(self):
"""stop sensor polling thread"""
if self._call_started:
self.polling_thread.stop()
def exit_poller(self):
if self._call_started:
self.polling_thread.exit()
def stop(self):
self.stop_poller()
super().stop()
def exit(self):
self.exit_poller()
super().exit()
|
py | 1a442b53e749afa9c34cd3b2fa8d33f1e9cdb1dc | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and Contributors
# See license.txt
from frappe.core.doctype.user_permission.user_permission import add_user_permissions, remove_applicable
from frappe.permissions import has_user_permission
from frappe.core.doctype.doctype.test_doctype import new_doctype
import frappe
import unittest
class TestUserPermission(unittest.TestCase):
def setUp(self):
frappe.db.sql("""DELETE FROM `tabUser Permission`
WHERE `user` in (
'[email protected]',
'[email protected]',
'[email protected]')""")
frappe.delete_doc_if_exists("DocType", "Person")
frappe.db.sql_ddl("DROP TABLE IF EXISTS `tabPerson`")
frappe.delete_doc_if_exists("DocType", "Doc A")
frappe.db.sql_ddl("DROP TABLE IF EXISTS `tabDoc A`")
def test_default_user_permission_validation(self):
user = create_user('[email protected]')
param = get_params(user, 'User', user.name, is_default=1)
add_user_permissions(param)
#create a duplicate entry with default
perm_user = create_user('[email protected]')
param = get_params(user, 'User', perm_user.name, is_default=1)
self.assertRaises(frappe.ValidationError, add_user_permissions, param)
def test_default_user_permission(self):
frappe.set_user('Administrator')
user = create_user('[email protected]', 'Website Manager')
for category in ['general', 'public']:
if not frappe.db.exists('Blog Category', category):
frappe.get_doc({'doctype': 'Blog Category', 'title': category}).insert()
param = get_params(user, 'Blog Category', 'general', is_default=1)
add_user_permissions(param)
param = get_params(user, 'Blog Category', 'public')
add_user_permissions(param)
frappe.set_user('[email protected]')
doc = frappe.new_doc("Blog Post")
self.assertEqual(doc.blog_category, 'general')
frappe.set_user('Administrator')
def test_apply_to_all(self):
''' Create User permission for User having access to all applicable Doctypes'''
user = create_user('[email protected]')
param = get_params(user, 'User', user.name)
is_created = add_user_permissions(param)
self.assertEqual(is_created, 1)
def test_for_apply_to_all_on_update_from_apply_all(self):
user = create_user('[email protected]')
param = get_params(user, 'User', user.name)
# Initially create User Permission document with apply_to_all checked
is_created = add_user_permissions(param)
self.assertEqual(is_created, 1)
is_created = add_user_permissions(param)
# User Permission should not be changed
self.assertEqual(is_created, 0)
def test_for_applicable_on_update_from_apply_to_all(self):
''' Update User Permission from all to some applicable Doctypes'''
user = create_user('[email protected]')
param = get_params(user,'User', user.name, applicable = ["Chat Room", "Chat Message"])
# Initially create User Permission document with apply_to_all checked
is_created = add_user_permissions(get_params(user, 'User', user.name))
self.assertEqual(is_created, 1)
is_created = add_user_permissions(param)
frappe.db.commit()
removed_apply_to_all = frappe.db.exists("User Permission", get_exists_param(user))
is_created_applicable_first = frappe.db.exists("User Permission", get_exists_param(user, applicable = "Chat Room"))
is_created_applicable_second = frappe.db.exists("User Permission", get_exists_param(user, applicable = "Chat Message"))
# Check that apply_to_all is removed
self.assertIsNone(removed_apply_to_all)
# Check that User Permissions for applicable is created
self.assertIsNotNone(is_created_applicable_first)
self.assertIsNotNone(is_created_applicable_second)
self.assertEqual(is_created, 1)
def test_for_apply_to_all_on_update_from_applicable(self):
''' Update User Permission from some to all applicable Doctypes'''
user = create_user('[email protected]')
param = get_params(user, 'User', user.name)
# create User permissions that with applicable
is_created = add_user_permissions(get_params(user, 'User', user.name, applicable = ["Chat Room", "Chat Message"]))
self.assertEqual(is_created, 1)
is_created = add_user_permissions(param)
is_created_apply_to_all = frappe.db.exists("User Permission", get_exists_param(user))
removed_applicable_first = frappe.db.exists("User Permission", get_exists_param(user, applicable = "Chat Room"))
removed_applicable_second = frappe.db.exists("User Permission", get_exists_param(user, applicable = "Chat Message"))
# To check that a User permission with apply_to_all exists
self.assertIsNotNone(is_created_apply_to_all)
# Check that all User Permission with applicable is removed
self.assertIsNone(removed_applicable_first)
self.assertIsNone(removed_applicable_second)
self.assertEqual(is_created, 1)
def test_user_perm_for_nested_doctype(self):
"""Test if descendants' visibility is controlled for a nested DocType."""
from frappe.core.doctype.doctype.test_doctype import new_doctype
user = create_user("[email protected]", "Blogger")
if not frappe.db.exists("DocType", "Person"):
doc = new_doctype("Person",
fields=[
{
"label": "Person Name",
"fieldname": "person_name",
"fieldtype": "Data"
}
], unique=0)
doc.is_tree = 1
doc.insert()
parent_record = frappe.get_doc(
{"doctype": "Person", "person_name": "Parent", "is_group": 1}
).insert()
child_record = frappe.get_doc(
{"doctype": "Person", "person_name": "Child", "is_group": 0, "parent_person": parent_record.name}
).insert()
add_user_permissions(get_params(user, "Person", parent_record.name))
# check if adding perm on a group record, makes child record visible
self.assertTrue(has_user_permission(frappe.get_doc("Person", parent_record.name), user.name))
self.assertTrue(has_user_permission(frappe.get_doc("Person", child_record.name), user.name))
frappe.db.set_value("User Permission", {"allow": "Person", "for_value": parent_record.name}, "hide_descendants", 1)
frappe.cache().delete_value("user_permissions")
# check if adding perm on a group record with hide_descendants enabled,
# hides child records
self.assertTrue(has_user_permission(frappe.get_doc("Person", parent_record.name), user.name))
self.assertFalse(has_user_permission(frappe.get_doc("Person", child_record.name), user.name))
def test_user_perm_on_new_doc_with_field_default(self):
"""Test User Perm impact on frappe.new_doc. with *field* default value"""
frappe.set_user('Administrator')
user = create_user("[email protected]", "Blogger")
# make a doctype "Doc A" with 'doctype' link field and default value ToDo
if not frappe.db.exists("DocType", "Doc A"):
doc = new_doctype("Doc A",
fields=[
{
"label": "DocType",
"fieldname": "doc",
"fieldtype": "Link",
"options": "DocType",
"default": "ToDo"
}
], unique=0)
doc.insert()
# make User Perm on DocType 'ToDo' in Assignment Rule (unrelated doctype)
add_user_permissions(get_params(user, "DocType", "ToDo", applicable=["Assignment Rule"]))
frappe.set_user("[email protected]")
new_doc = frappe.new_doc("Doc A")
# User perm is created on ToDo but for doctype Assignment Rule only
# it should not have impact on Doc A
self.assertEqual(new_doc.doc, "ToDo")
frappe.set_user('Administrator')
remove_applicable(["Assignment Rule"], "[email protected]", "DocType", "ToDo")
def test_user_perm_on_new_doc_with_user_default(self):
"""Test User Perm impact on frappe.new_doc. with *user* default value"""
from frappe.core.doctype.session_default_settings.session_default_settings import (clear_session_defaults,
set_session_default_values)
frappe.set_user('Administrator')
user = create_user("[email protected]", "Blogger")
# make a doctype "Doc A" with 'doctype' link field
if not frappe.db.exists("DocType", "Doc A"):
doc = new_doctype("Doc A",
fields=[
{
"label": "DocType",
"fieldname": "doc",
"fieldtype": "Link",
"options": "DocType",
}
], unique=0)
doc.insert()
# create a 'DocType' session default field
if not frappe.db.exists("Session Default", {"ref_doctype": "DocType"}):
settings = frappe.get_single('Session Default Settings')
settings.append("session_defaults", {
"ref_doctype": "DocType"
})
settings.save()
# make User Perm on DocType 'ToDo' in Assignment Rule (unrelated doctype)
add_user_permissions(get_params(user, "DocType", "ToDo", applicable=["Assignment Rule"]))
# User default Doctype value is ToDo via Session Defaults
frappe.set_user("[email protected]")
set_session_default_values({"doc": "ToDo"})
new_doc = frappe.new_doc("Doc A")
# User perm is created on ToDo but for doctype Assignment Rule only
# it should not have impact on Doc A
self.assertEqual(new_doc.doc, "ToDo")
frappe.set_user('Administrator')
clear_session_defaults()
remove_applicable(["Assignment Rule"], "[email protected]", "DocType", "ToDo")
def create_user(email, *roles):
''' create user with role system manager '''
if frappe.db.exists('User', email):
return frappe.get_doc('User', email)
user = frappe.new_doc('User')
user.email = email
user.first_name = email.split("@")[0]
if not roles:
roles = ('System Manager',)
user.add_roles(*roles)
return user
def get_params(user, doctype, docname, is_default=0, hide_descendants=0, applicable=None):
''' Return param to insert '''
param = {
"user": user.name,
"doctype":doctype,
"docname":docname,
"is_default": is_default,
"apply_to_all_doctypes": 1,
"applicable_doctypes": [],
"hide_descendants": hide_descendants
}
if applicable:
param.update({"apply_to_all_doctypes": 0})
param.update({"applicable_doctypes": applicable})
return param
def get_exists_param(user, applicable = None):
''' param to check existing Document '''
param = {
"user": user.name,
"allow": "User",
"for_value": user.name,
}
if applicable:
param.update({"applicable_for": applicable})
else:
param.update({"apply_to_all_doctypes": 1})
return param
|
py | 1a442bb3934539a7da1ef26d068720f9319b5e43 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class AutoRestHeadTestServiceConfiguration(Configuration):
"""Configuration for AutoRestHeadTestService.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
"""
def __init__(
self,
credential: "AsyncTokenCredential",
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
super(AutoRestHeadTestServiceConfiguration, self).__init__(**kwargs)
self.credential = credential
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-sample/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
|
py | 1a442c2bf0ea805066d3934e2175dfcc24bb5639 | from math import floor
from tweepy import API, OAuthHandler
from data_analysis.database import session, Tweet
consumer_key = 'dGx62GNqi7Yaj1XIcZgOLNjDb'
consumer_secret = 'ZCE896So7Ba1u96ICwMhulO2QO3oeZ5BeVyfUw1YbIYELzVyJs'
access_token = '1121993185-hGOTr3J40FlKGwWkiNWdeNVrcD4bqqW38SPiM3s'
access_token_secret = 'BAo4d2J24xyXRKFrga6A9MwpTW6bMb5EztfvnL5qv2LvJ'
auth = OAuthHandler(consumer_key,
consumer_secret)
auth.set_access_token(access_token, access_token_secret)
def update_tweets(api, tweets):
"""
This is a method to update our tweets.
`api` is an instance of tweepy.API
`tweets` is a list of all tweets from our database
This method handles high level iteration logic. See `_update_sets` for the
more interesting, updating-of-values, logic
"""
# How many tweets do we have?
len_tweets = len(tweets)
# The Twitter REST API only takes 100 id's at a time. So we need to break
# these into sets of 100, and use the `math.floor` method to get an integer
iterations = floor(len_tweets/100)
# Iterate through the sets of 100s of tweets
for num in range(iterations):
# first number of the set of 100
first_tweet_index = num * 100
# last number of the set of 100
last_tweet_index = first_tweet_index + 99
# Grab the set using index slicing
tweet_set = tweets[first_tweet_index:last_tweet_index]
# Call an the inner method so we avoid code duplication
_update_sets(api, session, tweet_set, num)
# if we can divide perfectly by 100, we're done!
if iterations % 100 == 0:
return
# If we're here, our last set is slightly smaller than 100, so we're
# going to caculate the next number and then grab to the end of the list
last_set_num = iterations * 100
last_set = tweets[last_set_num:]
# pass the last set into our inner method that we used to avoid code
# duplication
_update_sets(api, session, last_set, iterations)
def _update_sets(api, session, tweet_set, start_num):
"""
Broke out a helper method so we didn't have to repeat the code for our
last set.
This helper method does the heavy lifting for us
"""
# Grab out just the tweet ids using a list comprehension
tweet_ids = [tweet.tid for tweet in tweet_set]
# Using the tweepy api, grab the updated tweets
# `trim_user` drops user data
updated_set = api.statuses_lookup(tweet_ids, trim_user=True)
# iterate through update set
for updated_tweet in updated_set:
# the values we want to update
fav_count = updated_tweet.favorite_count
retweet_count = updated_tweet.retweet_count
# Get the old tweet using it's twitter id (tid for short)
tid = updated_tweet.id
database_tweet = session.query(Tweet).filter_by(tid=tid).one()
# update the tweet information in our database
database_tweet.favorite_count = fav_count
database_tweet.retweet_count = retweet_count
# User feedback
print('index: {}'.format(database_tweet.id))
print('favs: {} \t retweets: {}'.format(fav_count, retweet_count))
# save our changes to the database
session.commit()
def main():
api = API(auth)
# Grab all the tweets
tweets = session.query(Tweet).all()
update_tweets(api, tweets)
if __name__ == '__main__':
main()
|
py | 1a442ee27c8a2a81e2fa3217d837b2ad7b3f609a | import clr
import System
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
def DesignOptionIsPrimary(item):
if hasattr(item, "IsPrimary"): return item.IsPrimary
else: return False
items = UnwrapElement(IN[0])
if isinstance(IN[0], list): OUT = [DesignOptionIsPrimary(x) for x in items]
else: OUT = DesignOptionIsPrimary(items) |
py | 1a442efdb8a5e3f688a4b2ac3c25b0bd39963462 | """
Django settings for Tutiz project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "SECRET_KEYSECRET_KEYSECRET_KEYSECRET_KEYSECRET_KEY"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config("DEBUG")
ALLOWED_HOSTS = config("ALLOWED_HOSTS").split(" ")
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'api',
'accounts',
'rest_framework',
'corsheaders',
]
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": ('rest_framework_simplejwt.authentication.JWTAuthentication',
)
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'TutoringAPI.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'TutoringAPI.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config("DB_USER"),
'PASSWORD': config("DB_PASSWORD"),
'HOST': config("DB_HOST"),
'PORT': config("DB_PORT")
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
CORS_ALLOW_ALL_ORIGINS = True
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
APPEND_SLASH=False
|
py | 1a442f1d5d6db7d07f20927df203c8adbf66195f | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.exception import NotFound
from oslo_utils import timeutils
import six
class CooldownMixin(object):
"""Utility class to encapsulate Cooldown related logic.
This class is shared between AutoScalingGroup and ScalingPolicy.
This logic includes both cooldown timestamp comparing and scaling in
progress checking.
"""
def _is_scaling_allowed(self):
metadata = self.metadata_get()
if metadata.get('scaling_in_progress'):
return False
try:
# Negative values don't make sense, so they are clamped to zero
cooldown = max(0, self.properties[self.COOLDOWN])
except TypeError:
# If not specified, it will be None, same as cooldown == 0
cooldown = 0
if cooldown != 0:
try:
if 'cooldown' not in metadata:
# Note: this is for supporting old version cooldown logic
if metadata:
last_adjust = next(six.iterkeys(metadata))
if not timeutils.is_older_than(last_adjust, cooldown):
return False
else:
last_adjust = next(six.iterkeys(metadata['cooldown']))
if not timeutils.is_older_than(last_adjust, cooldown):
return False
except ValueError:
# occurs when metadata has only {scaling_in_progress: False}
pass
# Assumes _finished_scaling is called
# after the scaling operation completes
metadata['scaling_in_progress'] = True
self.metadata_set(metadata)
return True
def _finished_scaling(self, cooldown_reason, size_changed=True):
# If we wanted to implement the AutoScaling API like AWS does,
# we could maintain event history here, but since we only need
# the latest event for cooldown, just store that for now
metadata = self.metadata_get()
if size_changed:
now = timeutils.utcnow().isoformat()
metadata['cooldown'] = {now: cooldown_reason}
metadata['scaling_in_progress'] = False
try:
self.metadata_set(metadata)
except NotFound:
pass
|
py | 1a442f514b82ce4739ae0e64dac130f5206cece2 | import django_filters
from django.db.models import Count, Exists, OuterRef, Q
from ...account.models import Address, User
from ..core.filters import EnumFilter, MetadataFilterBase, ObjectTypeFilter
from ..core.types.common import DateRangeInput, IntRangeInput
from ..utils.filters import filter_range_field
from .enums import StaffMemberStatus
def filter_date_joined(qs, _, value):
return filter_range_field(qs, "date_joined__date", value)
def filter_number_of_orders(qs, _, value):
qs = qs.annotate(total_orders=Count("orders"))
return filter_range_field(qs, "total_orders", value)
def filter_placed_orders(qs, _, value):
return filter_range_field(qs, "orders__created__date", value)
def filter_staff_status(qs, _, value):
if value == StaffMemberStatus.ACTIVE:
return qs.filter(is_staff=True, is_active=True)
if value == StaffMemberStatus.DEACTIVATED:
return qs.filter(is_staff=True, is_active=False)
return qs
def filter_user_search(qs, _, value):
if value:
UserAddress = User.addresses.through
addresses = Address.objects.filter(
Q(first_name__trigram_similar=value)
| Q(last_name__trigram_similar=value)
| Q(city__trigram_similar=value)
| Q(country__trigram_similar=value)
| Q(phone=value)
).values("id")
user_addresses = UserAddress.objects.filter(
Exists(addresses.filter(pk=OuterRef("address_id")))
).values("user_id")
qs = qs.filter(
Q(email__trigram_similar=value)
| Q(first_name__trigram_similar=value)
| Q(last_name__trigram_similar=value)
| Q(Exists(user_addresses.filter(user_id=OuterRef("pk"))))
)
return qs
def filter_search(qs, _, value):
if value:
qs = qs.filter(name__trigram_similar=value)
return qs
class CustomerFilter(MetadataFilterBase):
date_joined = ObjectTypeFilter(
input_class=DateRangeInput, method=filter_date_joined
)
number_of_orders = ObjectTypeFilter(
input_class=IntRangeInput, method=filter_number_of_orders
)
placed_orders = ObjectTypeFilter(
input_class=DateRangeInput, method=filter_placed_orders
)
search = django_filters.CharFilter(method=filter_user_search)
class Meta:
model = User
fields = [
"date_joined",
"number_of_orders",
"placed_orders",
"search",
]
class PermissionGroupFilter(django_filters.FilterSet):
search = django_filters.CharFilter(method=filter_search)
class StaffUserFilter(django_filters.FilterSet):
status = EnumFilter(input_class=StaffMemberStatus, method=filter_staff_status)
search = django_filters.CharFilter(method=filter_user_search)
# TODO - Figure out after permision types
# department = ObjectTypeFilter
class Meta:
model = User
fields = ["status", "search"]
|
py | 1a442fcde102d424d75057d652738dbf787027b9 | import scrapy
from PrayerTimes.items import PrayertimesItem
class PrayerSpider(scrapy.Spider):
name = "PrayerTimes"
start_urls = [
'http://www.mcabayarea.org'
]
def parse(self, response):
prayerCSS='td.Prayer01::text'
prayerTimeCSS='td.Prayer02::text'
prayerTable=response.css('div.box-3.deepest.with-header')
item = PrayertimesItem()
#for idx, val in enumerate(prayerTable.css(prayerCSS).extract()):
item['Prayer'] = prayerTable.css(prayerCSS).extract()
item['IqamaTime'] = prayerTable.css(prayerTimeCSS).re(r'\d+:\d+')
return item
|
py | 1a44340d0036b725b79191818b5ee613efdea85e | class Research:
# dependency on a low-level module directly
# bad because strongly dependent on e.g. storage type
# def __init__(self, relationships):
# # high-level: find all of john's children
# relations = relationships.relations
# for r in relations:
# if r[0].name == 'John' and r[1] == Relationship.PARENT:
# print(f'John has a child called {r[2].name}.')
def __init__(self, browser):
for p in browser.find_all_children_of("John"):
print(f'John has a child called {p}')
|
py | 1a44341584ebd0f6cf02e95f204f22a1898705ca | import os
''' GENERATE SECRET KEY '''
if not os.getenv('SECRET_KEY'):
# Attempt to read the secret from the secret file
# This will fail if the secret has not been written
try:
with open('.ctfd_secret_key', 'rb') as secret:
key = secret.read()
except (OSError, IOError):
key = None
if not key:
key = os.urandom(64)
# Attempt to write the secret file
# This will fail if the filesystem is read-only
try:
with open('.ctfd_secret_key', 'wb') as secret:
secret.write(key)
secret.flush()
except (OSError, IOError):
pass
''' SERVER SETTINGS '''
class Config(object):
"""
CTFd Configuration Object
"""
'''
=== REQUIRED SETTINGS ===
SECRET_KEY:
The secret value used to creation sessions and sign strings. This should be set to a random string. In the
interest of ease, CTFd will automatically create a secret key file for you. If you wish to add this secret key
to your instance you should hard code this value to a random static value.
You can also remove .ctfd_secret_key from the .gitignore file and commit this file into whatever repository
you are using.
http://flask.pocoo.org/docs/latest/quickstart/#sessions
SQLALCHEMY_DATABASE_URI:
The URI that specifies the username, password, hostname, port, and database of the server
used to hold the CTFd database.
e.g. mysql+pymysql://root:<YOUR_PASSWORD_HERE>@localhost/ctfd
CACHE_TYPE:
Specifies how CTFd should cache configuration values. If CACHE_TYPE is set to 'redis', CTFd will make use
of the REDIS_URL specified in environment variables. You can also choose to hardcode the REDIS_URL here.
It is important that you specify some sort of cache as CTFd uses it to store values received from the database. If
no cache is specified, CTFd will default to a simple per-worker cache. The simple cache cannot be effectively used
with multiple workers.
REDIS_URL is the URL to connect to a Redis server.
e.g. redis://user:password@localhost:6379
http://pythonhosted.org/Flask-Caching/#configuring-flask-caching
'''
SECRET_KEY = os.getenv('SECRET_KEY') or key
DATABASE_URL = os.getenv('DATABASE_URL') or 'sqlite:///{}/ctfd.db'.format(os.path.dirname(os.path.abspath(__file__)))
REDIS_URL = os.getenv('REDIS_URL')
SQLALCHEMY_DATABASE_URI = DATABASE_URL
CACHE_REDIS_URL = REDIS_URL
if CACHE_REDIS_URL:
CACHE_TYPE = 'redis'
else:
CACHE_TYPE = 'filesystem'
CACHE_DIR = os.path.join(os.path.dirname(__file__), os.pardir, '.data', 'filesystem_cache')
CACHE_THRESHOLD = 0 # Override the threshold of cached values on the filesystem. The default is 500. Don't change unless you know what you're doing.
'''
=== SECURITY ===
SESSION_COOKIE_HTTPONLY:
Controls if cookies should be set with the HttpOnly flag.
PERMANENT_SESSION_LIFETIME:
The lifetime of a session. The default is 604800 seconds.
TRUSTED_PROXIES:
Defines a set of regular expressions used for finding a user's IP address if the CTFd instance
is behind a proxy. If you are running a CTF and users are on the same network as you, you may choose to remove
some proxies from the list.
CTFd only uses IP addresses for cursory tracking purposes. It is ill-advised to do anything complicated based
solely on IP addresses unless you know what you are doing.
'''
SESSION_COOKIE_HTTPONLY = (not os.getenv("SESSION_COOKIE_HTTPONLY")) # Defaults True
SESSION_COOKIE_SAMESITE = os.getenv("SESSION_COOKIE_SAMESITE") or 'Lax'
PERMANENT_SESSION_LIFETIME = int(os.getenv("PERMANENT_SESSION_LIFETIME") or 604800) # 7 days in seconds
TRUSTED_PROXIES = [
r'^127\.0\.0\.1$',
# Remove the following proxies if you do not trust the local network
# For example if you are running a CTF on your laptop and the teams are
# all on the same network
r'^::1$',
r'^fc00:',
r'^10\.',
r'^172\.(1[6-9]|2[0-9]|3[0-1])\.',
r'^192\.168\.'
]
'''
=== EMAIL ===
MAILFROM_ADDR:
The email address that emails are sent from if not overridden in the configuration panel.
MAIL_SERVER:
The mail server that emails are sent from if not overriden in the configuration panel.
MAIL_PORT:
The mail port that emails are sent from if not overriden in the configuration panel.
MAIL_USEAUTH
Whether or not to use username and password to authenticate to the SMTP server
MAIL_USERNAME
The username used to authenticate to the SMTP server if MAIL_USEAUTH is defined
MAIL_PASSWORD
The password used to authenticate to the SMTP server if MAIL_USEAUTH is defined
MAIL_TLS
Whether to connect to the SMTP server over TLS
MAIL_SSL
Whether to connect to the SMTP server over SSL
MAILGUN_API_KEY
Mailgun API key to send email over Mailgun
MAILGUN_BASE_URL
Mailgun base url to send email over Mailgun
'''
MAILFROM_ADDR = os.getenv("MAILFROM_ADDR") or "[email protected]"
MAIL_SERVER = os.getenv("MAIL_SERVER") or None
MAIL_PORT = os.getenv("MAIL_PORT")
MAIL_USEAUTH = os.getenv("MAIL_USEAUTH")
MAIL_USERNAME = os.getenv("MAIL_USERNAME")
MAIL_PASSWORD = os.getenv("MAIL_PASSWORD")
MAIL_TLS = os.getenv("MAIL_TLS") or False
MAIL_SSL = os.getenv("MAIL_SSL") or False
MAILGUN_API_KEY = os.getenv("MAILGUN_API_KEY")
MAILGUN_BASE_URL = os.getenv("MAILGUN_BASE_URL")
'''
=== LOGS ===
LOG_FOLDER:
The location where logs are written. These are the logs for CTFd key submissions, registrations, and logins.
The default location is the CTFd/logs folder.
'''
LOG_FOLDER = os.getenv('LOG_FOLDER') or os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logs')
'''
=== UPLOADS ===
UPLOAD_PROVIDER:
Specifies the service that CTFd should use to store files.
UPLOAD_FOLDER:
The location where files are uploaded. The default destination is the CTFd/uploads folder.
AWS_ACCESS_KEY_ID:
AWS access token used to authenticate to the S3 bucket.
AWS_SECRET_ACCESS_KEY:
AWS secret token used to authenticate to the S3 bucket.
AWS_S3_BUCKET:
The unique identifier for your S3 bucket.
AWS_S3_ENDPOINT_URL:
A URL pointing to a custom S3 implementation.
'''
UPLOAD_PROVIDER = os.getenv('UPLOAD_PROVIDER') or 'filesystem'
UPLOAD_FOLDER = os.getenv('UPLOAD_FOLDER') or os.path.join(os.path.dirname(os.path.abspath(__file__)), 'uploads')
if UPLOAD_PROVIDER == 's3':
AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET = os.getenv('AWS_S3_BUCKET')
AWS_S3_ENDPOINT_URL = os.getenv('AWS_S3_ENDPOINT_URL')
'''
=== OPTIONAL ===
REVERSE_PROXY:
Specifies whether CTFd is behind a reverse proxy or not. Set to True if using a reverse proxy like nginx.
TEMPLATES_AUTO_RELOAD:
Specifies whether Flask should check for modifications to templates and reload them automatically.
SQLALCHEMY_TRACK_MODIFICATIONS:
Automatically disabled to suppress warnings and save memory. You should only enable this if you need it.
UPDATE_CHECK:
Specifies whether or not CTFd will check whether or not there is a new version of CTFd
APPLICATION_ROOT:
Specifies what path CTFd is mounted under. It can be used to run CTFd in a subdirectory.
Example: /ctfd
SOCKETIO_ASYNC_MODE:
Specifies what async mode SocketIO should use. The simplest but least performant option is 'threading'.
Switching to a different async mode is not recommended without the appropriate load balancing mechanisms
in place and proper understanding of how websockets are supported by Flask.
https://flask-socketio.readthedocs.io/en/latest/#deployment
'''
REVERSE_PROXY = os.getenv("REVERSE_PROXY") or False
TEMPLATES_AUTO_RELOAD = (not os.getenv("TEMPLATES_AUTO_RELOAD")) # Defaults True
SQLALCHEMY_TRACK_MODIFICATIONS = (not os.getenv("SQLALCHEMY_TRACK_MODIFICATIONS")) # Defaults True
UPDATE_CHECK = (not os.getenv("UPDATE_CHECK")) # Defaults True
APPLICATION_ROOT = os.getenv('APPLICATION_ROOT') or '/'
SOCKETIO_ASYNC_MODE = os.getenv('SOCKETIO_ASYNC_MODE') or 'threading'
'''
=== OAUTH ===
MajorLeagueCyber Integration
Register an event at https://majorleaguecyber.org/ and use the Client ID and Client Secret here
'''
OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID")
OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET")
class TestingConfig(Config):
SECRET_KEY = 'AAAAAAAAAAAAAAAAAAAA'
PRESERVE_CONTEXT_ON_EXCEPTION = False
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv('TESTING_DATABASE_URL') or 'sqlite://'
SERVER_NAME = 'localhost'
UPDATE_CHECK = False
REDIS_URL = None
CACHE_TYPE = 'simple'
CACHE_THRESHOLD = 500
SAFE_MODE = True
|
py | 1a44342d8cf476dd1c945319d7cad8de8324cf7d | """ conver arff to csv format"""
import csv
import pandas as pd
def function_arfftocsv(source: str, dest: str = 'processed.csv'):
"""this function deletes @ and empty lines so that produce a no-header csv"""
fp = open(source)
rdr = csv.reader(filter(lambda row: row[0]!='@' and len(row)>1, fp))
with open(dest,'w', newline = '') as csvfile:
filewriter = csv.writer(csvfile)
for row in rdr:
filewriter.writerow(row)
fp.close()
# this function adds the headers specified in labels argument
def function_labelize(dest: str, labels: list, source: str = 'processed.csv') -> pd.DataFrame:
"""This function takes a destination dir, a source dir, the labels to add
and returns a dataframe with all labels for each column"""
df = pd.read_csv(source, names=labels,index_col=False, na_values='?', sep=',')
df.to_csv(dest, header=True, index_label=False, index=False)
return df
def function_dataEncoding(df: pd.DataFrame, labels: list, to_replace: dict, values: dict,
path: str) -> pd.DataFrame:
"""this function encodes explicitly the nominal values of specified labels
and returns the dataframe with this columns"""
for label in labels:# for each label we want to convert
df[label] = df[label].replace(to_replace[label], values[label])#replace the given values
df[labels].to_csv(path, header= True, index_label= False, index= False)#save as csv
return df[labels] # return the dataFrame
def processing ( all_labels: list,labels: list, to_replace: dict, values: dict,
path: str = 'all.csv', source: str = 'diabetes_paper_fazakis.csv',
des: str ='Finaldata.csv')-> pd.DataFrame:
"""this function places the labels for each model and converts categorical to numerical data"""
function_arfftocsv(source) # transform arff to csv
df = function_labelize(des, all_labels) # add column labels
return function_dataEncoding(df, labels, to_replace, values, path)# encode categorical and return dFrame
|
py | 1a443447db82f34f83cf3c48f9982ac7f476b305 | from django.shortcuts import render, get_object_or_404, HttpResponseRedirect
from treasure_hunt.models import Level, UserProfile
from django.contrib.auth.decorators import login_required
import django
django.setup() #Hack to fix Models not ready error
def index(request):
return render(request, 'treasurehunt/treasurehunt_index.html')
@login_required
def display_level(request, level):
level_object = get_object_or_404(Level, level_number__exact=level)
current_user = request.user.profile
if request.method == 'GET':
if int(current_user.current_level) <= int(level):
return render(request, 'treasurehunt/treasurehunt_level.html', {'level_object': level_object})
else:
return HttpResponseRedirect('/treasurehunt/level/%d' % current_user.current_level)
else:
level_answer = str(level_object.answer)
user_answer = str(request.POST['answer'])
if level_answer == user_answer and int(current_user.current_level) == int(level):
#Make sure that user level is updated only once for every level
current_user.current_level += 1
current_user.save(update_fields=['current_level'])
return HttpResponseRedirect('/treasurehunt/level/%d' % (int(level) + 1))
return HttpResponseRedirect('/treasurehunt/level/%d' % int(level))
@login_required
def display_leaderboard(request):
users = UserProfile.objects.all().order_by('-current_level')
return render(request, 'treasurehunt/treasurehunt_leaderboard.html', {'users': users})
|
py | 1a44349c61538160bae4ee75ebbf71c6608091bb | """Test the API's checkout process over full digital orders."""
import graphene
import pytest
from ....account.models import Address
from ....checkout.error_codes import CheckoutErrorCode
from ....checkout.fetch import fetch_checkout_info, fetch_checkout_lines
from ....checkout.models import Checkout
from ....checkout.utils import add_variant_to_checkout
from ....plugins.manager import get_plugins_manager
from ...core.utils import to_global_id_or_none
from ...tests.utils import get_graphql_content
from ..mutations.utils import update_checkout_shipping_method_if_invalid
from .test_checkout import (
MUTATION_CHECKOUT_CREATE,
MUTATION_CHECKOUT_SHIPPING_ADDRESS_UPDATE,
MUTATION_UPDATE_SHIPPING_METHOD,
)
from .test_checkout_lines import (
MUTATION_CHECKOUT_LINE_DELETE,
MUTATION_CHECKOUT_LINES_UPDATE,
)
@pytest.mark.parametrize("with_shipping_address", (True, False))
def test_create_checkout(
api_client,
digital_content,
graphql_address_data,
with_shipping_address,
channel_USD,
):
"""Test creating a checkout with a shipping address gets the address ignored."""
address_count = Address.objects.count()
variant = digital_content.product_variant
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
checkout_input = {
"channel": channel_USD.slug,
"lines": [{"quantity": 1, "variantId": variant_id}],
"email": "[email protected]",
}
if with_shipping_address:
checkout_input["shippingAddress"] = graphql_address_data
get_graphql_content(
api_client.post_graphql(
MUTATION_CHECKOUT_CREATE, {"checkoutInput": checkout_input}
)
)["data"]["checkoutCreate"]
# Retrieve the created checkout
checkout = Checkout.objects.get()
# Check that the shipping address was ignored, thus not created
assert (
checkout.shipping_address is None
), "The address shouldn't have been associated"
assert (
Address.objects.count() == address_count
), "No address should have been created"
def test_checkout_has_no_available_shipping_methods(
api_client, checkout_with_digital_item, address, shipping_zone
):
"""Test no shipping method are available on digital orders."""
query = """
query getCheckout($id: ID!) {
checkout(id: $id) {
availableShippingMethods {
name
price {
amount
}
}
}
}
"""
checkout = checkout_with_digital_item
# Put a shipping address, to ensure it is still handled properly
checkout.shipping_address = address
checkout.save(update_fields=["shipping_address"])
variables = {"id": to_global_id_or_none(checkout)}
response = api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["checkout"]
assert len(data["availableShippingMethods"]) == 0
def test_checkout_update_shipping_address(
api_client, checkout_with_digital_item, graphql_address_data
):
"""Test updating the shipping address of a digital order throws an error."""
checkout = checkout_with_digital_item
variables = {
"id": to_global_id_or_none(checkout),
"shippingAddress": graphql_address_data,
}
response = api_client.post_graphql(
MUTATION_CHECKOUT_SHIPPING_ADDRESS_UPDATE, variables
)
content = get_graphql_content(response)
data = content["data"]["checkoutShippingAddressUpdate"]
assert data["errors"] == [
{
"field": "shippingAddress",
"message": "This checkout doesn't need shipping",
"code": CheckoutErrorCode.SHIPPING_NOT_REQUIRED.name,
}
]
# Ensure the address was unchanged
checkout.refresh_from_db(fields=["shipping_address"])
assert checkout.shipping_address is None
def test_checkout_update_shipping_method(
api_client, checkout_with_digital_item, address, shipping_method
):
"""Test updating the shipping method of a digital order throws an error."""
checkout = checkout_with_digital_item
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.pk)
variables = {"id": to_global_id_or_none(checkout), "shippingMethodId": method_id}
# Put a shipping address, to ensure it is still handled properly
checkout.shipping_address = address
checkout.save(update_fields=["shipping_address"])
response = api_client.post_graphql(MUTATION_UPDATE_SHIPPING_METHOD, variables)
content = get_graphql_content(response)
data = content["data"]["checkoutShippingMethodUpdate"]
assert data["errors"] == [
{
"field": "shippingMethod",
"message": "This checkout doesn't need shipping",
"code": CheckoutErrorCode.SHIPPING_NOT_REQUIRED.name,
}
]
# Ensure the shipping method was unchanged
checkout.refresh_from_db(fields=["shipping_method"])
assert checkout.shipping_method is None
def test_remove_shipping_method_if_only_digital_in_checkout(
checkout_with_digital_item, address, shipping_method
):
checkout = checkout_with_digital_item
checkout.shipping_address = address
checkout.shipping_method = shipping_method
checkout.save()
assert checkout.shipping_method
manager = get_plugins_manager()
lines, _ = fetch_checkout_lines(checkout)
checkout_info = fetch_checkout_info(checkout, lines, [], manager)
update_checkout_shipping_method_if_invalid(checkout_info, lines)
checkout.refresh_from_db()
assert not checkout.shipping_method
def test_checkout_lines_update_remove_shipping_if_removed_product_with_shipping(
user_api_client, checkout_with_item, digital_content, address, shipping_method
):
checkout = checkout_with_item
digital_variant = digital_content.product_variant
checkout.shipping_address = address
checkout.shipping_method = shipping_method
checkout.save()
checkout_info = fetch_checkout_info(checkout, [], [], get_plugins_manager())
add_variant_to_checkout(checkout_info, digital_variant, 1)
line = checkout.lines.first()
variant = line.variant
variant_id = graphene.Node.to_global_id("ProductVariant", variant.pk)
variables = {
"id": to_global_id_or_none(checkout),
"lines": [{"variantId": variant_id, "quantity": 0}],
}
response = user_api_client.post_graphql(MUTATION_CHECKOUT_LINES_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["checkoutLinesUpdate"]
assert not data["errors"]
checkout.refresh_from_db()
assert checkout.lines.count() == 1
assert not checkout.shipping_method
def test_checkout_line_delete_remove_shipping_if_removed_product_with_shipping(
user_api_client, checkout_with_item, digital_content, address, shipping_method
):
checkout = checkout_with_item
digital_variant = digital_content.product_variant
checkout.shipping_address = address
checkout.shipping_method = shipping_method
checkout.save()
checkout_info = fetch_checkout_info(checkout, [], [], get_plugins_manager())
add_variant_to_checkout(checkout_info, digital_variant, 1)
line = checkout.lines.first()
line_id = graphene.Node.to_global_id("CheckoutLine", line.pk)
variables = {"id": to_global_id_or_none(checkout), "lineId": line_id}
response = user_api_client.post_graphql(MUTATION_CHECKOUT_LINE_DELETE, variables)
content = get_graphql_content(response)
data = content["data"]["checkoutLineDelete"]
assert not data["errors"]
checkout.refresh_from_db()
assert checkout.lines.count() == 1
assert not checkout.shipping_method
|
py | 1a4434b9dfc856e843f34638ff5b49f92828a32f | from apodeixi.controllers.util.manifest_api import ManifestAPI
from apodeixi.util.a6i_error import ApodeixiError
from apodeixi.util.formatting_utils import StringUtils
from apodeixi.controllers.util.skeleton_controller import SkeletonController
from apodeixi.knowledge_base.filing_coordinates import InitiativesFilingCoordinates
from apodeixi.xli.interval import GreedyIntervalSpec, ClosedOpenIntervalSpec
from apodeixi.xli.posting_controller_utils import PostingConfig
from apodeixi.xli.update_policy import UpdatePolicy
class Workstream_Controller(SkeletonController):
'''
Class to process an Excel posting for initiative workstreams. It produces two YAML manifests:
* The workstream's milestones
* The workstream's metrics
@param store A KnowledgeBaseStore instance. Handles all I/O of postings and manifests for this controller.
@param a6i_config The ApodeixiConfig instance for the Python process in which we are running.
'''
def __init__(self, parent_trace, store, a6i_config):
super().__init__(parent_trace, store, a6i_config)
self.MANIFEST_API = ManifestAPI( parent_trace = parent_trace,
domain = 'initiatives',
subdomain = 'workstream',
api_publisher = 'a6i',
extension = 'io')
self.SUPPORTED_VERSIONS = ['v1a']
self.SUPPORTED_KINDS = ['workstream-milestone', 'workstream-metric']
def getManifestAPI(self):
return self.MANIFEST_API
def getSupportedVersions(self):
return self.SUPPORTED_VERSIONS
def getSupportedKinds(self):
return self.SUPPORTED_KINDS
def getPostingConfig(self, parent_trace, kind, manifest_nb):
'''
Return a PostingConfig, corresponding to the configuration that this concrete controller supports.
'''
ME = Workstream_Controller
if kind == 'workstream-milestone':
update_policy = UpdatePolicy(reuse_uids=True, merge=False)
xlr_config = ME._WorkstreamMilestoneConfig( update_policy = update_policy,
kind = kind,
manifest_nb = manifest_nb,
controller = self)
elif kind == 'workstream-metric':
update_policy = UpdatePolicy(reuse_uids=True, merge=False)
xlr_config = ME._WorkstreamMetricConfig( update_policy = update_policy,
kind = kind,
manifest_nb = manifest_nb,
controller = self)
else:
raise ApodeixiError(parent_trace, "Invalid domain object '" + kind + "' - should be one of "
+ ", ".join(self.SUPPORTED_KINDS),
origination = {'signaled_from': __file__})
return xlr_config
def getPostingLabel(self, parent_trace):
'''
Returns a PostingLabel, corresponding to the what is expected by this concrete controller class.
'''
ME = Workstream_Controller
return ME._MyPostingLabel(parent_trace, controller = self)
def _buildAllManifests(self, parent_trace, posting_label_handle):
all_manifests_dict, label = super()._buildAllManifests(parent_trace, posting_label_handle)
return all_manifests_dict, label
def subnamespaceFromLabel(self, parent_trace, label):
'''
Helper method that returns what the 'subnamespace' that is a portion of a manifest's name.
It is inferred from a `label` that provides the posting details for a manifest that should be created.
Returns a string corresponding to the subnamespace, if one applies to this `kind` of manifest.
If no subnamespace applies, returns None.
'''
program = label.program (parent_trace)
FMT = StringUtils().format_as_yaml_fieldname # Abbreviation for readability
return FMT(program)
def manifestNameFromLabel(self, parent_trace, label, kind):
'''
Helper method that returns what the 'name' field should be in the manifest to be created with the given
label
@param kind The kind of manifest for which the name is sought. This parameter can be ignored for controller
classes that use the same name for all supported kinds; it is meant to support controllers that
process multiple manifest kinds and do not use the same name for all of them. For example, controllers
that point to reference data in a different domain/sub-domain.
'''
program = label.program (parent_trace)
workstream_UID = label.workstream_UID (parent_trace)
initiative = label.initiative (parent_trace)
scenario = label.scenario (parent_trace)
scoring_cycle = label.scoring_cycle (parent_trace)
FMT = StringUtils().format_as_yaml_fieldname # Abbreviation for readability
name = FMT(program + '.' + scoring_cycle + '.' + initiative + '.'
+ workstream_UID + '.' + scenario)
return name
def manifestNameFromCoords(self, parent_trace, subnamespace, coords, kind):
'''
Helper method that returns what the 'name' field should be in the manifest to be created with the given
filing coords, possibly complemented by the subnamespace.
Usually used in the context of generating forms.
Example: consider a manifest name like "modernization.dec-2020.fusionopus.default"
in namespace "my-corp.production".
To build such a name, this method must receive "modernization" as the subnamespace, and
filing coords from which to infer "dec-20220", "fusionopus", and "default".
@param subnamespace A string, which is allowed to be None. If not null, this is a further partioning of
the namespace into finer slices, and a manifest's name is supposed to identify the slice
in which the manifest resides.
@param coords A FilingCoords object corresponding to this controller. It is used, possibly along with the
`subnamespace` parameter, to build a manifest name.
@param kind The kind of manifest for which the name is sought. This parameter can be ignored for controller
classes that use the same name for all supported kinds; it is meant to support controllers that
process multiple manifest kinds and do not use the same name for all of them. For example, controllers
that point to reference data in a different domain/sub-domain.
'''
if not type(coords) == InitiativesFilingCoordinates:
raise ApodeixiError(parent_trace, "Can't build manifest name because received wrong type of filing coordinates",
data = {"Type of coords received": str(type(coords)),
"Expected type of coords": "InitiativesFilingCoordinates"})
workstream_UID = coords.workstream_UID
program = subnamespace
initiative = coords.initiative
scenario = coords.scenario
scoring_cycle = coords.scoring_cycle
FMT = StringUtils().format_as_yaml_fieldname # Abbreviation for readability
name = FMT(program + '.' + scoring_cycle + '.' + initiative + '.'
+ workstream_UID + '.' + scenario)
return name
def manifestLabelsFromCoords(self, parent_trace, subnamespace, coords):
'''
Helper method that returns what the a dict whose keys are label field names that should be populated
inside a manifest based on the parameters, and the values are what the value should be for each label.
Usually used in the context of generating forms.
Example: consider a manifest name like "modernization.dec-2020.fusionopus.default"
in namespace "my-corp.production", that arose from a posting for product "Fusion Opus",
scoring cycle "Dec 2020" and scenario "Default".
Then this method returns ["modernization", "Dec 2020", "Fusion Opus", and "Default"].
@param subnamespace A string, which is allowed to be None. If not null, this is a further partioning of
the namespace into finer slices, and a manifest's name is supposed to identify the slice
in which the manifest resides.
@param coords A FilingCoords object corresponding to this controller. It is used, possibly along with the
`subnamespace` parameter, to build a manifest name.
'''
if not type(coords) == InitiativesFilingCoordinates:
raise ApodeixiError(parent_trace, "Can't build manifest name because received wrong type of filing coordinates",
data = {"Type of coords received": str(type(coords)),
"Expected type of coords": "InitiativesFilingCoordinates"})
workstream_UID = coords.workstream_UID
initiative = subnamespace
scenario = coords.scenario
scoring_cycle = coords.scoring_cycle
MY_PL = Workstream_Controller._MyPostingLabel # Abbreviation for readability
result_dict = {}
result_dict[MY_PL._WORKSTREAM_UID] = workstream_UID
result_dict[MY_PL._INITIATIVE] = initiative
result_dict[MY_PL._SCENARIO] = scenario
result_dict[MY_PL._SCORING_CYCLE] = scoring_cycle
return result_dict
def _buildOneManifest(self, parent_trace, posting_data_handle, label):
'''
Helper function, amenable to unit testing, unlike the enveloping controller `apply` function that require a knowledge base
structure
'''
manifest_dict = super()._buildOneManifest(parent_trace, posting_data_handle, label)
my_trace = parent_trace.doing("Getting PostingLabel fields specific to Workstream_Controller")
workstream_UID = label.workstream_UID (my_trace)
workstream_title = label.workstream_title (my_trace)
program = label.program (my_trace)
initiative = label.initiative (my_trace)
scenario = label.scenario (my_trace)
scoring_cycle = label.scoring_cycle (my_trace)
scoring_maturity = label.scoring_maturity (my_trace)
my_trace = parent_trace.doing("Enriching generic manifest fields with additional fields "
+ "specific to Workstream_Controller")
if True:
metadata = manifest_dict['metadata']
MY_PL = Workstream_Controller._MyPostingLabel # Abbreviation for readability
labels = metadata['labels']
labels[MY_PL._WORKSTREAM_UID] = workstream_UID
labels[MY_PL._WORKSTREAM_TITLE] = workstream_title
labels[MY_PL._PROGRAM] = program
labels[MY_PL._INITIATIVE] = initiative
labels[MY_PL._SCENARIO] = scenario
labels[MY_PL._SCORING_CYCLE] = scoring_cycle
labels[MY_PL._SCORING_MATURITY] = scoring_maturity
assertion = manifest_dict['assertion']
assertion[MY_PL._WORKSTREAM_UID] = workstream_UID
assertion[MY_PL._WORKSTREAM_TITLE] = workstream_title
assertion[MY_PL._PROGRAM] = program
assertion[MY_PL._INITIATIVE] = initiative
assertion[MY_PL._SCENARIO] = scenario
assertion[MY_PL._SCORING_CYCLE] = scoring_cycle
assertion[MY_PL._SCORING_MATURITY] = scoring_maturity
return manifest_dict
class _WorkstreamMilestoneConfig(PostingConfig):
'''
Codifies the schema and integrity expectations for workstream's milestones
'''
_ENTITY_NAME = 'Theme'
_SPLITTING_COLUMNS = ['Milestone', 'Task', 'Dependency']
def __init__(self, update_policy, kind, manifest_nb, controller):
ME = Workstream_Controller._WorkstreamMilestoneConfig
super().__init__( kind = kind,
update_policy = update_policy,
manifest_nb = manifest_nb,
controller = controller)
interval_spec_milestones = ClosedOpenIntervalSpec( parent_trace = None,
splitting_columns = ME._SPLITTING_COLUMNS,
entity_name = ME._ENTITY_NAME
)
self.interval_spec = interval_spec_milestones
def preflightPostingValidation(self, parent_trace, posted_content_df):
'''
Method performs some initial validation of the `dataframe`, which is intended to be a DataFrame representation of the
data posted in Excel.
The intention for this preflight validation is to provide the user with more user-friendly error messages that
educate the user on what he/she should change in the posting for it to be valid. In the absence of this
preflight validation, the posting error from the user would eventually be caught deeper in the parsing logic,
by which time the error generated might not be too user friendly.
Thus this method is not so much to avoid corruption of the data, since downstream logic will prevent corruption
anyway. Rather, it is to provide usability by outputting high-level user-meaningful error messages.
'''
ME = Workstream_Controller._WorkstreamMilestoneConfig
posted_cols = list(posted_content_df.columns)
mandatory_cols = [ME._ENTITY_NAME]
mandatory_cols.extend(ME._SPLITTING_COLUMNS)
missing_cols = [col for col in mandatory_cols if not col in posted_cols]
if len(missing_cols) > 0:
raise ApodeixiError(parent_trace, "Posting lacks some mandatory columns. This often happens if "
+ "ranges are wrong in Posting Label.",
data = { 'Missing columns': missing_cols,
'Posted columns': posted_cols})
def entity_name(self):
ME = Workstream_Controller._WorkstreamMilestoneConfig
return ME._ENTITY_NAME
class _WorkstreamMetricConfig(PostingConfig):
'''
Codifies the schema and integrity expectations for workstream's metrics
'''
_ENTITY_NAME = 'Metric'
def __init__(self, update_policy, kind, manifest_nb, controller):
ME = Workstream_Controller._WorkstreamMetricConfig
super().__init__( kind = kind,
update_policy = update_policy,
manifest_nb = manifest_nb,
controller = controller)
interval_spec_metrics = GreedyIntervalSpec(parent_trace = None, entity_name = ME._ENTITY_NAME)
self.interval_spec = interval_spec_metrics
def preflightPostingValidation(self, parent_trace, posted_content_df):
'''
Method performs some initial validation of the `dataframe`, which is intended to be a DataFrame representation of the
data posted in Excel.
The intention for this preflight validation is to provide the user with more user-friendly error messages that
educate the user on what he/she should change in the posting for it to be valid. In the absence of this
preflight validation, the posting error from the user would eventually be caught deeper in the parsing logic,
by which time the error generated might not be too user friendly.
Thus this method is not so much to avoid corruption of the data, since downstream logic will prevent corruption
anyway. Rather, it is to provide usability by outputting high-level user-meaningful error messages.
'''
ME = Workstream_Controller._WorkstreamMetricConfig
posted_cols = list(posted_content_df.columns)
mandatory_cols = [ME._ENTITY_NAME]
missing_cols = [col for col in mandatory_cols if not col in posted_cols]
if len(missing_cols) > 0:
raise ApodeixiError(parent_trace, "Posting lacks some mandatory columns. This often happens if "
+ "ranges are wrong in Posting Label.",
data = { 'Missing columns': missing_cols,
'Posted columns': posted_cols})
def entity_name(self):
ME = Workstream_Controller._WorkstreamMetricConfig
return ME._ENTITY_NAME
class _MyPostingLabel(SkeletonController._MyPostingLabel):
'''
Codifies the schema expectations for the posting label when posting a workstream.
'''
_WORKSTREAM_UID = "workstreamUID"
_WORKSTREAM_TITLE = "workstreamTitle"
_PROGRAM = "program"
_INITIATIVE = "initiative"
_SCENARIO = "scenario"
_SCORING_CYCLE = "scoringCycle"
_SCORING_MATURITY = "scoringMaturity"
def __init__(self, parent_trace, controller):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
super().__init__( parent_trace = parent_trace,
controller = controller,
mandatory_fields = [ ME._PROGRAM, ME._WORKSTREAM_UID, ME._INITIATIVE, ME._SCENARIO, # Determine name
ME._WORKSTREAM_TITLE,
ME._SCORING_CYCLE, ME._SCORING_MATURITY],
date_fields = [])
def program(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._PROGRAM)
def workstream_UID(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._WORKSTREAM_UID)
def workstream_title(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._WORKSTREAM_TITLE)
def initiative(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._INITIATIVE)
def scenario(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._SCENARIO)
def scoring_cycle(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._SCORING_CYCLE)
def scoring_maturity(self, parent_trace):
# Shortcut to reference class static variables
ME = Workstream_Controller._MyPostingLabel
return self._getField(parent_trace, ME._SCORING_MATURITY)
|
py | 1a44359f99548bac7ee372df184b6ac6f992ab1c | #
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
# This file is based on an earlier version of pretix which was released under the Apache License 2.0. The full text of
# the Apache License 2.0 can be obtained at <http://www.apache.org/licenses/LICENSE-2.0>.
#
# This file may have since been changed and any changes are released under the terms of AGPLv3 as described above. A
# full history of changes and contributors is available at <https://github.com/pretix/pretix>.
#
# This file contains Apache-licensed contributions copyrighted by: Bolutife Lawrence, Maico Timmerman
#
# Unless required by applicable law or agreed to in writing, software distributed under the Apache License 2.0 is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under the License.
from decimal import Decimal
from urllib.parse import urlparse
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.utils.crypto import get_random_string
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_scopes.forms import SafeModelMultipleChoiceField
from i18nfield.forms import I18nFormField, I18nTextarea
from pytz import common_timezones
from pretix.api.models import WebHook
from pretix.api.webhooks import get_all_webhook_events
from pretix.base.forms import I18nModelForm, PlaceholderValidator, SettingsForm
from pretix.base.forms.questions import NamePartsFormField
from pretix.base.forms.widgets import SplitDateTimePickerWidget
from pretix.base.models import (
Customer, Device, EventMetaProperty, Gate, GiftCard, Membership,
MembershipType, Organizer, Team,
)
from pretix.base.settings import PERSON_NAME_SCHEMES, PERSON_NAME_TITLE_GROUPS
from pretix.control.forms import (
ExtFileField, SMTPSettingsMixin, SplitDateTimeField,
)
from pretix.control.forms.event import (
SafeEventMultipleChoiceField, multimail_validate,
)
from pretix.multidomain.models import KnownDomain
from pretix.multidomain.urlreverse import build_absolute_uri
class OrganizerForm(I18nModelForm):
error_messages = {
'duplicate_slug': _("This slug is already in use. Please choose a different one."),
}
class Meta:
model = Organizer
fields = ['name', 'slug']
def clean_slug(self):
slug = self.cleaned_data['slug']
if Organizer.objects.filter(slug__iexact=slug).exists():
raise forms.ValidationError(
self.error_messages['duplicate_slug'],
code='duplicate_slug',
)
return slug
class OrganizerDeleteForm(forms.Form):
error_messages = {
'slug_wrong': _("The slug you entered was not correct."),
}
slug = forms.CharField(
max_length=255,
label=_("Event slug"),
)
def __init__(self, *args, **kwargs):
self.organizer = kwargs.pop('organizer')
super().__init__(*args, **kwargs)
def clean_slug(self):
slug = self.cleaned_data.get('slug')
if slug != self.organizer.slug:
raise forms.ValidationError(
self.error_messages['slug_wrong'],
code='slug_wrong',
)
return slug
class OrganizerUpdateForm(OrganizerForm):
def __init__(self, *args, **kwargs):
self.domain = kwargs.pop('domain', False)
self.change_slug = kwargs.pop('change_slug', False)
kwargs.setdefault('initial', {})
self.instance = kwargs['instance']
if self.domain and self.instance:
initial_domain = self.instance.domains.filter(event__isnull=True).first()
if initial_domain:
kwargs['initial'].setdefault('domain', initial_domain.domainname)
super().__init__(*args, **kwargs)
if not self.change_slug:
self.fields['slug'].widget.attrs['readonly'] = 'readonly'
if self.domain:
self.fields['domain'] = forms.CharField(
max_length=255,
label=_('Custom domain'),
required=False,
help_text=_('You need to configure the custom domain in the webserver beforehand.')
)
def clean_domain(self):
d = self.cleaned_data['domain']
if d:
if d == urlparse(settings.SITE_URL).hostname:
raise ValidationError(
_('You cannot choose the base domain of this installation.')
)
if KnownDomain.objects.filter(domainname=d).exclude(organizer=self.instance.pk,
event__isnull=True).exists():
raise ValidationError(
_('This domain is already in use for a different event or organizer.')
)
return d
def clean_slug(self):
if self.change_slug:
return self.cleaned_data['slug']
return self.instance.slug
def save(self, commit=True):
instance = super().save(commit)
if self.domain:
current_domain = instance.domains.first()
if self.cleaned_data['domain']:
if current_domain and current_domain.domainname != self.cleaned_data['domain']:
current_domain.delete()
KnownDomain.objects.create(organizer=instance, domainname=self.cleaned_data['domain'])
elif not current_domain:
KnownDomain.objects.create(organizer=instance, domainname=self.cleaned_data['domain'])
elif current_domain:
current_domain.delete()
instance.cache.clear()
for ev in instance.events.all():
ev.cache.clear()
return instance
class EventMetaPropertyForm(forms.ModelForm):
class Meta:
model = EventMetaProperty
fields = ['name', 'default', 'required', 'protected', 'allowed_values']
widgets = {
'default': forms.TextInput()
}
class MembershipTypeForm(I18nModelForm):
class Meta:
model = MembershipType
fields = ['name', 'transferable', 'allow_parallel_usage', 'max_usages']
class TeamForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
organizer = kwargs.pop('organizer')
super().__init__(*args, **kwargs)
self.fields['limit_events'].queryset = organizer.events.all().order_by(
'-has_subevents', '-date_from'
)
class Meta:
model = Team
fields = ['name', 'all_events', 'limit_events', 'can_create_events',
'can_change_teams', 'can_change_organizer_settings',
'can_manage_gift_cards', 'can_manage_customers',
'can_change_event_settings', 'can_change_items',
'can_view_orders', 'can_change_orders', 'can_checkin_orders',
'can_view_vouchers', 'can_change_vouchers']
widgets = {
'limit_events': forms.CheckboxSelectMultiple(attrs={
'data-inverse-dependency': '#id_all_events',
'class': 'scrolling-multiple-choice scrolling-multiple-choice-large',
}),
}
field_classes = {
'limit_events': SafeEventMultipleChoiceField
}
def clean(self):
data = super().clean()
if self.instance.pk and not data['can_change_teams']:
if not self.instance.organizer.teams.exclude(pk=self.instance.pk).filter(
can_change_teams=True, members__isnull=False
).exists():
raise ValidationError(_('The changes could not be saved because there would be no remaining team with '
'the permission to change teams and permissions.'))
return data
class GateForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
kwargs.pop('organizer')
super().__init__(*args, **kwargs)
class Meta:
model = Gate
fields = ['name', 'identifier']
class DeviceForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
organizer = kwargs.pop('organizer')
super().__init__(*args, **kwargs)
self.fields['limit_events'].queryset = organizer.events.all().order_by(
'-has_subevents', '-date_from'
)
self.fields['gate'].queryset = organizer.gates.all()
def clean(self):
d = super().clean()
if not d['all_events'] and not d['limit_events']:
raise ValidationError(_('Your device will not have access to anything, please select some events.'))
return d
class Meta:
model = Device
fields = ['name', 'all_events', 'limit_events', 'security_profile', 'gate']
widgets = {
'limit_events': forms.CheckboxSelectMultiple(attrs={
'data-inverse-dependency': '#id_all_events',
'class': 'scrolling-multiple-choice scrolling-multiple-choice-large',
}),
}
field_classes = {
'limit_events': SafeEventMultipleChoiceField
}
class OrganizerSettingsForm(SettingsForm):
timezone = forms.ChoiceField(
choices=((a, a) for a in common_timezones),
label=_("Default timezone"),
)
name_scheme = forms.ChoiceField(
label=_("Name format"),
help_text=_("This defines how pretix will ask for human names. Changing this after you already received "
"orders might lead to unexpected behavior when sorting or changing names."),
required=True,
)
name_scheme_titles = forms.ChoiceField(
label=_("Allowed titles"),
help_text=_("If the naming scheme you defined above allows users to input a title, you can use this to "
"restrict the set of selectable titles."),
required=False,
)
auto_fields = [
'customer_accounts',
'customer_accounts_link_by_email',
'invoice_regenerate_allowed',
'contact_mail',
'imprint_url',
'organizer_info_text',
'event_list_type',
'event_list_availability',
'organizer_homepage_text',
'organizer_link_back',
'organizer_logo_image_large',
'organizer_logo_image_inherit',
'giftcard_length',
'giftcard_expiry_years',
'locales',
'region',
'meta_noindex',
'event_team_provisioning',
'primary_color',
'theme_color_success',
'theme_color_danger',
'theme_color_background',
'theme_round_borders',
'primary_font',
'privacy_url',
'cookie_consent',
'cookie_consent_dialog_title',
'cookie_consent_dialog_text',
'cookie_consent_dialog_text_secondary',
'cookie_consent_dialog_button_yes',
'cookie_consent_dialog_button_no',
]
organizer_logo_image = ExtFileField(
label=_('Header image'),
ext_whitelist=(".png", ".jpg", ".gif", ".jpeg"),
max_size=settings.FILE_UPLOAD_MAX_SIZE_IMAGE,
required=False,
help_text=_('If you provide a logo image, we will by default not show your organization name '
'in the page header. By default, we show your logo with a size of up to 1140x120 pixels. You '
'can increase the size with the setting below. We recommend not using small details on the picture '
'as it will be resized on smaller screens.')
)
favicon = ExtFileField(
label=_('Favicon'),
ext_whitelist=(".ico", ".png", ".jpg", ".gif", ".jpeg"),
required=False,
max_size=settings.FILE_UPLOAD_MAX_SIZE_FAVICON,
help_text=_('If you provide a favicon, we will show it instead of the default pretix icon. '
'We recommend a size of at least 200x200px to accommodate most devices.')
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['name_scheme'].choices = (
(k, _('Ask for {fields}, display like {example}').format(
fields=' + '.join(str(vv[1]) for vv in v['fields']),
example=v['concatenation'](v['sample'])
))
for k, v in PERSON_NAME_SCHEMES.items()
)
self.fields['name_scheme_titles'].choices = [('', _('Free text input'))] + [
(k, '{scheme}: {samples}'.format(
scheme=v[0],
samples=', '.join(v[1])
))
for k, v in PERSON_NAME_TITLE_GROUPS.items()
]
class MailSettingsForm(SMTPSettingsMixin, SettingsForm):
auto_fields = [
'mail_from',
'mail_from_name',
]
mail_bcc = forms.CharField(
label=_("Bcc address"),
help_text=_("All emails will be sent to this address as a Bcc copy"),
validators=[multimail_validate],
required=False,
max_length=255
)
mail_text_signature = I18nFormField(
label=_("Signature"),
required=False,
widget=I18nTextarea,
help_text=_("This will be attached to every email."),
validators=[PlaceholderValidator([])],
widget_kwargs={'attrs': {
'rows': '4',
'placeholder': _(
'e.g. your contact details'
)
}}
)
mail_text_customer_registration = I18nFormField(
label=_("Text"),
required=False,
widget=I18nTextarea,
)
mail_text_customer_email_change = I18nFormField(
label=_("Text"),
required=False,
widget=I18nTextarea,
)
mail_text_customer_reset = I18nFormField(
label=_("Text"),
required=False,
widget=I18nTextarea,
)
base_context = {
'mail_text_customer_registration': ['customer', 'url'],
'mail_text_customer_email_change': ['customer', 'url'],
'mail_text_customer_reset': ['customer', 'url'],
}
def _get_sample_context(self, base_parameters):
placeholders = {
'organizer': self.organizer.name
}
if 'url' in base_parameters:
placeholders['url'] = build_absolute_uri(
self.organizer,
'presale:organizer.customer.activate'
) + '?token=' + get_random_string(30)
if 'customer' in base_parameters:
placeholders['name'] = pgettext_lazy('person_name_sample', 'John Doe')
name_scheme = PERSON_NAME_SCHEMES[self.organizer.settings.name_scheme]
for f, l, w in name_scheme['fields']:
if f == 'full_name':
continue
placeholders['name_%s' % f] = name_scheme['sample'][f]
return placeholders
def _set_field_placeholders(self, fn, base_parameters):
phs = [
'{%s}' % p
for p in sorted(self._get_sample_context(base_parameters).keys())
]
ht = _('Available placeholders: {list}').format(
list=', '.join(phs)
)
if self.fields[fn].help_text:
self.fields[fn].help_text += ' ' + str(ht)
else:
self.fields[fn].help_text = ht
self.fields[fn].validators.append(
PlaceholderValidator(phs)
)
def __init__(self, *args, **kwargs):
self.organizer = kwargs.get('obj')
super().__init__(*args, **kwargs)
for k, v in self.base_context.items():
self._set_field_placeholders(k, v)
class WebHookForm(forms.ModelForm):
events = forms.MultipleChoiceField(
widget=forms.CheckboxSelectMultiple,
label=pgettext_lazy('webhooks', 'Event types')
)
def __init__(self, *args, **kwargs):
organizer = kwargs.pop('organizer')
super().__init__(*args, **kwargs)
self.fields['limit_events'].queryset = organizer.events.all()
self.fields['events'].choices = [
(
a.action_type,
mark_safe('{} – <code>{}</code>'.format(a.verbose_name, a.action_type))
) for a in get_all_webhook_events().values()
]
if self.instance:
self.fields['events'].initial = list(self.instance.listeners.values_list('action_type', flat=True))
class Meta:
model = WebHook
fields = ['target_url', 'enabled', 'all_events', 'limit_events']
widgets = {
'limit_events': forms.CheckboxSelectMultiple(attrs={
'data-inverse-dependency': '#id_all_events'
}),
}
field_classes = {
'limit_events': SafeModelMultipleChoiceField
}
class GiftCardCreateForm(forms.ModelForm):
value = forms.DecimalField(
label=_('Gift card value'),
min_value=Decimal('0.00')
)
def __init__(self, *args, **kwargs):
self.organizer = kwargs.pop('organizer')
initial = kwargs.pop('initial', {})
initial['expires'] = self.organizer.default_gift_card_expiry
kwargs['initial'] = initial
super().__init__(*args, **kwargs)
def clean_secret(self):
s = self.cleaned_data['secret']
if GiftCard.objects.filter(
secret__iexact=s
).filter(
Q(issuer=self.organizer) | Q(issuer__gift_card_collector_acceptance__collector=self.organizer)
).exists():
raise ValidationError(
_('A gift card with the same secret already exists in your or an affiliated organizer account.')
)
return s
class Meta:
model = GiftCard
fields = ['secret', 'currency', 'testmode', 'expires', 'conditions']
field_classes = {
'expires': SplitDateTimeField
}
widgets = {
'expires': SplitDateTimePickerWidget,
'conditions': forms.Textarea(attrs={"rows": 2})
}
class GiftCardUpdateForm(forms.ModelForm):
class Meta:
model = GiftCard
fields = ['expires', 'conditions']
field_classes = {
'expires': SplitDateTimeField
}
widgets = {
'expires': SplitDateTimePickerWidget,
'conditions': forms.Textarea(attrs={"rows": 2})
}
class CustomerUpdateForm(forms.ModelForm):
error_messages = {
'duplicate': _("An account with this email address is already registered."),
}
class Meta:
model = Customer
fields = ['is_active', 'name_parts', 'email', 'is_verified', 'locale']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['name_parts'] = NamePartsFormField(
max_length=255,
required=False,
scheme=self.instance.organizer.settings.name_scheme,
titles=self.instance.organizer.settings.name_scheme_titles,
label=_('Name'),
)
def clean(self):
email = self.cleaned_data.get('email')
if email is not None:
try:
self.instance.organizer.customers.exclude(pk=self.instance.pk).get(email=email)
except Customer.DoesNotExist:
pass
else:
raise forms.ValidationError(
self.error_messages['duplicate'],
code='duplicate',
)
return self.cleaned_data
class CustomerCreateForm(CustomerUpdateForm):
class Meta:
model = Customer
fields = ['identifier', 'is_active', 'name_parts', 'email', 'is_verified', 'locale']
class MembershipUpdateForm(forms.ModelForm):
class Meta:
model = Membership
fields = ['testmode', 'membership_type', 'date_start', 'date_end', 'attendee_name_parts', 'canceled']
field_classes = {
'date_start': SplitDateTimeField,
'date_end': SplitDateTimeField,
}
widgets = {
'date_start': SplitDateTimePickerWidget(),
'date_end': SplitDateTimePickerWidget(attrs={'data-date-after': '#id_date_Start'}),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.instance and self.instance.pk:
del self.fields['testmode']
self.fields['membership_type'].queryset = self.instance.customer.organizer.membership_types.all()
self.fields['attendee_name_parts'] = NamePartsFormField(
max_length=255,
required=False,
scheme=self.instance.customer.organizer.settings.name_scheme,
titles=self.instance.customer.organizer.settings.name_scheme_titles,
label=_('Attendee name'),
)
|
py | 1a44378750c130fddb9d42066189c129090ece2a | import tensorflow as tf
from malaya.text.function import entities_textcleaning, tag_chunk
from malaya.text.bpe import (
xlnet_tokenization_siamese,
xlnet_tokenization,
padding_sequence,
merge_sentencepiece_tokens,
merge_sentencepiece_tokens_tagging,
parse_bert_tagging,
)
from malaya.function import add_neutral as neutral
from malaya.function.parse_dependency import DependencyGraph
from malaya.function.html import (
_render_binary,
_render_toxic,
_render_emotion,
_render_relevancy,
)
import numpy as np
from collections import defaultdict
from herpetologist import check_type
from typing import List, Tuple
render_dict = {
'sentiment': _render_binary,
'relevancy': _render_relevancy,
'emotion': _render_emotion,
'toxic': _render_toxic,
'subjective': _render_binary,
}
class BASE:
def __init__(
self,
X,
logits,
segment_ids,
input_masks,
vectorizer,
sess,
tokenizer,
label = ['negative', 'positive'],
):
self._X = X
self._logits = logits
self._segment_ids = segment_ids
self._input_masks = input_masks
self._vectorizer = vectorizer
self._sess = sess
self._tokenizer = tokenizer
self._label = label
class XLNET(BASE):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
logits_seq,
vectorizer,
sess,
tokenizer,
attns,
class_name,
label = ['negative', 'positive'],
):
BASE.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
vectorizer = vectorizer,
logits = logits,
sess = sess,
tokenizer = tokenizer,
label = label,
)
self._attns = attns
self._logits_seq = logits_seq
self._class_name = class_name
self._softmax = tf.nn.softmax(self._logits)
self._softmax_seq = tf.nn.softmax(self._logits_seq)
def _classify(self, strings):
input_ids, input_masks, segment_ids, _ = xlnet_tokenization(
self._tokenizer, strings
)
return self._sess.run(
self._softmax,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
def _vectorize(self, strings, method = 'first'):
method = method.lower()
if method not in ['first', 'last', 'mean', 'word']:
raise ValueError(
"method not supported, only support 'first', 'last', 'mean' and 'word'"
)
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, strings
)
v = self._sess.run(
self._vectorizer,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
if method == 'first':
v = v[:, 0]
elif method == 'last':
v = v[:, -1]
elif method == 'mean':
v = np.mean(v, axis = 1)
else:
v = [
merge_sentencepiece_tokens(
list(zip(s_tokens[i], v[i][: len(s_tokens[i])])),
weighted = False,
vectorize = True,
model = 'xlnet',
)
for i in range(len(v))
]
return v
def _predict(self, strings, add_neutral = False):
results = self._classify(strings)
if add_neutral:
result = neutral(results)
label = self._label + ['neutral']
else:
label = self._label
return [label[result] for result in np.argmax(results, axis = 1)]
def _predict_proba(self, strings, add_neutral = False):
results = self._classify(strings)
if add_neutral:
results = neutral(results)
label = self._label + ['neutral']
else:
label = self._label
outputs = []
for result in results:
outputs.append({label[i]: result[i] for i in range(len(result))})
return outputs
def _predict_words(
self, string, method, visualization, add_neutral = False
):
method = method.lower()
if method not in ['last', 'first', 'mean']:
raise ValueError(
"method not supported, only support 'last', 'first' and 'mean'"
)
if add_neutral:
label = self._label + ['neutral']
else:
label = self._label
batch_x, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, [string]
)
result, attentions, words = self._sess.run(
[self._softmax, self._attns, self._softmax_seq],
feed_dict = {
self._X: batch_x,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
if method == 'first':
cls_attn = attentions[0][:, :, 0, :]
if method == 'last':
cls_attn = attentions[-1][:, :, 0, :]
if method == 'mean':
cls_attn = np.mean(attentions, axis = 0).mean(axis = 2)
cls_attn = np.mean(cls_attn, axis = 1)
total_weights = np.sum(cls_attn, axis = -1, keepdims = True)
attn = cls_attn / total_weights
words = words[0]
if add_neutral:
result = neutral(result)
words = neutral(words)
result = result[0]
weights = []
merged = merge_sentencepiece_tokens(
list(zip(s_tokens[0], attn[0])), model = 'xlnet'
)
for i in range(words.shape[1]):
m = merge_sentencepiece_tokens(
list(zip(s_tokens[0], words[:, i])),
weighted = False,
model = 'xlnet',
)
_, weight = zip(*m)
weights.append(weight)
w, a = zip(*merged)
words = np.array(weights).T
distribution_words = words[:, np.argmax(words.sum(axis = 0))]
y_histogram, x_histogram = np.histogram(
distribution_words, bins = np.arange(0, 1, 0.05)
)
y_histogram = y_histogram / y_histogram.sum()
x_attention = np.arange(len(w))
left, right = np.unique(
np.argmax(words, axis = 1), return_counts = True
)
left = left.tolist()
y_barplot = []
for i in range(len(label)):
if i not in left:
y_barplot.append(i)
else:
y_barplot.append(right[left.index(i)])
dict_result = {label[i]: result[i] for i in range(len(result))}
dict_result['alphas'] = {w: a[no] for no, w in enumerate(w)}
dict_result['word'] = {w: words[no] for no, w in enumerate(w)}
dict_result['histogram'] = {'x': x_histogram, 'y': y_histogram}
dict_result['attention'] = {'x': x_attention, 'y': np.array(a)}
dict_result['barplot'] = {'x': label, 'y': y_barplot}
dict_result['class_name'] = self._class_name
if visualization:
render_dict[self._class_name](dict_result)
else:
return dict_result
class BINARY_XLNET(XLNET):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
logits_seq,
vectorizer,
sess,
tokenizer,
attns,
class_name,
label = ['negative', 'positive'],
):
XLNET.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
logits = logits,
logits_seq = logits_seq,
vectorizer = vectorizer,
sess = sess,
tokenizer = tokenizer,
attns = attns,
class_name = class_name,
label = label,
)
@check_type
def vectorize(self, strings: List[str], method: str = 'first'):
"""
vectorize list of strings.
Parameters
----------
strings: List[str]
method : str, optional (default='first')
Vectorization layer supported. Allowed values:
* ``'last'`` - vector from last sequence.
* ``'first'`` - vector from first sequence.
* ``'mean'`` - average vectors from all sequences.
* ``'word'`` - average vectors based on tokens.
Returns
-------
result: np.array
"""
return self._vectorize(strings = strings, method = method)
@check_type
def predict(self, strings: List[str], add_neutral: bool = True):
"""
classify list of strings.
Parameters
----------
strings: List[str]
add_neutral: bool, optional (default=True)
if True, it will add neutral probability.
Returns
-------
result: List[str]
"""
return self._predict(strings = strings, add_neutral = add_neutral)
@check_type
def predict_proba(self, strings: List[str], add_neutral: bool = True):
"""
classify list of strings and return probability.
Parameters
----------
strings : List[str]
add_neutral: bool, optional (default=True)
if True, it will add neutral probability.
Returns
-------
result: List[dict[str, float]]
"""
return self._predict_proba(strings = strings, add_neutral = add_neutral)
@check_type
def predict_words(
self, string: str, method: str = 'last', visualization: bool = True
):
"""
classify words.
Parameters
----------
string : str
method : str, optional (default='last')
Attention layer supported. Allowed values:
* ``'last'`` - attention from last layer.
* ``'first'`` - attention from first layer.
* ``'mean'`` - average attentions from all layers.
visualization: bool, optional (default=True)
If True, it will open the visualization dashboard.
Returns
-------
result: dict
"""
return self._predict_words(
string = string,
method = method,
add_neutral = True,
visualization = visualization,
)
class MULTICLASS_XLNET(XLNET):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
logits_seq,
vectorizer,
sess,
tokenizer,
attns,
class_name,
label = ['negative', 'positive'],
):
XLNET.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
logits = logits,
logits_seq = logits_seq,
vectorizer = vectorizer,
sess = sess,
tokenizer = tokenizer,
attns = attns,
class_name = class_name,
label = label,
)
@check_type
def vectorize(self, strings: List[str], method: str = 'first'):
"""
vectorize list of strings.
Parameters
----------
strings: List[str]
method : str, optional (default='first')
Vectorization layer supported. Allowed values:
* ``'last'`` - vector from last sequence.
* ``'first'`` - vector from first sequence.
* ``'mean'`` - average vectors from all sequences.
* ``'word'`` - average vectors based on tokens.
Returns
-------
result: np.array
"""
return self._vectorize(strings = strings, method = method)
@check_type
def predict(self, strings: List[str]):
"""
classify list of strings.
Parameters
----------
strings: List[str]
Returns
-------
result: List[str]
"""
return self._predict(strings = strings)
@check_type
def predict_proba(self, strings: List[str]):
"""
classify list of strings and return probability.
Parameters
----------
strings : List[str]
Returns
-------
result: List[dict[str, float]]
"""
return self._predict_proba(strings = strings)
@check_type
def predict_words(
self, string: str, method: str = 'last', visualization: bool = True
):
"""
classify words.
Parameters
----------
string : str
method : str, optional (default='last')
Attention layer supported. Allowed values:
* ``'last'`` - attention from last layer.
* ``'first'`` - attention from first layer.
* ``'mean'`` - average attentions from all layers.
visualization: bool, optional (default=True)
If True, it will open the visualization dashboard.
Returns
-------
result: dict
"""
return self._predict_words(
string = string, method = method, visualization = visualization
)
class SIGMOID_XLNET(BASE):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
logits_seq,
vectorizer,
sess,
tokenizer,
attns,
class_name,
label = ['negative', 'positive'],
):
BASE.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
logits = logits,
vectorizer = vectorizer,
sess = sess,
tokenizer = tokenizer,
label = label,
)
self._attns = attns
self._logits_seq = logits_seq
self._class_name = class_name
self._sigmoid = tf.nn.sigmoid(self._logits)
self._sigmoid_seq = tf.nn.sigmoid(self._logits_seq)
def _classify(self, strings):
input_ids, input_masks, segment_ids, _ = xlnet_tokenization(
self._tokenizer, strings
)
result = self._sess.run(
self._sigmoid,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
return result
@check_type
def vectorize(self, strings: List[str], method: str = 'first'):
"""
vectorize list of strings.
Parameters
----------
strings: List[str]
method : str, optional (default='first')
Vectorization layer supported. Allowed values:
* ``'last'`` - vector from last sequence.
* ``'first'`` - vector from first sequence.
* ``'mean'`` - average vectors from all sequences.
* ``'word'`` - average vectors based on tokens.
Returns
-------
result: np.array
"""
method = method.lower()
if method not in ['first', 'last', 'mean', 'word']:
raise ValueError(
"method not supported, only support 'first', 'last', 'mean' and 'word'"
)
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, strings
)
v = self._sess.run(
self._vectorizer,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
if method == 'first':
v = v[:, 0]
elif method == 'last':
v = v[:, -1]
elif method == 'mean':
v = np.mean(v, axis = 1)
else:
v = [
merge_sentencepiece_tokens(
list(zip(s_tokens[i], v[i][: len(s_tokens[i])])),
weighted = False,
vectorize = True,
model = 'xlnet',
)
for i in range(len(v))
]
return v
@check_type
def predict(self, strings: List[str]):
"""
classify list of strings.
Parameters
----------
strings: List[str]
Returns
-------
result: List[List[str]]
"""
probs = self._classify(strings)
results = []
probs = np.around(probs)
for prob in probs:
list_result = []
for no, label in enumerate(self._label):
if prob[no]:
list_result.append(label)
results.append(list_result)
return results
@check_type
def predict_proba(self, strings: List[str]):
"""
classify list of strings and return probability.
Parameters
----------
strings : List[str]
Returns
-------
result: List[dict[str, float]]
"""
probs = self._classify(strings)
results = []
for prob in probs:
dict_result = {}
for no, label in enumerate(self._label):
dict_result[label] = prob[no]
results.append(dict_result)
return results
@check_type
def predict_words(
self, string: str, method: str = 'last', visualization: bool = True
):
"""
classify words.
Parameters
----------
string : str
method : str, optional (default='last')
Attention layer supported. Allowed values:
* ``'last'`` - attention from last layer.
* ``'first'`` - attention from first layer.
* ``'mean'`` - average attentions from all layers.
visualization: bool, optional (default=True)
If True, it will open the visualization dashboard.
Returns
-------
dictionary: results
"""
method = method.lower()
if method not in ['last', 'first', 'mean']:
raise ValueError(
"method not supported, only support 'last', 'first' and 'mean'"
)
batch_x, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, [string]
)
result, attentions, words = self._sess.run(
[self._sigmoid, self._attns, self._sigmoid_seq],
feed_dict = {
self._X: batch_x,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
if method == 'first':
cls_attn = attentions[0][:, :, 0, :]
if method == 'last':
cls_attn = attentions[-1][:, :, 0, :]
if method == 'mean':
cls_attn = np.mean(attentions, axis = 0).mean(axis = 2)
cls_attn = np.mean(cls_attn, axis = 1)
total_weights = np.sum(cls_attn, axis = -1, keepdims = True)
attn = cls_attn / total_weights
result = result[0]
words = words[0]
weights = []
merged = merge_sentencepiece_tokens(
list(zip(s_tokens[0], attn[0])), model = 'xlnet'
)
for i in range(words.shape[1]):
m = merge_sentencepiece_tokens(
list(zip(s_tokens[0], words[:, i])),
weighted = False,
model = 'xlnet',
)
_, weight = zip(*m)
weights.append(weight)
w, a = zip(*merged)
words = np.array(weights).T
distribution_words = words[:, np.argmax(words.sum(axis = 0))]
y_histogram, x_histogram = np.histogram(
distribution_words, bins = np.arange(0, 1, 0.05)
)
y_histogram = y_histogram / y_histogram.sum()
x_attention = np.arange(len(w))
left, right = np.unique(
np.argmax(words, axis = 1), return_counts = True
)
left = left.tolist()
y_barplot = []
for i in range(len(self._label)):
if i not in left:
y_barplot.append(i)
else:
y_barplot.append(right[left.index(i)])
dict_result = {self._label[i]: result[i] for i in range(len(result))}
dict_result['alphas'] = {w: a[no] for no, w in enumerate(w)}
dict_result['word'] = {w: words[no] for no, w in enumerate(w)}
dict_result['histogram'] = {'x': x_histogram, 'y': y_histogram}
dict_result['attention'] = {'x': x_attention, 'y': np.array(a)}
dict_result['barplot'] = {'x': self._label, 'y': y_barplot}
dict_result['class_name'] = self._class_name
if visualization:
_render_toxic(dict_result)
else:
return dict_result
class SIAMESE_XLNET(BASE):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
vectorizer,
sess,
tokenizer,
label = ['not similar', 'similar'],
):
BASE.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
vectorizer = vectorizer,
logits = logits,
sess = sess,
tokenizer = tokenizer,
label = label,
)
self._softmax = tf.nn.softmax(self._logits)
self._batch_size = 20
def _base(self, strings_left, strings_right):
input_ids, input_masks, segment_ids, _ = xlnet_tokenization_siamese(
self._tokenizer, strings_left, strings_right
)
return self._sess.run(
self._softmax,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
@check_type
def vectorize(self, strings: List[str]):
"""
Vectorize list of strings.
Parameters
----------
strings : List[str]
Returns
-------
result: np.array
"""
input_ids, input_masks, segment_ids, _ = xlnet_tokenization(
self._tokenizer, strings
)
segment_ids = np.array(segment_ids)
segment_ids[segment_ids == 0] = 1
return self._sess.run(
self._vectorizer,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
@check_type
def predict_proba(self, strings_left: List[str], strings_right: List[str]):
"""
calculate similarity for two different batch of texts.
Parameters
----------
string_left : List[str]
string_right : List[str]
Returns
-------
result : List[float]
"""
if len(strings_left) != len(strings_right):
raise ValueError(
'length `strings_left` must be same as length `strings_right`'
)
return self._base(strings_left, strings_right)[:, 1]
def _tree_plot(self, strings):
l, r = [], []
for s in strings:
for s_ in strings:
l.append(s)
r.append(s_)
results = []
for i in range(0, len(l), self._batch_size):
index = min(i + self._batch_size, len(l))
x = l[i:index]
y = r[i:index]
results.append(self._base(x, y)[:, 1])
results = np.concatenate(results, axis = 0)
results = np.reshape(results, (len(strings), len(strings)))
return results
@check_type
def heatmap(
self,
strings: List[str],
visualize: bool = True,
annotate: bool = True,
figsize: Tuple[int, int] = (7, 7),
):
results = self._tree_plot(strings)
if not visualize:
return results
try:
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
except:
raise ModuleNotFoundError(
'matplotlib and seaborn not installed. Please install it and try again.'
)
plt.figure(figsize = figsize)
g = sns.heatmap(
results,
cmap = 'Blues',
xticklabels = strings,
yticklabels = strings,
annot = annotate,
)
plt.show()
class TAGGING_XLNET(BASE):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
vectorizer,
sess,
tokenizer,
settings,
):
BASE.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
logits = logits,
vectorizer = vectorizer,
sess = sess,
tokenizer = tokenizer,
label = None,
)
self._settings = settings
self._settings['idx2tag'] = {
int(k): v for k, v in self._settings['idx2tag'].items()
}
self._pos = 'organization' not in self._settings['tag2idx']
@check_type
def vectorize(self, string: str):
"""
vectorize a string.
Parameters
----------
string: List[str]
Returns
-------
result: np.array
"""
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, [string]
)
s_tokens = s_tokens[0]
v = self._sess.run(
self._vectorizer,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
v = v[0]
return merge_sentencepiece_tokens(
list(zip(s_tokens, v[: len(s_tokens)])),
weighted = False,
vectorize = True,
model = 'xlnet',
)
@check_type
def analyze(self, string: str):
"""
Analyze a string.
Parameters
----------
string : str
Returns
-------
result : {'words': List[str], 'tags': [{'text': 'text', 'type': 'location', 'score': 1.0, 'beginOffset': 0, 'endOffset': 1}]}
"""
predicted = self.predict(string)
return tag_chunk(predicted)
@check_type
def predict(self, string: str):
"""
Tag a string.
Parameters
----------
string : str
Returns
-------
result : Tuple[str, str]
"""
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, [string]
)
s_tokens = s_tokens[0]
predicted = self._sess.run(
self._logits,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)[0]
t = [self._settings['idx2tag'][d] for d in predicted]
merged = merge_sentencepiece_tokens_tagging(
s_tokens, t, model = 'xlnet'
)
return list(zip(*merged))
class DEPENDENCY_XLNET(BASE):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
vectorizer,
sess,
tokenizer,
settings,
heads_seq,
):
BASE.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
logits = logits,
vectorizer = vectorizer,
sess = sess,
tokenizer = tokenizer,
label = None,
)
self._tag2idx = settings
self._idx2tag = {int(v): k for k, v in self._tag2idx.items()}
self._heads_seq = heads_seq
@check_type
def vectorize(self, string: str):
"""
vectorize a string.
Parameters
----------
string: List[str]
Returns
-------
result: np.array
"""
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, [string]
)
s_tokens = s_tokens[0]
v = self._sess.run(
self._vectorizer,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
v = v[0]
return merge_sentencepiece_tokens(
list(zip(s_tokens, v[: len(s_tokens)])),
weighted = False,
vectorize = True,
model = 'xlnet',
)
@check_type
def predict(self, string: str):
"""
Tag a string.
Parameters
----------
string : str
Returns
-------
result : Tuple
"""
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization(
self._tokenizer, [string]
)
s_tokens = s_tokens[0]
tagging, depend = self._sess.run(
[self._logits, self._heads_seq],
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
tagging = [self._idx2tag[i] for i in tagging[0]]
depend = depend[0] - 1
for i in range(len(depend)):
if depend[i] == 0 and tagging[i] != 'root':
tagging[i] = 'root'
elif depend[i] != 0 and tagging[i] == 'root':
depend[i] = 0
tagging = merge_sentencepiece_tokens_tagging(
s_tokens, tagging, model = 'xlnet'
)
tagging = list(zip(*tagging))
indexing = merge_sentencepiece_tokens_tagging(
s_tokens, depend, model = 'xlnet'
)
indexing = list(zip(*indexing))
result, indexing_ = [], []
for i in range(len(tagging)):
index = int(indexing[i][1])
if index > len(tagging):
index = len(tagging)
indexing_.append((indexing[i][0], index))
result.append(
'%d\t%s\t_\t_\t_\t_\t%d\t%s\t_\t_'
% (i + 1, tagging[i][0], index, tagging[i][1])
)
d = DependencyGraph('\n'.join(result), top_relation_label = 'root')
return d, tagging, indexing_
class ZEROSHOT_XLNET(BASE):
def __init__(
self,
X,
segment_ids,
input_masks,
logits,
vectorizer,
sess,
tokenizer,
label = ['not similar', 'similar'],
):
BASE.__init__(
self,
X = X,
segment_ids = segment_ids,
input_masks = input_masks,
logits = logits,
vectorizer = vectorizer,
sess = sess,
tokenizer = tokenizer,
label = label,
)
self._softmax = tf.nn.softmax(self._logits)
def _base(self, strings, labels):
strings_left, strings_right, mapping = [], [], defaultdict(list)
index = 0
for no, string in enumerate(strings):
for label in labels:
strings_left.append(string)
strings_right.append(f'teks ini adalah mengenai {label}')
mapping[no].append(index)
index += 1
input_ids, input_masks, segment_ids, _ = xlnet_tokenization_siamese(
self._tokenizer, strings_left, strings_right
)
output = self._sess.run(
self._softmax,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
results = []
for k, v in mapping.items():
result = {}
for no, index in enumerate(v):
result[labels[no]] = output[index, 1]
results.append(result)
return results
@check_type
def vectorize(
self, strings: List[str], labels: List[str], method: str = 'first'
):
"""
vectorize a string.
Parameters
----------
strings: List[str]
labels : List[str]
method : str, optional (default='first')
Vectorization layer supported. Allowed values:
* ``'last'`` - vector from last sequence.
* ``'first'`` - vector from first sequence.
* ``'mean'`` - average vectors from all sequences.
* ``'word'`` - average vectors based on tokens.
Returns
-------
result: np.array
"""
strings_left, strings_right, combined = [], [], []
for no, string in enumerate(strings):
for label in labels:
strings_left.append(string)
strings_right.append(f'teks ini adalah mengenai {label}')
combined.append((string, label))
input_ids, input_masks, segment_ids, s_tokens = xlnet_tokenization_siamese(
self._tokenizer, strings_left, strings_right
)
v = self._sess.run(
self._vectorizer,
feed_dict = {
self._X: input_ids,
self._segment_ids: segment_ids,
self._input_masks: input_masks,
},
)
v = np.transpose(v, [1, 0, 2])
if method == 'first':
v = v[:, 0]
elif method == 'last':
v = v[:, -1]
elif method == 'mean':
v = np.mean(v, axis = 1)
else:
v = [
merge_sentencepiece_tokens(
list(zip(s_tokens[i], v[i][: len(s_tokens[i])])),
weighted = False,
vectorize = True,
model = 'xlnet',
)
for i in range(len(v))
]
return combined, v
@check_type
def predict_proba(self, strings: List[str], labels: List[str]):
"""
classify list of strings and return probability.
Parameters
----------
strings : List[str]
labels : List[str]
Returns
-------
list: list of float
"""
if len(set(labels)) != len(labels):
raise ValueError('labels must be unique.')
return self._base(strings, labels)
|
py | 1a443903fabccd96b72d9d74c8dfa7401214f8c0 | #!/usr/bin/env python
#
# File: llnms-ssh-remote-connector.py
# Author: Marvin Smith
# Date: 6/15/2015
#
# Purpose: Provide remote connection capabilities via SSH.
#
__author__ = 'Marvin Smith'
# Python Libraries
import argparse, os, sys, subprocess
# LLNMS Libraries
if os.environ['LLNMS_HOME'] is not None:
sys.path.append(os.environ['LLNMS_HOME'] + '/lib')
import llnms
# -------------------------------------------- #
# - Parse Command-Line Arguments - #
# -------------------------------------------- #
def Parse_Command_Line():
# Create argument parser
parser = argparse.ArgumentParser( description="Connect to remote system via ssh." )
# Version Info
parser.add_argument('-v', '--version',
action='version',
version='%(prog)s ' + llnms.info.Get_Version_String(),
help='Print the version information.')
# Verbose Mode
parser.add_argument('--verbose',
dest='verbose_flag',
required=False,
default=False,
action='store_true',
help='Print with verbose output.')
# Quiet Mode
parser.add_argument('--quiet',
dest='quiet_flag',
required=False,
default=False,
action='store_true',
help='Do not print stdout results.')
# Select Asset
parser.add_argument('-a','--asset-hostname',
required=True,
dest='asset_hostname',
help='Asset name to connect to. Must be registered in LLNMS.')
# Operation Mode
parser.add_argument('-c','--command',
required=True,
dest='command_input',
help='Command to run on remote system.')
# return the parser
return parser.parse_args()
# ------------------------------ #
# - Validate Input - #
# ------------------------------ #
def Validate_Input( options, asset_list ):
# Figure out the print mode
print_level=1
if options.quiet_flag is True and options.verbose_flag is True:
raise Exception("Conflict between quiet and verbose mode flags.")
if options.quiet_flag is True:
print_level=0
elif options.verbose_flag is True:
print_level=2
# Get the command input
output_cmd = options.command_input
# Get the asset name
asset_hostname = options.asset_hostname
# Make sure the asset is inside the asset list
asset = None
output_address = None
for asset_candidate in asset_list:
# Compare the asset names
if asset_candidate.hostname == asset_hostname:
# Make sure the asset has remote connections enabled
for address in asset_candidate.address_list:
if address.remote_access[0] is True:
asset = asset_candidate
output_address = address
break
if asset is None:
raise Exception('Unable to find a matching asset.')
# Return the matching asset
return [asset, output_address, output_cmd, print_level]
# ------------------------------- #
# - Asset Command - #
# ------------------------------- #
def Connect_System( asset, address, cmd, print_level ):
# Command
command = 'ssh '
# Check if the asset contains a user entry
username = address.remote_access[1]['login-username']
if username is not None:
command += username + "@"
# Add the hostname and cmd
command += asset.hostname + ' \'' + cmd + '\''
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if print_level == 1:
print out
elif print_level == 2:
print(command)
print(out)
print(err)
# --------------------------- #
# - Main Driver - #
# --------------------------- #
def Main():
# Get LLNMS Home
llnms_home=os.environ['LLNMS_HOME']
# Parse Command-Line Arguments
options = Parse_Command_Line()
# Fetch the asset list
llnms_assets = llnms.Asset.llnms_load_assets(llnms_home=llnms_home)
# Validate Input
[asset, address, cmd, print_level] = Validate_Input(options, llnms_assets)
# Run SSH against the matching asset
Connect_System( asset, address, cmd, print_level )
# exit
return
# ---------------------------- #
# - Main Entry - #
# ---------------------------- #
if __name__ == '__main__':
Main()
|
py | 1a443909894019656c6d9b190d2b6671102e07ca | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2019-10-19 08:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mysite', '0005_auto_20191019_1612'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='course',
field=models.CharField(choices=[('LinuxL', 'Linux中高级'), ('PythonFullStack', 'Python高级全栈开发')], max_length=128, verbose_name='咨询课程'),
),
]
|
py | 1a443936dd9eb4416f5daf081aedd311a2d5fb17 | # -*- coding: utf-8 -*-
from otsTurtleClasses import Oturtle
import helper as h
t = Oturtle("Turtle", "turtle", True, ("blue","red"))
t.setup(50,None, head = 180, speed=5, size = 2)
root = t.win
fwd = 1
def task():
global fwd
t.forward(fwd)
t.right(33)
fwd += 1
if fwd < 100:
root.after(1, task) # reschedule event in 1 milli second
t.ground.onclick(t.goto)
t.ground.onkey(t.ground.bye, "x")
t.win.bind("g", t.gruen)
t.ground.listen()
print t._Oturtle__oldColor # das geht immer noch
task()
t.win.mainloop()
|
py | 1a443ad1a4c39e229db36883609dd01b0a46efc2 | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "to-the-point-29960.botics.co"
site_params = {
"name": "To the point",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
|
py | 1a443b0f9c4c2f80e37619403e127ccc9d51c742 | """
Django settings for learning_log project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'c&=x%v4k!%$bn&9!)*b5%-0ao)fsrl6@mbpd8t8zt*b*5#*4h0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
# My apps
'learning_logs',
'users',
# Third party apps.
'bootstrap4',
# Default django apps.
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'learning_log.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'learning_log.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# My settings
LOGIN_URL = 'users:login'
# Heroku settings.
import django_heroku
django_heroku.settings(locals())
if os.environ.get('DEBUG') == 'TRUE':
DEBUG = True
elif os.environ.get('DEBUG') == 'FALSE':
DEBUG = False
|
py | 1a443b81666bfe8f940f92a34da6f86da6a9e882 | from kivy.uix.scrollview import ScrollView
from flat_kivy.uix.flatpopup import FlatPopup
class ScrolledPopupContent(ScrollView):
pass
class EnchantPopupLayout(ScrollView):
pass
class CustomPopup(FlatPopup):
pass
|
py | 1a443bf1a17525c4a841d1a477d78f651a5de7cb | def get_format(formats, default=None, **kwargs):
of = default if default is not None else formats[0]
if 'http_request' in kwargs:
http_request = kwargs['http_request']
best = http_request.accept_mimetypes.best
for f in formats:
if f in best:
of = f
return of
|
py | 1a443cf9a32f629822d7443f5006659dce5b897d | """
Django settings for membermatters project.
Generated by "django-admin startproject" using Django 2.0.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import json
from collections import OrderedDict
from datetime import timedelta
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.environ.get(
"PORTAL_SECRET_KEY", "l)#t68rzepzp)0l#x=9mntciapun$whl+$j&=_@nl^zl1xm3j*"
)
# Default config is for dev environments and is overwritten in prod
DEBUG = True
ALLOWED_HOSTS = ["*"]
SESSION_COOKIE_HTTPONLY = False
SESSION_COOKIE_SAMESITE = None
CSRF_COOKIE_SAMESITE = None
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
# this allows the frontend dev server to talk to the dev server
CORS_ALLOW_ALL_ORIGINS = True
if os.environ.get("PORTAL_ENV") == "Production":
ENVIRONMENT = "Production"
CORS_ALLOW_ALL_ORIGINS = False
DEBUG = False
# Application definition
INSTALLED_APPS = [
"constance",
"constance.backends.database",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.humanize",
"profile",
"access",
"group",
"memberbucks",
"api_spacedirectory",
"api_general",
"api_access",
"api_meeting",
"api_admin_tools",
"api_billing",
"corsheaders",
"rest_framework",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"corsheaders.middleware.CorsMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"membermatters.middleware.Sentry",
"membermatters.middleware.ForceCsrfCookieMiddleware",
]
ROOT_URLCONF = "membermatters.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"constance.context_processors.config",
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "membermatters.wsgi.application"
if "MMDB_SECRET" in os.environ:
# This is a JSON blob containing the database connection details, generated by "copilot" in an AWS deployment
# Fields in this JSON blob are: {username, host, dbname, password, port}
database_config = json.loads(os.environ["MMDB_SECRET"])
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
"NAME": database_config.get("dbname"),
"USER": database_config.get("username"),
"PASSWORD": database_config.get("password"),
"HOST": database_config.get("host"),
"PORT": database_config.get("port"),
}
}
else:
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.environ.get("PORTAL_DB_LOCATION", "/usr/src/data/db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"file": {
"level": "WARNING",
"class": "logging.FileHandler",
"filename": os.environ.get(
"PORTAL_LOG_LOCATION", "/usr/src/logs/django.log"
),
},
},
"loggers": {
"django": {
"handlers": ["file"],
"level": "INFO",
"propagate": True,
},
},
}
REST_FRAMEWORK = {
"EXCEPTION_HANDLER": "membermatters.custom_exception_handlers.fix_401",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework.authentication.SessionAuthentication",
"rest_framework_simplejwt.authentication.JWTAuthentication",
),
}
SIMPLE_JWT = {
"ACCESS_TOKEN_LIFETIME": timedelta(minutes=5),
"REFRESH_TOKEN_LIFETIME": timedelta(days=365),
"ROTATE_REFRESH_TOKENS": True,
"UPDATE_LAST_LOGIN": True,
"BLACKLIST_AFTER_ROTATION": True,
"ALGORITHM": "HS256",
"SIGNING_KEY": SECRET_KEY,
"VERIFYING_KEY": None,
"AUTH_HEADER_TYPES": "Bearer",
"USER_ID_FIELD": "id",
"USER_ID_CLAIM": "user_id",
"AUTH_TOKEN_CLASSES": ("rest_framework_simplejwt.tokens.AccessToken",),
"TOKEN_TYPE_CLAIM": "token_type",
}
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = "en-au"
TIME_ZONE = "Australia/Brisbane"
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = os.environ.get(
"PORTAL_STATIC_LOCATION", "/usr/src/app/memberportal/membermatters/static"
)
LOGIN_REDIRECT_URL = "/"
LOGIN_URL = "/signin"
MEDIA_URL = "/media/"
MEDIA_ROOT = os.environ.get("PORTAL_MEDIA_LOCATION", "/usr/src/data/media/")
AUTH_USER_MODEL = "profile.User"
REQUEST_TIMEOUT = 0.05
# Django constance configuration
CONSTANCE_BACKEND = "membermatters.constance_backend.DatabaseBackend"
CONSTANCE_CONFIG = {
# General site info
"SITE_NAME": (
"MemberMatters Portal",
"The title shown at the top of the page and as the tab title.",
),
"SITE_OWNER": (
"MemberMatters",
"The name of the legal entity/association/club that is running this site.",
),
"ENTITY_TYPE": (
"Association",
"This is the type of group you are such as an association, club, etc.",
),
"SITE_BANNER": (
"",
"A site wide banner that can display useful information. Leave empty to turn off.",
),
# Email config
"EMAIL_SYSADMIN": (
"[email protected]",
"The default sysadmin email that should receive technical errors etc.",
),
"EMAIL_ADMIN": (
"[email protected]",
"The default admin email that should receive administrative notifications.",
),
"EMAIL_DEFAULT_FROM": (
'"MemberMatters Portal" <[email protected]>',
"The default email that outbound messages are sent from.",
),
"SITE_MAIL_ADDRESS": (
"123 Example St, Nowhere",
"This address is used in the footer of all emails for anti spam.",
),
# URLs
"SITE_URL": (
"https://membermatters.org",
"The publicly accessible URL of your MemberMatters instance.",
),
"MAIN_SITE_URL": ("https://membermatters.org", "The URL of your main website."),
"CONTACT_PAGE_URL": (
"https://membermatters.org",
"The URL of your contact page (displayed during signup if "
"requireAccessCard == False).",
),
"INDUCTION_URL": (
"https://eventbrite.com.au",
"The URL members should visit to book in for a site induction.",
),
# Logo and favicon
"SITE_LOGO": (
"https://hsbne-public-assets.s3-ap-southeast-2.amazonaws.com/main-logo.png",
"Site logo (rectangular)",
),
"SITE_FAVICON": (
"https://hsbne-public-assets.s3-ap-southeast-2.amazonaws.com/logo-favicon.png",
"Site favicon (square)",
),
"STATS_CARD_IMAGE": (
"https://hsbne.org/assets/img/carousel/00.jpg",
"Image to use for the site statistics card.",
),
"MENU_BACKGROUND": (
"",
"[Optional] Image to use as the background in the menu. Leave blank for the default background image.",
),
# Custom theme colors
"THEME_PRIMARY": ("#278ab0", "Custom primary theme colour"),
"THEME_TOOLBAR": ("#0461b1", "Custom toolbar theme colour"),
"THEME_ACCENT": ("#189ab4", "Custom accent theme colour"),
# Localisation of terminology
"MEMBERBUCKS_NAME": (
"Memberbucks",
"You can customise the name of the built in currency.",
),
"GROUP_NAME": ("Group", "You can customise what we call a group."),
"ADMIN_NAME": (
"Administrators",
"You can specify a different name for your admin group like executive or management committee.",
),
"WEBCAM_PAGE_URLS": (
"[]",
"A JSON serialised array of URLs to pull webcam images from.",
),
"HOME_PAGE_CARDS": (
"""[
{
"title": "Example",
"description": "This is an example card with a narwhal icon!",
"icon": "fad fa-narwhal",
"url": "https://membermatters.org/",
"btn_text": "Click Here"
},
{
"title": "Example 2",
"description": "This is an example card with a unicorn icon! And it links to another page using a Vue route!",
"icon": "fad fa-unicorn",
"routerLink": {
"name": "reportIssue"
},
"btn_text": "Go to route"
}
]
""",
"You can specify cards that go on the home page with JSON. See https://github.com/MemberMatters/MemberMatters/blob/master/GETTING_STARTED.md.",
),
"WELCOME_EMAIL_CARDS": (
"[]",
"Same syntax as HOME_PAGE_CARDS but icons are not used. If nothing is specified we will use HOME_PAGE_CARDS.",
),
# Stripe config
"STRIPE_PUBLISHABLE_KEY": ("", "Set this to your Stripe PUBLIC API key."),
"STRIPE_SECRET_KEY": ("", "Set this to your Stripe PRIVATE API key."),
"STRIPE_WEBHOOK_SECRET": (
"",
"Set this to a secret value to verify that a webhook came from Stripe.",
),
"STRIPE_MEMBERBUCKS_TOPUP_OPTIONS": (
"[1000, 2000, 3000]",
"This is a JSON array of top-up amounts in cents.",
),
"MAKEMEMBER_CREATE_XERO_INVOICES": (
False,
"Creates a Xero invoice when 'Make Member' is clicked in the admin tools area.",
),
"STRIPE_CREATE_XERO_INVOICES": (
False,
"Creates an invoice in Xero for every successful Stripe membership payment.",
),
"XERO_TAX_TYPE": ("EXEMPTOUTPUT", "Tax type to use on Xero invoices."),
"XERO_MEMBERSHIP_ACCOUNT_CODE": (
"100",
"Account code to use on Xero invoices for membership.",
),
"XERO_MEMBERSHIP_ITEM_CODE": (
"membership",
"Item code to use on Xero invoices for membership.",
),
"XERO_STRIPE_FEE_ACCOUNT_CODE": (
"100",
"Account code to use on Xero invoices for membership.",
),
"XERO_STRIPE_FEE_ITEM_CODE": (
"stripe",
"Item code to use on Xero invoices for membership.",
),
"XERO_MEMBERBUCKS_ACCOUNT_CODE": (
"100",
"Account code to use on Xero invoices for memberbucks.",
),
"ENABLE_STRIPE_MEMBERSHIP_PAYMENTS": (
False,
"Enable integration with stripe for membership payments.",
),
# Trello config
"ENABLE_TRELLO_INTEGRATION": (
False,
"Enable the submit issue to trello integration. If disabled we'll send an email to EMAIL_ADMIN instead.",
),
"TRELLO_API_KEY": ("", "Set this to your Trello API key."),
"TRELLO_API_TOKEN": ("", "Set this to your Trello API token."),
"TRELLO_ID_LIST": (
"",
"Set this to the ID of your card list you want issue " "to go to.",
),
# Space API config
"ENABLE_SPACE_DIRECTORY": (
False,
"Turn on the space directory API available at /api/spacedirectory.",
),
"SPACE_DIRECTORY_OPEN": (False, "Sets the open state."),
"SPACE_DIRECTORY_MESSAGE": (
"This is the default MemberMatters (membermatters.org) space directory message.",
"Sets the message.",
),
"SPACE_DIRECTORY_ICON_OPEN": ("", "Sets the icon shown while in the open state."),
"SPACE_DIRECTORY_ICON_CLOSED": (
"",
"Sets the icon shown while in the closed state.",
),
"SPACE_DIRECTORY_LOCATION_ADDRESS": (
"123 Setme St",
"Sets the snail mail address.",
),
"SPACE_DIRECTORY_LOCATION_LAT": (0, "Sets the latitude."),
"SPACE_DIRECTORY_LOCATION_LON": (0, "Sets the longitude."),
"SPACE_DIRECTORY_FED_SPACENET": (False, "Sets support for spacenet."),
"SPACE_DIRECTORY_FED_SPACESAML": (False, "Sets support for spacesaml."),
"SPACE_DIRECTORY_FED_SPACEPHONE": (False, "Sets support for spacephone."),
"SPACE_DIRECTORY_CAMS": (
"[]",
"A JSON list of strings (URLs) that webcam snapshots of the space can be found.",
),
"SPACE_DIRECTORY_CONTACT_EMAIL": (
"[email protected]",
"Sets the general contact email.",
),
"SPACE_DIRECTORY_CONTACT_TWITTER": ("", "Sets the twitter handle."),
"SPACE_DIRECTORY_CONTACT_FACEBOOK": ("", "Sets the Facebook page URL."),
"SPACE_DIRECTORY_CONTACT_PHONE": (
"",
"Sets the general contact phone number, include country code with a leading +.",
),
"SPACE_DIRECTORY_PROJECTS": (
"[]",
"A JSON list of strings (URLs) to project sites like wikis, GitHub, etc.",
),
"ENABLE_MEMBERBUCKS": (False, "Enable the memberbucks functionality."),
"MEMBERBUCKS_MAX_TOPUP": ("50", "The maximum topup allowed in dollars."),
"MEMBERBUCKS_CURRENCY": (
"aud",
"The currency to charge cards in - see Stripe documentation.",
),
"ENABLE_THEME_SWIPE": (
False,
"Enable playing a member's theme song on a swipe.",
),
"THEME_SWIPE_URL": (
"http://10.0.1.50/playmp3.php?nickname={}",
"The URL to send a GET request to on a swipe if enabled.",
),
"ENABLE_DISCORD_INTEGRATION": (
False,
"Enable playing a member's theme song on a swipe.",
),
"DISCORD_DOOR_WEBHOOK": (
"https://discordapp.com/api/webhooks/<token>",
"Discord URL to send webhook notifications to.",
),
"DISCORD_INTERLOCK_WEBHOOK": (
"https://discordapp.com/api/webhooks/<token>",
"Discord URL to send webhook notifications to.",
),
"ENABLE_DISCOURSE_SSO_PROTOCOL": (
False,
"Enable support for the discourse SSO protocol.",
),
"DISCOURSE_SSO_PROTOCOL_SECRET_KEY": (
"",
"Secret key for the discourse SSO protocol (if enabled).",
),
"GOOGLE_ANALYTICS_PROPERTY_ID": (
"",
"Place you google analytics property ID here to enable Google analytics integration.",
),
"API_SECRET_KEY": (
"PLEASE_CHANGE_ME",
"The API key used by the internal access system for device authentication.",
),
"SENTRY_DSN_FRONTEND": (
"https://577dc95136cd402bb273d00f46c2a017@sentry.serv02.binarydigital.com.au/5/",
"Enter a Sentry DSN to enable sentry logging of frontend errors.",
),
"SENTRY_DSN_BACKEND": (
"https://8ba460796a9a40d4ac2584e0e8dca59a@sentry.serv02.binarydigital.com.au/4",
"Enter a Sentry DSN to enable sentry logging of backend errors.",
),
"SENDGRID_API_KEY": (
"PLEASE_CHANGE_ME",
"The API key used to send email with Sendgrid.",
),
"INDUCTION_ENROL_LINK": (
"",
"The link that a member can use to enrol into an induction.",
),
"INDUCTION_COURSE_ID": (
"",
"Canvas course id for the induction.",
),
"MAX_INDUCTION_DAYS": (
180,
"The maximum amount of days since a member was last inducted before they have to complete another induction (0 to disable).",
),
"MIN_INDUCTION_SCORE": (
99,
"The minimum score to consider an induction as passed (0-100).",
),
"REQUIRE_ACCESS_CARD": (
True,
"If an access card is required to be added to a members profile before signup.",
),
"DEFAULT_MEMBER_TYPE": (
1,
"The ID of the member type to assign new members to by default.",
),
"CANVAS_API_TOKEN": (
"PLEASE_CHANGE_ME",
"Canvas API token.",
),
"ENABLE_PROXY_VOTING": (False, "Enables the proxy voting management feature."),
"ENABLE_WEBCAMS": (
False,
"Enables a webcams page in the portal. Configure with the WEBCAM_PAGE_URLS setting.",
),
}
CONSTANCE_CONFIG_FIELDSETS = OrderedDict(
[
(
"General",
(
"SITE_NAME",
"SITE_OWNER",
"ENTITY_TYPE",
"GOOGLE_ANALYTICS_PROPERTY_ID",
"API_SECRET_KEY",
"DEFAULT_MEMBER_TYPE",
"SITE_BANNER",
),
),
(
"Features",
(
"ENABLE_WEBCAMS",
"ENABLE_PROXY_VOTING",
"ENABLE_STRIPE_MEMBERSHIP_PAYMENTS",
"ENABLE_MEMBERBUCKS",
"ENABLE_DISCOURSE_SSO_PROTOCOL",
"ENABLE_DISCORD_INTEGRATION",
"ENABLE_SPACE_DIRECTORY",
"ENABLE_THEME_SWIPE",
),
),
(
"Sentry Error Reporting",
(
"SENTRY_DSN_FRONTEND",
"SENTRY_DSN_BACKEND",
),
),
(
"Signup",
(
"INDUCTION_ENROL_LINK",
"INDUCTION_COURSE_ID",
"MAX_INDUCTION_DAYS",
"MIN_INDUCTION_SCORE",
"REQUIRE_ACCESS_CARD",
),
),
(
"Canvas Integration",
("CANVAS_API_TOKEN",),
),
(
"Sendgrid Integration",
("SENDGRID_API_KEY",),
),
(
"Contact Information",
(
"EMAIL_SYSADMIN",
"EMAIL_ADMIN",
"EMAIL_DEFAULT_FROM",
"SITE_MAIL_ADDRESS",
),
),
(
"Discourse SSO Protocol",
("DISCOURSE_SSO_PROTOCOL_SECRET_KEY",),
),
("URLs", ("SITE_URL", "MAIN_SITE_URL", "CONTACT_PAGE_URL", "INDUCTION_URL")),
("Memberbucks", ("MEMBERBUCKS_MAX_TOPUP", "MEMBERBUCKS_CURRENCY")),
(
"Images",
("SITE_LOGO", "SITE_FAVICON", "STATS_CARD_IMAGE", "MENU_BACKGROUND"),
),
("Theme", ("THEME_PRIMARY", "THEME_TOOLBAR", "THEME_ACCENT")),
(
"Group Localisation",
(
"MEMBERBUCKS_NAME",
"GROUP_NAME",
"ADMIN_NAME",
"WEBCAM_PAGE_URLS",
"HOME_PAGE_CARDS",
"WELCOME_EMAIL_CARDS",
),
),
(
"Stripe Integration",
(
"STRIPE_PUBLISHABLE_KEY",
"STRIPE_SECRET_KEY",
"STRIPE_WEBHOOK_SECRET",
"STRIPE_MEMBERBUCKS_TOPUP_OPTIONS",
),
),
(
"Xero Integration",
(
"MAKEMEMBER_CREATE_XERO_INVOICES",
"STRIPE_CREATE_XERO_INVOICES",
"XERO_MEMBERBUCKS_ACCOUNT_CODE",
"XERO_MEMBERSHIP_ACCOUNT_CODE",
"XERO_MEMBERSHIP_ITEM_CODE",
"XERO_STRIPE_FEE_ACCOUNT_CODE",
"XERO_STRIPE_FEE_ITEM_CODE",
"XERO_TAX_TYPE",
),
),
(
"Trello Integration",
(
"ENABLE_TRELLO_INTEGRATION",
"TRELLO_API_KEY",
"TRELLO_API_TOKEN",
"TRELLO_ID_LIST",
),
),
(
"Space Directory",
(
"SPACE_DIRECTORY_OPEN",
"SPACE_DIRECTORY_MESSAGE",
"SPACE_DIRECTORY_ICON_OPEN",
"SPACE_DIRECTORY_ICON_CLOSED",
"SPACE_DIRECTORY_LOCATION_ADDRESS",
"SPACE_DIRECTORY_LOCATION_LAT",
"SPACE_DIRECTORY_LOCATION_LON",
"SPACE_DIRECTORY_FED_SPACENET",
"SPACE_DIRECTORY_FED_SPACESAML",
"SPACE_DIRECTORY_CAMS",
"SPACE_DIRECTORY_CONTACT_EMAIL",
"SPACE_DIRECTORY_FED_SPACEPHONE",
"SPACE_DIRECTORY_CONTACT_TWITTER",
"SPACE_DIRECTORY_CONTACT_FACEBOOK",
"SPACE_DIRECTORY_CONTACT_PHONE",
"SPACE_DIRECTORY_PROJECTS",
),
),
("Theme Swipe Integration", ("THEME_SWIPE_URL",)),
(
"Discord Integration",
(
"DISCORD_DOOR_WEBHOOK",
"DISCORD_INTERLOCK_WEBHOOK",
),
),
]
)
|
py | 1a443f164840bacf1544f68f9ee4abc788b146a3 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VaultPatchProperties(Model):
"""Properties of the vault.
:param tenant_id: The Azure Active Directory tenant ID that should be used
for authenticating requests to the key vault.
:type tenant_id: str
:param sku: SKU details
:type sku: ~azure.mgmt.keyvault.v2016_10_01.models.Sku
:param access_policies: An array of 0 to 16 identities that have access to
the key vault. All identities in the array must use the same tenant ID as
the key vault's tenant ID.
:type access_policies:
list[~azure.mgmt.keyvault.v2016_10_01.models.AccessPolicyEntry]
:param enabled_for_deployment: Property to specify whether Azure Virtual
Machines are permitted to retrieve certificates stored as secrets from the
key vault.
:type enabled_for_deployment: bool
:param enabled_for_disk_encryption: Property to specify whether Azure Disk
Encryption is permitted to retrieve secrets from the vault and unwrap
keys.
:type enabled_for_disk_encryption: bool
:param enabled_for_template_deployment: Property to specify whether Azure
Resource Manager is permitted to retrieve secrets from the key vault.
:type enabled_for_template_deployment: bool
:param enable_soft_delete: Property specifying whether recoverable
deletion ('soft' delete) is enabled for this key vault. The property may
not be set to false.
:type enable_soft_delete: bool
:param create_mode: The vault's create mode to indicate whether the vault
need to be recovered or not. Possible values include: 'recover', 'default'
:type create_mode: str or
~azure.mgmt.keyvault.v2016_10_01.models.CreateMode
:param enable_purge_protection: Property specifying whether protection
against purge is enabled for this vault; it is only effective if soft
delete is also enabled. Once activated, the property may no longer be
reset to false.
:type enable_purge_protection: bool
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'access_policies': {'key': 'accessPolicies', 'type': '[AccessPolicyEntry]'},
'enabled_for_deployment': {'key': 'enabledForDeployment', 'type': 'bool'},
'enabled_for_disk_encryption': {'key': 'enabledForDiskEncryption', 'type': 'bool'},
'enabled_for_template_deployment': {'key': 'enabledForTemplateDeployment', 'type': 'bool'},
'enable_soft_delete': {'key': 'enableSoftDelete', 'type': 'bool'},
'create_mode': {'key': 'createMode', 'type': 'CreateMode'},
'enable_purge_protection': {'key': 'enablePurgeProtection', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(VaultPatchProperties, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.sku = kwargs.get('sku', None)
self.access_policies = kwargs.get('access_policies', None)
self.enabled_for_deployment = kwargs.get('enabled_for_deployment', None)
self.enabled_for_disk_encryption = kwargs.get('enabled_for_disk_encryption', None)
self.enabled_for_template_deployment = kwargs.get('enabled_for_template_deployment', None)
self.enable_soft_delete = kwargs.get('enable_soft_delete', None)
self.create_mode = kwargs.get('create_mode', None)
self.enable_purge_protection = kwargs.get('enable_purge_protection', None)
|
py | 1a44400ae479eea5d01ddd76139ad5b0a71b1027 | from django.test import TestCase
from django.core.exceptions import ValidationError
from oscar.core.compat import get_user_model
from oscar.apps.catalogue.reviews import models
from oscar.test.factories import create_product
from oscar.test.factories import UserFactory
User = get_user_model()
class TestAnAnonymousReview(TestCase):
def setUp(self):
self.product = create_product()
self.data = {
'product': self.product,
'title': 'This product is lovely',
'body': 'I really like this cheese',
'score': 0,
'name': 'JR Hartley',
'email': '[email protected]'
}
def review(self, **kwargs):
if kwargs:
data = self.data.copy()
data.update(kwargs)
else:
data = self.data
return models.ProductReview(**data)
def test_can_be_created(self):
review = self.review()
review.full_clean()
def test_requires_a_title(self):
review = self.review(title="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_a_body(self):
review = self.review(body="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_a_name(self):
review = self.review(name="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_an_email_address(self):
review = self.review(email="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_non_whitespace_title(self):
review = self.review(title=" ")
self.assertRaises(ValidationError, review.full_clean)
def test_starts_with_no_votes(self):
review = self.review()
review.save()
self.assertFalse(review.has_votes)
self.assertEqual(0, review.num_up_votes)
self.assertEqual(0, review.num_down_votes)
def test_has_reviewer_name_property(self):
review = self.review(name="Dave")
self.assertEqual("Dave", review.reviewer_name)
def test_review_moderate_setting_false(self):
with self.settings(OSCAR_MODERATE_REVIEWS=False):
review = self.review()
self.assertEqual(1, review.status)
def test_review_moderate_setting_true(self):
with self.settings(OSCAR_MODERATE_REVIEWS=True):
review = self.review()
self.assertEqual(0, review.status)
class TestAUserReview(TestCase):
def setUp(self):
self.product = create_product()
self.user = UserFactory(first_name="Tom", last_name="Thumb")
self.data = {
'product': self.product,
'title': 'This product is lovely',
'body': 'I really like this cheese',
'score': 0,
'user': self.user
}
def review(self, **kwargs):
if kwargs:
data = self.data.copy()
data.update(kwargs)
else:
data = self.data
return models.ProductReview(**data)
def test_can_be_created(self):
review = self.review()
review.full_clean()
def test_requires_a_title(self):
review = self.review(title="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_a_body(self):
review = self.review(body="")
self.assertRaises(ValidationError, review.full_clean)
def test_has_reviewer_name_property(self):
review = self.review()
self.assertEqual("Tom Thumb", review.reviewer_name)
def test_num_approved_reviews(self):
review = self.review()
review.save()
self.assertEqual(self.product.num_approved_reviews, 1)
self.assertEqual(self.product.reviews.approved().first(), review)
def test_review_moderate_setting_false(self):
with self.settings(OSCAR_MODERATE_REVIEWS=False):
review = self.review()
self.assertEqual(1, review.status)
def test_review_moderate_setting_true(self):
with self.settings(OSCAR_MODERATE_REVIEWS=True):
review = self.review()
self.assertEqual(0, review.status)
class TestVotingOnAReview(TestCase):
def setUp(self):
self.product = create_product()
self.user = UserFactory()
self.voter = UserFactory()
self.review = self.product.reviews.create(
title='This is nice',
score=3,
body="This is the body",
user=self.user)
def test_updates_totals_for_upvote(self):
self.review.vote_up(self.voter)
self.assertTrue(self.review.has_votes)
self.assertEqual(1, self.review.total_votes)
self.assertEqual(1, self.review.delta_votes)
def test_updates_totals_for_downvote(self):
self.review.vote_down(self.voter)
self.assertTrue(self.review.has_votes)
self.assertEqual(1, self.review.total_votes)
self.assertEqual(-1, self.review.delta_votes)
def test_is_permitted_for_normal_user(self):
is_allowed, reason = self.review.can_user_vote(self.voter)
self.assertTrue(is_allowed, reason)
def test_is_not_permitted_for_reviewer(self):
is_allowed, reason = self.review.can_user_vote(self.user)
self.assertFalse(is_allowed, reason)
def test_is_not_permitted_for_previous_voter(self):
self.review.vote_up(self.voter)
is_allowed, reason = self.review.can_user_vote(self.voter)
self.assertFalse(is_allowed, reason)
|
py | 1a4445e5aaf8b3ec29d4c2eda5e6bc7331d3698d | """
Get Line Intersection
Get's intersection of 2 lines
TESTED REVIT API: 2017
Author: Gui Talarico | github.com/gtalarico
This file is shared on www.revitapidocs.com
For more information visit http://github.com/gtalarico/revitapidocs
License: http://github.com/gtalarico/revitapidocs/blob/master/LICENSE.md
"""
import clr
from Autodesk.Revit.DB import Line, XYZ
from Autodesk.Revit.DB import SetComparisonResult, IntersectionResultArray
def get_intersection(line1, line2):
results = clr.Reference[IntersectionResultArray]()
# See ironpython.net/documentation/dotnet for clr.Reference
result = line1.Intersect(line2, results)
# http://www.revitapidocs.com/2018/51961478-fb36-e00b-2d1b-7db27b0a09e6.htm
if result != SetComparisonResult.Overlap:
print('No Intesection')
intersection = results.Item[0]
return intersection.XYZPoint
line1 = Line.CreateBound(XYZ(0,0,0), XYZ(10,0,0))
line2 = Line.CreateBound(XYZ(5,-5,0), XYZ(5,5,0))
point = get_intersection(line1, line2)
print(point)
# <Autodesk.Revit.DB.XYZ object at 0x00000000000001BA [(5.000000000, 0.000000000, 0.000000000)]>
"""
From this discussion:
https://forum.dynamobim.com/t/translating-to-python/13481
C# Equivalent
private XYZ GetIntersection(
Line line1,
Line line2 )
{
IntersectionResultArray results;
SetComparisonResult result
= line1.Intersect( line2, out results );
if( result != SetComparisonResult.Overlap )
throw new InvalidOperationException(
"Input lines did not intersect." );
if( results == null || results.Size != 1 )
throw new InvalidOperationException(
"Could not extract line intersection point." );
IntersectionResult iResult
= results.get_Item( 0 );
return iResult.XYZPoint;
}
"""
|
py | 1a4446fc08930b31f4404db7874284c3d0a70abf | from datetime import datetime
from datetime import timedelta
from datetime import date
import sys
def countLearnCirlce(date, itemName):
days = [1,3,7,15,31,63,127,255,511,1023]
result = []
for item in days:
result.append((date + timedelta(days=item)).strftime('%Y-%m-%d'))
return result
if __name__ == '__main__':
separated = "~~"
input = sys.argv[1].split(',')
itemName = sys.argv[2]
date = datetime(int(input[0]), int(input[1]), int(input[2]))
result = countLearnCirlce(date, itemName)
for item in result:
print "%s%s%s"%(item, separated, itemName) |
py | 1a44471a8f8698838da79e54b860f6c91f31c190 | #####DONORSCHOOSE FUNCTIONS
import datetime
from datetime import timedelta, date #for time duration calculations
from dateutil.parser import parse #for fuzzy finding year
def elapseddays(posted, completed):
formatuse = '%Y-%m-%d %H:%M:%S' # The format: see down this page:https://docs.python.org/3/library/datetime.html
otherformat = '%Y-%m-%d'
try:
elapsed_days=completed-posted
except:
try:
elapsed_days = datetime.datetime.strptime(completed,formatuse)-datetime.datetime.strptime(posted,formatuse)
except:
try:
elapsed_days = datetime.datetime.strptime(completed,otherformat)-datetime.datetime.strptime(posted,otherformat)
except:
elapsed_days = 'error'
return(elapsed_days)
def elapsedseconds(posted, completed):
formatuse = '%Y-%m-%d %H:%M:%S' # The format: see down this page:https://docs.python.org/3/library/datetime.html
otherformat = '%Y-%m-%d'
if isinstance(posted, datetime.datetime) or (type(posted) is pd.Timestamp):
clock = completed
else:
try:
clock = datetime.datetime.strptime(completed,formatuse)
except:
clock = datetime.datetime.strptime(completed,otherformat)
if isinstance(completed, datetime.datetime) or (type(completed) is pd.Timestamp):
startclock = completed
else:
try:
startclock = datetime.datetime.strptime(posted,formatuse)
except:
startclock = datetime.datetime.strptime(posted,otherformat)
elapsed = (clock-startclock).total_seconds()
return(elapsed)
intervals = (
('weeks', 604800), # 60 * 60 * 24 * 7
('days', 86400), # 60 * 60 * 24
('hours', 3600), # 60 * 60
('minutes', 60),
('seconds', 1),
)
def display_time(seconds, granularity=2):
result = []
for name, count in intervals:
value = seconds // count
if value:
seconds -= value * count
if value == 1:
name = name.rstrip('s')
result.append("{} {}".format(value, name))
return ', '.join(result[:granularity])
# Function convert seconds into day.decimal
def ConvertSectoDay(n):
day = n // (24 * 3600)
#print(day) #keep day
n = n % (24 * 3600)
daydec=(n/86400) # add this to day
addem=day+daydec
#https://stackoverflow.com/a/48812729/1602288
holder='{:g}'.format(float('{:.{p}g}'.format(addem, p=5)))
return(float(holder))
def projectover(posted, completed,expiration):
formatuse = '%Y-%m-%d %H:%M:%S' # The format: see down this page:https://docs.python.org/3/library/datetime.html
otherformat = '%Y-%m-%d'
#failed projects were never completed, so in those cases, use the expiration date
# if variable is None:
if completed is None:
try:
clock = datetime.datetime.strptime(expiration,formatuse)
except:
try:
clock = datetime.datetime.strptime(expiration,otherformat)
except:
clock = datetime.datetime.strptime('1900-01-01',otherformat)
else:
try:
clock = datetime.datetime.strptime(completed,formatuse)
except:
try:
clock = datetime.datetime.strptime(completed,otherformat)
except:
clock = datetime.datetime.strptime('1900-01-01',otherformat)
return(clock)
def makedate(posted):
formatuse = '%Y-%m-%d %H:%M:%S' # The format: see down this page:https://docs.python.org/3/library/datetime.html
otherformat = '%Y-%m-%d'
try:
clock = datetime.datetime.strptime(posted,formatuse)
except:
try:
clock = datetime.datetime.strptime(posted,otherformat)
except:
clock = datetime.datetime.strptime('1900-01-01',otherformat)
return(clock)
def Convert_to_clock_x(m):
m=int(m)
if m == 1:
a = 1
if m == 2:
a = 2
if m == 3:
a = 3
if m == 4:
a = 2
if m == 5:
a = 1
if m == 6:
a = 0
if m == 7:
a = -1
if m == 8:
a = -2
if m == 9:
a = -3
if m == 10:
a = -2
if m == 11:
a = -1
if m == 12:
a = 0
return(a)
def Convert_to_clock_y(m):
m=int(m)
if m == 1:
a = 2
if m == 2:
a = 1
if m == 3:
a = 0
if m == 4:
a = -1
if m == 5:
a = -2
if m == 6:
a = -3
if m == 7:
a = -2
if m == 8:
a = -1
if m == 9:
a = 0
if m == 10:
a = 1
if m == 11:
a = 2
if m == 12:
a = 3
return(a)
import matplotlib.pyplot as plt
import seaborn as sns
#function for producing nice, smoothed line plots sorted by categorical variable, of a continues (var_dist) variable
def comp_dist(df_to_use, cat_to_subset, var_dist, figw,figh,linew):
plt.figure(figsize=(figw,figh))
sns.set_context( rc={"lines.linewidth": linew})
for grp in sorted(df_to_use[cat_to_subset].unique()):
grp_df = df_to_use.loc[df_to_use[cat_to_subset] == grp]
sns.distplot(grp_df[var_dist], hist=False, label=grp)
plt.xlim(0, 90)
plt.show()
import math
def getxy(day):
x = math.sin((180 - day * 0.9849521203830369)/180 * 3.141)
y = math.cos((180 - day * 0.9849521203830369)/180 * 3.141)
return x, y
|
py | 1a44474396ee86e148196a43414df5c81b998b06 | from toontown.toonbase import ToontownGlobals
ALLOW_TEMP_MINIGAMES = simbase.config.GetBool('allow-temp-minigames', False)
TEMP_MG_ID_COUNTER = ToontownGlobals.TravelGameId - 1
TempMgCtors = {}
def _printMessage(message):
print '\n\n!!!', message, '\n\n'
def _registerTempMinigame(name, Class, id, minPlayers=1, maxPlayers=4):
if not ALLOW_TEMP_MINIGAMES:
_printMessage('registerTempMinigame WARNING: allow-temp-minigames config is set to false, but we are trying to register temp minigame ' + name)
import traceback
traceback.print_stack()
return
ToontownGlobals.MinigameIDs += (id,)
ToontownGlobals.MinigameNames[name] = id
TempMgCtors[id] = Class
for i in xrange(minPlayers, maxPlayers):
ToontownGlobals.MinigamePlayerMatrix[i] += (id,)
_printMessage('registerTempMinigame: ' + name)
if ALLOW_TEMP_MINIGAMES:
pass |
py | 1a444744d875ae2d360ec84926dd5d820a000ee5 | from checkov.common.bridgecrew.integration_features.features import * # noqa
|
py | 1a44480ebe2afaed890ef1df03af1c99cebd11fc | from typing import TextIO
def write_indent(outfile: TextIO, count: int=1):
"""Writes a tab character to the given output file
Args:
outfile: The output file to be written
count: The number of tab characters to be written
"""
for i in range(0, count):
outfile.write("\t")
def write_newline(outfile: TextIO, count:int=1):
"""Writes a newline character to the given output file
Args:
outfile: The output file to be written
count: The number of newline characters to be written
"""
for i in range(0, count):
outfile.write("\n")
|
py | 1a44485236405a38ca59d221e538320945236455 | CONSTRUCTOR = 'constructor'
INTERFACE = 'interface'
ENUM = 'enum'
FUNCTION = 'function'
PROPERTY = 'property'
def _CommentHasFlag(comment, flag_name):
assert flag_name.startswith('@'), 'flag name should start with @'
for flag in comment.flags:
if flag.name == flag_name:
return True
return False
def DetermineSymbolType(symbol):
comment = symbol.comment
assert comment, 'Expected to have comment'
if _CommentHasFlag(comment, '@constructor'):
return CONSTRUCTOR
if _CommentHasFlag(comment, '@interface'):
return INTERFACE
if _CommentHasFlag(comment, '@enum'):
return ENUM
if _CommentHasFlag(comment, '@param') or _CommentHasFlag(comment, '@return'):
return FUNCTION
# TODO(nnaze): Handle functions with no @param or @return.
return PROPERTY
|
py | 1a444878b40ca42ad0f33e27fed51c7b4d73e837 | from flask import Blueprint
auth = Blueprint('auth',__name__)
from . import views,forms
from . import db |
py | 1a4449ecf625c16fc9adb92e0d09e28b496f6155 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2016 Timothy Dozat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import re
import argparse
import numpy as np
from collections import defaultdict
#***************************************************************
class DepTree:
""" """
#=============================================================
def __init__(self, buff):
""" """
self._head2deps = defaultdict(list)
self._dep2head = dict()
self._str = []
for line in buff:
dep_idx = int(line[0])
head_idx = int(line[6])
self.head2deps[head_idx].append(dep_idx)
self.dep2head[dep_idx] = head_idx
self._str.append(line[1])
return
#=============================================================
def count_nonprojective(self):
""" """
nonproj = []
for dep in self:
head = self.dep2head[dep]
span_min = min(dep, head)
span_max = max(dep, head)
for mid_dep in range(span_min+1, span_max):
mid_head = self.dep2head[mid_dep]
if mid_head < span_min or mid_head > span_max:
crossing = True
break
else:
crossing = False
nonproj.append(int(crossing))
return nonproj
#=============================================================
@property
def head2deps(self):
return self._head2deps
@property
def dep2head(self):
return self._dep2head
#=============================================================
def __iter__(self):
return (dep for dep in self.dep2head)
def __len__(self):
return len(self.dep2head)
def __str__(self):
return ' '.join(self._str)+'\n'
#***************************************************************
if __name__ == '__main__':
""" """
parser = argparse.ArgumentParser()
parser.add_argument('files', nargs='+')
args = parser.parse_args()
for filename in args.files:
lang = re.search('([-\w]*)-ud', filename).group(1)
nonproj = []
with open(filename) as f:
buff = []
for line in f:
line = line.strip()
if line:
if not re.match('#|[0-9]+[-.][0-9]+', line):
buff.append(line.split('\t'))
else:
tree = DepTree(buff)
nonproj.extend(tree.count_nonprojective())
buff = []
print(lang, np.mean(nonproj)*100,file=sys.stderr)
|
py | 1a444a2e45278573fa4dd8273948220dcb494006 | from simple_rest_client.api import API
from simple_rest_client.resource import Resource
class FileUploadResource(Resource):
actions = {"create": {"method": "POST", "url": "post.php?dir=example"}}
# http://blog.henrycipolla.com/2011/12/testing-multipartform-data-uploads-with-post-test-server/
files = {"file": open("github.py", "rb")}
post_test_server_api = API(api_root_url="http://posttestserver.com/", timeout=10)
post_test_server_api.add_resource(resource_name="file_upload", resource_class=FileUploadResource)
print(
"post_test_server_api.file_upload.create={!r}".format(
post_test_server_api.file_upload.create(files=files).body
)
)
|
py | 1a444d7fc1a01f1c7098c66922546a5b32bd7e8f | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import hydra
@hydra.main(config_path="config.yaml")
def my_app(_):
pass
if __name__ == "__main__":
my_app()
|
py | 1a444dbb1ca8f355e94e031c9507b75901f5ed64 | """Container for creating and displaying diffs."""
import copy
import difflib
import json
import pygments
from pygments import formatters, lexers
DIFF_LEXER = lexers.get_lexer_by_name('diff')
DIFF_FORMATTER = formatters.get_formatter_by_name('terminal16m')
class DiffText:
"""Generic text diffs."""
def __init__(self, content):
self.original_content = content
self.preview = None
self.before = None
self.after = None
self._diff_lines = None
def __enter__(self):
self.preview = copy.deepcopy(self.original_content)
self.before = self.copy()
return self.preview
def __exit__(self, exc_type, exc_value, traceback):
self.after = self.copy()
def copy(self):
"""Duplicate string for modification."""
return str(self.preview)
@property
def diff(self):
"""Generate diff."""
_diff = difflib.unified_diff(
self.before.split('\n'),
self.after.split('\n'),
fromfile='before changes',
tofile='after changes',
lineterm='',
)
self._diff_lines = list(_diff)
return self._diff_lines
@property
def highlighted(self):
"""Return syntax highlighted diff."""
diff = '\n'.join(self.diff)
highlighted_diff = pygments.highlight(diff, DIFF_LEXER, DIFF_FORMATTER)
highlighted_diff = highlighted_diff.rstrip('\n')
return highlighted_diff
class DiffJson(DiffText):
"""JSON diff."""
def copy(self):
"""Convert contents into static JSON string."""
return json.dumps(self.preview, indent=2)
|
py | 1a444dc1ca084ebdfa353d9e647729f3d981f29b | # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# RCACondition
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019, The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import datetime
# Third-party modules
import six
from bson import ObjectId
@six.python_2_unicode_compatible
class RCACondition(object):
def __init__(self, alarm_class, condition):
self.name = "%s::%s" % (alarm_class.name, condition.name)
self.window = condition.window
self.root = condition.root
self.same_object = False
# Build condition expression
self.condition = compile(condition.condition, "<string>", "eval")
# Build match condition expression
x = [
"'alarm_class': ObjectId('%s')" % self.root.id,
"'timestamp__gte': alarm.timestamp - datetime.timedelta(seconds=%d)" % self.window,
"'timestamp__lte': alarm.timestamp + datetime.timedelta(seconds=%d)" % self.window,
]
if self.root.id == alarm_class.id:
x += ["'id__ne': alarm.id"]
for k, v in six.iteritems(condition.match_condition):
if k == "managed_object" and v == "alarm.managed_object.id":
self.same_object = True
x += ["'%s': %s" % (k, v)]
self.match_condition = compile("{%s}" % ", ".join(x), "<string>", "eval")
# Build reverse match condition expression
x = [
"'alarm_class': ObjectId('%s')" % alarm_class.id,
"'root__exists': False",
"'timestamp__gte': alarm.timestamp - datetime.timedelta(seconds=%d)" % self.window,
"'timestamp__lte': alarm.timestamp + datetime.timedelta(seconds=%d)" % self.window,
]
if self.root.id == alarm_class.id:
x += ["'id__ne': alarm.id"]
if self.same_object:
x += ["'managed_object': alarm.managed_object"]
self.reverse_match_condition = compile("{%s}" % ", ".join(x), "<string>", "eval")
def __str__(self):
return self.name
def get_context(self, alarm):
return {"alarm": alarm, "datetime": datetime, "ObjectId": ObjectId}
def check_condition(self, alarm):
return eval(self.condition, {}, self.get_context(alarm))
def get_match_condition(self, alarm, **kwargs):
r = eval(self.match_condition, {}, self.get_context(alarm))
if kwargs:
r.update(kwargs)
return r
def get_reverse_match_condition(self, alarm):
return eval(self.reverse_match_condition, {}, self.get_context(alarm))
|
py | 1a444ecc6f379e1db3b7c19539c1074cc37b2c48 | from .ensembles import RandomForestRegressor
from .ensembles import RandomForestClassifier
|
py | 1a444f26c0b9b7ab982a9d712db584a450ad3f1c | import rudra.utils.helper as helper
import requests
import pytest
def test_get_github_repo_info():
gh_repo1 = 'https://github.com/fabric8-analytics/f8a-hpf-insights'
gh_repo2 = 'https://github.com/fabric8-analytics/f8a-hpf-insights.git'
gh_repo3 = 'git+https://github.com/fabric8-analytics/f8a-hpf-insights'
gh_repo4 = 'fabric8-analytics/f8a-hpf-insights'
user, repo = helper.get_github_repo_info(gh_repo1)
assert user == 'fabric8-analytics' and repo == 'f8a-hpf-insights'
user, repo = helper.get_github_repo_info(gh_repo2)
assert user == 'fabric8-analytics' and repo == 'f8a-hpf-insights'
user, repo = helper.get_github_repo_info(gh_repo3)
assert user == 'fabric8-analytics' and repo == 'f8a-hpf-insights'
user, repo = helper.get_github_repo_info(gh_repo4)
assert user == 'fabric8-analytics' and repo == 'f8a-hpf-insights'
def test_get_training_file_url():
user = 'fabric8-analytics'
repo = 'f8a-hpf-insights'
file_url = helper.get_training_file_url(user, repo)
resp = requests.get(file_url)
assert resp.status_code == 200
file_url = helper.get_training_file_url(user, repo, branch='training-code')
resp = requests.get(file_url)
assert resp.status_code == 200
file_url = helper.get_training_file_url(
user, repo, training_file_path='src/flask_endpoint.py')
resp = requests.get(file_url)
assert resp.status_code == 200
def test_load_hyper_params():
# mock command line args
helper.argv = ['helper.py', '{"a": 111, "b": "some text"}']
hyper_params = helper.load_hyper_params()
assert hyper_params.get('a') == 111
assert hyper_params.get('b') == "some text"
def test_cache_dict_with_zero_max_size():
cache_dict = helper.CacheDict(0)
with pytest.raises(KeyError):
cache_dict['key1'] = 'value1'
assert len(cache_dict) == 0
def test_cache_dict_with_one_max_size():
cache_dict = helper.CacheDict(1)
cache_dict['key1'] = 'value1'
cache_dict['key2'] = 'value2'
assert len(cache_dict) == 1
assert 'key2' in cache_dict
assert 'key1' not in cache_dict
def test_cache_dict():
# default max_len = 1024
cache_dict = helper.CacheDict()
for i in range(2000):
cache_dict[i] = i * i
assert len(cache_dict) == cache_dict.max_len
assert cache_dict[i] == i * i
del cache_dict[i]
assert len(cache_dict) == cache_dict.max_len - 1
assert cache_dict[cache_dict.max_len - 2] == pow(cache_dict.max_len - 2, 2)
assert len(list(cache_dict)) == cache_dict.max_len - 1
assert str(cache_dict.max_len - 2) in str(cache_dict)
|
py | 1a444f2d9605692befbbae4eab5c6024d4007fa6 | #
def SortCharacters(s):
order = [0] * len(s)
count = {'$': 0, "A": 0, 'C': 0, 'G': 0, 'T': 0}
for char in s:
count[char] += 1
symb = ['$', 'A', 'C', 'G', 'T']
for i in range(1, 5):
count[symb[i]] += count[symb[i-1]]
for j in range(len(s) - 1, -1, -1):
c = s[j]
count[c] -= 1
order[count[c]] = j
return order
def ComputeCharClasses(s, order):
class_chars = [0] * len(s)
for i in range(1, len(s)):
if s[order[i]] == s[order[i-1]]:
class_chars[order[i]] = class_chars[order[i-1]]
else:
class_chars[order[i]] = class_chars[order[i-1]] + 1
return class_chars
def SortDoubled(s, L, old_order, old_class):
count = [0] * len(s)
new_order = [0] * len(s)
for i in range(len(s)):
count[old_class[i]] += 1
for i in range(1, len(s)):
count[i] += count[i-1]
for j in range(len(s) - 1, -1, -1):
start = (old_order[j] - L + len(s)) % len(s)
cl = old_class[start]
count[cl] -= 1
new_order[count[cl]] = start
return new_order
def UpdateClasses(new_order, old_class, L):
n = len(new_order)
new_class = [0] * n
for i in range(1, n):
cur = new_order[i]
mid = (cur + L) % n
prev = new_order[i-1]
mid_prev = (prev + L) % n
if old_class[cur] == old_class[prev] and old_class[mid] == old_class[mid_prev]:
new_class[cur] = new_class[prev]
else:
new_class[cur] = new_class[prev] + 1
return new_class
def BuildSuffixArray(S):
order = SortCharacters(S)
class_ = ComputeCharClasses(S, order)
L = 1
while L < len(S):
order = SortDoubled(S, L, order, class_)
class_ = UpdateClasses(order, class_, L)
L = 2 * L
return order
if __name__ == '__main__':
text = input()
suffix_array = BuildSuffixArray(text)
for elem in suffix_array:
print(elem, end=' ') |
py | 1a444f86a10d3b388d6f0033f74b7f00f66aaa41 | """
ASGI config for documentStore project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'documentStore.settings')
application = get_asgi_application()
|
py | 1a44511cfeb99e350f8c3394fa51c5cfbf0f3b6c | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for tensorflow.python.training.saver.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import saver
class SaverLargePartitionedVariableTest(test.TestCase):
# Need to do this in a separate test because of the amount of memory needed
# to run this test.
def testLargePartitionedVariables(self):
save_path = os.path.join(self.get_temp_dir(), "large_variable")
var_name = "my_var"
# Saving large partition variable.
with session.Session("", graph=ops.Graph()) as sess:
with ops.device("/cpu:0"):
# Create a partitioned variable which is larger than int32 size but
# split into smaller sized variables.
init = lambda shape, dtype, partition_info: constant_op.constant(
True, dtype, shape)
partitioned_var = partitioned_variables.create_partitioned_variables(
[1 << 31], [4], init, dtype=dtypes.bool, name=var_name)
variables.global_variables_initializer().run()
save = saver.Saver(partitioned_var)
val = save.save(sess, save_path)
self.assertEqual(save_path, val)
if __name__ == "__main__":
test.main()
|
py | 1a44511f212929f6df19cc882797fb6ad1a5610a | # visualization functions
import matplotlib as mpl
import matplotlib.pyplot as plt
def plot_arrays(sample, output):
"""
Create a figure with two plots: the original sample, and a corresponding
prediction.
"""
assert len(sample.shape) == 2 and len(output.shape) == 2
cmap = mpl.colors.ListedColormap(['purple', 'white', 'black', 'orange'])
bounds = [-2.5, -.5, .5, 1.5, 2.5]
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
# calculate the difference. Since the output may be longer, align the
# difference to beginning of the arrays
diff = sample - output[:sample.shape[0], :sample.shape[1]]
diff *= -2.
# find the areas where the prediction doesn't match the sample
is_diff = diff != 0.
# change those locations in output so they plot to the correct color
output[:sample.shape[0],:sample.shape[1]][is_diff] = diff[is_diff]
# plot images using the appropriate color map
fig = plt.figure(1)
plt.subplot(121)
plt.imshow(sample, cmap=cmap, norm=norm)
plt.subplot(122)
img2 = plt.imshow(output, cmap=cmap, norm=norm)
bar = plt.colorbar(img2, cmap=cmap, norm=norm, boundaries=bounds, ticks=[-2, 0, 1, 2])
bar.ax.set_yticklabels(["False 0", "True 0", "True 1", "False 1"])
plt.show()
|
py | 1a44516e19d75dd1707a9ffb5b41f42be4ef6ab3 | from django.apps import AppConfig
class GraphqlAppConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'graphql_app'
|
py | 1a44519aa6783552f36686cf5e6554124301aa4d | from django.conf import settings
from rest_framework.routers import DefaultRouter, SimpleRouter
from app.users.api.views import UserViewSet
if settings.DEBUG:
router = DefaultRouter()
else:
router = SimpleRouter()
router.register("users", UserViewSet)
app_name = "api"
urlpatterns = router.urls
|
py | 1a445340fe661000257a58d26dd8704e974012e9 | #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import argparse
from datetime import datetime
import tensorflow as tf
import metadata
import input
import model
# ******************************************************************************
# YOU MAY MODIFY THIS FUNCTION TO ADD/REMOVE PARAMS OR CHANGE THE DEFAULT VALUES
# ******************************************************************************
def initialise_hyper_params(args_parser):
"""
Define the arguments with the default values,
parses the arguments passed to the task,
and set the HYPER_PARAMS global variable
Args:
args_parser
"""
# data files arguments
args_parser.add_argument(
'--train-files',
help='GCS or local paths to training data',
nargs='+',
required=True
)
args_parser.add_argument(
'--eval-files',
help='GCS or local paths to evaluation data',
nargs='+',
required=True
)
args_parser.add_argument(
'--feature-stats-file',
help='GCS or local paths to feature statistics json file',
nargs='+',
default=None
)
# Experiment arguments - training
args_parser.add_argument(
'--train-steps',
help="""
Steps to run the training job for. If --num-epochs and --train-size are not specified,
this must be. Otherwise the training job will run indefinitely.
if --num-epochs and --train-size are specified, then --train-steps will be:
(train-size/train-batch-size) * num-epochs\
""",
default=1000,
type=int
)
args_parser.add_argument(
'--train-batch-size',
help='Batch size for each training step',
type=int,
default=200
)
args_parser.add_argument(
'--train-size',
help='Size of training set (instance count)',
type=int,
default=None
)
args_parser.add_argument(
'--num-epochs',
help="""\
Maximum number of training data epochs on which to train.
If both --train-size and --num-epochs are specified,
--train-steps will be: (train-size/train-batch-size) * num-epochs.\
""",
default=10,
type=int,
)
# Experiment arguments - evaluation
args_parser.add_argument(
'--eval-every-secs',
help='How long to wait before running the next evaluation',
default=120,
type=int
)
args_parser.add_argument(
'--eval-steps',
help="""\
Number of steps to run evaluation for at each checkpoint',
Set to None to evaluate on the whole evaluation data
""",
default=None,
type=int
)
args_parser.add_argument(
'--eval-batch-size',
help='Batch size for evaluation steps',
type=int,
default=200
)
# features processing arguments
args_parser.add_argument(
'--num-buckets',
help='Number of buckets into which to discretize numeric columns',
default=10,
type=int
)
args_parser.add_argument(
'--embedding-size',
help='Number of embedding dimensions for categorical columns. value of 0 means no embedding',
default=4,
type=int
)
# Estimator arguments
args_parser.add_argument(
'--learning-rate',
help="Learning rate value for the optimizers",
default=0.1,
type=float
)
args_parser.add_argument(
'--hidden-units',
help="""\
Hidden layer sizes to use for DNN feature columns, provided in comma-separated layers.
If --scale-factor > 0, then only the size of the first layer will be used to compute
the sizes of subsequent layers \
""",
default='64,32,16,8'
)
args_parser.add_argument(
'--layer-sizes-scale-factor',
help="""\
Determine how the size of the layers in the DNN decays.
If value = 0 then the provided --hidden-units will be taken as is\
""",
default=0.7,
type=float
)
args_parser.add_argument(
'--num-layers',
help='Number of layers in the DNN. If --scale-factor > 0, then this parameter is ignored',
default=3,
type=int
)
args_parser.add_argument(
'--dropout-prob',
help="The probability we will drop out a given coordinate",
default=None
)
args_parser.add_argument(
'--encode-one-hot',
help="""\
If set to True, the categorical columns will be encoded as One-Hot indicators in the deep part of the DNN model.
Otherwise, the categorical columns will only be used in the wide part of the DNN model
""",
action='store_true',
default=True,
)
args_parser.add_argument(
'--as-wide-columns',
help="""\
If set to True, the categorical columns will be used in the wide part of the DNN model
""",
action='store_true',
default=True,
)
# Saved model arguments
args_parser.add_argument(
'--job-dir',
help='GCS location to write checkpoints and export models',
required=True
)
args_parser.add_argument(
'--reuse-job-dir',
action='store_true',
default=False,
help="""\
Flag to decide if the model checkpoint should
be re-used from the job-dir. If False then the
job-dir will be deleted"""
)
args_parser.add_argument(
'--export-format',
help='The input format of the exported SavedModel binary',
choices=['JSON', 'CSV', 'EXAMPLE'],
default='JSON'
)
# Argument to turn on all logging
args_parser.add_argument(
'--verbosity',
choices=[
'DEBUG',
'ERROR',
'FATAL',
'INFO',
'WARN'
],
default='INFO',
)
return args_parser.parse_args()
# ******************************************************************************
# YOU NEED NOT TO CHANGE THE FUNCTION TO RUN THE EXPERIMENT
# ******************************************************************************
def run_experiment(run_config):
"""Train, evaluate, and export the model using tf.estimator.train_and_evaluate API"""
train_input_fn = input.generate_input_fn(
file_names_pattern=HYPER_PARAMS.train_files,
mode=tf.estimator.ModeKeys.TRAIN,
num_epochs=HYPER_PARAMS.num_epochs,
batch_size=HYPER_PARAMS.train_batch_size
)
eval_input_fn = input.generate_input_fn(
file_names_pattern=HYPER_PARAMS.eval_files,
mode=tf.estimator.ModeKeys.EVAL,
batch_size=HYPER_PARAMS.eval_batch_size
)
exporter = tf.estimator.FinalExporter(
'estimator',
input.SERVING_FUNCTIONS[HYPER_PARAMS.export_format],
as_text=False # change to true if you want to export the model as readable text
)
# compute the number of training steps based on num_epoch, train_size, and train_batch_size
if HYPER_PARAMS.train_size is not None and HYPER_PARAMS.num_epochs is not None:
train_steps = (HYPER_PARAMS.train_size / HYPER_PARAMS.train_batch_size) * \
HYPER_PARAMS.num_epochs
else:
train_steps = HYPER_PARAMS.train_steps
train_spec = tf.estimator.TrainSpec(
train_input_fn,
max_steps=int(train_steps)
)
eval_spec = tf.estimator.EvalSpec(
eval_input_fn,
steps=HYPER_PARAMS.eval_steps,
exporters=[exporter],
name='estimator-eval',
throttle_secs=HYPER_PARAMS.eval_every_secs,
)
print("* experiment configurations")
print("===========================")
print("Train size: {}".format(HYPER_PARAMS.train_size))
print("Epoch count: {}".format(HYPER_PARAMS.num_epochs))
print("Train batch size: {}".format(HYPER_PARAMS.train_batch_size))
print("Training steps: {} ({})".format(int(train_steps),
"supplied" if HYPER_PARAMS.train_size is None else "computed"))
print("Evaluate every: {} seconds".format(HYPER_PARAMS.eval_every_secs))
print("===========================")
if metadata.TASK_TYPE == "classification":
estimator = model.create_classifier(
config=run_config
)
elif metadata.TASK_TYPE == "regression":
estimator = model.create_regressor(
config=run_config
)
else:
estimator = model.create_estimator(
config=run_config
)
# train and evaluate
tf.estimator.train_and_evaluate(
estimator,
train_spec,
eval_spec
)
# ******************************************************************************
# THIS IS ENTRY POINT FOR THE TRAINER TASK
# ******************************************************************************
def main():
print('')
print('Hyper-parameters:')
print(HYPER_PARAMS)
print('')
# Set python level verbosity
tf.logging.set_verbosity(HYPER_PARAMS.verbosity)
# Set C++ Graph Execution level verbosity
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(tf.logging.__dict__[HYPER_PARAMS.verbosity] / 10)
# Directory to store output model and checkpoints
model_dir = HYPER_PARAMS.job_dir
# If job_dir_reuse is False then remove the job_dir if it exists
print("Resume training:", HYPER_PARAMS.reuse_job_dir)
if not HYPER_PARAMS.reuse_job_dir:
if tf.gfile.Exists(HYPER_PARAMS.job_dir):
tf.gfile.DeleteRecursively(HYPER_PARAMS.job_dir)
print("Deleted job_dir {} to avoid re-use".format(HYPER_PARAMS.job_dir))
else:
print("No job_dir available to delete")
else:
print("Reusing job_dir {} if it exists".format(HYPER_PARAMS.job_dir))
run_config = tf.estimator.RunConfig(
tf_random_seed=19830610,
log_step_count_steps=1000,
save_checkpoints_secs=HYPER_PARAMS.eval_every_secs, # change frequency of saving checkpoints
keep_checkpoint_max=3,
model_dir=model_dir
)
run_config = run_config.replace(model_dir=model_dir)
print("Model Directory:", run_config.model_dir)
# Run the train and evaluate experiment
time_start = datetime.utcnow()
print("")
print("Experiment started at {}".format(time_start.strftime("%H:%M:%S")))
print(".......................................")
run_experiment(run_config)
time_end = datetime.utcnow()
print(".......................................")
print("Experiment finished at {}".format(time_end.strftime("%H:%M:%S")))
print("")
time_elapsed = time_end - time_start
print("Experiment elapsed time: {} seconds".format(time_elapsed.total_seconds()))
print("")
args_parser = argparse.ArgumentParser()
HYPER_PARAMS = initialise_hyper_params(args_parser)
if __name__ == '__main__':
main()
|
py | 1a44537c4e6154f1fcea0aa2343c2e780696fc3f | from .client import Client # noqa
from .params import DataShape, DataType, ListSymbolsFormat, Params # noqa
from .jsonrpc_client import JsonRpcClient, MsgpackRpcClient # noqa
from .grpc_client import GRPCClient # noqa
# alias
Param = Params # noqa
from .stream import StreamConn # noqa
__version__ = '0.18'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.