ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 7dfe5d26fa942b39134eead9072785ae9c18dc5a | import os
import shutil
import torch
from collections import OrderedDict
import glob
class Saver(object):
def __init__(self, args):
self.args = args
self.directory = os.path.join('run', args.dataset, args.checkname)
self.runs = sorted(glob.glob(os.path.join(self.directory, 'experiment_*')))
run_id = int(self.runs[-1].split('_')[-1]) + 1 if self.runs else 0
self.experiment_dir = os.path.join(self.directory, 'experiment_{}'.format(str(run_id)))
if not os.path.exists(self.experiment_dir):
os.makedirs(self.experiment_dir)
def save_checkpoint(self, state, is_best, filename='checkpoint.pth.tar'):
"""Saves checkpoint to disk"""
filename = os.path.join(self.experiment_dir, filename)
torch.save(state, filename)
if is_best:
best_pred = state['best_pred']
with open(os.path.join(self.experiment_dir, 'best_pred.txt'), 'w') as f:
f.write(str(best_pred))
if self.runs:
# previous_dice = 0.0
previous_miou = [0.0]
for run in self.runs:
run_id = run.split('_')[-1]
path = os.path.join(self.directory, 'experiment_{}'.format(str(run_id)), 'best_pred.txt')
if os.path.exists(path):
with open(path, 'r') as f:
miou = float(f.readline())
previous_miou.append(miou)
else:
continue
# max_dice = max(previous_dice)
max_miou = max(previous_miou)
if best_pred > max_miou:
shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar'))
else:
shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar'))
def save_experiment_config(self):
logfile = os.path.join(self.experiment_dir, 'parameters.txt')
log_file = open(logfile, 'w')
p = OrderedDict()
p['datset'] = self.args.dataset
p['backbone'] = self.args.backbone
p['out_stride'] = self.args.out_stride
p['lr'] = self.args.lr
p['lr_scheduler'] = self.args.lr_scheduler
p['loss_type'] = self.args.loss_type
p['epoch'] = self.args.epochs
p['base_size'] = self.args.base_size
p['crop_size'] = self.args.crop_size
for key, val in p.items():
log_file.write(key + ':' + str(val) + '\n')
log_file.close() |
py | 7dfe5d449c4186c8d6a0f12711b522aeac518f46 | import json
import requests
from config import *
def get_country_top_players(country):
response = requests.get(API_URL + '/locations/' + str(country['id']) + '/rankings/players?limit=50',
headers=HEADERS).json()
return {
country['name']: [player['tag'].replace('#', '%') for player in response['items']]
}
def get_global_top_players_api():
try:
with open('countries.json', 'r') as countries_data:
countries = json.load(countries_data)
with open('top_players.json', 'w') as top_players_file:
json.dump([get_country_top_players(country) for country in countries], top_players_file)
return 'Success'
except Exception as e:
print(e)
return 'Failure'
def get_global_top_players():
try:
with open('top_players.json', 'w') as top_players_data:
return json.load(top_players_data)
except Exception as e:
print(e)
return None
if __name__ == '__main__':
print(get_global_top_players())
|
py | 7dfe5d7b49f44603f61b395ba604808ec61d3074 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self, split=False):
self.nodes = start_nodes(4, self.options.tmpdir, [['-usehd=0'], ['-usehd=0'], ['-usehd=0'], ['-usehd=0']])
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print "Mining blocks..."
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(2000)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 15)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 22 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 26 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 26, self.nodes[1].getnewaddress() : 25 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 50:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 50:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(50) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
if aUtx['amount'] == 50:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 60 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
if aUtx['amount'] == 50:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 60, self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 10}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():11,self.nodes[1].getnewaddress():12,self.nodes[1].getnewaddress():1,self.nodes[1].getnewaddress():13,self.nodes[1].getnewaddress():2,self.nodes[1].getnewaddress():3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 12 CSS to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 12)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():11}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('11.0000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
wait_bitcoinds()
self.nodes = start_nodes(4, self.options.tmpdir, [['-usehd=0'], ['-usehd=0'], ['-usehd=0'], ['-usehd=0']])
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 12)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():11}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('511.0000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('500.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
if __name__ == '__main__':
RawTransactionsTest().main()
|
py | 7dfe5d84c6633ee51581dccc81e66699e21467b1 | import hue_requests, temperature
KITCHEN = 'kitchen'
OUTDOOR = 'street'
HALL = 'hall'
MAIN_ROOM = 'main room'
spanish_name = {
KITCHEN: 'interior',
OUTDOOR: 'exterior',
HALL: 'hall',
MAIN_ROOM: 'habitación'
}
sensor_id = {
KITCHEN: '14',
OUTDOOR: '5',
HALL: '45',
MAIN_ROOM: '52'
}
def get_all_info():
temperature.get_all_info()
if __name__ == '__main__':
hue_requests.set_endpoint()
get_all_info()
|
py | 7dfe5e0619e56ad67502cb4f2ea65720ad5ab40a | # -*- coding: utf-8 -*-
bl_info= {
"name": "mmd_tools",
"author": "sugiany",
"version": (0, 6, 0),
"blender": (2, 70, 0),
"location": "View3D > Tool Shelf > MMD Tools Panel",
"description": "Utility tools for MMD model editing. (powroupi's forked version)",
"warning": "",
"wiki_url": "https://github.com/powroupi/blender_mmd_tools/wiki",
"tracker_url": "https://github.com/powroupi/blender_mmd_tools/issues",
"category": "Object"}
if "bpy" in locals():
if bpy.app.version < (2, 71, 0):
import imp as importlib
else:
import importlib
importlib.reload(properties)
importlib.reload(operators)
importlib.reload(panels)
else:
import bpy
import logging
from bpy.types import AddonPreferences
from bpy.props import StringProperty
from bpy.app.handlers import persistent
from . import properties
from . import operators
from . import panels
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
class MMDToolsAddonPreferences(AddonPreferences):
# this must match the addon name, use '__package__'
# when defining this in a submodule of a python package.
bl_idname = __name__
shared_toon_folder = StringProperty(
name="Shared Toon Texture Folder",
description=('Directory path to toon textures. This is normally the ' +
'"Data" directory within of your MikuMikuDance directory'),
subtype='DIR_PATH',
)
base_texture_folder = StringProperty(
name='Base Texture Folder',
description='Path for textures shared between models',
subtype='DIR_PATH',
)
dictionary_folder = StringProperty(
name='Dictionary Folder',
description='Path for searching csv dictionaries',
subtype='DIR_PATH',
default=__file__[:-11],
)
def draw(self, context):
layout = self.layout
layout.prop(self, "shared_toon_folder")
layout.prop(self, "base_texture_folder")
layout.prop(self, "dictionary_folder")
def menu_func_import(self, context):
self.layout.operator(operators.fileio.ImportPmx.bl_idname, text="MikuMikuDance Model (.pmd, .pmx)")
self.layout.operator(operators.fileio.ImportVmd.bl_idname, text="MikuMikuDance Motion (.vmd)")
self.layout.operator(operators.fileio.ImportVpd.bl_idname, text="Vocaloid Pose Data (.vpd)")
def menu_func_export(self, context):
self.layout.operator(operators.fileio.ExportPmx.bl_idname, text="MikuMikuDance Model (.pmx)")
self.layout.operator(operators.fileio.ExportVmd.bl_idname, text="MikuMikuDance Motion (.vmd)")
self.layout.operator(operators.fileio.ExportVpd.bl_idname, text="Vocaloid Pose Data (.vpd)")
def menu_func_armature(self, context):
self.layout.operator(operators.model.CreateMMDModelRoot.bl_idname, text='Create MMD Model')
@persistent
def load_handler(dummy):
from mmd_tools_local.core.sdef import FnSDEF
FnSDEF.clear_cache()
FnSDEF.register_driver_function()
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
bpy.types.INFO_MT_armature_add.append(menu_func_armature)
properties.register()
bpy.app.handlers.load_post.append(load_handler)
def unregister():
bpy.app.handlers.load_post.remove(load_handler)
properties.unregister()
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
bpy.types.INFO_MT_armature_add.remove(menu_func_armature)
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()
|
py | 7dfe5e4f1b544f1a04a60ef811763cd830927ebf | import numpy as np
class _pa16j():
"""Pose alternated with 16 joints (like Penn Action with three more
joints on the spine.
"""
num_joints = 16
joint_names = ['pelvis', 'thorax', 'neck', 'head',
'r_shoul', 'l_shoul', 'r_elb', 'l_elb', 'r_wrist', 'l_wrist',
'r_hip', 'l_hip', 'r_knww', 'l_knee', 'r_ankle', 'l_ankle']
"""Horizontal flip mapping"""
map_hflip = [0, 1, 2, 3, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14]
"""Projections from other layouts to the PA16J standard"""
map_from_mpii = [6, 7, 8, 9, 12, 13, 11, 14, 10, 15, 2, 3, 1, 4, 0, 5]
map_from_ntu = [0, 20, 2, 3, 4, 8, 5, 9, 6, 10, 12, 16, 13, 17, 14, 18]
"""Projections of PA16J to other formats"""
map_to_pa13j = [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
map_to_jhmdb = [2, 1, 3, 4, 5, 10, 11, 6, 7, 12, 13, 8, 9, 14, 15]
map_to_mpii = [14, 12, 10, 11, 13, 15, 0, 1, 2, 3, 8, 6, 4, 5, 7, 9]
map_to_lsp = [14, 12, 10, 11, 13, 15, 8, 6, 4, 5, 7, 9, 2, 3]
"""Color map"""
color = ['g', 'r', 'b', 'y', 'm']
cmap = [0, 0, 0, 0, 1, 2, 1, 2, 1, 2, 3, 4, 3, 4, 3, 4]
links = [[0, 1], [1, 2], [2, 3], [4, 6], [6, 8], [5, 7], [7, 9],
[10, 12], [12, 14], [11, 13], [13, 15]]
class _pa17j():
"""Pose alternated with 17 joints (like _pa16j, with the middle spine).
"""
num_joints = 17
"""Horizontal flip mapping"""
map_hflip = [0, 1, 2, 3, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14, 16]
"""Projections from other layouts to the PA17J standard"""
map_from_h36m = \
[0, 12, 13, 15, 25, 17, 26, 18, 27, 19, 1, 6, 2, 7, 3, 8, 11]
map_from_ntu = _pa16j.map_from_ntu + [1]
map_from_mpii3dhp = \
[4, 5, 6, 7, 14, 9, 15, 10, 16, 11, 23, 18, 24, 19, 25, 20, 3]
map_from_mpii3dhp_te = \
[14, 1, 16, 0, 2, 5, 3, 6, 4, 7, 8, 11, 9, 12, 10, 13, 15]
"""Projections of PA17J to other formats"""
map_to_pa13j = _pa16j.map_to_pa13j
map_to_mpii = [14, 12, 10, 11, 13, 15, 0, 1, 2, 3, 8, 6, 4, 5, 7, 9]
map_to_pa16j = list(range(16))
"""Color map"""
color = ['g', 'r', 'b', 'y', 'm']
cmap = [0, 0, 0, 0, 1, 2, 1, 2, 1, 2, 3, 4, 3, 4, 3, 4, 0]
links = [[0, 16], [16, 1], [1, 2], [2, 3], [4, 6], [6, 8], [5, 7], [7, 9],
[10, 12], [12, 14], [11, 13], [13, 15]]
class _pa20j():
"""Pose alternated with 20 joints. Similar to _pa16j, but with one more
joint for hands and feet.
"""
num_joints = 20
"""Horizontal flip mapping"""
map_hflip = [0, 1, 2, 3, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14, 17, 16,
19, 18]
"""Projections from other layouts to the PA20J standard"""
map_from_h36m = [0, 12, 13, 15, 25, 17, 26, 18, 27, 19, 30, 22, 1, 6, 2,
7, 3, 8, 4, 9]
map_from_ntu = [0, 20, 2, 3, 4, 8, 5, 9, 6, 10, 7, 11, 12, 16, 13, 17, 14,
18, 15, 19]
"""Projections of PA20J to other formats"""
map_to_mpii = [16, 14, 12, 13, 15, 17, 0, 1, 2, 3, 8, 6, 4, 5, 7, 9]
map_to_pa13j = [3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17]
map_to_pa16j = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17]
"""Color map"""
color = ['g', 'r', 'b', 'y', 'm']
cmap = [0, 0, 0, 0, 1, 2, 1, 2, 1, 2, 1, 2, 3, 4, 3, 4, 3, 4, 3, 4]
links = [[0, 1], [1, 2], [2, 3], [4, 6], [6, 8], [8, 10], [5, 7], [7, 9],
[9, 11], [12, 14], [14, 16], [16, 18], [13, 15], [15, 17], [17, 19]]
class _pa21j():
"""Pose alternated with 21 joints. Similar to _pa20j, but with one more
joint referent to the 16th joint from _pa17j, for compatibility with H36M.
"""
num_joints = 21
"""Horizontal flip mapping"""
map_hflip = _pa20j.map_hflip + [20]
"""Projections from other layouts to the PA21J standard"""
map_from_h36m = _pa20j.map_from_h36m + [11]
map_from_ntu = _pa20j.map_from_ntu + [1]
"""Projections of PA20J to other formats"""
map_to_mpii = _pa20j.map_to_mpii
map_to_pa13j = _pa20j.map_to_pa13j
map_to_pa16j = _pa20j.map_to_pa16j
map_to_pa17j = _pa20j.map_to_pa16j + [20]
"""Color map"""
color = ['g', 'r', 'b', 'y', 'm']
cmap = [0, 0, 0, 0, 1, 2, 1, 2, 1, 2, 1, 2, 3, 4, 3, 4, 3, 4, 3, 4, 0]
links = [[0, 20], [20, 1], [1, 2], [2, 3], [4, 6], [6, 8], [8, 10], [5, 7], [7, 9],
[9, 11], [12, 14], [14, 16], [16, 18], [13, 15], [15, 17], [17, 19]]
class coco17j():
"""Original layout for the MS COCO dataset."""
num_joints = 17
dim = 2
"""Horizontal flip mapping"""
map_hflip = [0, 2, 1, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15]
"""Color map"""
color = ['g', 'r', 'b', 'y', 'm', 'w']
cmap = [0, 0, 0, 5, 5, 0, 0, 2, 1, 2, 1, 0, 0, 4, 3, 4, 3]
links = [[13, 15], [13, 11], [14, 16], [14, 12], [11, 12], [5, 11], [6,
12], [5, 6], [7, 5], [8, 6], [7, 9], [8, 10], [1, 2], [0, 1], [0, 2],
[3, 1], [4, 2], [3, 5], [4, 6]]
class pa16j2d(_pa16j):
dim = 2
class pa16j3d(_pa16j):
dim = 3
class pa17j2d(_pa17j):
dim = 2
class pa17j3d(_pa17j):
dim = 3
class pa20j3d(_pa20j):
dim = 3
class pa21j3d(_pa21j):
dim = 3
class ntu25j3d():
num_joints = 25
dim = 3
def _func_and(x):
if x.all():
return 1
return 0
def get_visible_joints(x, margin=0.0):
visible = np.apply_along_axis(_func_and, axis=1, arr=(x > margin))
visible *= np.apply_along_axis(_func_and, axis=1, arr=(x < 1 - margin))
return visible
def get_valid_joints(x):
return np.apply_along_axis(_func_and, axis=1, arr=(x > -1e6))
def convert_pa17j3d_to_pa16j(p, dim=3):
assert p.shape == (pa17j3d.num_joints, pa17j3d.dim)
return p[pa17j3d.map_to_pa16j,0:dim].copy()
def convert_sequence_pa17j3d_to_pa16j(seqp, dim=3):
assert seqp.shape[1:] == (pa17j3d.num_joints, pa17j3d.dim)
x = np.zeros((len(seqp), _pa16j.num_joints, dim))
for i in range(len(seqp)):
x[i,:] = convert_pa17j3d_to_pa16j(seqp[i], dim=dim)
return x
def write_poselist(filename, poses):
""" Write a pose list to a text file.
In the text file, every row corresponds to one pose and the columns are:
{x1, y1, x2, y2, ...}
Inputs: 'filename'
'poses' [nb_samples, nb_joints, 2]
"""
nb_samples, nb_joints, dim = poses.shape
x = poses.copy()
x = np.reshape(x, (nb_samples, nb_joints * dim))
np.savetxt(filename, x, fmt='%.6f', delimiter=',')
def assign_knn_confidence(c, num_iter=2):
assert c.ndim == 2 and c.shape[1] == 1, \
'Invalid confidence shape {}'.format(c.shape)
def _search_knn(refp):
cs = c[list(refp), 0]
if np.isnan(cs).all():
return np.nan
if np.nanmean(cs) < 0.5:
return 0.1
return 0.9
for _ in range(num_iter):
for i in range(len(c)):
if np.isnan(c[i, 0]):
c[i, 0] = _search_knn(dsl80j3d.neighbors[i])
|
py | 7dfe5e692538aaea6ed27c7761eb6f0e01861428 | from rest_framework import serializers
from .models import Contract
class ContractSerializer(serializers.ModelSerializer):
class Meta:
model = Contract
fields = ['id', 'name', 'address'] |
py | 7dfe5f9a923a7c844cea78dfef953f9dbed35da1 | """
General audio classification scenario
"""
import logging
from tqdm import tqdm
from armory.utils.config_loading import (
load_dataset,
load_model,
load_attack,
load_adversarial_dataset,
load_defense_wrapper,
load_defense_internal,
)
from armory.utils import metrics
from armory.scenarios.base import Scenario
logger = logging.getLogger(__name__)
class AudioClassificationTask(Scenario):
def _evaluate(self, config: dict) -> dict:
"""
Evaluate the config and return a results dict
"""
model_config = config["model"]
classifier, preprocessing_fn = load_model(model_config)
defense_config = config.get("defense") or {}
defense_type = defense_config.get("type")
if defense_type in ["Preprocessor", "Postprocessor"]:
logger.info(f"Applying internal {defense_type} defense to classifier")
classifier = load_defense_internal(config["defense"], classifier)
if model_config["fit"]:
classifier.set_learning_phase(True)
logger.info(
f"Fitting model {model_config['module']}.{model_config['name']}..."
)
fit_kwargs = model_config["fit_kwargs"]
logger.info(f"Loading train dataset {config['dataset']['name']}...")
train_data = load_dataset(
config["dataset"],
epochs=fit_kwargs["nb_epochs"],
split_type="train",
preprocessing_fn=preprocessing_fn,
)
if defense_type == "Trainer":
logger.info(f"Training with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], classifier)
defense.fit_generator(train_data, **fit_kwargs)
else:
logger.info("Fitting classifier on clean train dataset...")
classifier.fit_generator(train_data, **fit_kwargs)
if defense_type == "Transform":
# NOTE: Transform currently not supported
logger.info(f"Transforming classifier with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], classifier)
classifier = defense()
classifier.set_learning_phase(False)
# Evaluate the ART classifier on benign test examples
logger.info(f"Loading test dataset {config['dataset']['name']}...")
test_data = load_dataset(
config["dataset"],
epochs=1,
split_type="test",
preprocessing_fn=preprocessing_fn,
)
logger.info("Running inference on benign examples...")
metrics_logger = metrics.MetricsLogger.from_config(config["metric"])
for x, y in tqdm(test_data, desc="Benign"):
y_pred = classifier.predict(x)
metrics_logger.update_task(y, y_pred)
metrics_logger.log_task()
# Evaluate the ART classifier on adversarial test examples
logger.info("Generating or loading / testing adversarial examples...")
attack_config = config["attack"]
attack_type = attack_config.get("type")
if attack_type == "preloaded":
test_data = load_adversarial_dataset(
attack_config,
epochs=1,
split_type="adversarial",
preprocessing_fn=preprocessing_fn,
)
else:
attack = load_attack(attack_config, classifier)
test_data = load_dataset(
config["dataset"],
epochs=1,
split_type="test",
preprocessing_fn=preprocessing_fn,
)
for x, y in tqdm(test_data, desc="Attack"):
if attack_type == "preloaded":
x, x_adv = x
elif attack_config.get("use_label"):
x_adv = attack.generate(x=x, y=y)
else:
x_adv = attack.generate(x=x)
y_pred_adv = classifier.predict(x_adv)
metrics_logger.update_task(y, y_pred_adv, adversarial=True)
metrics_logger.update_perturbation(x, x_adv)
metrics_logger.log_task(adversarial=True)
return metrics_logger.results()
|
py | 7dfe5fedd67e8aa9f9d0671a24e71c8edb228a6a | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
exec(open('twittcher/version.py').read()) # loads __version__
setup(name='twittcher',
version=__version__,
author='Zulko',
description=("Watch tweets on Twitter's user pages or search pages."),
long_description=open('README.rst').read(),
license='see LICENSE.txt',
keywords="Twitter tweet search bot",
install_requires=['beautifulsoup'],
packages= find_packages(exclude='docs'))
|
py | 7dfe60d081f7c27d3988a68df9a0c3a185255d8d | import numpy as np
import matplotlib.pyplot as plt
# import necessary packages
from src.data_manager.concrete.RealDataLoader import RealDataLoader
from src.data_filter.concrete.LowPassFilter import LowPassFilter
# load the data
loader = RealDataLoader('training_data/data_v1')
unfiltered_data = loader.load_complete_data()
# filter the data
x = LowPassFilter()
filtered_data = x.filter(unfiltered_data)
# select one trajectory
ind = 5
# create the corresponding plots
fig, axes = plt.subplots(3)
plabels = ["x", "y", "z"]
# iterate over the rows
for k in range(3):
row = axes[k]
row.set_ylabel(plabels[k])
row.set_xlabel("t")
row.plot(unfiltered_data[ind][60:-120, k])
row.plot(filtered_data[ind][:, k])
plt.show() |
py | 7dfe62e755bc7de8b48976545a5fc1acebb7a2a2 | from django.db import models
from django.core.urlresolvers import reverse
class List(models.Model):
def get_absolute_url(self):
return reverse('view_list', args=[self.id])
class Item(models.Model):
text = models.TextField(default='')
list = models.ForeignKey('List', default=None)
def __str__(self):
return self.text
class Meta:
unique_together = ('list', 'text')
|
py | 7dfe634eef11f6d43a5ca97630d0f3e5d4f4da1c | c = 1
while c !=10:
print(c)
c+=1
print('Acabou') |
py | 7dfe63deb2c67ba18434d69f9b00b3694e8f3091 | """
Generic classes used for creating an asymmetric encryption scheme.
"""
import inspect
from abc import ABC, abstractmethod
from typing import Any, Generic, Optional, Tuple, Type, TypeVar, cast
from .encryption_scheme import CT, CV, KM, PT, RP, EncryptionScheme
class PublicKey:
"""
Public Key of an AsymmetricEncryptionScheme.
This should be subclassed for every AsymmetricEncryptionScheme.
"""
class SecretKey:
"""
Secret Key of an AsymmetricEncryptionScheme.
This should be subclassed for every AsymmetricEncryptionScheme.
"""
PK = TypeVar("PK", bound=PublicKey)
SK = TypeVar("SK", bound=SecretKey)
AE = TypeVar(
"AE", bound="AsymmetricEncryptionScheme[Any, Any, Any, Any, Any, Any, Any]"
)
class AsymmetricEncryptionScheme(
Generic[KM, PT, RP, CV, CT, PK, SK], EncryptionScheme[KM, PT, RP, CV, CT], ABC
):
"""
Abstract base class for an AsymmetricEncryptionScheme. Subclass of EncryptionScheme.
"""
@classmethod
def from_security_parameter(cls: Type[AE], *args: Any, **kwargs: Any) -> AE:
"""
Generate a new AsymmetricEncryptionScheme from a security parameter. Note that regular
arguments will be passed to the generate_key_material method, so all parameter that are
required for the constructor should be passed as keyword arguments.
:param args: Security parameter(s) for key generation.
:param kwargs: Security parameter(s) and optional extra arguments for the EncryptionScheme
constructor.
:raises ValueError: If a keyword argument is not valid for key generation or the constructor
:return: A new EncryptionScheme.
"""
gen_names = inspect.getfullargspec(cls.generate_key_material)[0]
init_names = [
name for name in inspect.getfullargspec(cls.__init__)[0] if name != "self"
]
gen_kwargs = {}
init_kwargs = {}
for kwarg, val in kwargs.items():
if kwarg in gen_names:
# arguments used for generating key material
gen_kwargs[kwarg] = val
elif kwarg in init_names:
# arguments used in the __init__ method
init_kwargs[kwarg] = val
else:
raise ValueError(
f"The keyword arguments should either be used for key generation, "
f"or passed to the constructor, but parameter with name {kwarg}"
f"is not present in either."
)
public_key, secret_key = cast(
Tuple[PK, SK], cls.generate_key_material(*args, **gen_kwargs)
)
return cls(public_key, secret_key, **init_kwargs) # type: ignore[call-arg]
@classmethod
def from_public_key(cls: Type[AE], public_key: PK, **kwargs: Any) -> AE:
"""
Generate a new AsymmetricEncryptionScheme from a public key (e.g. when received from another
party) and possibly additional parameters.
:param public_key: The PublicKey of this scheme instantiation.
constructor.
:param kwargs: Optional extra arguments for the EncryptionScheme constructor.
:return: A new EncryptionScheme.
"""
return cls(public_key=public_key, secret_key=None, **kwargs) # type: ignore[call-arg]
def __init__(
self,
public_key: PK,
secret_key: Optional[SK],
):
"""
Construct an AsymmetricEncryptionScheme with the given keypair and optional keyword
arguments. All keyword arguments are combined with the public key to create an ID, so all
the __init__ of a custom subclass of AsymmetricEncryptionScheme should pass all their
parameter values as keyword arguments to this __init__ for the ID generation to work
properly. If this does not happen, then schemes might be considered equal when they are
totally different.
:param public_key: Asymmetric PublicKey.
:param secret_key: Asymmetric SecretKey, might be None when the SecretKey is unknown.
"""
self.__pk = public_key
self.__sk = secret_key
EncryptionScheme.__init__(self)
@classmethod
@abstractmethod
def generate_key_material(cls, *args: Any, **kwargs: Any) -> KM:
"""
Method to generate key material (PublicKey and SecretKey) for this scheme.
:param args: Required arguments to generate said key material.
:param kwargs: Required arguments to generate said key material.
:return: Tuple containing first the PublicKey of this scheme and then the SecretKey.
"""
@property
def public_key(self) -> PK:
"""
PublicKey of this instantiation of the scheme.
:return: PublicKey of this instantiation.
"""
return self.__pk
@property
def secret_key(self) -> Optional[SK]:
"""
SecretKey of this instantiation of the scheme.
:return: SecretKey of this instantiation, or None when it is unknown.
"""
return self.__sk
|
py | 7dfe64b84aaf6b8728f9931a42f4683d7b630307 | import serial
import random
import time
import sys
import iothub_client
from iothub_client import IoTHubClient, IoTHubClientError, IoTHubTransportProvider, IoTHubClientResult
from iothub_client import IoTHubMessage, IoTHubMessageDispositionResult, IoTHubError, DeviceMethodReturnValue
from iothub_client import IoTHubClientRetryPolicy, GetRetryPolicyReturnValue
from iothub_client_args import get_iothub_opt, OptionError
from datetime import datetime
# LED display rule. Normal Off.
DISPLAY_RULE_NORMALLY_OFF = 0
# LED display rule. Normal On.
DISPLAY_RULE_NORMALLY_ON = 1
# messageTimeout - the maximum time in milliseconds until a message times out.
# The timeout period starts at IoTHubClient.send_event_async.
# By default, messages do not expire.
MESSAGE_TIMEOUT = 10000
RECEIVE_CONTEXT = 0
AVG_WIND_SPEED = 10.0
MIN_TEMPERATURE = 20.0
MIN_HUMIDITY = 60.0
MESSAGE_COUNT = 0
RECEIVED_COUNT = 0
CONNECTION_STATUS_CONTEXT = 0
TWIN_CONTEXT = 0
SEND_REPORTED_STATE_CONTEXT = 0
METHOD_CONTEXT = 0
# global counters
RECEIVE_CALLBACKS = 0
SEND_CALLBACKS = 0
BLOB_CALLBACKS = 0
CONNECTION_STATUS_CALLBACKS = 0
TWIN_CALLBACKS = 0
SEND_REPORTED_STATE_CALLBACKS = 0
METHOD_CALLBACKS = 0
# chose HTTP, AMQP, AMQP_WS or MQTT as transport protocol
PROTOCOL = IoTHubTransportProvider.AMQP
# String containing Hostname, Device Id & Device Key in the format:
# "HostName=<host_name>;DeviceId=<device_id>;SharedAccessKey=<device_key>"
CONNECTION_STRING = "[Device Connection String]"
TEXT_MESSAGE_TO_IOTHUB = ""
def set_certificates(client):
from iothub_client_cert import CERTIFICATES
try:
client.set_option("TrustedCerts", CERTIFICATES)
print ( "set_option TrustedCerts successful" )
except IoTHubClientError as iothub_client_error:
print ( "set_option TrustedCerts failed (%s)" % iothub_client_error )
def receive_message_callback(message, counter):
global RECEIVE_CALLBACKS
message_buffer = message.get_bytearray()
size = len(message_buffer)
print ( "Received Message [%d]:" % counter )
print ( " Data: <<<%s>>> & Size=%d" % (message_buffer[:size].decode('utf-8'), size) )
map_properties = message.properties()
key_value_pair = map_properties.get_internals()
print ( " Properties: %s" % key_value_pair )
counter += 1
RECEIVE_CALLBACKS += 1
print ( " Total calls received: %d" % RECEIVE_CALLBACKS )
return IoTHubMessageDispositionResult.ACCEPTED
def send_confirmation_callback(message, result, user_context):
global SEND_CALLBACKS
print ( "Confirmation[%d] received for message with result = %s" % (user_context, result) )
map_properties = message.properties()
print ( " message_id: %s" % message.message_id )
print ( " correlation_id: %s" % message.correlation_id )
key_value_pair = map_properties.get_internals()
print ( " Properties: %s" % key_value_pair )
SEND_CALLBACKS += 1
print ( " Total calls confirmed: %d" % SEND_CALLBACKS )
def connection_status_callback(result, reason, user_context):
global CONNECTION_STATUS_CALLBACKS
print ( "Connection status changed[%d] with:" % (user_context) )
print ( " reason: %d" % reason )
print ( " result: %s" % result )
CONNECTION_STATUS_CALLBACKS += 1
print ( " Total calls confirmed: %d" % CONNECTION_STATUS_CALLBACKS )
def device_twin_callback(update_state, payload, user_context):
global TWIN_CALLBACKS
print ( "")
print ( "Twin callback called with:")
print ( "updateStatus: %s" % update_state )
print ( "context: %s" % user_context )
print ( "payload: %s" % payload )
TWIN_CALLBACKS += 1
print ( "Total calls confirmed: %d\n" % TWIN_CALLBACKS )
def send_reported_state_callback(status_code, user_context):
global SEND_REPORTED_STATE_CALLBACKS
print ( "Confirmation[%d] for reported state received with:" % (user_context) )
print ( " status_code: %d" % status_code )
SEND_REPORTED_STATE_CALLBACKS += 1
print ( " Total calls confirmed: %d" % SEND_REPORTED_STATE_CALLBACKS )
def device_method_callback(method_name, payload, user_context):
global METHOD_CALLBACKS
print ( "\nMethod callback called with:\nmethodName = %s\npayload = %s\ncontext = %s" % (method_name, payload, user_context) )
METHOD_CALLBACKS += 1
print ( "Total calls confirmed: %d\n" % METHOD_CALLBACKS )
device_method_return_value = DeviceMethodReturnValue()
device_method_return_value.response = "{ \"Response\": \"This is the response from the device\" }"
device_method_return_value.status = 200
return device_method_return_value
def iothub_client_init():
# prepare iothub client
client = IoTHubClient(CONNECTION_STRING, PROTOCOL)
if client.protocol == IoTHubTransportProvider.HTTP:
client.set_option("timeout", TIMEOUT)
client.set_option("MinimumPollingTime", MINIMUM_POLLING_TIME)
# set the time until a message times out
client.set_option("messageTimeout", MESSAGE_TIMEOUT)
# some embedded platforms need certificate information
set_certificates(client)
# to enable MQTT logging set to 1
if client.protocol == IoTHubTransportProvider.MQTT:
client.set_option("logtrace", 0)
client.set_message_callback(
receive_message_callback, RECEIVE_CONTEXT)
if client.protocol == IoTHubTransportProvider.MQTT or client.protocol == IoTHubTransportProvider.MQTT_WS:
client.set_device_twin_callback(
device_twin_callback, TWIN_CONTEXT)
client.set_device_method_callback(
device_method_callback, METHOD_CONTEXT)
if client.protocol == IoTHubTransportProvider.AMQP or client.protocol == IoTHubTransportProvider.AMQP_WS:
client.set_connection_status_callback(
connection_status_callback, CONNECTION_STATUS_CONTEXT)
retryPolicy = IoTHubClientRetryPolicy.RETRY_INTERVAL
retryInterval = 100
client.set_retry_policy(retryPolicy, retryInterval)
print ( "SetRetryPolicy to: retryPolicy = %d" % retryPolicy)
print ( "SetRetryPolicy to: retryTimeoutLimitInSeconds = %d" % retryInterval)
retryPolicyReturn = client.get_retry_policy()
print ( "GetRetryPolicy returned: retryPolicy = %d" % retryPolicyReturn.retryPolicy)
print ( "GetRetryPolicy returned: retryTimeoutLimitInSeconds = %d" % retryPolicyReturn.retryTimeoutLimitInSeconds)
return client
def calc_crc(buf, length):
"""
CRC-16 calculation.
"""
crc = 0xFFFF
for i in range(length):
crc = crc ^ buf[i]
for i in range(8):
carrayFlag = crc & 1
crc = crc >> 1
if (carrayFlag == 1):
crc = crc ^ 0xA001
crcH = crc >> 8
crcL = crc & 0x00FF
return (bytearray([crcL, crcH]))
def print_latest_data(data):
"""
print measured latest value.
"""
time_measured = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
temperature = str(int(hex(data[9]) + format(data[8], 'x'), 16) / 100)
relative_humidity = str(int(hex(data[11]) + format(data[10], 'x'), 16) / 100)
ambient_light = str(int(hex(data[13]) + format(data[12], 'x'), 16))
barometric_pressure = str(
int(hex(data[17]) + format(data[16], 'x') + format(data[15], 'x') + format(data[14], 'x'), 16) / 1000)
sound_noise = str(int(hex(data[19]) + format(data[18], 'x'), 16) / 100)
eTVOC = str(int(hex(data[21]) + format(data[20], 'x'), 16))
eCO2 = str(int(hex(data[23]) + format(data[22], 'x'), 16))
discomfort_index = str(int(hex(data[25]) + format(data[24], 'x'), 16) / 100)
heat_stroke = str(int(hex(data[27]) + format(data[26], 'x'), 16) / 100)
vibration_information = str(int(hex(data[28]), 16))
si_value = str(int(hex(data[30]) + format(data[29], 'x'), 16) / 10)
pga = str(int(hex(data[32]) + format(data[31], 'x'), 16) / 10)
seismic_intensity = str(int(hex(data[34]) + format(data[33], 'x'), 16) / 1000)
temperature_flag = str(int(hex(data[36]) + format(data[35], 'x'), 16))
relative_humidity_flag = str(int(hex(data[38]) + format(data[37], 'x'), 16))
ambient_light_flag = str(int(hex(data[40]) + format(data[39], 'x'), 16))
barometric_pressure_flag = str(int(hex(data[42]) + format(data[41], 'x'), 16))
sound_noise_flag = str(int(hex(data[44]) + format(data[43], 'x'), 16))
etvoc_flag = str(int(hex(data[46]) + format(data[45], 'x'), 16))
eco2_flag = str(int(hex(data[48]) + format(data[47], 'x'), 16))
discomfort_index_flag = str(int(hex(data[50]) + format(data[49], 'x'), 16))
heat_stroke_flag = str(int(hex(data[52]) + format(data[51], 'x'), 16))
si_value_flag = str(int(hex(data[53]), 16))
pga_flag = str(int(hex(data[54]), 16))
seismic_intensity_flag = str(int(hex(data[55]), 16))
print("")
print("Time measured:" + time_measured)
print("Temperature:" + temperature)
print("Relative humidity:" + relative_humidity)
print("Ambient light:" + ambient_light)
print("Barometric pressure:" + barometric_pressure)
print("Sound noise:" + sound_noise)
print("eTVOC:" + eTVOC)
print("eCO2:" + eCO2)
print("Discomfort index:" + discomfort_index)
print("Heat stroke:" + heat_stroke)
print("Vibration information:" + vibration_information)
print("SI value:" + si_value)
print("PGA:" + pga)
print("Seismic intensity:" + seismic_intensity)
print("Temperature flag:" + temperature_flag)
print("Relative humidity flag:" + relative_humidity_flag)
print("Ambient light flag:" + ambient_light_flag)
print("Barometric pressure flag:" + barometric_pressure_flag)
print("Sound noise flag:" + sound_noise_flag)
print("eTVOC flag:" + etvoc_flag)
print("eCO2 flag:" + eco2_flag)
print("Discomfort index flag:" + discomfort_index_flag)
print("Heat stroke flag:" + heat_stroke_flag)
print("SI value flag:" + si_value_flag)
print("PGA flag:" + pga_flag)
print("Seismic intensity flag:" + seismic_intensity_flag)
TEXT_MESSAGE_TO_IOTHUB = MSG_TXT % (time_measured, barometric_pressure, temperature, relative_humidity)
print("Send to be ...:" + TEXT_MESSAGE_TO_IOTHUB)
return TEXT_MESSAGE_TO_IOTHUB
def now_utc_str():
"""
Get now utc.
"""
return datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
MSG_TXT = "{\"measured_time\": \"%s\", \"Barometric\": %s,\"temperature\": %s,\"humidity\": %s}"
if __name__ == '__main__':
# Serial.
ser = serial.Serial("/dev/ttyUSB0", 115200, serial.EIGHTBITS, serial.PARITY_NONE)
try:
# IoT Hub Connection
client = iothub_client_init()
reported_state = "{\"newState\":\"standBy\"}"
client.send_reported_state(reported_state, len(reported_state), send_reported_state_callback, SEND_REPORTED_STATE_CONTEXT)
# LED On. Color of Green.
command = bytearray([0x52, 0x42, 0x0a, 0x00, 0x02, 0x11, 0x51, DISPLAY_RULE_NORMALLY_ON, 0x00, 0, 255, 0])
command = command + calc_crc(command, len(command))
ser.write(command)
time.sleep(0.1)
ret = ser.read(ser.inWaiting())
while ser.isOpen():
# Get Latest data Long.
command = bytearray([0x52, 0x42, 0x05, 0x00, 0x01, 0x21, 0x50])
command = command + calc_crc(command, len(command))
tmp = ser.write(command)
time.sleep(0.1)
data = ser.read(ser.inWaiting())
message = print_latest_data(data)
iot_message = IoTHubMessage(message)
print("Create IoT Hub Message with : "+message)
client.send_event_async(iot_message, send_confirmation_callback, MESSAGE_COUNT)
MESSAGE_COUNT = MESSAGE_COUNT + 1
time.sleep(1)
except KeyboardInterrupt:
# LED Off.
command = bytearray([0x52, 0x42, 0x0a, 0x00, 0x02, 0x11, 0x51, DISPLAY_RULE_NORMALLY_OFF, 0x00, 0, 0, 0])
command = command + calc_crc(command, len(command))
ser.write(command)
time.sleep(1)
# script finish.
sys.exit
|
py | 7dfe66070baf31f8dadc336e6a5f4d05ecfa287f | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='pysip',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.1.0',
description='A thin wrapper around requests to interact with the Simple Intel Platform (SIP).',
long_description=long_description,
long_description_content_type='text/markdown',
# The project's main homepage.
url='https://github.com/IntegralDefense/pysip',
# Author details
author='Matthew Wilson',
author_email='[email protected]',
# Choose your license
license='Apache 2.0',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='pysip',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=['pysip'],
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['requests'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
#'dev': ['check-manifest'],
#'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
#'sample': ['package_data.dat'],
},
#scripts=['bin/urlfinder'],
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
#entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
#},
)
|
py | 7dfe66596267664463f7920529acccece8dc4c17 | # -*- coding: utf8 -*-
import requests, time, re,sys, json, random
# 设置开始
# 用户名(格式为 13800138000)
# 酷推skey和server酱sckey和企业微信设置,只用填一个其它留空即可
skey = sys.argv[3]
# 推送server酱
sckey = sys.argv[4]
# 企业微信推送
# 是否开启企业微信推送false关闭true开启,默认关闭,开启后请填写设置并将上面两个都留空
position = sys.argv[5]
base_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?'
req_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token='
corpid = sys.argv[6] # 企业ID, 登陆企业微信,在我的企业-->企业信息里查看
corpsecret = sys.argv[7] # 自建应用,每个自建应用里都有单独的secret
agentid = sys.argv[8] # 填写你的应用ID,不加引号,是个整型常数,就是AgentId
touser = sys.argv[9] # 指定接收消息的成员,成员ID列表(多个接收者用‘|’分隔,最多支持1000个)。特殊情况:指定为”@all”,则向该企业应用的全部成员发送
toparty = sys.argv[10] # 指定接收消息的部门,部门ID列表,多个接收者用‘|’分隔,最多支持100个。当touser为”@all”时忽略本参数
totag = sys.argv[11] # 指定接收消息的标签,标签ID列表,多个接收者用‘|’分隔,最多支持100个。当touser为”@all”时忽略本参数
# (用于测试推送如果改了能收到推送,推送设置就没问题,看看是不是set_push列表里面没设置推送,仔细看下面我写的很详细)要修改的步数,直接输入想要修改的步数值,(默认)留空为随机步数,改了这个直接运行固定值(用于测试推送)
# 测试好记得留空不然一直提交固定步数
step1 = ""
# 开启根据地区天气情况降低步数(默认关闭)
open_get_weather = sys.argv[12]
# 设置获取天气的地区(上面开启后必填)如:area = "宁波"
area = sys.argv[13]
# 以下如果看不懂直接默认就行只需改上面
# 系数K查询到天气后降低步数比率,如查询得到设置地区为多云天气就会在随机后的步数乘0.9作为最终修改提交的步数
K_dict = {"多云": 0.9, "阴": 0.8, "小雨": 0.7, "中雨": 0.5, "大雨": 0.4, "暴雨": 0.3, "大暴雨": 0.2, "特大暴雨": 0.2}
# 设置运行程序时间点,24小时制(不要设置0,1,2可能会发生逻辑错误),这边设置好云函数触发里也要改成相同的小时运行,与time_list列表对应,如默认:30 0 8,10,13,15,17,19,21 * * * *,不会的改8,10,13,15,17,19,21就行替换成你要运行的时间点,其它复制
# 默认表示为8点10点13点15点17点19点21点运行,如需修改改time_list列表,如改成:time_list = [7, 9, 13, 15, 17, 19, 20]就表示为7点9点13点15点17点19点20点运行,云函数触发里面也要同步修改
# 说白了不是刷七次嘛,你希望在什么时候刷,设七个时间点,不要该成0,1,2(就是不要设置0点1点2点运行),其它随便改。如果要刷的次数小于7次多余的时间点不用改保持默认就行如只需要4次就改前4个,但函数触发里面要改成4个的,不能用7个的
time_list = [8, 10, 13, 15, 17, 19, 21]
# 设置运行结果推送不推送与上面时间一一对应,如:set_push列表内的第一个值与time_list列表内的第一个时间点对应,该值单独控制该时间点的推送与否(默认表示为21点(就是设置的最后一个时间点)推送其余时间运行不推送结果)
# 也是改列表内的False不推送,True推送,每个对应上面列表的一个时间点,如果要刷的次数小于7次同样改前几个其它默认
set_push = [True, True, True, True, True, True, True]
# 最小步数(如果只需要刷步的次数少于7次就将该次数以后的步数全都改成0,如:time_list[3]: 0,表示第五次开始不运行或者直接云函数触发里面不在该时间调用均可(建议用后者))
min_dict = {time_list[0]: 6000, time_list[1]: 10000, time_list[2]: 20000, time_list[3]: 30000, time_list[4]: 40000, time_list[5]: 50000, time_list[6]: 60000}
# 最大步数(例如现在设置意思是在8点(你设置的第一个时间点默认8)运行会在1500到2999中随机生成一个数提交(开启气候降低步数会乘系数K)10点3000~4999。。。以此类推,步数范围建议看懂了再改,没看懂直接默认就好)
max_dict = {time_list[0]: 9999, time_list[1]: 19999, time_list[2]: 29999, time_list[3]: 39999, time_list[4]: 49999, time_list[5]: 59999, time_list[6]: 69999}
# 设置结束
now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
headers = {'User-Agent': 'MiFit/5.3.0 (iPhone; iOS 14.7.1; Scale/3.00)'}
#获取区域天气情况
def getWeather():
if area == "NO":
return
else:
global K, type
url = 'http://wthrcdn.etouch.cn/weather_mini?city=' + area
hea = {'User-Agent': 'Mozilla/5.0'}
r = requests.get(url=url, headers=hea)
if r.status_code == 200:
result = r.text
res = json.loads(result)
if "多云" in res['data']['forecast'][0]['type']:
K = K_dict["多云"]
elif "阴" in res['data']['forecast'][0]['type']:
K = K_dict["阴"]
elif "小雨" in res['data']['forecast'][0]['type']:
K = K_dict["小雨"]
elif "中雨" in res['data']['forecast'][0]['type']:
K = K_dict["中雨"]
elif "大雨" in res['data']['forecast'][0]['type']:
K = K_dict["大雨"]
elif "暴雨" in res['data']['forecast'][0]['type']:
K = K_dict["暴雨"]
elif "大暴雨" in res['data']['forecast'][0]['type']:
K = K_dict["大暴雨"]
elif "特大暴雨" in res['data']['forecast'][0]['type']:
K = K_dict["特大暴雨"]
type = res['data']['forecast'][0]['type']
else:
print("获取天气情况出错")
#获取北京时间确定随机步数&启动主函数
def getBeijinTime():
global K, type
K = 1.0
type = ""
hea = {'User-Agent': 'Mozilla/5.0'}
url = r'http://time1909.beijing-time.org/time.asp'
if open_get_weather:
getWeather()
r = requests.get(url=url, headers=hea)
if r.status_code == 200:
result = r.text
#print(result)
if "nhrs=" + str(time_list[0]) in result:
a = set_push[0]
min_1 = min_dict[time_list[0]]
max_1 = max_dict[time_list[0]]
elif "nhrs=" + str(time_list[1]) in result:
a = set_push[1]
min_1 = min_dict[time_list[1]]
max_1 = max_dict[time_list[1]]
elif "nhrs=" + str(time_list[2]) in result:
a = set_push[2]
min_1 = min_dict[time_list[2]]
max_1 = max_dict[time_list[2]]
elif "nhrs=" + str(time_list[3]) in result:
a = set_push[3]
min_1 = min_dict[time_list[3]]
max_1 = max_dict[time_list[3]]
elif "nhrs=" + str(time_list[4]) in result:
a = set_push[4]
min_1 = min_dict[time_list[4]]
max_1 = max_dict[time_list[4]]
elif "nhrs=" + str(time_list[5]) in result:
a = set_push[5]
min_1 = min_dict[time_list[5]]
max_1 = max_dict[time_list[5]]
elif "nhrs=" + str(time_list[6]) in result:
a = set_push[6]
min_1 = min_dict[time_list[6]]
max_1 = max_dict[time_list[6]]
else:
a = False
min_1 = 0
max_1 = 0
if step1 != "":
min_1 = 1
max_1 = 1
if step1 != "":
a = True
min_1 = int(K * min_1)
max_1 = int(K * max_1)
else:
print("获取北京时间失败")
return
if min_1 != 0 and max_1 != 0:
user_mi = sys.argv[1]
# 登录密码
passwd_mi = sys.argv[2]
user_list = user_mi.split('#')
passwd_list = passwd_mi.split('#')
if len(user_list) == len(passwd_list):
for user_mi, passwd_mi in zip(user_list, passwd_list):
main(user_mi,passwd_mi,min_1, max_1, a)
else:
print("当前不是主人设定的提交步数时间或者主人设置了0步数呢,本次不提交")
return
# 获取登录code
def get_code(location):
code_pattern = re.compile("(?<=access=).*?(?=&)")
code = code_pattern.findall(location)[0]
return code
# 登录
def login(user, password):
url1 = "https://api-user.huami.com/registrations/+86" + user + "/tokens"
headers = {
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
"User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.2"
}
data1 = {
"client_id": "HuaMi",
"password": f"{password}",
"redirect_uri": "https://s3-us-west-2.amazonaws.com/hm-registration/successsignin.html",
"token": "access"
}
r1 = requests.post(url1, data=data1, headers=headers, allow_redirects=False)
location = r1.headers["Location"]
try:
code = get_code(location)
except:
return 0, 0
# print("access_code获取成功!")
# print(code)
url2 = "https://account.huami.com/v2/client/login"
data2 = {
"app_name": "com.xiaomi.hm.health",
"app_version": "4.6.0",
"code": f"{code}",
"country_code": "CN",
"device_id": "2C8B4939-0CCD-4E94-8CBA-CB8EA6E613A1",
"device_model": "phone",
"grant_type": "access_token",
"third_name": "huami_phone",
}
r2 = requests.post(url2, data=data2, headers=headers).json()
login_token = r2["token_info"]["login_token"]
# print("login_token获取成功!")
# print(login_token)
userid = r2["token_info"]["user_id"]
# print("userid获取成功!")
# print(userid)
return login_token, userid
# 主函数
def main(_user,_passwd,min_1, max_1, a):
user = str(_user)
password = str(_passwd)
step = str(step1)
if user == '' or password == '':
print("用户名或密码填写有误!")
return
if step == '':
print("已设置为随机步数(" + str(min_1) + "~" + str(max_1) + ")")
step = str(random.randint(min_1, max_1))
else:
step = str(step)
login_token = 0
login_token, userid = login(user, password)
if login_token == 0:
print("登陆失败!")
return "login fail!"
t = get_time()
app_token = get_app_token(login_token)
today = time.strftime("%F")
data_json = '%5B%7B%22data_hr%22%3A%22%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F9L%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FVv%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F0v%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F9e%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F0n%5C%2Fa%5C%2F%5C%2F%5C%2FS%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F0b%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F1FK%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FR%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F9PTFFpaf9L%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FR%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F0j%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F9K%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FOv%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2Fzf%5C%2F%5C%2F%5C%2F86%5C%2Fzr%5C%2FOv88%5C%2Fzf%5C%2FPf%5C%2F%5C%2F%5C%2F0v%5C%2FS%5C%2F8%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FSf%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2Fz3%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F0r%5C%2FOv%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FS%5C%2F9L%5C%2Fzb%5C%2FSf9K%5C%2F0v%5C%2FRf9H%5C%2Fzj%5C%2FSf9K%5C%2F0%5C%2F%5C%2FN%5C%2F%5C%2F%5C%2F%5C%2F0D%5C%2FSf83%5C%2Fzr%5C%2FPf9M%5C%2F0v%5C%2FOv9e%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FS%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2Fzv%5C%2F%5C%2Fz7%5C%2FO%5C%2F83%5C%2Fzv%5C%2FN%5C%2F83%5C%2Fzr%5C%2FN%5C%2F86%5C%2Fz%5C%2F%5C%2FNv83%5C%2Fzn%5C%2FXv84%5C%2Fzr%5C%2FPP84%5C%2Fzj%5C%2FN%5C%2F9e%5C%2Fzr%5C%2FN%5C%2F89%5C%2F03%5C%2FP%5C%2F89%5C%2Fz3%5C%2FQ%5C%2F9N%5C%2F0v%5C%2FTv9C%5C%2F0H%5C%2FOf9D%5C%2Fzz%5C%2FOf88%5C%2Fz%5C%2F%5C%2FPP9A%5C%2Fzr%5C%2FN%5C%2F86%5C%2Fzz%5C%2FNv87%5C%2F0D%5C%2FOv84%5C%2F0v%5C%2FO%5C%2F84%5C%2Fzf%5C%2FMP83%5C%2FzH%5C%2FNv83%5C%2Fzf%5C%2FN%5C%2F84%5C%2Fzf%5C%2FOf82%5C%2Fzf%5C%2FOP83%5C%2Fzb%5C%2FMv81%5C%2FzX%5C%2FR%5C%2F9L%5C%2F0v%5C%2FO%5C%2F9I%5C%2F0T%5C%2FS%5C%2F9A%5C%2Fzn%5C%2FPf89%5C%2Fzn%5C%2FNf9K%5C%2F07%5C%2FN%5C%2F83%5C%2Fzn%5C%2FNv83%5C%2Fzv%5C%2FO%5C%2F9A%5C%2F0H%5C%2FOf8%5C%2F%5C%2Fzj%5C%2FPP83%5C%2Fzj%5C%2FS%5C%2F87%5C%2Fzj%5C%2FNv84%5C%2Fzf%5C%2FOf83%5C%2Fzf%5C%2FOf83%5C%2Fzb%5C%2FNv9L%5C%2Fzj%5C%2FNv82%5C%2Fzb%5C%2FN%5C%2F85%5C%2Fzf%5C%2FN%5C%2F9J%5C%2Fzf%5C%2FNv83%5C%2Fzj%5C%2FNv84%5C%2F0r%5C%2FSv83%5C%2Fzf%5C%2FMP%5C%2F%5C%2F%5C%2Fzb%5C%2FMv82%5C%2Fzb%5C%2FOf85%5C%2Fz7%5C%2FNv8%5C%2F%5C%2F0r%5C%2FS%5C%2F85%5C%2F0H%5C%2FQP9B%5C%2F0D%5C%2FNf89%5C%2Fzj%5C%2FOv83%5C%2Fzv%5C%2FNv8%5C%2F%5C%2F0f%5C%2FSv9O%5C%2F0ZeXv%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F1X%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F9B%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2FTP%5C%2F%5C%2F%5C%2F1b%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F0%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F9N%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2F%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%5C%2Fv7%2B%22%2C%22date%22%3A%222021-08-07%22%2C%22data%22%3A%5B%7B%22start%22%3A0%2C%22stop%22%3A1439%2C%22value%22%3A%22UA8AUBQAUAwAUBoAUAEAYCcAUBkAUB4AUBgAUCAAUAEAUBkAUAwAYAsAYB8AYB0AYBgAYCoAYBgAYB4AUCcAUBsAUB8AUBwAUBIAYBkAYB8AUBoAUBMAUCEAUCIAYBYAUBwAUCAAUBgAUCAAUBcAYBsAYCUAATIPYD0KECQAYDMAYB0AYAsAYCAAYDwAYCIAYB0AYBcAYCQAYB0AYBAAYCMAYAoAYCIAYCEAYCYAYBsAYBUAYAYAYCIAYCMAUB0AUCAAUBYAUCoAUBEAUC8AUB0AUBYAUDMAUDoAUBkAUC0AUBQAUBwAUA0AUBsAUAoAUCEAUBYAUAwAUB4AUAwAUCcAUCYAUCwKYDUAAUUlEC8IYEMAYEgAYDoAYBAAUAMAUBkAWgAAWgAAWgAAWgAAWgAAUAgAWgAAUBAAUAQAUA4AUA8AUAkAUAIAUAYAUAcAUAIAWgAAUAQAUAkAUAEAUBkAUCUAWgAAUAYAUBEAWgAAUBYAWgAAUAYAWgAAWgAAWgAAWgAAUBcAUAcAWgAAUBUAUAoAUAIAWgAAUAQAUAYAUCgAWgAAUAgAWgAAWgAAUAwAWwAAXCMAUBQAWwAAUAIAWgAAWgAAWgAAWgAAWgAAWgAAWgAAWgAAWREAWQIAUAMAWSEAUDoAUDIAUB8AUCEAUC4AXB4AUA4AWgAAUBIAUA8AUBAAUCUAUCIAUAMAUAEAUAsAUAMAUCwAUBYAWgAAWgAAWgAAWgAAWgAAWgAAUAYAWgAAWgAAWgAAUAYAWwAAWgAAUAYAXAQAUAMAUBsAUBcAUCAAWwAAWgAAWgAAWgAAWgAAUBgAUB4AWgAAUAcAUAwAWQIAWQkAUAEAUAIAWgAAUAoAWgAAUAYAUB0AWgAAWgAAUAkAWgAAWSwAUBIAWgAAUC4AWSYAWgAAUAYAUAoAUAkAUAIAUAcAWgAAUAEAUBEAUBgAUBcAWRYAUA0AWSgAUB4AUDQAUBoAXA4AUA8AUBwAUA8AUA4AUA4AWgAAUAIAUCMAWgAAUCwAUBgAUAYAUAAAUAAAUAAAUAAAUAAAUAAAUAAAUAAAUAAAWwAAUAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAeSEAeQ8AcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcBcAcAAAcAAAcCYOcBUAUAAAUAAAUAAAUAAAUAUAUAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcCgAeQAAcAAAcAAAcAAAcAAAcAAAcAYAcAAAcBgAeQAAcAAAcAAAegAAegAAcAAAcAcAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcCkAeQAAcAcAcAAAcAAAcAwAcAAAcAAAcAIAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcCIAeQAAcAAAcAAAcAAAcAAAcAAAeRwAeQAAWgAAUAAAUAAAUAAAUAAAUAAAcAAAcAAAcBoAeScAeQAAegAAcBkAeQAAUAAAUAAAUAAAUAAAUAAAUAAAcAAAcAAAcAAAcAAAcAAAcAAAegAAegAAcAAAcAAAcBgAeQAAcAAAcAAAcAAAcAAAcAAAcAkAegAAegAAcAcAcAAAcAcAcAAAcAAAcAAAcAAAcA8AeQAAcAAAcAAAeRQAcAwAUAAAUAAAUAAAUAAAUAAAUAAAcAAAcBEAcA0AcAAAWQsAUAAAUAAAUAAAUAAAUAAAcAAAcAoAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAYAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcBYAegAAcAAAcAAAegAAcAcAcAAAcAAAcAAAcAAAcAAAeRkAegAAegAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAEAcAAAcAAAcAAAcAUAcAQAcAAAcBIAeQAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcBsAcAAAcAAAcBcAeQAAUAAAUAAAUAAAUAAAUAAAUBQAcBYAUAAAUAAAUAoAWRYAWTQAWQAAUAAAUAAAUAAAcAAAcAAAcAAAcAAAcAAAcAMAcAAAcAQAcAAAcAAAcAAAcDMAeSIAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcAAAcBQAeQwAcAAAcAAAcAAAcAMAcAAAeSoAcA8AcDMAcAYAeQoAcAwAcFQAcEMAeVIAaTYAbBcNYAsAYBIAYAIAYAIAYBUAYCwAYBMAYDYAYCkAYDcAUCoAUCcAUAUAUBAAWgAAYBoAYBcAYCgAUAMAUAYAUBYAUA4AUBgAUAgAUAgAUAsAUAsAUA4AUAMAUAYAUAQAUBIAASsSUDAAUDAAUBAAYAYAUBAAUAUAUCAAUBoAUCAAUBAAUAoAYAIAUAQAUAgAUCcAUAsAUCIAUCUAUAoAUA4AUB8AUBkAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAAfgAA%22%2C%22tz%22%3A32%2C%22did%22%3A%22DA932FFFFE8816E7%22%2C%22src%22%3A24%7D%5D%2C%22summary%22%3A%22%7B%5C%22v%5C%22%3A6%2C%5C%22slp%5C%22%3A%7B%5C%22st%5C%22%3A1628296479%2C%5C%22ed%5C%22%3A1628296479%2C%5C%22dp%5C%22%3A0%2C%5C%22lt%5C%22%3A0%2C%5C%22wk%5C%22%3A0%2C%5C%22usrSt%5C%22%3A-1440%2C%5C%22usrEd%5C%22%3A-1440%2C%5C%22wc%5C%22%3A0%2C%5C%22is%5C%22%3A0%2C%5C%22lb%5C%22%3A0%2C%5C%22to%5C%22%3A0%2C%5C%22dt%5C%22%3A0%2C%5C%22rhr%5C%22%3A0%2C%5C%22ss%5C%22%3A0%7D%2C%5C%22stp%5C%22%3A%7B%5C%22ttl%5C%22%3A18272%2C%5C%22dis%5C%22%3A10627%2C%5C%22cal%5C%22%3A510%2C%5C%22wk%5C%22%3A41%2C%5C%22rn%5C%22%3A50%2C%5C%22runDist%5C%22%3A7654%2C%5C%22runCal%5C%22%3A397%2C%5C%22stage%5C%22%3A%5B%7B%5C%22start%5C%22%3A327%2C%5C%22stop%5C%22%3A341%2C%5C%22mode%5C%22%3A1%2C%5C%22dis%5C%22%3A481%2C%5C%22cal%5C%22%3A13%2C%5C%22step%5C%22%3A680%7D%2C%7B%5C%22start%5C%22%3A342%2C%5C%22stop%5C%22%3A367%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A2295%2C%5C%22cal%5C%22%3A95%2C%5C%22step%5C%22%3A2874%7D%2C%7B%5C%22start%5C%22%3A368%2C%5C%22stop%5C%22%3A377%2C%5C%22mode%5C%22%3A4%2C%5C%22dis%5C%22%3A1592%2C%5C%22cal%5C%22%3A88%2C%5C%22step%5C%22%3A1664%7D%2C%7B%5C%22start%5C%22%3A378%2C%5C%22stop%5C%22%3A386%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A1072%2C%5C%22cal%5C%22%3A51%2C%5C%22step%5C%22%3A1245%7D%2C%7B%5C%22start%5C%22%3A387%2C%5C%22stop%5C%22%3A393%2C%5C%22mode%5C%22%3A4%2C%5C%22dis%5C%22%3A1036%2C%5C%22cal%5C%22%3A57%2C%5C%22step%5C%22%3A1124%7D%2C%7B%5C%22start%5C%22%3A394%2C%5C%22stop%5C%22%3A398%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A488%2C%5C%22cal%5C%22%3A19%2C%5C%22step%5C%22%3A607%7D%2C%7B%5C%22start%5C%22%3A399%2C%5C%22stop%5C%22%3A414%2C%5C%22mode%5C%22%3A4%2C%5C%22dis%5C%22%3A2220%2C%5C%22cal%5C%22%3A120%2C%5C%22step%5C%22%3A2371%7D%2C%7B%5C%22start%5C%22%3A415%2C%5C%22stop%5C%22%3A427%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A1268%2C%5C%22cal%5C%22%3A59%2C%5C%22step%5C%22%3A1489%7D%2C%7B%5C%22start%5C%22%3A428%2C%5C%22stop%5C%22%3A433%2C%5C%22mode%5C%22%3A1%2C%5C%22dis%5C%22%3A152%2C%5C%22cal%5C%22%3A4%2C%5C%22step%5C%22%3A238%7D%2C%7B%5C%22start%5C%22%3A434%2C%5C%22stop%5C%22%3A444%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A2295%2C%5C%22cal%5C%22%3A95%2C%5C%22step%5C%22%3A2874%7D%2C%7B%5C%22start%5C%22%3A445%2C%5C%22stop%5C%22%3A455%2C%5C%22mode%5C%22%3A4%2C%5C%22dis%5C%22%3A1592%2C%5C%22cal%5C%22%3A88%2C%5C%22step%5C%22%3A1664%7D%2C%7B%5C%22start%5C%22%3A456%2C%5C%22stop%5C%22%3A466%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A1072%2C%5C%22cal%5C%22%3A51%2C%5C%22step%5C%22%3A1245%7D%2C%7B%5C%22start%5C%22%3A467%2C%5C%22stop%5C%22%3A477%2C%5C%22mode%5C%22%3A4%2C%5C%22dis%5C%22%3A1036%2C%5C%22cal%5C%22%3A57%2C%5C%22step%5C%22%3A1124%7D%2C%7B%5C%22start%5C%22%3A478%2C%5C%22stop%5C%22%3A488%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A488%2C%5C%22cal%5C%22%3A19%2C%5C%22step%5C%22%3A607%7D%2C%7B%5C%22start%5C%22%3A489%2C%5C%22stop%5C%22%3A499%2C%5C%22mode%5C%22%3A4%2C%5C%22dis%5C%22%3A2220%2C%5C%22cal%5C%22%3A120%2C%5C%22step%5C%22%3A2371%7D%2C%7B%5C%22start%5C%22%3A500%2C%5C%22stop%5C%22%3A511%2C%5C%22mode%5C%22%3A3%2C%5C%22dis%5C%22%3A1268%2C%5C%22cal%5C%22%3A59%2C%5C%22step%5C%22%3A1489%7D%2C%7B%5C%22start%5C%22%3A512%2C%5C%22stop%5C%22%3A522%2C%5C%22mode%5C%22%3A1%2C%5C%22dis%5C%22%3A152%2C%5C%22cal%5C%22%3A4%2C%5C%22step%5C%22%3A238%7D%5D%7D%2C%5C%22goal%5C%22%3A8000%2C%5C%22tz%5C%22%3A%5C%2228800%5C%22%7D%22%2C%22source%22%3A24%2C%22type%22%3A0%7D%5D'
finddate = re.compile(r".*?date%22%3A%22(.*?)%22%2C%22data.*?")
findstep = re.compile(r".*?ttl%5C%22%3A(.*?)%2C%5C%22dis.*?")
data_json = re.sub(finddate.findall(data_json)[0], today, str(data_json))
data_json = re.sub(findstep.findall(data_json)[0], step, str(data_json))
url = f'https://api-mifit-cn.huami.com/v1/data/band_data.json?&t={t}'
head = {
"apptoken": app_token,
"Content-Type": "application/x-www-form-urlencoded"
}
data = f'userid={userid}&last_sync_data_time=1597306380&device_type=0&last_deviceid=DA932FFFFE8816E7&data_json={data_json}'
response = requests.post(url, data=data, headers=head).json()
# print(response)
_add = ""
if K != 1.0:
_add = type + ",已设置降低步数,系数为" + str(K) + "。\n"
result = f"[{now}]\n账号:{user}\n由于天气{_add} 修改步数({step})\n" + response['message']
print(result)
if a:
push('【小米运动步数修改】', result)
push_wx(result)
run(result)
else:
print("此次修改结果不推送")
return result
# 获取时间戳
def get_time():
url = 'http://api.m.taobao.com/rest/api3.do?api=mtop.common.getTimestamp'
response = requests.get(url, headers=headers).json()
t = response['data']['t']
return t
# 获取app_token
def get_app_token(login_token):
url = f"https://account-cn.huami.com/v1/client/app_tokens?app_name=com.xiaomi.hm.health&dn=api-user.huami.com%2Capi-mifit.huami.com%2Capp-analytics.huami.com&login_token={login_token}"
response = requests.get(url, headers=headers).json()
app_token = response['token_info']['app_token']
# print("app_token获取成功!")
# print(app_token)
return app_token
#发送酷推
def push(title, content):
if skey == "NO":
return
else:
url = "https://push.xuthus.cc/send/" + skey
data = title + "\n" + content
# 发送请求
res = requests.post(url=url, data=data.encode('utf-8')).text
# 输出发送结果
print(res)
# 推送server
def push_wx(desp=""):
if sckey == 'NO':
return
else:
server_url = f"https://sc.ftqq.com/{sckey}.send"
params = {
"text": '【小米运动步数修改】',
"desp": desp
}
response = requests.get(server_url, params=params).text
print(response)
# 企业微信
def get_access_token():
urls = base_url + 'corpid=' + corpid + '&corpsecret=' + corpsecret
resp = requests.get(urls).json()
access_token = resp['access_token']
return access_token
def run(msg):
if get_access_token != "NO":
return
elif position:
data = {
"touser": touser,
"toparty": toparty,
"totag": totag,
"msgtype": "text",
"agentid": agentid,
"text": {
"content": "【小米运动步数修改】\n" + msg
},
"safe": 0,
"enable_id_trans": 0,
"enable_duplicate_check": 0,
"duplicate_check_interval": 1800
}
data = json.dumps(data)
req_urls = req_url + get_access_token()
resp = requests.post(url=req_urls, data=data).text
print(resp)
return resp
else:
return
def main_handler(event, context):
getBeijinTime()
if __name__ == "__main__":
getBeijinTime()
|
py | 7dfe66c608fa259e60beffe4e30bea3e929395f6 | __version__ = '0.7.5'
|
py | 7dfe6a1f8d975adb3afa97490ebc3cf819520d1c | """This module contains the general information for SwPortDiscoverFsm ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class SwPortDiscoverFsmConsts:
COMPLETION_TIME_ = ""
CURRENT_FSM_DEPLOY = "Deploy"
CURRENT_FSM_NOP = "nop"
FSM_STATUS_FAIL = "fail"
FSM_STATUS_IN_PROGRESS = "inProgress"
FSM_STATUS_NOP = "nop"
FSM_STATUS_PENDING = "pending"
FSM_STATUS_SKIP = "skip"
FSM_STATUS_SUCCESS = "success"
FSM_STATUS_THROTTLED = "throttled"
RMT_ERR_CODE_ERR_2FA_AUTH_RETRY = "ERR-2fa-auth-retry"
RMT_ERR_CODE_ERR_ACTIVATE_FAILED = "ERR-ACTIVATE-failed"
RMT_ERR_CODE_ERR_ACTIVATE_IN_PROGRESS = "ERR-ACTIVATE-in-progress"
RMT_ERR_CODE_ERR_ACTIVATE_RETRY = "ERR-ACTIVATE-retry"
RMT_ERR_CODE_ERR_BIOS_TOKENS_OLD_BIOS = "ERR-BIOS-TOKENS-OLD-BIOS"
RMT_ERR_CODE_ERR_BIOS_TOKENS_OLD_CIMC = "ERR-BIOS-TOKENS-OLD-CIMC"
RMT_ERR_CODE_ERR_BIOS_NETWORK_BOOT_ORDER_NOT_FOUND = "ERR-BIOS-network-boot-order-not-found"
RMT_ERR_CODE_ERR_BOARDCTRLUPDATE_IGNORE = "ERR-BOARDCTRLUPDATE-ignore"
RMT_ERR_CODE_ERR_DIAG_CANCELLED = "ERR-DIAG-cancelled"
RMT_ERR_CODE_ERR_DIAG_FSM_RESTARTED = "ERR-DIAG-fsm-restarted"
RMT_ERR_CODE_ERR_DIAG_TEST_FAILED = "ERR-DIAG-test-failed"
RMT_ERR_CODE_ERR_DNLD_AUTHENTICATION_FAILURE = "ERR-DNLD-authentication-failure"
RMT_ERR_CODE_ERR_DNLD_HOSTKEY_MISMATCH = "ERR-DNLD-hostkey-mismatch"
RMT_ERR_CODE_ERR_DNLD_INVALID_IMAGE = "ERR-DNLD-invalid-image"
RMT_ERR_CODE_ERR_DNLD_NO_FILE = "ERR-DNLD-no-file"
RMT_ERR_CODE_ERR_DNLD_NO_SPACE = "ERR-DNLD-no-space"
RMT_ERR_CODE_ERR_DNLD_USB_UNMOUNTED = "ERR-DNLD-usb-unmounted"
RMT_ERR_CODE_ERR_DNS_DELETE_ERROR = "ERR-DNS-delete-error"
RMT_ERR_CODE_ERR_DNS_GET_ERROR = "ERR-DNS-get-error"
RMT_ERR_CODE_ERR_DNS_SET_ERROR = "ERR-DNS-set-error"
RMT_ERR_CODE_ERR_DIAGNOSTICS_IN_PROGRESS = "ERR-Diagnostics-in-progress"
RMT_ERR_CODE_ERR_DIAGNOSTICS_MEMTEST_IN_PROGRESS = "ERR-Diagnostics-memtest-in-progress"
RMT_ERR_CODE_ERR_DIAGNOSTICS_NETWORK_IN_PROGRESS = "ERR-Diagnostics-network-in-progress"
RMT_ERR_CODE_ERR_FILTER_ILLEGAL_FORMAT = "ERR-FILTER-illegal-format"
RMT_ERR_CODE_ERR_FSM_NO_SUCH_STATE = "ERR-FSM-no-such-state"
RMT_ERR_CODE_ERR_HOST_FRU_IDENTITY_MISMATCH = "ERR-HOST-fru-identity-mismatch"
RMT_ERR_CODE_ERR_HTTP_SET_ERROR = "ERR-HTTP-set-error"
RMT_ERR_CODE_ERR_HTTPS_SET_ERROR = "ERR-HTTPS-set-error"
RMT_ERR_CODE_ERR_IBMC_ANALYZE_RESULTS = "ERR-IBMC-analyze-results"
RMT_ERR_CODE_ERR_IBMC_CONNECT_ERROR = "ERR-IBMC-connect-error"
RMT_ERR_CODE_ERR_IBMC_CONNECTOR_INFO_RETRIEVAL_ERROR = "ERR-IBMC-connector-info-retrieval-error"
RMT_ERR_CODE_ERR_IBMC_FRU_RETRIEVAL_ERROR = "ERR-IBMC-fru-retrieval-error"
RMT_ERR_CODE_ERR_IBMC_INVALID_END_POINT_CONFIG = "ERR-IBMC-invalid-end-point-config"
RMT_ERR_CODE_ERR_IBMC_RESULTS_NOT_READY = "ERR-IBMC-results-not-ready"
RMT_ERR_CODE_ERR_MAX_SUBSCRIPTIONS_ALLOWED_ERROR = "ERR-MAX-subscriptions-allowed-error"
RMT_ERR_CODE_ERR_MO_CONFIG_CHILD_OBJECT_CANT_BE_CONFIGURED = "ERR-MO-CONFIG-child-object-cant-be-configured"
RMT_ERR_CODE_ERR_MO_META_NO_SUCH_OBJECT_CLASS = "ERR-MO-META-no-such-object-class"
RMT_ERR_CODE_ERR_MO_PROPERTY_NO_SUCH_PROPERTY = "ERR-MO-PROPERTY-no-such-property"
RMT_ERR_CODE_ERR_MO_PROPERTY_VALUE_OUT_OF_RANGE = "ERR-MO-PROPERTY-value-out-of-range"
RMT_ERR_CODE_ERR_MO_ACCESS_DENIED = "ERR-MO-access-denied"
RMT_ERR_CODE_ERR_MO_DELETION_RULE_VIOLATION = "ERR-MO-deletion-rule-violation"
RMT_ERR_CODE_ERR_MO_DUPLICATE_OBJECT = "ERR-MO-duplicate-object"
RMT_ERR_CODE_ERR_MO_ILLEGAL_CONTAINMENT = "ERR-MO-illegal-containment"
RMT_ERR_CODE_ERR_MO_ILLEGAL_CREATION = "ERR-MO-illegal-creation"
RMT_ERR_CODE_ERR_MO_ILLEGAL_ITERATOR_STATE = "ERR-MO-illegal-iterator-state"
RMT_ERR_CODE_ERR_MO_ILLEGAL_OBJECT_LIFECYCLE_TRANSITION = "ERR-MO-illegal-object-lifecycle-transition"
RMT_ERR_CODE_ERR_MO_NAMING_RULE_VIOLATION = "ERR-MO-naming-rule-violation"
RMT_ERR_CODE_ERR_MO_OBJECT_NOT_FOUND = "ERR-MO-object-not-found"
RMT_ERR_CODE_ERR_MO_RESOURCE_ALLOCATION = "ERR-MO-resource-allocation"
RMT_ERR_CODE_ERR_NTP_DELETE_ERROR = "ERR-NTP-delete-error"
RMT_ERR_CODE_ERR_NTP_GET_ERROR = "ERR-NTP-get-error"
RMT_ERR_CODE_ERR_NTP_SET_ERROR = "ERR-NTP-set-error"
RMT_ERR_CODE_ERR_POWER_CAP_UNSUPPORTED = "ERR-POWER-CAP-UNSUPPORTED"
RMT_ERR_CODE_ERR_POWER_PROFILE_IN_PROGRESS = "ERR-POWER-PROFILE-IN-PROGRESS"
RMT_ERR_CODE_ERR_SERVER_MIS_CONNECT = "ERR-SERVER-mis-connect"
RMT_ERR_CODE_ERR_SWITCH_INVALID_IF_CONFIG = "ERR-SWITCH-invalid-if-config"
RMT_ERR_CODE_ERR_TOKEN_REQUEST_DENIED = "ERR-TOKEN-request-denied"
RMT_ERR_CODE_ERR_UNABLE_TO_FETCH_BIOS_SETTINGS = "ERR-UNABLE-TO-FETCH-BIOS-SETTINGS"
RMT_ERR_CODE_ERR_UPDATE_FAILED = "ERR-UPDATE-failed"
RMT_ERR_CODE_ERR_UPDATE_IN_PROGRESS = "ERR-UPDATE-in-progress"
RMT_ERR_CODE_ERR_UPDATE_RETRY = "ERR-UPDATE-retry"
RMT_ERR_CODE_ERR_AAA_CONFIG_MODIFY_ERROR = "ERR-aaa-config-modify-error"
RMT_ERR_CODE_ERR_ACCT_REALM_SET_ERROR = "ERR-acct-realm-set-error"
RMT_ERR_CODE_ERR_ADMIN_PASSWD_SET = "ERR-admin-passwd-set"
RMT_ERR_CODE_ERR_AUTH_ISSUE = "ERR-auth-issue"
RMT_ERR_CODE_ERR_AUTH_REALM_GET_ERROR = "ERR-auth-realm-get-error"
RMT_ERR_CODE_ERR_AUTH_REALM_SET_ERROR = "ERR-auth-realm-set-error"
RMT_ERR_CODE_ERR_AUTHENTICATION = "ERR-authentication"
RMT_ERR_CODE_ERR_AUTHORIZATION_REQUIRED = "ERR-authorization-required"
RMT_ERR_CODE_ERR_CLI_SESSION_LIMIT_REACHED = "ERR-cli-session-limit-reached"
RMT_ERR_CODE_ERR_CREATE_KEYRING = "ERR-create-keyring"
RMT_ERR_CODE_ERR_CREATE_LOCALE = "ERR-create-locale"
RMT_ERR_CODE_ERR_CREATE_ROLE = "ERR-create-role"
RMT_ERR_CODE_ERR_CREATE_TP = "ERR-create-tp"
RMT_ERR_CODE_ERR_CREATE_USER = "ERR-create-user"
RMT_ERR_CODE_ERR_DELETE_LOCALE = "ERR-delete-locale"
RMT_ERR_CODE_ERR_DELETE_ROLE = "ERR-delete-role"
RMT_ERR_CODE_ERR_DELETE_SESSION = "ERR-delete-session"
RMT_ERR_CODE_ERR_DELETE_USER = "ERR-delete-user"
RMT_ERR_CODE_ERR_DOWNGRADE_FAIL = "ERR-downgrade-fail"
RMT_ERR_CODE_ERR_EFI_DIAGNOSTICS_IN_PROGRESS = "ERR-efi-Diagnostics--in-progress"
RMT_ERR_CODE_ERR_ENABLE_MGMT_CONN = "ERR-enable-mgmt-conn"
RMT_ERR_CODE_ERR_EP_SET_ERROR = "ERR-ep-set-error"
RMT_ERR_CODE_ERR_GET_MAX_HTTP_USER_SESSIONS = "ERR-get-max-http-user-sessions"
RMT_ERR_CODE_ERR_HTTP_INITIALIZING = "ERR-http-initializing"
RMT_ERR_CODE_ERR_INSUFFICIENTLY_EQUIPPED = "ERR-insufficiently-equipped"
RMT_ERR_CODE_ERR_INTERNAL_ERROR = "ERR-internal-error"
RMT_ERR_CODE_ERR_LDAP_DELETE_ERROR = "ERR-ldap-delete-error"
RMT_ERR_CODE_ERR_LDAP_GET_ERROR = "ERR-ldap-get-error"
RMT_ERR_CODE_ERR_LDAP_GROUP_MODIFY_ERROR = "ERR-ldap-group-modify-error"
RMT_ERR_CODE_ERR_LDAP_GROUP_SET_ERROR = "ERR-ldap-group-set-error"
RMT_ERR_CODE_ERR_LDAP_SET_ERROR = "ERR-ldap-set-error"
RMT_ERR_CODE_ERR_LOCALE_SET_ERROR = "ERR-locale-set-error"
RMT_ERR_CODE_ERR_MAX_USERID_SESSIONS_REACHED = "ERR-max-userid-sessions-reached"
RMT_ERR_CODE_ERR_MISSING_METHOD = "ERR-missing-method"
RMT_ERR_CODE_ERR_MODIFY_LOCALE = "ERR-modify-locale"
RMT_ERR_CODE_ERR_MODIFY_ROLE = "ERR-modify-role"
RMT_ERR_CODE_ERR_MODIFY_USER = "ERR-modify-user"
RMT_ERR_CODE_ERR_MODIFY_USER_LOCALE = "ERR-modify-user-locale"
RMT_ERR_CODE_ERR_MODIFY_USER_ROLE = "ERR-modify-user-role"
RMT_ERR_CODE_ERR_PROVIDER_GROUP_MODIFY_ERROR = "ERR-provider-group-modify-error"
RMT_ERR_CODE_ERR_PROVIDER_GROUP_SET_ERROR = "ERR-provider-group-set-error"
RMT_ERR_CODE_ERR_RADIUS_GET_ERROR = "ERR-radius-get-error"
RMT_ERR_CODE_ERR_RADIUS_GLOBAL_SET_ERROR = "ERR-radius-global-set-error"
RMT_ERR_CODE_ERR_RADIUS_GROUP_SET_ERROR = "ERR-radius-group-set-error"
RMT_ERR_CODE_ERR_RADIUS_SET_ERROR = "ERR-radius-set-error"
RMT_ERR_CODE_ERR_REQUEST_TIMEOUT = "ERR-request-timeout"
RMT_ERR_CODE_ERR_RESET_ADAPTER = "ERR-reset-adapter"
RMT_ERR_CODE_ERR_ROLE_SET_ERROR = "ERR-role-set-error"
RMT_ERR_CODE_ERR_SECONDARY_NODE = "ERR-secondary-node"
RMT_ERR_CODE_ERR_SERVICE_NOT_READY = "ERR-service-not-ready"
RMT_ERR_CODE_ERR_SESSION_CACHE_FULL = "ERR-session-cache-full"
RMT_ERR_CODE_ERR_SESSION_NOT_FOUND = "ERR-session-not-found"
RMT_ERR_CODE_ERR_SET_KEY_CERT = "ERR-set-key-cert"
RMT_ERR_CODE_ERR_SET_LOGIN_PROFILE = "ERR-set-login-profile"
RMT_ERR_CODE_ERR_SET_MIN_PASSPHRASE_LENGTH = "ERR-set-min-passphrase-length"
RMT_ERR_CODE_ERR_SET_NETWORK = "ERR-set-network"
RMT_ERR_CODE_ERR_SET_PASSWORD_STRENGTH_CHECK = "ERR-set-password-strength-check"
RMT_ERR_CODE_ERR_SET_PORT_CHANNEL = "ERR-set-port-channel"
RMT_ERR_CODE_ERR_STORE_PRE_LOGIN_BANNER_MSG = "ERR-store-pre-login-banner-msg"
RMT_ERR_CODE_ERR_TACACS_ENABLE_ERROR = "ERR-tacacs-enable-error"
RMT_ERR_CODE_ERR_TACACS_GLOBAL_SET_ERROR = "ERR-tacacs-global-set-error"
RMT_ERR_CODE_ERR_TACACS_GROUP_SET_ERROR = "ERR-tacacs-group-set-error"
RMT_ERR_CODE_ERR_TACACS_PLUS_GET_ERROR = "ERR-tacacs-plus-get-error"
RMT_ERR_CODE_ERR_TACACS_SET_ERROR = "ERR-tacacs-set-error"
RMT_ERR_CODE_ERR_TEST_ERROR_1 = "ERR-test-error-1"
RMT_ERR_CODE_ERR_TEST_ERROR_2 = "ERR-test-error-2"
RMT_ERR_CODE_ERR_TIMEZONE_SET_ERROR = "ERR-timezone-set-error"
RMT_ERR_CODE_ERR_USER_ACCOUNT_EXPIRED = "ERR-user-account-expired"
RMT_ERR_CODE_ERR_USER_PASSWD_EXPIRED = "ERR-user-passwd-expired"
RMT_ERR_CODE_ERR_USER_SET_ERROR = "ERR-user-set-error"
RMT_ERR_CODE_ERR_XML_PARSE_ERROR = "ERR-xml-parse-error"
RMT_ERR_CODE_NONE = "none"
class SwPortDiscoverFsm(ManagedObject):
"""This is SwPortDiscoverFsm class."""
consts = SwPortDiscoverFsmConsts()
naming_props = set([])
mo_meta = MoMeta("SwPortDiscoverFsm", "swPortDiscoverFsm", "fsm", VersionMeta.Version321d, "OutputOnly", 0xf, [], [""], ['swPortDiscover'], ['swPortDiscoverFsmStage'], [None])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version321d, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"completion_time": MoPropertyMeta("completion_time", "completionTime", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [""], []),
"current_fsm": MoPropertyMeta("current_fsm", "currentFsm", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, None, ["Deploy", "nop"], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []),
"fsm_status": MoPropertyMeta("fsm_status", "fsmStatus", "string", VersionMeta.Version321d, MoPropertyMeta.INTERNAL, None, None, None, None, ["fail", "inProgress", "nop", "pending", "skip", "success", "throttled"], []),
"instance_id": MoPropertyMeta("instance_id", "instanceId", "uint", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"progress": MoPropertyMeta("progress", "progress", "byte", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, None, [], ["0-100"]),
"rmt_err_code": MoPropertyMeta("rmt_err_code", "rmtErrCode", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, None, ["ERR-2fa-auth-retry", "ERR-ACTIVATE-failed", "ERR-ACTIVATE-in-progress", "ERR-ACTIVATE-retry", "ERR-BIOS-TOKENS-OLD-BIOS", "ERR-BIOS-TOKENS-OLD-CIMC", "ERR-BIOS-network-boot-order-not-found", "ERR-BOARDCTRLUPDATE-ignore", "ERR-DIAG-cancelled", "ERR-DIAG-fsm-restarted", "ERR-DIAG-test-failed", "ERR-DNLD-authentication-failure", "ERR-DNLD-hostkey-mismatch", "ERR-DNLD-invalid-image", "ERR-DNLD-no-file", "ERR-DNLD-no-space", "ERR-DNLD-usb-unmounted", "ERR-DNS-delete-error", "ERR-DNS-get-error", "ERR-DNS-set-error", "ERR-Diagnostics-in-progress", "ERR-Diagnostics-memtest-in-progress", "ERR-Diagnostics-network-in-progress", "ERR-FILTER-illegal-format", "ERR-FSM-no-such-state", "ERR-HOST-fru-identity-mismatch", "ERR-HTTP-set-error", "ERR-HTTPS-set-error", "ERR-IBMC-analyze-results", "ERR-IBMC-connect-error", "ERR-IBMC-connector-info-retrieval-error", "ERR-IBMC-fru-retrieval-error", "ERR-IBMC-invalid-end-point-config", "ERR-IBMC-results-not-ready", "ERR-MAX-subscriptions-allowed-error", "ERR-MO-CONFIG-child-object-cant-be-configured", "ERR-MO-META-no-such-object-class", "ERR-MO-PROPERTY-no-such-property", "ERR-MO-PROPERTY-value-out-of-range", "ERR-MO-access-denied", "ERR-MO-deletion-rule-violation", "ERR-MO-duplicate-object", "ERR-MO-illegal-containment", "ERR-MO-illegal-creation", "ERR-MO-illegal-iterator-state", "ERR-MO-illegal-object-lifecycle-transition", "ERR-MO-naming-rule-violation", "ERR-MO-object-not-found", "ERR-MO-resource-allocation", "ERR-NTP-delete-error", "ERR-NTP-get-error", "ERR-NTP-set-error", "ERR-POWER-CAP-UNSUPPORTED", "ERR-POWER-PROFILE-IN-PROGRESS", "ERR-SERVER-mis-connect", "ERR-SWITCH-invalid-if-config", "ERR-TOKEN-request-denied", "ERR-UNABLE-TO-FETCH-BIOS-SETTINGS", "ERR-UPDATE-failed", "ERR-UPDATE-in-progress", "ERR-UPDATE-retry", "ERR-aaa-config-modify-error", "ERR-acct-realm-set-error", "ERR-admin-passwd-set", "ERR-auth-issue", "ERR-auth-realm-get-error", "ERR-auth-realm-set-error", "ERR-authentication", "ERR-authorization-required", "ERR-cli-session-limit-reached", "ERR-create-keyring", "ERR-create-locale", "ERR-create-role", "ERR-create-tp", "ERR-create-user", "ERR-delete-locale", "ERR-delete-role", "ERR-delete-session", "ERR-delete-user", "ERR-downgrade-fail", "ERR-efi-Diagnostics--in-progress", "ERR-enable-mgmt-conn", "ERR-ep-set-error", "ERR-get-max-http-user-sessions", "ERR-http-initializing", "ERR-insufficiently-equipped", "ERR-internal-error", "ERR-ldap-delete-error", "ERR-ldap-get-error", "ERR-ldap-group-modify-error", "ERR-ldap-group-set-error", "ERR-ldap-set-error", "ERR-locale-set-error", "ERR-max-userid-sessions-reached", "ERR-missing-method", "ERR-modify-locale", "ERR-modify-role", "ERR-modify-user", "ERR-modify-user-locale", "ERR-modify-user-role", "ERR-provider-group-modify-error", "ERR-provider-group-set-error", "ERR-radius-get-error", "ERR-radius-global-set-error", "ERR-radius-group-set-error", "ERR-radius-set-error", "ERR-request-timeout", "ERR-reset-adapter", "ERR-role-set-error", "ERR-secondary-node", "ERR-service-not-ready", "ERR-session-cache-full", "ERR-session-not-found", "ERR-set-key-cert", "ERR-set-login-profile", "ERR-set-min-passphrase-length", "ERR-set-network", "ERR-set-password-strength-check", "ERR-set-port-channel", "ERR-store-pre-login-banner-msg", "ERR-tacacs-enable-error", "ERR-tacacs-global-set-error", "ERR-tacacs-group-set-error", "ERR-tacacs-plus-get-error", "ERR-tacacs-set-error", "ERR-test-error-1", "ERR-test-error-2", "ERR-timezone-set-error", "ERR-user-account-expired", "ERR-user-passwd-expired", "ERR-user-set-error", "ERR-xml-parse-error", "none"], ["0-4294967295"]),
"rmt_err_descr": MoPropertyMeta("rmt_err_descr", "rmtErrDescr", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"rmt_rslt": MoPropertyMeta("rmt_rslt", "rmtRslt", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|resource-unavailable|service-unavailable|intermittent-error|sw-defect|service-not-implemented-ignore|extend-timeout|capability-not-implemented-failure|illegal-fru|end-point-unavailable|failure|resource-capacity-exceeded|service-protocol-error|fw-defect|service-not-implemented-fail|task-reset|unidentified-fail|capability-not-supported|end-point-failed|fru-state-indeterminate|resource-dependency|fru-identity-indeterminate|internal-error|hw-defect|service-not-supported|fru-not-supported|end-point-protocol-error|capability-unavailable|fru-not-ready|capability-not-implemented-ignore|fru-info-malformed|timeout),){0,32}(defaultValue|not-applicable|resource-unavailable|service-unavailable|intermittent-error|sw-defect|service-not-implemented-ignore|extend-timeout|capability-not-implemented-failure|illegal-fru|end-point-unavailable|failure|resource-capacity-exceeded|service-protocol-error|fw-defect|service-not-implemented-fail|task-reset|unidentified-fail|capability-not-supported|end-point-failed|fru-state-indeterminate|resource-dependency|fru-identity-indeterminate|internal-error|hw-defect|service-not-supported|fru-not-supported|end-point-protocol-error|capability-unavailable|fru-not-ready|capability-not-implemented-ignore|fru-info-malformed|timeout){0,1}""", [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version321d, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"completionTime": "completion_time",
"currentFsm": "current_fsm",
"descr": "descr",
"dn": "dn",
"fsmStatus": "fsm_status",
"instanceId": "instance_id",
"progress": "progress",
"rmtErrCode": "rmt_err_code",
"rmtErrDescr": "rmt_err_descr",
"rmtRslt": "rmt_rslt",
"rn": "rn",
"sacl": "sacl",
"status": "status",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.completion_time = None
self.current_fsm = None
self.descr = None
self.fsm_status = None
self.instance_id = None
self.progress = None
self.rmt_err_code = None
self.rmt_err_descr = None
self.rmt_rslt = None
self.sacl = None
self.status = None
ManagedObject.__init__(self, "SwPortDiscoverFsm", parent_mo_or_dn, **kwargs)
|
py | 7dfe6bf9ca8841147f1924f5e183927eed68eb9d | #!/usr/bin/env python
''' Script for correcting broken mapsplice BAMs using pysam
Adam Ewing ([email protected])
'''
import sys
import pysam
import os
import subprocess
from re import sub
def namesort_bam(bamfile):
sortbase = bamfile + ".namesort"
sortfn = sortbase + ".bam"
sortargs = ['samtools','sort','-n','-@','8','-m','2G',bamfile,sortbase]
print "sorting, cmd: " + " ".join(sortargs)
subprocess.call(sortargs)
return sortfn
def possort_bam(bamfile):
sortbase = bamfile + ".sort"
sortfn = sortbase + ".bam"
sortargs = ['samtools','sort','-@','8','-m','2G',bamfile,sortbase]
print "sorting, cmd: " + " ".join(sortargs)
subprocess.call(sortargs)
os.rename(sortfn,bamfile)
def validate(reads):
if len(reads) > 2:
return False
if len(reads) == 2:
if reads[0].is_unmapped and reads[1].is_unmapped:
return True
has_read1 = has_read2 = False
for read in reads:
if read.is_read1 and read.is_read2:
return False
if not read.mate_is_unmapped and read.mpos < 0:
return False
if read.is_read1:
has_read1 = True
if read.is_read2:
has_read2 = True
if not (has_read1 and has_read2):
return False
# check isize
if abs(reads[0].isize) != abs(reads[1].isize):
return False
# check paired flag
if not (reads[0].is_paired and reads[1].is_paired):
return False
# mate strand agreement
if reads[0].is_reverse != reads[1].mate_is_reverse:
return False
if reads[1].is_reverse != reads[0].mate_is_reverse:
return False
# mate position and refid agreement
if reads[0].tid != reads[1].rnext or reads[1].tid != reads[0].rnext:
return False
if reads[0].pos != reads[1].mpos or reads[1].pos != reads[0].mpos:
return False
return True
def fixmates(reads):
''' if there are more than 2 reads in a pair:
1. if one is marked non-primary, remove it
2. if (1) results in two 'read1' reads mark one as read
3. TODO: try to base (2) on FR read mapping scheme '''
if len(reads) > 2:
newreads = []
for read in reads:
if not read.is_secondary:
newreads.append(read)
if len(newreads) == 2:
if validate(newreads): # if rejecting a non-primary alignment fixes it
return newreads
else: # unsalvagable at present, don't output
sys.stderr.write("rejecting (three primary alignments for pair):" + newreads[0].qname + "\n")
return []
reads = newreads
if len(reads) == 2:
# fix mate strand agreement, position, refid
reads[1].mate_is_reverse = reads[0].is_reverse
reads[0].mate_is_reverse = reads[1].is_reverse
reads[0].rnext = reads[1].tid
reads[1].rnext = reads[0].tid
reads[0].mpos = reads[1].pos
reads[1].mpos = reads[0].pos
if validate(reads):
return reads
# fix unpaired by flag
if not reads[0].is_paired:
reads[0].is_paired=True
if not reads[1].is_paired:
reads[1].is_paired=True
if validate(reads):
return reads
# fix one-end anchored
if (not reads[0].is_unmapped) and reads[1].is_unmapped:
reads[0].mate_is_unmapped = True
if validate(reads):
return reads
elif (not reads[1].is_unmapped) and reads[0].is_unmapped:
reads[1].mate_is_unmapped = True
if validate(reads):
return reads
# fix insert size, set to smallest of the pair (mapsplice sets huge isizes sometimes)
if abs(reads[0].isize) != abs(reads[1].isize):
if (not reads[0].is_unmapped) and (not reads[1].is_unmapped) and (reads[0].tid == reads[1].tid):
reads[0].isize = reads[0].pos - reads[1].pos
reads[1].isize = reads[1].pos - reads[0].pos
else:
reads[0].isize = reads[1].isize = 0
# fix if mates don't have position/tid set
if (not reads[0].mate_is_unmapped and reads[0].mpos < 0) or (not reads[1].mate_is_unmapped and reads[1].mpos < 0):
reads[0].mpos = reads[1].pos
reads[0].rnext = reads[1].tid
reads[1].mpos = reads[0].pos
reads[1].rnext = reads[0].tid
if validate(reads):
return reads
# try to fix both-ended reads (where a read is marked both end1 and end2)
newreads = []
for read in reads:
if read.is_read1 and read.is_read2:
# try to infer correct order from query name
if read.qname.endswith('/1'):
read.is_read2 = False
elif read.qname.endswith('/2'):
read.is_read1 = False
newreads.append(read)
if validate(newreads): # if fixing both-ended reads is enough
return newreads
else:
reads = newreads
# fix situations where there is no read1 or no read2 in a pair
has_read1 = has_read2 = False
for read in reads:
if read.is_read1:
has_read1 = True
if read.is_read2:
has_read2 = True
# try to assign based on qname
newreads = []
if not (has_read1 and has_read2):
for read in reads:
if read.qname.endswith('/1'):
read.is_read1 = True
read.is_read2 = False
elif read.qname.endswith('/2'):
read.is_read2 = True
read.is_read1 = False
newreads.append(read)
reads = newreads
if validate(reads):
return reads
else:
# arbitrary assignment as last ditch option
reads[0].is_read1 = True
reads[0].is_read2 = False
reads[1].is_read1 = False
reads[1].is_read2 = True
if validate(reads):
return reads
sys.stderr.write("rejecting (could not correct):" + reads[0].qname + "\n")
return []
def writereads(reads, outbam):
for read in reads:
read.qname = sub('/[12]', '', read.qname)
outbam.write(read)
if len(sys.argv) == 2:
assert sys.argv[1].endswith('bam')
inbamfile = namesort_bam(sys.argv[1])
outbamfile = sub('bam$','fix.bam',sys.argv[1])
inbam = pysam.Samfile(inbamfile, 'rb')
outbam = pysam.Samfile(outbamfile, 'wb', template=inbam)
reads = []
passed = fixed = 0
lastname = None
for read in inbam.fetch(until_eof=True):
basename = sub('/[12]', '', read.qname)
if basename == lastname:
reads.append(read)
else:
if validate(reads):
passed += 1
writereads(reads, outbam)
else:
reads = fixmates(reads)
writereads(reads, outbam)
fixed += 1
reads = []
lastname = basename
reads.append(read)
# handle last reads
if validate(reads):
passed += 1
writereads(reads, outbam)
else:
reads = fixmates(reads)
writereads(reads, outbam)
fixed += 1
outbam.close()
os.remove(inbamfile)
print 'groups passed:',passed,'fixed:',fixed,'... sorting'
possort_bam(outbamfile)
else:
print "corrects mapsplice .bam files.\n"
print "usage:", sys.argv[0], "<BAM generated by mapsplice>"
|
py | 7dfe6c64f1c8043bc4f036db0c909a5800981571 | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Primer3(MakefilePackage):
"""Primer3 is a widely used program for designing PCR primers
(PCR = "Polymerase Chain Reaction"). PCR is an essential and
ubiquitous tool in genetics and molecular biology. Primer3
can also design hybridization probes and sequencing primers."""
homepage = "https://primer3.org/"
url = "https://github.com/primer3-org/primer3/archive/v2.3.7.tar.gz"
version('2.5.0', sha256='7581e2fa3228ef0ee1ffa427b2aa0a18fc635d561208327471daf59d1b804da0')
version('2.3.7', sha256='f7ac3e64dc89b7c80882bf0f52c2c0a58572f5fdafd178680d4a7ae91b6c465b')
build_directory = 'src'
# Prior to May 15, 2018, the code contained invalid pointer/int
# comparisons, leading to compilers that default to strict mode
# failing to compile thal.c.
# This prevents building 2.3.7 w/ [email protected]. Details here:
# https://github.com/primer3-org/primer3/issues/2
# https://github.com/primer3-org/primer3/issues/3
def patch(self):
if self.spec.version == Version('2.3.7'):
filter_file(r'^(CC_OPTS.*)', r'\1 -fpermissive',
join_path('src', 'Makefile'))
def install(self, spec, prefix):
with working_dir(self.build_directory):
mkdirp(prefix.bin)
for binary in ('primer3_core', 'ntdpal', 'oligotm',
'long_seq_tm_test'):
install(binary, prefix.bin)
|
py | 7dfe6c7a1157660a52eea25bac2c2ce5e2b58492 | # ===============================================================================
# Copyright 2016 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= standard library imports ========================
import os
import logging
from math import isnan
import yaml
from numpy import array, where, zeros
from osgeo import gdal
# ============= local library imports ==========================
class InvalidDataSourceException(BaseException):
def __init__(self, path):
self._path = path
def __str__(self):
return 'InvalidDataSource. Could not locate "{}"'.format(self._path)
def write_tiff(path, params, data, driver=None):
if driver is None:
driver = gdal.GetDriverByName('GTiff')
args = params['cols'], params['rows'], params['bands'], params['datatype']
geo_transform = params['geo_transform']
projection = params['projection']
band = params['band']
out = driver.Create(path, *args)
out.SetGeoTransform(geo_transform)
out.setProjection(projection)
outband = out.GetRasterBand(band)
outband.WriteArray(data, 0, 0)
def tif_path(root, name):
"""
:param root:
:param name:
:return:
"""
if not name.endswith('.tif'):
name = '{}.tif'.format(name)
path = os.path.join(root, name)
return path
def tif_params(root, name, band=1):
"""
:param root:
:param name:
:param band:
:return:
"""
path = tif_path(root, name)
obj = gdal.Open(path)
band = obj.GetRasterBand(band)
d = {'cols': obj.RasterXSize, 'rows': obj.RasterYSize,
'bands': obj.RasterCount,
'band': band,
'projection': obj.GetProjection(),
'geo_transform': obj.GetGeoTransform(),
'datatype': band.DataType}
# probably not necessary
del obj
return d
def tif_to_array(root, name, band=1):
"""
Helper function for getting an array from a tiff
:param root: directory
:type root: str
:param name: name of file
:type name: str
:param band: band
:type band: int
:return: numpy.ndarray
"""
if not name.endswith('.tif'):
name = '{}.tif'.format(name)
path = os.path.join(root, name)
if not os.path.isfile(path):
logging.critical('Could not locate {}'.format(path))
raise InvalidDataSourceException(path)
rband = gdal.Open(path).GetRasterBand(band)
return array(rband.ReadAsArray())
def clean(d):
"""
Replace NaN with 0
:param d: input array
:type d: numpy.ndarray
:return: numpy.ndarray
"""
return where(isnan(d), zeros(d.shape), d)
# ============= EOF =============================================
|
py | 7dfe6c96e332397be77a362489c4bc43282e29f1 | import time
from pymongo import MongoClient
client = MongoClient(host="server.lan")
user = 'device_user'
password = 'kyyfzx78..'
db = client['device_register']
db.authenticate(user, password)
def create_index(collection, index_list: list):
return db[collection].create_index(index_list)
if __name__ == '__main__':
# indexs = [
# # ("used", -1),
# ("create_time", -1),
# ]
# print(create_index("dy_multi", indexs))
# print(create_index("hs_multi", indexs))
# print(create_index("gua_multi", indexs))
# print(create_index("toutiao_multi", indexs))
result = db['gua_multi'].find_one_and_update({"used": 0, "remark": {'$regex': 'laozhuo'}, "use_count": {"$lt": 3}}, {"$set": {"used": 1, "user": "deng", "get_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())}})
print(result) |
py | 7dfe703f751b30c0ab9bc1321d4aa5bbcc054d14 | import logging
import os
import sys
from dotenv import load_dotenv
from logbook import Logger, StreamHandler
from logbook.compat import redirect_logging
from telegram import (
InlineKeyboardButton,
InlineKeyboardMarkup,
MessageEntity,
ForceReply,
ParseMode,
Update,
)
from telegram.ext import (
Updater,
CommandHandler,
Filters,
MessageHandler,
CallbackQueryHandler,
PreCheckoutQueryHandler,
CallbackContext,
)
from telegram.ext import messagequeue as mq
from telegram.utils.request import Request
from pdf_bot import *
load_dotenv()
APP_URL = os.environ.get("APP_URL")
PORT = int(os.environ.get("PORT", "8443"))
TELE_TOKEN = os.environ.get("TELE_TOKEN_BETA", os.environ.get("TELE_TOKEN"))
DEV_TELE_ID = int(os.environ.get("DEV_TELE_ID"))
TIMEOUT = 20
CALLBACK_DATA = "callback_data"
def main():
# Setup logging
logging.getLogger("pdfminer").setLevel(logging.WARNING)
logging.getLogger("ocrmypdf").setLevel(logging.WARNING)
redirect_logging()
format_string = "{record.level_name}: {record.message}"
StreamHandler(
sys.stdout, format_string=format_string, level="INFO"
).push_application()
log = Logger()
q = mq.MessageQueue(all_burst_limit=3, all_time_limit_ms=3000)
request = Request(con_pool_size=8)
pdf_bot = MQBot(TELE_TOKEN, request=request, mqueue=q)
# Create the EventHandler and pass it your bot's token.
updater = Updater(
bot=pdf_bot,
use_context=True,
request_kwargs={"connect_timeout": TIMEOUT, "read_timeout": TIMEOUT},
)
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# General commands handlers
dispatcher.add_handler(CommandHandler("start", start_msg, run_async=True))
dispatcher.add_handler(CommandHandler("help", help_msg, run_async=True))
dispatcher.add_handler(CommandHandler("setlang", send_lang, run_async=True))
dispatcher.add_handler(
CommandHandler("support", send_support_options, run_async=True)
)
dispatcher.add_handler(CommandHandler("send", send_msg, Filters.user(DEV_TELE_ID)))
dispatcher.add_handler(
CommandHandler("stats", get_stats, Filters.user(DEV_TELE_ID))
)
# Callback query handler
dispatcher.add_handler(CallbackQueryHandler(process_callback_query, run_async=True))
# Payment handlers
dispatcher.add_handler(
MessageHandler(
Filters.reply & TEXT_FILTER, receive_custom_amount, run_async=True
)
)
dispatcher.add_handler(PreCheckoutQueryHandler(precheckout_check, run_async=True))
dispatcher.add_handler(
MessageHandler(Filters.successful_payment, successful_payment, run_async=True)
)
# URL handler
dispatcher.add_handler(
MessageHandler(Filters.entity(MessageEntity.URL), url_to_pdf, run_async=True)
)
# PDF commands handlers
dispatcher.add_handler(compare_cov_handler())
dispatcher.add_handler(merge_cov_handler())
dispatcher.add_handler(photo_cov_handler())
dispatcher.add_handler(text_cov_handler())
dispatcher.add_handler(watermark_cov_handler())
# PDF file handler
dispatcher.add_handler(file_cov_handler())
# Feedback handler
dispatcher.add_handler(feedback_cov_handler())
# Log all errors
dispatcher.add_error_handler(error_callback)
# Start the Bot
if APP_URL is not None:
updater.start_webhook(listen="0.0.0.0", port=PORT, url_path=TELE_TOKEN)
updater.bot.set_webhook(APP_URL + TELE_TOKEN)
log.notice("Bot started webhook")
else:
updater.start_polling()
log.notice("Bot started polling")
# Run the bot until the you presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
def start_msg(update, context):
_ = set_lang(update, context)
update.effective_message.reply_text(
_(
"Welcome to PDF Bot!\n\n<b>Key features:</b>\n"
"- Compress, merge, preview, rename, split and add watermark to PDF files\n"
"- Create PDF files from text messages\n"
"- Extract images and text from PDF files\n"
"- Convert PDF files into images\n"
"- Convert webpages and images into PDF files\n"
"- Beautify handwritten notes images into PDF files\n"
"- <b><i>And more...</i></b>\n\n"
"Type /help to see how to use PDF Bot"
),
parse_mode=ParseMode.HTML,
)
# Create the user entity in Datastore
create_user(update.effective_message.from_user.id)
def help_msg(update, context):
_ = set_lang(update, context)
keyboard = [
[InlineKeyboardButton(_("Set Language"), callback_data=SET_LANG)],
[
InlineKeyboardButton(_("Join Channel"), f"https://t.me/{CHANNEL_NAME}"),
InlineKeyboardButton(_("Support PDF Bot"), callback_data=PAYMENT),
],
]
reply_markup = InlineKeyboardMarkup(keyboard)
update.effective_message.reply_text(
_(
"You can perform most of the tasks by sending me one of the followings:\n"
"- PDF files\n- Photos\n- Webpage links\n\n"
"The rest of the tasks can be performed by using the commands below:\n"
"/compare - compare PDF files\n"
"/merge - merge PDF files\n"
"/photo - convert and combine multiple photos into PDF files\n"
"/text - create PDF files from text messages\n"
"/watermark - add watermark to PDF files"
),
reply_markup=reply_markup,
)
def process_callback_query(update: Update, context: CallbackContext):
_ = set_lang(update, context)
query = update.callback_query
data = query.data
if CALLBACK_DATA not in context.user_data:
context.user_data[CALLBACK_DATA] = set()
if data not in context.user_data[CALLBACK_DATA]:
context.user_data[CALLBACK_DATA].add(data)
if data == SET_LANG:
send_lang(update, context, query)
elif data in LANGUAGES:
store_lang(update, context, query)
if data == PAYMENT:
send_support_options(update, context, query)
elif data in [THANKS, COFFEE, BEER, MEAL]:
send_payment_invoice(update, context, query)
elif data == CUSTOM:
context.bot.send_message(
query.from_user.id,
_("Send me the amount that you'll like to support PDF Bot"),
reply_markup=ForceReply(),
)
context.user_data[CALLBACK_DATA].remove(data)
query.answer()
def send_msg(update, context):
tele_id = int(context.args[0])
message = " ".join(context.args[1:])
try:
context.bot.send_message(tele_id, message)
except Exception as e:
log = Logger()
log.error(e)
update.effective_message.reply_text(DEV_TELE_ID, "Failed to send message")
def error_callback(update, context):
log = Logger()
log.error(f'Update "{update}" caused error "{context.error}"')
if __name__ == "__main__":
main()
|
py | 7dfe709a2e9631967d5170ac00b026b2ad39f689 | import argparse
import datetime
import json
import numpy as np
import os
import time
from pathlib import Path
import torch
import torch.backends.cudnn as cudnn
from torch.utils.tensorboard import SummaryWriter
import timm
from timm.models.layers import trunc_normal_
from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import utils.lr_decay as lrd
import utils.misc as misc
from utils.pos_embed import interpolate_pos_embed
from utils.misc import NativeScalerWithGradNormCount as NativeScaler
import models.models_vit as models_vit
import models.models_swinTrans as models_swinTrans
from esc50.engine_run import train_one_epoch, evaluate
from esc50.dataset import get_training_set, get_test_set
def get_args_parser():
parser = argparse.ArgumentParser('ESC50 classification', add_help=False)
parser.add_argument('--batch_size', default=64, type=int,
help='Batch size per GPU (effective batch size is batch_size * accum_iter * # gpus')
parser.add_argument('--epochs', default=80, type=int)
parser.add_argument('--accum_iter', default=1, type=int,
help='Accumulate gradient iterations (for increasing the effective batch size under memory constraints)')
parser.add_argument('--print_freq', default=100, type=int)
# Model parameters
parser.add_argument('--model', default='vit_base_patch16', type=str, metavar='MODEL',
help='Name of model to train')
parser.add_argument('--model_type', default='vit', type=str, metavar='MODEL',
help='Type of model to train (vit or swin)')
parser.add_argument('--input_size', default=224, type=int,
help='images input size')
parser.add_argument('--drop_path', type=float, default=0.1, metavar='PCT',
help='Drop path rate (default: 0.1)')
# Optimizer parameters
parser.add_argument('--clip_grad', type=float, default=None, metavar='NORM',
help='Clip gradient norm (default: None, no clipping)')
parser.add_argument('--weight_decay', type=float, default=0.05,
help='weight decay (default: 0.05)')
parser.add_argument('--lr', type=float, default=None, metavar='LR',
help='learning rate (absolute lr)')
parser.add_argument('--blr', type=float, default=1e-3, metavar='LR',
help='base learning rate: absolute_lr = base_lr * total_batch_size / 256')
parser.add_argument('--layer_decay', type=float, default=0.75,
help='layer-wise lr decay from ELECTRA/BEiT')
parser.add_argument('--min_lr', type=float, default=1e-6, metavar='LR',
help='lower lr bound for cyclic schedulers that hit 0')
parser.add_argument('--warmup_epochs', type=int, default=5, metavar='N',
help='epochs to warmup LR')
# Augmentation parameters
parser.add_argument('--color_jitter', type=float, default=None, metavar='PCT',
help='Color jitter factor (enabled only when not using Auto/RandAug)')
parser.add_argument('--aa', type=str, default='rand-m9-mstd0.5-inc1', metavar='NAME',
help='Use AutoAugment policy. "v0" or "original". " + "(default: rand-m9-mstd0.5-inc1)'),
parser.add_argument('--smoothing', type=float, default=0.1,
help='Label smoothing (default: 0.1)')
# * Random Erase params
parser.add_argument('--reprob', type=float, default=0.25, metavar='PCT',
help='Random erase prob (default: 0.25)')
parser.add_argument('--remode', type=str, default='pixel',
help='Random erase mode (default: "pixel")')
parser.add_argument('--recount', type=int, default=1,
help='Random erase count (default: 1)')
parser.add_argument('--resplit', action='store_true', default=False,
help='Do not random erase first (clean) augmentation split')
# * Finetuning params
parser.add_argument('--finetune', default='',
help='finetune from checkpoint')
parser.add_argument('--global_pool', action='store_true')
parser.set_defaults(global_pool=True)
parser.add_argument('--cls_token', action='store_false', dest='global_pool',
help='Use class token instead of global pool for classification')
# Dataset parameters
parser.add_argument('--data_path_fold1', default='./esc50/data/esc50_mp3_fold1.hdf', type=str,
help='dataset path fold1')
parser.add_argument('--data_path_fold2', default='./esc50/data/esc50_mp3_fold2.hdf', type=str,
help='dataset path fold2')
parser.add_argument('--data_path_fold3', default='./esc50/data/esc50_mp3_fold3.hdf', type=str,
help='dataset path fold3')
parser.add_argument('--data_path_fold4', default='./esc50/data/esc50_mp3_fold4.hdf', type=str,
help='dataset path fold4')
parser.add_argument('--data_path_fold5', default='./esc50/data/esc50_mp3_fold5.hdf', type=str,
help='dataset path fold5')
parser.add_argument('--norm_file', default='./esc50/mean_std_128.npy', type=str,
help='norm file path')
parser.add_argument('--sample_rate', default=32000, type=int)
parser.add_argument('--clip_length', default=10, type=int)
parser.add_argument('--augment', default=True, type=bool)
parser.add_argument('--in_mem', default=False, type=bool)
parser.add_argument('--extra_augment', default=True, type=bool)
parser.add_argument('--roll', default=True, type=bool)
parser.add_argument('--wavmix', default=True, type=bool)
parser.add_argument('--specmix', default=True, type=bool)
parser.add_argument('--mixup_alpha', default=0.3, type=float)
parser.add_argument('--nb_classes', default=50, type=int,
help='number of the classification types')
parser.add_argument('--u_patchout', default=100, type=int,
help='number of masked patches')
parser.add_argument('--target_size', default=(128,500), type=tuple,
help='target size')
parser.add_argument('--output_dir', default='./esc50/output_dir',
help='path where to save, empty for no saving')
parser.add_argument('--log_dir', default='../esc50/log_dir',
help='path where to tensorboard log')
parser.add_argument('--device', default='cuda',
help='device to use for training / testing')
parser.add_argument('--seed', default=42, type=int)
parser.add_argument('--resume', default='',
help='resume from checkpoint')
parser.add_argument('--resume_dir', default='',
help='resume dir')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
help='start epoch')
parser.add_argument('--eval', action='store_true',
help='Perform evaluation only')
parser.add_argument('--dist_eval', action='store_true', default=False,
help='Enabling distributed evaluation (recommended during training for faster monitor')
parser.add_argument('--num_workers', default=16, type=int)
parser.add_argument('--pin_mem', action='store_true',
help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')
parser.add_argument('--no_pin_mem', action='store_false', dest='pin_mem')
parser.set_defaults(pin_mem=False)
# distributed training parameters
parser.add_argument('--world_size', default=1, type=int,
help='number of distributed processes')
parser.add_argument('--local_rank', default=-1, type=int)
parser.add_argument('--dist_on_itp', action='store_true')
parser.add_argument('--dist_url', default='env://',
help='url used to set up distributed training')
return parser
def main(args):
misc.init_distributed_mode(args)
print('job dir: {}'.format(os.path.dirname(os.path.realpath(__file__))))
print("{}".format(args).replace(', ', ',\n'))
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed + misc.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
if args.resume_dir and not args.resume:
tag = ''
for root, dirs, files in os.walk(args.resume_dir, topdown=False):
for name in files:
if name[-3:] == 'pth':
if not tag:
tag = os.path.join(root, name)
elif int(name.split('checkpoint-')[1].split('.pth')[0]) > int(tag.split('checkpoint-')[1].split('.pth')[0]):
tag = os.path.join(root, name)
args.resume = tag
cudnn.benchmark = True
num_tasks = misc.get_world_size() # set datasets and dataloaders
global_rank = misc.get_rank()
train_hdf5_lists = [
args.data_path_fold1,
args.data_path_fold2,
args.data_path_fold3,
args.data_path_fold4
]
eval_hdf5 = args.data_path_fold5
dataset_train = get_training_set(
train_hdf5_lists=train_hdf5_lists,
sample_rate=args.sample_rate,
clip_length=args.clip_length,
augment=args.augment,
in_mem=args.in_mem,
extra_augment=args.extra_augment,
roll=args.roll,
wavmix=args.wavmix)
sampler_train = torch.utils.data.DistributedSampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
print("Sampler_train = %s" % str(sampler_train))
dataset_val = get_test_set(
eval_hdf5=eval_hdf5,
sample_rate=args.sample_rate,
clip_length=args.clip_length)
if args.dist_eval:
if len(dataset_val) % num_tasks != 0:
print('Warning: Enabling distributed evaluation with an eval dataset not divisible by process number. '
'This will slightly alter validation results as extra duplicate entries are added to achieve '
'equal num of samples per-process.')
sampler_val = torch.utils.data.DistributedSampler(
dataset_val, num_replicas=num_tasks, rank=global_rank, shuffle=True) # shuffle=True to reduce monitor bias
else:
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
print("Sampler_val = %s" % str(sampler_val))
if global_rank == 0 and args.log_dir is not None and not args.eval:
os.makedirs(args.log_dir, exist_ok=True)
log_writer = SummaryWriter(log_dir=args.log_dir)
else:
log_writer = None
data_loader_train = torch.utils.data.DataLoader(
dataset_train, sampler=sampler_train,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=True,
)
data_loader_val = torch.utils.data.DataLoader(
dataset_val, sampler=sampler_val,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=False
)
assert args.model_type == 'vit' or args.model_type == 'swin', "Only support vit and swin models now."
if args.model_type == 'vit':
model = models_vit.__dict__[args.model](
num_classes=args.nb_classes,
drop_path_rate=args.drop_path,
global_pool=args.global_pool,
norm_file=args.norm_file,
u_patchout=args.u_patchout,
target_size=args.target_size
)
elif args.model_type == 'swin':
model = models_swinTrans.__dict__[args.model](
num_classes=args.nb_classes,
drop_path_rate=args.drop_path,
norm_file=args.norm_file
)
if args.finetune and not args.eval:
checkpoint = torch.load(args.finetune, map_location='cpu')
print("Load pre-trained checkpoint from: %s" % args.finetune)
checkpoint_model = checkpoint['model']
state_dict = model.state_dict()
for k in ['head.weight', 'head.bias']:
if k in checkpoint_model and checkpoint_model[k].shape != state_dict[k].shape:
print(f"Removing key {k} from pretrained checkpoint")
del checkpoint_model[k]
# interpolate position embedding
if args.model_type == 'vit':
interpolate_pos_embed(model, checkpoint_model)
# load pre-trained model
msg = model.load_state_dict(checkpoint_model, strict=False)
print(msg)
if args.global_pool and args.model_type == 'vit':
assert set(msg.missing_keys) == {'head.weight', 'head.bias'} or set(msg.missing_keys) == {'head.weight', 'head.bias', 'fc_norm.weight', 'fc_norm.bias'}
# manually initialize fc layer
trunc_normal_(model.head.weight, std=2e-5)
model.to(device)
model_without_ddp = model
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Model = %s" % str(model_without_ddp))
print('number of params (M): %.2f' % (n_parameters / 1.e6))
eff_batch_size = args.batch_size * args.accum_iter * misc.get_world_size()
if args.lr is None: # only base_lr is specified
args.lr = args.blr * eff_batch_size / 256
print("base lr: %.2e" % (args.lr * 256 / eff_batch_size))
print("actual lr: %.2e" % args.lr)
print("accumulate grad iterations: %d" % args.accum_iter)
print("effective batch size: %d" % eff_batch_size)
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
model_without_ddp = model.module
# build optimizer with layer-wise lr decay (lrd)
param_groups = lrd.param_groups_lrd(model_without_ddp, args, args.weight_decay,
no_weight_decay_list=model_without_ddp.no_weight_decay(),
layer_decay=args.layer_decay
)
optimizer = torch.optim.AdamW(param_groups, lr=args.lr)
loss_scaler = NativeScaler()
if args.specmix or args.wavmix:
# smoothing is handled with mixup label transform
criterion = SoftTargetCrossEntropy()
elif args.smoothing > 0.:
criterion = LabelSmoothingCrossEntropy(smoothing=args.smoothing)
else:
criterion = torch.nn.CrossEntropyLoss()
print("criterion = %s" % str(criterion))
misc.load_model(args=args, model_without_ddp=model_without_ddp, optimizer=optimizer, loss_scaler=loss_scaler)
if args.eval:
test_stats = evaluate(data_loader_val, model, device)
print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
exit(0)
print(f"Start training for {args.epochs} epochs")
start_time = time.time()
max_accuracy = 0.0
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
data_loader_train.sampler.set_epoch(epoch)
train_stats = train_one_epoch(
model, criterion, data_loader_train,
optimizer, device, epoch, loss_scaler,
args.clip_grad,
log_writer=log_writer,
args=args
)
if args.output_dir and epoch % 5 == 0:
misc.save_model(
args=args, model=model, model_without_ddp=model_without_ddp, optimizer=optimizer,
loss_scaler=loss_scaler, epoch=epoch)
test_stats = evaluate(data_loader_val, model, device)
print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
max_accuracy = max(max_accuracy, test_stats["acc1"])
print(f'Max accuracy: {max_accuracy:.2f}%')
if log_writer is not None:
log_writer.add_scalar('perf/test_acc1', test_stats['acc1'], epoch)
log_writer.add_scalar('perf/test_acc5', test_stats['acc5'], epoch)
log_writer.add_scalar('perf/test_loss', test_stats['loss'], epoch)
log_stats = {**{f'train_{k}': v for k, v in train_stats.items()},
**{f'test_{k}': v for k, v in test_stats.items()},
'epoch': epoch,
'n_parameters': n_parameters}
if args.output_dir and misc.is_main_process():
if log_writer is not None:
log_writer.flush()
with open(os.path.join(args.output_dir, "log.txt"), mode="a", encoding="utf-8") as f:
f.write(json.dumps(log_stats) + "\n")
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('Training time {}'.format(total_time_str))
if __name__ == '__main__':
args = get_args_parser()
args = args.parse_args()
if args.output_dir:
Path(args.output_dir).mkdir(parents=True, exist_ok=True)
main(args)
|
py | 7dfe70bac210ea8f979929c4765ccfaedfc4761c | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# pyre-strict
from typing import Optional
import libcst as cst
from libcst.helpers import (
ensure_type,
get_absolute_module_for_import,
get_absolute_module_for_import_or_raise,
)
from libcst.testing.utils import UnitTest, data_provider
class StatementTest(UnitTest):
@data_provider(
(
# Simple imports that are already absolute.
(None, "from a.b import c", "a.b"),
("x.y.z", "from a.b import c", "a.b"),
# Relative import that can't be resolved due to missing module.
(None, "from ..w import c", None),
# Relative import that goes past the module level.
("x", "from ...y import z", None),
("x.y.z", "from .....w import c", None),
("x.y.z", "from ... import c", None),
# Correct resolution of absolute from relative modules.
("x.y.z", "from . import c", "x.y"),
("x.y.z", "from .. import c", "x"),
("x.y.z", "from .w import c", "x.y.w"),
("x.y.z", "from ..w import c", "x.w"),
("x.y.z", "from ...w import c", "w"),
)
)
def test_get_absolute_module(
self, module: Optional[str], importfrom: str, output: Optional[str],
) -> None:
node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine)
assert len(node.body) == 1, "Unexpected number of statements!"
import_node = ensure_type(node.body[0], cst.ImportFrom)
self.assertEqual(get_absolute_module_for_import(module, import_node), output)
if output is None:
with self.assertRaises(Exception):
get_absolute_module_for_import_or_raise(module, import_node)
else:
self.assertEqual(
get_absolute_module_for_import_or_raise(module, import_node), output
)
@data_provider(
(
# Nodes without an asname
(cst.ImportAlias(name=cst.Name("foo")), "foo", None),
(
cst.ImportAlias(name=cst.Attribute(cst.Name("foo"), cst.Name("bar"))),
"foo.bar",
None,
),
# Nodes with an asname
(
cst.ImportAlias(
name=cst.Name("foo"), asname=cst.AsName(name=cst.Name("baz"))
),
"foo",
"baz",
),
(
cst.ImportAlias(
name=cst.Attribute(cst.Name("foo"), cst.Name("bar")),
asname=cst.AsName(name=cst.Name("baz")),
),
"foo.bar",
"baz",
),
)
)
def test_importalias_helpers(
self, alias_node: cst.ImportAlias, full_name: str, alias: Optional[str]
) -> None:
self.assertEqual(alias_node.evaluated_name, full_name)
self.assertEqual(alias_node.evaluated_alias, alias)
|
py | 7dfe70c8b81272d4dc11b055701467b494dc5c2f | from .executor import Executor
from .pre_request import PreRequest |
py | 7dfe70f9eea83b24282c4923e6f4f30c79eca0e0 | import tempfile
import pytest
import torch
from lhotse import CutSet
from lhotse.dataset import GlobalMVN, RandomizedSmoothing, SpecAugment
from lhotse.dataset.collation import collate_features
@pytest.fixture
def global_mvn():
cuts = CutSet.from_json('test/fixtures/ljspeech/cuts.json')
return GlobalMVN.from_cuts(cuts)
def test_global_mvn_initialization_and_stats_saving(global_mvn):
with tempfile.NamedTemporaryFile() as tf:
global_mvn.to_file(tf.name)
global_mvn2 = GlobalMVN.from_file(tf.name)
for key_item_1, key_item_2 in zip(
global_mvn.state_dict().items(),
global_mvn2.state_dict().items()
):
assert torch.equal(key_item_1[1], key_item_2[1])
@pytest.mark.parametrize(
"in_tensor", [torch.ones(10, 40), torch.ones(2, 10, 40)]
)
def test_global_mvn_shapes(global_mvn, in_tensor):
assert global_mvn(in_tensor).shape == in_tensor.shape
assert global_mvn.inverse(in_tensor).shape == in_tensor.shape
@pytest.mark.parametrize(
"in_tensor", [torch.ones(10, 40), torch.ones(2, 10, 40)]
)
def test_global_mvn_inverse(global_mvn, in_tensor):
out_tensor = global_mvn(in_tensor)
assert torch.allclose(in_tensor, global_mvn.inverse(out_tensor))
def test_global_mvn_from_cuts():
cuts = CutSet.from_json('test/fixtures/ljspeech/cuts.json')
stats1 = GlobalMVN.from_cuts(cuts)
stats2 = GlobalMVN.from_cuts(cuts, max_cuts=1)
assert isinstance(stats1, GlobalMVN)
assert isinstance(stats2, GlobalMVN)
def test_specaugment_single():
cuts = CutSet.from_json('test/fixtures/ljspeech/cuts.json')
feats = torch.from_numpy(cuts[0].load_features())
tfnm = SpecAugment(p=1.0, time_warp_factor=10)
with pytest.raises(AssertionError):
augmented = tfnm(feats)
assert (feats != augmented).any()
@pytest.mark.parametrize('num_feature_masks', [0, 1, 2])
@pytest.mark.parametrize('num_frame_masks', [0, 1, 2])
def test_specaugment_batch(num_feature_masks, num_frame_masks):
cuts = CutSet.from_json('test/fixtures/ljspeech/cuts.json')
feats, feat_lens = collate_features(cuts)
tfnm = SpecAugment(
p=1.0,
time_warp_factor=10,
features_mask_size=5,
frames_mask_size=20,
num_feature_masks=num_feature_masks,
num_frame_masks=num_frame_masks
)
augmented = tfnm(feats)
assert (feats != augmented).any()
@pytest.mark.parametrize('sample_sigma', [True, False])
def test_randomized_smoothing(sample_sigma):
audio = torch.zeros(64, 4000, dtype=torch.float32)
tfnm = RandomizedSmoothing(sigma=0.1, sample_sigma=sample_sigma, p=0.8)
audio_aug = tfnm(audio)
# Shapes are the same
assert audio.shape == audio_aug.shape
# All samples are different than the input audio
assert (audio != audio_aug).any()
# Different batch samples receive different augmentation:
# we sum along the time axis and compare the summed values;
# if all examples got the same augmentation,
# there would have been just one unique value.
assert len(set(audio_aug.sum(dim=1).tolist())) > 1
def test_randomized_smoothing_p1():
audio = torch.zeros(64, 4000, dtype=torch.float32)
tfnm = RandomizedSmoothing(sigma=0.1, p=1.0)
audio_aug = tfnm(audio)
# Shapes are the same
assert audio.shape == audio_aug.shape
# Some (most) samples are different than the input audio
assert (audio != audio_aug).any()
# Different batch samples receive different augmentation
assert (audio_aug[0] != audio_aug[1]).any()
def test_randomized_smoothing_p0():
audio = torch.zeros(64, 4000, dtype=torch.float32)
tfnm = RandomizedSmoothing(sigma=0.1, p=0.0)
audio_aug = tfnm(audio)
# Shapes are the same
assert audio.shape == audio_aug.shape
# Audio is unaffacted
assert (audio == audio_aug).all()
# Audio is unaffacted across batches
assert (audio_aug[0] == audio_aug[1]).all()
def test_randomized_smoothing_schedule():
audio = torch.zeros(16, 16000, dtype=torch.float32)
tfnm = RandomizedSmoothing(
sigma=[
(0, 0.01),
(100, 0.5)
],
p=0.8
)
audio_aug = tfnm(audio)
# Shapes are the same
assert audio.shape == audio_aug.shape
# All samples are different than the input audio
assert (audio != audio_aug).any()
# Different batch samples receive different augmentation:
# we sum along the time axis and compare the summed values;
# if all examples got the same augmentation,
# there would have been just one unique value.
assert len(set(audio_aug.sum(dim=1).tolist())) > 1
tfnm.step = 1000
audio_aug2 = tfnm(audio)
# The schedule kicked in and the abs magnitudes should be larger.
assert audio_aug2.abs().sum() > audio_aug.abs().sum()
|
py | 7dfe7118e38040919c01b857cfb0b4cc5e2eb408 | import os
import boto3
def lambda_handler(event, context):
STEPFUNCTION_ARN = os.environ["STEPFUNCTION_ARN"]
print(event)
insert_event = [i for i in event["Records"] if i["eventName"] == "INSERT"]
if not len(insert_event):
print("Skipping trigger lambda for non INSERT event")
return
client = boto3.client('stepfunctions')
response = client.start_execution(
stateMachineArn=STEPFUNCTION_ARN
)
return True
|
py | 7dfe71e75d035a3ec9ce9f44f5ee743e56ff901a | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_selonian_f_07.iff"
result.attribute_template_id = 9
result.stfName("npc_name","selonian_base_female")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
py | 7dfe732608ae71e2465f2f8a8c33924a3d7ce344 | """
Copyright 2017-present, Airbnb Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from mock import Mock, patch
from moto import mock_ssm
from nose.tools import assert_equal, assert_false, assert_items_equal
from stream_alert.apps._apps.onelogin import OneLoginApp
from tests.unit.stream_alert_apps.test_helpers import (
get_event,
get_mock_context,
put_mock_params
)
@mock_ssm
@patch.object(OneLoginApp, 'type', Mock(return_value='type'))
class TestOneLoginApp(object):
"""Test class for the OneLoginApp"""
# pylint: disable=protected-access
@patch.dict(os.environ, {'AWS_DEFAULT_REGION': 'us-east-1'})
def setup(self):
"""Setup before each method"""
# pylint: disable=attribute-defined-outside-init
self._test_app_name = 'onelogin'
put_mock_params(self._test_app_name)
self._event = get_event(self._test_app_name)
self._context = get_mock_context(self._test_app_name)
self._app = OneLoginApp(self._event, self._context)
def set_config_values(self, region, client_id, client_secret):
"""Helper function to setup the auth values"""
self._app._config.auth['region'] = region
self._app._config.auth['client_id'] = client_id
self._app._config.auth['client_secret'] = client_secret
@patch('requests.post')
def test_generate_headers_bad_response(self, requests_mock):
"""OneLoginApp - Generate Headers, Bad Response"""
self.set_config_values('us', 'bad_id', 'bad_secret')
requests_mock.return_value = Mock(
status_code=404,
json=Mock(return_value={'message': 'something went wrong'})
)
assert_false(self._app._generate_headers())
@patch('requests.post')
def test_generate_headers_empty_response(self, requests_mock):
"""OneLoginApp - Generate Headers, Empty Response"""
requests_mock.return_value = Mock(
status_code=200,
json=Mock(return_value=None)
)
assert_false(self._app._generate_headers())
@patch('requests.post')
def test_generate_headers(self, requests_mock):
"""OneLoginApp - Generate Headers"""
requests_mock.return_value = Mock(
status_code=200,
json=Mock(return_value={'access_token': 'this_is_a_token'})
)
self._app._generate_headers()
assert_equal(self._app._auth_headers['Authorization'], 'bearer:this_is_a_token')
def test_sleep(self):
"""OneLoginApp - Sleep Seconds"""
self._app._poll_count = 1
assert_equal(self._app._sleep_seconds(), 0)
self._app._poll_count = 200
assert_equal(self._app._sleep_seconds(), 0)
def test_required_auth_info(self):
"""OneLoginApp - Required Auth Info"""
assert_items_equal(self._app.required_auth_info().keys(),
{'region', 'client_secret', 'client_id'})
@staticmethod
def _get_sample_events(count, next_link):
"""Helper function for returning sample onelogin events"""
event = {
'id': 123,
'created_at': '2017-10-05T18:11:32Z',
'account_id': 1234,
'user_id': 321,
'event_type_id': 4321,
'notes': 'Notes',
'ipaddr': '0.0.0.0',
'actor_user_id': 987,
'assuming_acting_user_id': 654,
'role_id': 456,
'app_id': 123456,
'group_id': 98765,
'otp_device_id': 11111,
'policy_id': 22222,
'actor_system': 'System',
'custom_message': 'Message',
'role_name': 'Role',
'app_name': 'App Name',
'group_name': 'Group Name',
'actor_user_name': '',
'user_name': 'username',
'policy_name': 'Policy Name',
'otp_device_name': 'OTP Device Name',
'operation_name': 'Operation Name',
'directory_sync_run_id': 7777,
'directory_id': 6666,
'resolution': 'Resolved',
'client_id': 11223344,
'resource_type_id': 44332211,
'error_description': 'ERROR ERROR'
}
data = [event] * count
return {'data': data, 'pagination': {'next_link': next_link}}
def test_get_onelogin_events_no_headers(self):
"""OneLoginApp - Get OneLogin Events, No Headers"""
assert_false(self._app._get_onelogin_events())
@patch('requests.get')
def test_get_onelogin_events_bad_response(self, requests_mock):
"""OneLoginApp - Get OneLogin Events, Bad Response"""
self._app._auth_headers = True
requests_mock.return_value = Mock(
status_code=404,
json=Mock(return_value={'message': 'something went wrong'})
)
assert_false(self._app._get_onelogin_events())
@patch('requests.get')
def test_get_onelogin_events_empty_response(self, requests_mock):
"""OneLoginApp - Get OneLogin Events, Empty Response"""
self._app._auth_headers = True
requests_mock.return_value = Mock(
status_code=200,
json=Mock(return_value=None)
)
assert_false(self._app._get_onelogin_events())
@patch('requests.get')
def test_get_onelogin_events_rate_limited(self, requests_mock):
"""OneLoginApp - Get OneLogin Events, Rate Limited"""
self._app._auth_headers = True
self._app._rate_limit_sleep = 1
err_limit_response = Mock(
status_code=400,
json=Mock(return_value={
'message': 'something went wrong',
'status': {'code': 400, 'message': 'rate_limit_exceeded'}
})
)
ok_limit_response = Mock(
status_code=200,
json=Mock(return_value={
'data': {'X-RateLimit-Reset': 123}
})
)
requests_mock.side_effect = [err_limit_response, ok_limit_response]
assert_false(self._app._get_onelogin_events())
assert_equal(self._app._rate_limit_sleep, 123)
@patch('requests.get')
def test_get_onelogin_events_empty_data(self, requests_mock):
"""OneLoginApp - Get OneLogin Events, Empty Data"""
self._app._auth_headers = True
requests_mock.return_value = Mock(
status_code=200,
json=Mock(return_value={'data': [], 'pagination': {'next_link': 'not'}})
)
assert_false(self._app._get_onelogin_events())
@patch('requests.post')
def test_gather_logs_no_headers(self, requests_mock):
"""OneLoginApp - Gather Events Entry Point, No Headers"""
self.set_config_values('us', 'bad_id', 'bad_secret')
requests_mock.return_value = Mock(
status_code=404,
json=Mock(return_value={'message': 'something went wrong'})
)
assert_false(self._app._gather_logs())
@patch('requests.get')
def test_gather_logs_no_pagination(self, requests_mock):
"""OneLoginApp - Gather Events Entry Point, No Pagination"""
logs = self._get_sample_events(5, None)
self._app._auth_headers = True
self._app._next_page_url = None
self._app._last_timestamp = 1507698237
requests_mock.return_value = Mock(
status_code=200,
json=Mock(side_effect=[logs])
)
events = self._app._gather_logs()
assert_equal(len(logs['data']), len(events))
assert_equal(logs['pagination']['next_link'], self._app._next_page_url)
@patch('requests.get')
def test_get_onelogin_get_events_without_pagination(self, requests_mock):
"""OneLoginApp - Get Events Without Pagination"""
pagination = None
logs = self._get_sample_events(2, pagination)
self._app._auth_headers = True
self._app._next_page_url = pagination
self._app._last_timestamp = 1507698237
requests_mock.return_value = Mock(
status_code=200,
json=Mock(side_effect=[logs])
)
events = self._app._get_onelogin_events()
assert_equal(len(logs['data']), len(events))
assert_equal(logs['pagination']['next_link'], self._app._next_page_url)
@patch('requests.get')
def test_get_onelogin_get_events_with_pagination(self, requests_mock):
"""OneLoginApp - Get Events With Pagination"""
next_link = 'https://next_link'
logs = self._get_sample_events(3, next_link)
self._app._auth_headers = True
self._app._next_page_url = next_link
requests_mock.return_value = Mock(
status_code=200,
json=Mock(side_effect=[logs])
)
events = self._app._get_onelogin_events()
assert_equal(len(logs['data']), len(events))
assert_equal(logs['pagination']['next_link'], self._app._next_page_url)
@patch('requests.get')
def test_set_onelogin_rate_limit_sleep(self, requests_mock):
"""OneLoginApp - Set OneLogin Rate Limit Sleep"""
self._app._auth_headers = True
self._app._rate_limit_sleep = 0
new_rate_limit_sleep = 123
requests_mock.return_value = Mock(
status_code=200,
json=Mock(return_value={'data': {'X-RateLimit-Reset': new_rate_limit_sleep}})
)
self._app._set_rate_limit_sleep()
assert_equal(self._app._rate_limit_sleep, new_rate_limit_sleep)
def test_set_onelogin_rate_limit_sleep_no_headers(self):
"""OneLoginApp - Set OneLogin Rate Limit Sleep, No Headers"""
self._app._auth_headers = None
self._app._rate_limit_sleep = 1
self._app._set_rate_limit_sleep()
assert_equal(self._app._rate_limit_sleep, 0)
@patch('requests.get')
def test_set_onelogin_rate_limit_sleep_bad_response(self, requests_mock):
"""OneLoginApp - Set OneLogin Rate Limit Sleep, Bad Response"""
self._app._auth_headers = True
self._app._rate_limit_sleep = 1
requests_mock.return_value = Mock(
status_code=403,
json=Mock(return_value={'message': 'something went wrong'})
)
self._app._set_rate_limit_sleep()
assert_equal(self._app._rate_limit_sleep, 0)
@patch('requests.get')
def test_set_onelogin_rate_limit_sleep_empty_response(self, requests_mock):
"""OneLoginApp - Set OneLogin Rate Limit Sleep, Empty Response"""
self._app._auth_headers = True
self._app._rate_limit_sleep = 1
requests_mock.return_value = Mock(
status_code=200,
json=Mock(return_value=None)
)
self._app._set_rate_limit_sleep()
assert_equal(self._app._rate_limit_sleep, 0)
def test_onelogin_events_endpoint(self):
"""OneLoginApp - Verify Events Endpoint"""
assert_equal(self._app._events_endpoint(), 'https://api.us.onelogin.com/api/1/events')
def test_onelogin_token_endpoint(self):
"""OneLoginApp - Verify Token Endpoint"""
assert_equal(self._app._token_endpoint(),
'https://api.us.onelogin.com/auth/oauth2/v2/token')
def test_onelogin_events_type():
"""OneLoginApp - Verify Events Type"""
# pylint: disable=protected-access
assert_equal(OneLoginApp._type(), 'events')
def test_onelogin_event_service():
"""OneLoginApp - Verify Service"""
assert_equal(OneLoginApp.service(), 'onelogin')
|
py | 7dfe733cff1f8c71ebce2ffd8a00f80db19e303d | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('polls/', include('polls.urls', namespace="polls")),
]
|
py | 7dfe73be42b67b759a0cc59e391a0b5ea50c8e3d |
import sys
# overwrite to redirect output to file for instance
out = sys.stdout
verbose = False
cliplines = True
def split_len(seq, length):
return [seq[i:i+length] for i in range(0, len(seq), length)]
def output(intro, text):
lines = text.split('\n')
n = len(intro)
m = 79 - n
if cliplines:
clippedlines = []
for l in lines:
clippedlines.extend(split_len(l, m))
else:
clippedlines = lines
out.write(intro)
out.write(clippedlines[0])
out.write('\n')
for l in clippedlines[1:]:
out.write(" "*n)
out.write(l)
out.write('\n')
def seperator():
out.write('\n')
def debug(text):
if verbose:
output("[Debug] ", text)
def info(text):
output("[Info] ", text)
def warning(text):
output("[Warning] ", text)
def error(text):
output("[Error] ", text)
class FatalError(Exception):
pass
def fatal(text):
output("[Fatal] ", text)
raise FatalError()
|
py | 7dfe743ba54d96642b7a45dfb1dd515117441992 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_param_EtherBus')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_param_EtherBus')
_param_EtherBus = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_param_EtherBus', [dirname(__file__)])
except ImportError:
import _param_EtherBus
return _param_EtherBus
try:
_mod = imp.load_module('_param_EtherBus', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_param_EtherBus = swig_import_helper()
del swig_import_helper
else:
import _param_EtherBus
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
object.__setattr__(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_method(set):
def set_attr(self, name, value):
if (name == "thisown"):
return self.this.own(value)
if hasattr(self, name) or (name == "this"):
set(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import m5.internal.param_EtherDump
import m5.internal.param_SimObject
import m5.internal.drain
import m5.internal.serialize
import m5.internal.param_EtherObject
class EtherBus(m5.internal.param_EtherObject.EtherObject):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
EtherBus_swigregister = _param_EtherBus.EtherBus_swigregister
EtherBus_swigregister(EtherBus)
class EtherBusParams(m5.internal.param_EtherObject.EtherObjectParams):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def create(self):
return _param_EtherBus.EtherBusParams_create(self)
dump = _swig_property(_param_EtherBus.EtherBusParams_dump_get, _param_EtherBus.EtherBusParams_dump_set)
loopback = _swig_property(_param_EtherBus.EtherBusParams_loopback_get, _param_EtherBus.EtherBusParams_loopback_set)
speed = _swig_property(_param_EtherBus.EtherBusParams_speed_get, _param_EtherBus.EtherBusParams_speed_set)
def __init__(self):
this = _param_EtherBus.new_EtherBusParams()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _param_EtherBus.delete_EtherBusParams
__del__ = lambda self: None
EtherBusParams_swigregister = _param_EtherBus.EtherBusParams_swigregister
EtherBusParams_swigregister(EtherBusParams)
|
py | 7dfe743bcee516ce6449358d595c84dea191f5da | # Copyright (c) 2015-2016 Claudiu Popa <[email protected]>
# Copyright (c) 2018 Anthony Sottile <[email protected]>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""Text formatting drivers for ureports"""
from __future__ import print_function
from pylint.reporters.ureports import BaseWriter
TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
BULLETS = ['*', '-']
class TextWriter(BaseWriter):
"""format layouts as text
(ReStructured inspiration but not totally handled yet)
"""
def begin_format(self):
super(TextWriter, self).begin_format()
self.list_level = 0
def visit_section(self, layout):
"""display a section as text
"""
self.section += 1
self.writeln()
self.format_children(layout)
self.section -= 1
self.writeln()
def visit_evaluationsection(self, layout):
"""Display an evaluation section as a text."""
self.section += 1
self.format_children(layout)
self.section -= 1
self.writeln()
def visit_title(self, layout):
title = ''.join(list(self.compute_content(layout)))
self.writeln(title)
try:
self.writeln(TITLE_UNDERLINES[self.section] * len(title))
except IndexError:
print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT")
def visit_paragraph(self, layout):
"""enter a paragraph"""
self.format_children(layout)
self.writeln()
def visit_table(self, layout):
"""display a table as text"""
table_content = self.get_table_content(layout)
# get columns width
cols_width = [0]*len(table_content[0])
for row in table_content:
for index, col in enumerate(row):
cols_width[index] = max(cols_width[index], len(col))
self.default_table(layout, table_content, cols_width)
self.writeln()
def default_table(self, layout, table_content, cols_width):
"""format a table"""
cols_width = [size+1 for size in cols_width]
format_strings = ' '.join(['%%-%ss'] * len(cols_width))
format_strings = format_strings % tuple(cols_width)
format_strings = format_strings.split(' ')
table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n'
headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n'
# FIXME: layout.cheaders
self.write(table_linesep)
for index, line in enumerate(table_content):
self.write('|')
for line_index, at_index in enumerate(line):
self.write(format_strings[line_index] % at_index)
self.write('|')
if index == 0 and layout.rheaders:
self.write(headsep)
else:
self.write(table_linesep)
def visit_verbatimtext(self, layout):
"""display a verbatim layout as text (so difficult ;)
"""
self.writeln('::\n')
for line in layout.data.splitlines():
self.writeln(' ' + line)
self.writeln()
def visit_text(self, layout):
"""add some text"""
self.write('%s' % layout.data)
|
py | 7dfe74d155e6ffd3d7fcb36445fa0766366322c5 | from django.shortcuts import get_object_or_404, render, redirect
from django.contrib.auth.decorators import login_required
from datetime import datetime
from core.models import Event
from core.forms import MakeEventForm
def get_quick_events(request):
"""Function to get 5 most close events to user"""
user_events = None
if request.user.is_authenticated:
user_events = Event.objects.filter(user=request.user, event_date__gte=datetime.today()).order_by('event_date')[:5]
event_num = len(user_events)
event_counter = ['One', 'Two', 'Three', 'Four', 'Five'][:event_num]
user_events = dict(zip(event_counter, user_events))
return user_events
def home(request):
return render(request, 'events/index.html', {'events': get_quick_events(request)})
@login_required
def make_event(request):
form = MakeEventForm(request.POST, request.FILES)
near_events = get_quick_events(request)
if request.method == 'GET':
return render(request, 'events/make_events.html', {'form': form, 'events': near_events})
else:
try:
form = MakeEventForm(request.POST, request.FILES)
if form.is_valid():
new_event = form.save(commit=False)
new_event.user = request.user
new_event.save()
return redirect('home')
raise(ValueError)
except ValueError:
return render(request, 'events/make_events.html', {'form': form, 'events': near_events})
@login_required
def my_events(request):
if request.method == 'GET':
today = datetime.today()
events = Event.objects.filter(user=request.user, event_date__gte=today).order_by('event_date')
return render(request, 'events/my_events.html', {'events': events, 'today': today.date})
@login_required
def old_events(request):
if request.method == 'GET':
today = datetime.today()
events = Event.objects.filter(user=request.user, event_date__lt=today).order_by('-event_date')
return render(request, 'events/old_events.html', {'events': events, 'today': today.date})
@login_required
def del_event(request, event_id):
if request.method == 'POST':
Event.objects.get(id=event_id).delete()
return redirect('my_events')
@login_required
def detail_event(request, event_id):
event = get_object_or_404(Event, pk=event_id, user=request.user)
if request.method == 'GET':
return render(request, 'events/detail_event.html', {'event': event})
@login_required
def edit_event(request, event_id):
event = get_object_or_404(Event, pk=event_id, user=request.user)
if request.method == 'GET':
form = MakeEventForm(instance=event)
return render(request, 'events/edit_event.html', {'event': event, 'form': form})
else:
try:
form = MakeEventForm(request.POST, request.FILES, instance=event)
if form.is_valid():
form.save()
return redirect('my_events')
raise(ValueError)
except ValueError:
return render(request, 'events/edit_event.html', {'event': event, 'form': form})
|
py | 7dfe75a6a736f0c757c4261951449b387d34ad7d | # Generated by Django 3.2.3 on 2021-05-19 21:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CinemaHall',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='CPL',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('subtitles', models.CharField(max_length=200)),
('duration', models.IntegerField()),
('disk_size', models.IntegerField()),
],
),
migrations.CreateModel(
name='Effect',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('type', models.CharField(max_length=200)),
('duration', models.IntegerField()),
],
),
migrations.CreateModel(
name='KDM',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('not_valid_before', models.DateTimeField(blank=True)),
('not_valid_after', models.DateTimeField(blank=True)),
('KDMKey', models.IntegerField()),
],
),
migrations.CreateModel(
name='Projector',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('brand', models.CharField(max_length=200)),
('resolution', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='SoundSystem',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('technology', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='SPL',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('CPLs', models.ManyToManyField(blank=True, to='CinemaBase.CPL')),
('Effects', models.ManyToManyField(blank=True, to='CinemaBase.Effect')),
],
),
migrations.CreateModel(
name='Show',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('start_date', models.DateTimeField(blank=True)),
('SPLs', models.ManyToManyField(to='CinemaBase.SPL')),
('cinema_hall', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='CinemaBase.cinemahall')),
],
),
migrations.AddField(
model_name='cpl',
name='KDM',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='CinemaBase.kdm'),
),
migrations.AddField(
model_name='cinemahall',
name='projector',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='CinemaBase.projector'),
),
migrations.AddField(
model_name='cinemahall',
name='showings',
field=models.ManyToManyField(blank=True, null=True, to='CinemaBase.Show'),
),
migrations.AddField(
model_name='cinemahall',
name='sound_system',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='CinemaBase.soundsystem'),
),
]
|
py | 7dfe778d435bdeb2cb670be5e266c8cb19b293db | #!/usr/bin/python
# This script is a data logger for serial communications
# Note:
# You will need to use Python to install the pyserial library
# The command to do that is "pip install pyserial"
# You will need to set the serial port:
# On Windows, the serial com port will be COM3 or COM4 or something similar.
# On Linux, it's /dev/ttyUSB0 or some variation fo that
# Import Python libraries for handling datetime information and the serial library
from datetime import datetime
import serial
# Create an object linking the serial library interface to the serial communications port
serial_ob = serial.Serial(port="COM6",baudrate=9600) # Set this to your own serial COM port
# Set the file name with the date and time it was started
filename = datetime.now().strftime("datalog_%Y%m%d_%H%M%S")
# Define a function to decode the serial data
def stripline():
string = serial_ob.readline()
decoded_bytes = str(string[0:len(string)-2].decode("utf-8")) # This transforms the raw serial data to UTF-8 format
print(decoded_bytes) # Print the decoded values to the screen as well
return(decoded_bytes) # Otherwise, return the decoded serial data to original function call
# Create a loop for reading and updating fresh data
try:
while True:
serial_log = open(filename + '.csv','a') # Create file, with date time and append to it.
serial_log.write(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) # Use the YMD-HMS format for entries.
+str((",")) # Add a comma
+str(stripline()) # Call the stripline function
+str((",")) # Add anoter comma
+str(("\n"))) # Finish with a new line.
serial_log.close()
except KeyboardInterrupt:
pass
|
py | 7dfe785882bffd8dcb1dfcab7909f4d8c6d4f67b | from django.shortcuts import render_to_response, HttpResponse, render, redirect, get_list_or_404
from django.template import RequestContext
import json, httplib, urllib
from project.models import Projects
def index(request):
data = {
"projects" : get_list_or_404(Projects.objects.all().order_by("-submit_date"))
}
return render(request,"index.html",data);
|
py | 7dfe78f7343f7e02c6c33bcbaeeb6cc4afeb85bb | """
WSGI config for djreact project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djreact.settings")
application = get_wsgi_application()
|
py | 7dfe7996affd02c560479c9c1004ee3d7b594397 | import os
import cmd
import sys
import spice
import random
import cStringIO
################################################################################
class Interface(cmd.Cmd):
def preloop(self):
'Setup the command prompt.'
self.prompt = '>>> '
self.intro = 'CS Profile Manager v2.1'
self.intro += '\n' + self.ruler * len(self.intro)
self.use_rawinput = False
self.cmdqueue.extend(sys.argv[1:])
try:
self.control = Profile_Manager('Profile.dat', 'save',
'Doukutsu.exe', 1478656, 260997856)
self.error = False
except Exception, reason:
self.reason = reason
self.error = True
def precmd(self, line):
'Look for Profile_Manager error.'
if self.error:
return 'quit'
return line
def postloop(self):
'Provide proper shutdown messages.'
if self.error:
self.stdout.write(self.reason.message)
else:
self.stdout.write('Goodbye.')
def do_shell(self, arg):
'shell <arg>\nPass argument to the command prompt.'
os.system(arg)
def do_save(self, arg):
'save <arg>\nSave profile by name or alias.'
try:
self.control.save(arg)
except Exception, reason:
self.stdout.write(reason.message + '\n')
def do_list(self, arg):
'list\nList profiles with their aliases.'
array = self.control.list()
if array:
for alias, name in enumerate(array):
self.stdout.write('(%s) %s\n' % (alias + 1, name))
else:
self.stdout.write('NO PROFILES LOADED\n')
def do_load(self, arg):
'load <arg>\nLoad profile by name or alias.'
try:
self.control.load(arg)
except Exception, reason:
self.stdout.write(reason.message + '\n')
def do_away(self, arg):
'away <arg>\nDelete profile by name or alias.'
try:
self.control.away(arg)
except Exception, reason:
self.stdout.write(reason.message + '\n')
def do_quit(self, arg):
'quit\nExit the profile manager.'
return True
def do_export(self, arg):
'export <arg>\nExport profiles to specified file.'
try:
self.control.export_(arg, 'Doukutsu Monogatari')
except Exception, reason:
self.stdout.write(reason.message + '\n')
def do_import(self, arg):
'import <arg>\nImport profiles from specified file.'
try:
self.control.import_(arg, 'Doukutsu Monogatari')
except Exception, reason:
self.stdout.write(reason.message + '\n')
################################################################################
class Profile_Manager:
STRING = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
def __init__(self, filename, savepath,
testfile=None, testsize=None, testhash=None):
'Initialize the Profile Manager object.'
if testfile is not None:
self.test(testfile, testsize, testhash)
self.filename = filename
self.savepath = savepath
self.autoload()
def test(self, testfile, testsize, testhash):
'Perform tests instructed by the caller.'
assert os.path.exists(testfile), '%r does not exist.' % testfile
assert os.path.isfile(testfile), '%r is not a file.' % testfile
if testsize is not None:
assert os.path.getsize(testfile) == testsize, \
'%r has an invalid size.' % testfile
if testhash is not None:
assert hash(file(testfile, 'rb').read()) == testhash, \
'%r has an invalid hash.' % testfile
def autoload(self):
'Automatically load available profiles.'
self.profiles = {}
if not os.path.exists(self.savepath):
os.makedirs(self.savepath)
else:
for path, dirs, data in os.walk(self.savepath):
for name in data:
self.autoread(os.path.join(path, name))
self.aliases = self.profiles.keys()
def autoread(self, pathname):
'Read in profiles from their pathnames.'
# Create the keys.
random.seed(int(os.path.getctime(pathname)))
major = spice.major()
minor = spice.minor()
random.seed() # Restore randomness.
# Decode the file.
string = cStringIO.StringIO()
spice.decode(file(pathname, 'rb'), string, major, minor)
string = string.getvalue()
# Extract the data.
namesize = ord(string[0]) + 2
name = string[1:namesize]
profile = string[namesize:]
# Archive the data.
assert profile, '%r has no profile data.' % pathname
self.profiles[name] = [profile, pathname, major, minor]
def save(self, arg):
'Save profile with name and archive.'
assert os.path.exists(self.filename), '%r NOT FOUND' % self.filename
assert 1 <= len(arg) <= 256, 'BAD NAME LENGTH'
arg = self.solve(arg, False)
profile = file(self.filename, 'rb').read()
if arg in self.profiles:
# Update profile and get save info.
self.profiles[arg][0] = profile
pathname, major, minor = self.profiles[arg][1:]
destination = open(pathname, 'wb')
else:
destination, major, minor = self.save_new(arg, profile)
self.save_act(arg, profile, destination, major, minor)
def save_new(self, arg, profile):
'Prepare to write a new profile.'
# Create a pathname.
name = ''.join(random.sample(self.STRING, len(self.STRING)))
pathname = os.path.join(self.savepath, name)
while os.path.exists(pathname):
name = ''.join(random.sample(self.STRING, len(self.STRING)))
pathname = os.path.join(self.savepath, name)
# Create destination and keys.
destination = open(pathname, 'wb')
random.seed(int(os.path.getctime(pathname)))
major = spice.major()
minor = spice.minor()
random.seed() # Restore randomness.
# Create a new profile entry.
self.profiles[arg] = [profile, pathname, major, minor]
self.aliases.append(arg)
return destination, major, minor
def save_act(self, arg, profile, destination, major, minor):
'Encrypt and save profile to disk.'
source = cStringIO.StringIO(chr(len(arg) - 1) + arg + profile)
spice.encode(source, destination, major, minor)
destination.close()
def list(self):
'Return an array of loaded profiles.'
return tuple(self.aliases)
def load(self, arg):
'Load an archived profile for use.'
arg = self.solve(arg)
profile = self.profiles[arg][0]
file(self.filename, 'wb').write(profile)
def away(self, arg):
'Detele the specified profile.'
arg = self.solve(arg)
os.remove(self.profiles[arg][1])
del self.profiles[arg]
self.aliases.remove(arg)
def solve(self, arg, require=True):
'Solve profile alias if given.'
if arg not in self.profiles:
try:
index = int(arg) - 1
except:
if require:
raise Exception('%r NOT FOUND' % arg)
return arg
assert self.aliases, 'NO PROFILES LOADED'
try:
assert index > -1
return self.aliases[index]
except:
raise Exception('INDEX OUT OF BOUNDS')
return arg
def export_(self, arg, key):
'Encode all profiles and export them.'
try:
destination = open(arg, 'wb')
except:
raise Exception('%r CANNOT BE CREATED' % arg)
random.seed(key)
major = spice.major()
minor = spice.minor()
random.seed() # Restore randomness.
for name in self.aliases:
profile = self.profiles[name][0]
assert len(profile) <= 16777216, '%r IS TOO LARGE' % name
len_name = chr(len(name) - 1)
len_profile = self.str_(len(profile) - 1)
source = cStringIO.StringIO(len_name + len_profile + name + profile)
spice.encode(source, destination, major, minor)
destination.close()
def str_(self, number):
'Convert number into a string.'
string = ''
for byte in range(3):
string = chr(number & 0xFF) + string
number >>= 8
return string
def import_(self, arg, key):
'Import all profiles and decode them.'
# Decode the data being imported.
try:
source = open(arg, 'rb')
except:
raise Exception('%r CANNOT BE OPENED' % arg)
random.seed(key)
major = spice.major()
minor = spice.minor()
random.seed() # Restore randomness.
destination = cStringIO.StringIO()
spice.decode(source, destination, major, minor)
source.close()
destination.seek(0)
# Import the decoded profiles.
len_name = destination.read(1)
while len_name:
len_profile = destination.read(3)
assert len(len_profile) == 3, '%r IS CORRUPT' % arg
len_name = ord(len_name) + 1
name = destination.read(len_name)
assert len(name) == len_name, '%r IS CORRUPT' % arg
len_profile = self.int_(len_profile) + 1
profile = destination.read(len_profile)
assert len(profile) == len_profile, '%r IS CORRUPT' % arg
# Check for duplicate names.
if name in self.aliases:
name = name[:250]
code = ''.join(random.sample(self.STRING, 3))
temp = '%s [%s]' % (name, code)
while temp in self.aliases:
code = ''.join(random.sample(self.STRING, 3))
temp = '%s [%s]' % (name, code)
name = temp
# Save the new profile to disk.
self.save_act(name, profile, *self.save_new(name, profile))
len_name = destination.read(1)
def int_(self, string):
'Convert string into a number.'
number = 0
for character in string:
number <<= 8
number += ord(character)
return number
################################################################################
if __name__ == '__main__':
Interface().cmdloop()
|
py | 7dfe7a8ebcc38c39d5120a4c9db2c68e2ad0ad9a | #!python3
#encoding:utf-8
import os.path
from database.Database import Database as Db
class Editor:
def __init__(self, client, args):
self.__client = client
self.__args = args
self.__userRepo = Db().Repositories[self.__args.username]
self.__repo_name = os.path.basename(self.__args.path_dir_pj)
def Edit(self, name, description, homepage):
j = self.__client.Repositories.edit(name, description, homepage)
self.__EditDb(j)
# リポジトリ名の変更が成功したら、ディレクトリ名も変更する
if self.__repo_name != name:
import os
os.rename("../" + self.__repo_name, "../" + name)
def __EditDb(self, j):
import pytz
import datetime
repo = self.__userRepo['Repositories'].find_one(Name=self.__repo_name)
data = {}
data['Id'] = repo['Id']
data['Name'] = j['name']
if not(None is j['description'] or '' == j['description']):
data['Description'] = j['description']
if not(None is j['homepage'] or '' == j['homepage']):
data['Homepage'] = j['homepage']
data['CreatedAt']=j['created_at']
data['PushedAt']=j['pushed_at']
data['UpdatedAt']=j['updated_at']
data['CheckedAt']="{0:%Y-%m-%dT%H:%M:%SZ}".format(datetime.datetime.now(pytz.utc))
self.__userRepo['Repositories'].update(data, ['Id'])
|
py | 7dfe7af5eb9152d365bd202fa5251d54bd59017b | #!/usr/bin/env python3
# Copyright © 2018 Broadcom. All Rights Reserved. The term “Broadcom” refers to
# Broadcom Inc. and/or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may also obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`port_avail_show` - PyFOS util for specific port op use case.
***********************************************************************************
The :mod:`port_avail_show` provides for specific port op use case.
This module is a stand-alone script that can be used to display available
ports.
* Inputs:
* -L=<login>: Login ID. If not provided, an interactive
prompt will request one.
* -P=<password>: Password. If not provided, an interactive
prompt will request one.
* -i=<IP address>: IP address.
* -u=<user name>: The string name to be assigned to switch.
* -f=<VFID>: VFID or -1 if VF is disabled. If unspecified,
a VFID of 128 is assumed.
* Outputs:
* List of ports that are available.
"""
import sys
from pyfos import pyfos_auth
import pyfos.pyfos_brocade_interface as pyfos_switchfcport
from pyfos import pyfos_util
from pyfos.utils import brcd_util
def usage():
print(" Script specific options:")
print("")
print(" --name=NAME name of port. [OPTIONAL]")
print("")
def id_avail_port(port, available_ports):
neighbor_list = port.peek_neighbor()
if len(neighbor_list) == 0:
name = port.peek_name()
port_type = port.peek_port_type()
available_ports.append(
{'name': name,
'port-type': pyfos_switchfcport.port_type_to_str(int(port_type))})
def main(argv):
valid_options = ["name"]
inputs = brcd_util.generic_input(argv, usage, valid_options)
session = brcd_util.getsession(inputs)
vfid = None
if 'vfid' in inputs:
vfid = inputs['vfid']
if vfid is not None:
pyfos_auth.vfid_set(session, vfid)
if "name" in inputs:
name = inputs["name"]
result = pyfos_switchfcport.fibrechannel.get(session, name)
else:
result = pyfos_switchfcport.fibrechannel.get(session)
if not pyfos_util.is_failed_resp(result):
available_ports = []
if isinstance(result, list):
for port in result:
id_avail_port(port, available_ports)
else:
id_avail_port(result, available_ports)
pyfos_util.response_print(available_ports)
else:
if "name" in inputs:
print("failed to get information on", inputs["name"])
else:
print("failed to get information on ports")
pyfos_auth.logout(session)
if __name__ == "__main__":
main(sys.argv[1:])
|
py | 7dfe7e1471cb6cb3f80f89887f1334eac8afe4b0 | ###############################################################################
# Version: 1.1
# Last modified on: 3 April, 2016
# Developers: Michael G. Epitropakis
# email: m_(DOT)_epitropakis_(AT)_lancaster_(DOT)_ac_(DOT)_uk
###############################################################################
from scipy.spatial.distance import pdist, squareform
import numpy as np
import math
from functions import *
from cfunction import *
from CF1 import *
from CF2 import *
from CF3 import *
from CF4 import *
class CEC2013(object):
__nfunc_ = -1
__functions_ = {1:five_uneven_peak_trap, 2:equal_maxima, 3:uneven_decreasing_maxima,
4:himmelblau, 5:six_hump_camel_back, 6:shubert, 7:vincent, 8:shubert, 9:vincent,
10:modified_rastrigin_all, 11:CF1, 12:CF2, 13:CF3, 14:CF3, 15:CF4, 16:CF3,
17:CF4, 18:CF3, 19:CF4, 20:CF4}
__f_ = None
__fopt_ = [200.0, 1.0, 1.0, 200.0, 1.031628453489877, 186.7309088310239, 1.0, 2709.093505572820, 1.0, -2.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]
__rho_ = [0.01, 0.01, 0.01, 0.01, 0.5, 0.5, 0.2, 0.5, 0.2, 0.01,
0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01 ]
__nopt_ = [2, 5, 1, 4, 2, 18, 36, 81, 216, 12, 6, 8, 6, 6, 8, 6, 8, 6, 8, 8 ]
__maxfes_ = [50000, 50000, 50000, 50000, 50000, 200000, 200000, 400000, 400000, 200000,
200000, 200000, 200000, 400000, 400000, 400000, 400000, 400000, 400000, 400000 ]
__dimensions_ = [1, 1, 1, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 3, 3, 5, 5, 10, 10, 20]
def __init__(self, nofunc):
assert (nofunc > 0 and nofunc <= 20)
self.__nfunc_ = nofunc
if (self.__nfunc_ > 0 and self.__nfunc_ < 11):
self.__f_ = self.__functions_[self.__nfunc_]
else:
self.__f_ = self.__functions_[self.__nfunc_]( self.get_dimension() )
def evaluate(self, x):
x_ = np.asarray(x)
assert (len(x_) == self.get_dimension())
if (self.__nfunc_ > 0 and self.__nfunc_ < 11):
return self.__f_(x_)
else:
return self.__f_.evaluate(x_)
def get_lbound(self, n):
assert (n >= 0 and n <self.__dimensions_[self.__nfunc_-1])
result = 0
if (self.__nfunc_ == 1 or self.__nfunc_ == 2 or self.__nfunc_ == 3):
result = 0
elif (self.__nfunc_ == 4):
result = -6
elif (self.__nfunc_ == 5):
tmp = [-1.9, -1.1]
result = tmp[n]
elif (self.__nfunc_ == 6 or self.__nfunc_ == 8):
result = -10
elif (self.__nfunc_ == 7 or self.__nfunc_ == 9):
result = 0.25
elif (self.__nfunc_ == 10):
result = 0
elif (self.__nfunc_ > 10):
result = self.__f_.get_lbound(n)
return result
def get_ubound(self, n):
assert (n >= 0 and n <self.__dimensions_[self.__nfunc_-1])
result = 0
if (self.__nfunc_ == 1):
result = 30
elif (self.__nfunc_ == 2 or self.__nfunc_ == 3):
result = 1
elif (self.__nfunc_ == 4):
result = 6
elif (self.__nfunc_ == 5):
tmp = [1.9, 1.1]
result = tmp[n]
elif (self.__nfunc_ == 6 or self.__nfunc_ == 8):
result = 10
elif (self.__nfunc_ == 7 or self.__nfunc_ == 9):
result = 10
elif (self.__nfunc_ == 10):
result = 1
elif (self.__nfunc_ > 10):
result = self.__f_.get_ubound(n)
return result
def get_fitness_goptima(self):
return self.__fopt_[self.__nfunc_-1]
def get_dimension(self):
return self.__dimensions_[self.__nfunc_-1]
def get_no_goptima(self):
return self.__nopt_[self.__nfunc_-1]
def get_rho(self):
return self.__rho_[self.__nfunc_-1]
def get_maxfes(self):
return self.__maxfes_[self.__nfunc_-1]
def get_info(self):
return {'fbest': get_fitness_goptima(),
'dimension': get_dimension(),
'nogoptima': get_no_goptima(),
'maxfes': get_maxfes(),
'rho': get_rho()}
def how_many_goptima(pop, f, accuracy):
# pop: NP, D
NP, D = pop.shape[0], pop.shape[1]
# Evaluate population
fits = np.zeros( NP )
for i in range( NP ):
fits[i] = f.evaluate(pop[i])
# Descenting sorting
order = np.argsort(fits)[::-1]
# Sort population based on its fitness values
sorted_pop = pop[order,:]
spopfits = fits[order]
# find seeds in the temp population (indices!)
seeds_idx = find_seeds_indices(sorted_pop, f.get_rho() )
count = 0
goidx = []
for idx in seeds_idx:
# evaluate seed
seed_fitness = spopfits[idx] #f.evaluate(sorted_pop[idx])
# |F_seed - F_goptimum| <= accuracy
if math.fabs( seed_fitness - f.get_fitness_goptima() ) <= accuracy:
count = count + 1
goidx.append(idx)
# save time
if count == f.get_no_goptima():
break
# gather seeds
seeds = sorted_pop[goidx]
return count, seeds
def find_seeds_indices(sorted_pop, radius):
seeds = []
seeds_idx = []
# Determine the species seeds: iterate through sorted population
for i, x in enumerate(sorted_pop):
found = False
# Iterate seeds
for j, sx in enumerate(seeds):
# Calculate distance from seeds
dist = math.sqrt( sum( (x - sx)**2 ) )
# If the Euclidean distance is less than the radius
if dist <= radius:
found = True
break
if not found:
seeds.append(x)
seeds_idx.append(i)
return seeds_idx
|
py | 7dfe7f4bf1059ed238ec78fcbf1148314ec8266f | """
Paper Chase -- game logic.
"""
import random
from gamedata import *
music.play('running_music') # So sorry... ;-)
def update_player(player):
"""
Given a player, will ensure the correct image is used
for their current state.
"""
if player.jumping:
player.image = "{}_run3".format(player.name)
elif player.kicking:
player.image = "{}_kick".format(player.name)
player.left -= PULL
else:
if player.flying:
player.left += PUSH
player.image = "{}_fly{}".format(player.name, player.frame)
else:
player.image = "{}_run{}".format(player.name, player.frame)
player.frame += 1
if player.frame > 5:
player.frame = 1
def animate_update():
"""
Update images so we have around 12 FPS.
"""
global steps
global speed
global object_frequency
global active_objects
global power_up
global END
# Increase difficulty every LEVEL_UP steps.
steps += 1
if steps % LEVEL_UP == 0:
speed = min(40, speed + 4) # Non plays move faster.
# Objects appear more frequently.
object_frequency = max(50, object_frequency - 5)
# Update player images.
update_player(red)
update_player(blue)
# Scroll the floor continuously.
floor_a.left -= speed
floor_b.left -= speed
if int(floor_a.right) < 0:
floor_a.left = floor_b.right
if int(floor_b.right) < 0:
floor_b.left = floor_a.right
# Move non-player objects.
for obj in active_objects:
obj.left -= speed
# Move a power-up
if power_up:
power_up.left -= speed
if power_up.right < 0:
power_up = None
# Check for winning condition
distance_between_players = abs(red.left - blue.left)
if (distance_between_players > DISTANCE or red.right < 0 or
blue.right < 0):
END = True
else:
# Re-schedule a call to this function.
clock.schedule_unique(animate_update, 0.08)
def toggle_warning():
"""
Used to make the "Steps ahead" info flash.
"""
global WARNING
WARNING = not WARNING
clock.schedule_unique(toggle_warning, 0.5)
def jump(player, on_finished):
if not player.flying:
player.jumping = True
x, y = player.pos
animate(player, pos=(x, 204), duration=0.5,
on_finished=on_finished, tween='decelerate')
def fall(player, on_finished):
x, y = player.pos
animate(player, pos=(x, 304), duration=0.3,
on_finished=on_finished, tween='accelerate')
def fly_up(player):
if not player.landing:
x, y = player.pos
animate(player, pos=(x, max(20, y - 50)),
duration=0.1, tween='accelerate')
def fly_down(player, on_land):
if not player.landing:
x, y = player.pos
new_y = y + 50
if new_y < 290:
animate(player, pos=(x, new_y), duration=0.1,
tween='accelerate')
else:
on_land()
def kick(player, on_land):
player.kicking = True
clock.schedule_unique(on_land, 0.6)
def land(player, on_land):
player.landing = True
x, y = player.pos
animate(player, pos=(x, 304), duration=0.1, tween='accelerate',
on_finished=on_land)
def red_land():
land(red, red_reset)
def red_reset():
red.jumping = False
red.flying = False
red.kicking = False
red.landing = False
def red_jump():
jump(red, red_fall)
def red_fall():
fall(red, red_reset)
def blue_land():
land(blue, blue_reset)
def blue_jump():
jump(blue, blue_fall)
def blue_fall():
fall(blue, blue_reset)
def blue_reset():
blue.jumping = False
blue.flying = False
blue.kicking = False
blue.landing = False
def update():
"""
Update game state in light of user input.
"""
if END: # The race has finished
update_end()
elif STARTED: # The race has started
update_race()
else: # Just display the intro screen
update_intro()
def update_intro():
"""
Wait for the space bar to be pressed and then start
the race.
"""
global STARTED
if keyboard[keys.SPACE]:
STARTED = True
# Start the race.
clock.schedule_unique(animate_update, 0.08)
def update_end():
"""
Wait for the space bar to be pressed and then reset the
game to the start state.
"""
global STARTED
global END
global speed
global object_frequency
global steps
global active_objects
if keyboard[keys.SPACE]:
STARTED = True
END = False
speed = 20 # How fast non-player objects move.
object_frequency = 100 # Smaller = more frequent.
steps = 0
red.pos = (512, 304)
blue.pos = (512, 304)
red.flying = False
blue.flying = False
red.jumping = False
blue.jumping = False
red.antigravity = 0
blue.antigravity = 0
active_objects = []
# Start the race.
clock.schedule_unique(animate_update, 0.08)
def update_race():
"""
Update game state when the players are racing.
"""
global active_objects
global power_up
# RED
if keyboard[keys.RETURN] and not red.jumping:
red_jump()
if keyboard[keys.UP] and not red.jumping:
if red.antigravity > 0 and not red.flying:
red.antigravity -= 1
red.flying = True
clock.schedule_unique(red_land, FLIGHT_TIME)
if red.flying:
fly_up(red)
if keyboard[keys.DOWN]:
fly_down(red, red_land)
if (keyboard[keys.RIGHT] and not red.kicking and
not red.flying):
kick(red, red_reset)
# Blue
if keyboard[keys.SPACE] and not blue.jumping:
blue_jump()
if keyboard[keys.W] and not blue.jumping:
if blue.antigravity > 0 and not blue.flying:
blue.antigravity -= 1
blue.flying = True
clock.schedule_unique(blue_land, FLIGHT_TIME)
if blue.flying:
fly_up(blue)
if keyboard[keys.S]:
fly_down(blue, blue_land)
if (keyboard[keys.D] and not blue.kicking and
not blue.flying):
kick(blue, blue_reset)
# Check for collisions between players and non-player objects.
for obj in active_objects:
# The object has scrolled past.
if obj.right < 0:
active_objects.remove(obj)
# The object has been kicked forward.
if obj.left > 1999:
active_objects.remove(obj)
# Red player collision
if red.colliderect(obj) and not obj.red_hit:
if red.kicking:
x = random.randint(2000, 4000)
y = random.randint(0, HEIGHT)
animate(obj, pos=(x, y), duration=0.2, tween='accelerate')
else:
red.left -= FALL
obj.red_hit = True
if red.flying:
red_land()
# Blue player collision.
if blue.colliderect(obj) and not obj.blue_hit:
if blue.kicking:
x = random.randint(2000, 4000)
y = random.randint(0, HEIGHT)
animate(obj, pos=(x, y), duration=0.2, tween='accelerate')
else:
blue.left -= FALL
obj.blue_hit = True
if blue.flying:
blue_land()
# Check for collision with power-up.
if power_up:
# This may seem convoluted, but it ensures that if both players get to the
# power-up at the same time the game is "fair" in balancing up the advantage.
touching_red = (red.colliderect(power_up) and not (red.flying or red.kicking)
and red.antigravity < 3)
touching_blue = (blue.colliderect(power_up) and not (blue.flying or blue.kicking)
and blue.antigravity < 3)
if touching_blue and touching_red:
if red.antigravity > blue.antigravity:
blue.antigravity += 1
elif red.antigravity < blue.antigravity:
red.antigravity += 1
else:
if random.choice([True, False]):
red.antigravity += 1
else:
blue.antigravity += 1
power_up = None
elif touching_red:
red.antigravity += 1
power_up = None
elif touching_blue:
blue.antigravity += 1
power_up = None
if random.randint(0, object_frequency) == 0 or not active_objects:
make_obstacle(ground_objects)
if random.randint(0, object_frequency) == 0 or not active_objects:
make_obstacle(air_objects)
if not power_up and random.randint(0, powerup_frequency) == 0:
power_up = Actor('antigravity', pos=(1024, 320))
def make_obstacle(objects):
global active_objects
obj_collection = objects[random.choice(list(objects.keys()))]
low = obj_collection['pos'][0]
high = obj_collection['pos'][1]
new_object = Actor(random.choice(obj_collection['items']),
pos=(1024, random.randint(low, high)))
new_object.red_hit = False
new_object.blue_hit = False
active_objects.append(new_object)
def draw():
"""
Draw things on the screen.
"""
screen.blit('paper', (0, 0))
if END: # The race has finished
draw_end()
elif STARTED: # The race has started
draw_race()
else: # Just display the intro screen
draw_intro()
def draw_intro():
"""
Draw the intro screen with the story, keys and instructions
to press space to start the race.
"""
# Paper
screen.draw.text('Paper', (260, 10),
fontname='funsized', fontsize=56,
color=(0, 0, 255), background='None')
# Chase
screen.draw.text('Chase', (500, 10),
fontname='funsized', fontsize=56,
color=(255, 0, 0), background='None')
# Story
story = ("The intergalactic war between the red and blue factions "
"of the biro universe has reached its climax. Each world has "
"sent a stick-figure champion to race in the \"Paper chase\" "
"for ultimate victory and to decide which colour biro pen "
"teachers should use when marking work. (Get {} steps ahead "
"to win, collect up to 3 Python power-ups to import antigravity and "
"avoid all other obstacles.)").format(DISTANCE)
screen.draw.text(story, (50, 80), width=900,
fontname='rudiment', fontsize=30,
color=(0, 0, 0))
screen.draw.text('W - fly up, S - fly down\nD - kick object, SPACE - jump.', (50, 240),
fontname='rudiment', fontsize=30,
color=(0, 0, 255))
screen.draw.text('Up Arrow - fly up, Down Arrow - fly down\nRight Arrow - kick object, Enter - jump.', (500, 240),
fontname='rudiment', fontsize=30,
color=(255, 0, 0))
screen.draw.text('Press SPACE to start the race.', (270, 320),
fontname='rudiment', fontsize=38,
color=(0, 0, 0), background='None')
def draw_end():
"""
Draw the end state with the result and instructions to
press space to start again.
"""
winner = 'Red' if red.left > blue.left else 'Blue'
color = (255, 0, 0) if red.left > blue.left else (0, 0, 255)
screen.draw.text('{} won!'.format(winner), (360, 100),
fontname='funsized', fontsize=56,
color=color, background='None')
screen.draw.text('Press SPACE to restart.', (360, 250),
fontname='rudiment', fontsize=38,
color=(0, 0, 0), background='None')
def draw_race():
"""
Draw game state when players are racing.
"""
red.draw()
blue.draw()
floor_a.draw()
floor_b.draw()
for obj in active_objects:
obj.draw()
if power_up:
power_up.draw()
screen.draw.text('Antigravity: {}'.format(red.antigravity),
(800, 340), fontname='rudiment', fontsize=38,
color=(255, 0, 0), background='None')
screen.draw.text('Antigravity: {}'.format(blue.antigravity),
(580, 340), fontname='rudiment', fontsize=38,
color=(0, 0, 255), background='None')
distance_between_players = int(abs(red.left - blue.left))
distance_to_display = distance_between_players - (distance_between_players % 10)
color = (255, 0, 0) if red.left > blue.left else (0, 0, 255)
alert_margin = int((DISTANCE / 4) * 3)
if distance_to_display < alert_margin:
screen.draw.text('Steps ahead: {}'.format(distance_to_display),
(10, 340), fontname='rudiment', fontsize=38,
color=color, background='None')
elif WARNING:
screen.draw.text('Steps ahead: {}'.format(distance_to_display),
(10, 340), fontname='rudiment', fontsize=38,
color=color, background='None')
toggle_warning() |
py | 7dfe80a9aa24979cdf83e34d2d4bc2eaf593438e | from dataclasses import dataclass
import numpy as np
import pandas as pd
from frds.data.wrds import WRDSDataset
@dataclass
class Funda(WRDSDataset):
"""Fundamentals Annual"""
data: pd.DataFrame
library = "comp"
table = "funda"
index_col = ["gvkey", "datadate"]
date_cols = ["datadate"]
def __post_init__(self):
idx = [c.upper() for c in self.index_col]
if set(self.data.index.names) != set(idx):
self.data.reset_index(inplace=True, drop=True)
self.data.rename(columns=str.upper, inplace=True)
self.data.set_index(idx, inplace=True)
# Some variables are not available
# e.g., ADD1 (address line 1) is not itself stored in FUNDA
attrs = [
varname
for varname, prop in vars(Funda).items()
if isinstance(prop, property) and varname.isupper()
]
for attr in attrs:
try:
self.__getattribute__(attr)
except KeyError:
delattr(Funda, attr)
# Automatically apply the default filtering rules
self.filter()
def filter(self):
"""Default filter applied on the FUNDA dataset"""
self.data = self.data[
np.in1d(self.data.DATAFMT, ("STD"))
& np.in1d(self.data.INDFMT, ("INDL"))
& np.in1d(self.data.POPSRC, ("D"))
& np.in1d(self.data.CONSOL, ("C"))
]
@staticmethod
def lag(series: pd.Series, lags: int = 1, *args, **kwargs):
return series.shift(lags, *args, **kwargs)
@staticmethod
def lead(series: pd.Series, leads: int = 1, *args, **kwargs):
return series.shift(-leads, *args, **kwargs)
@property
def GVKEY(self) -> pd.Series:
"""GVKEY -- Global Company Key (GVKEY): string"""
return self.data["GVKEY"]
@property
def CONM(self) -> pd.Series:
"""Company Name (CONM): string"""
return self.data["CONM"]
@property
def TIC(self) -> pd.Series:
"""Ticker Symbol (TIC): string"""
return self.data["TIC"]
@property
def CUSIP(self) -> pd.Series:
"""CUSIP (CUSIP): string"""
return self.data["CUSIP"]
@property
def CIK(self) -> pd.Series:
"""CIK Number (CIK): string"""
return self.data["CIK"]
@property
def EXCHG(self) -> pd.Series:
"""Stock Exchange Code (EXCHG): double"""
return self.data["EXCHG"]
@property
def FYR(self) -> pd.Series:
"""Fiscal Year-End (FYR): double"""
return self.data["FYR"]
@property
def FIC(self) -> pd.Series:
"""Foreign Incorporation Code (FIC): string"""
return self.data["FIC"]
@property
def ADD1(self) -> pd.Series:
"""ADD1 -- Address Line 1 (ADD1): string"""
return self.data["ADD1"]
@property
def ADD2(self) -> pd.Series:
"""ADD2 -- Address Line 2 (ADD2): string"""
return self.data["ADD2"]
@property
def ADD3(self) -> pd.Series:
"""ADD3 -- Address Line 3 (ADD3): string"""
return self.data["ADD3"]
@property
def ADD4(self) -> pd.Series:
"""ADD4 -- Address Line 4 (ADD4): string"""
return self.data["ADD4"]
@property
def ADDZIP(self) -> pd.Series:
"""ADDZIP -- Postal Code (ADDZIP): string"""
return self.data["ADDZIP"]
@property
def BUSDESC(self) -> pd.Series:
"""BUSDESC -- S&P Business Description (BUSDESC): string"""
return self.data["BUSDESC"]
@property
def CITY(self) -> pd.Series:
"""CITY -- City (CITY): string"""
return self.data["CITY"]
@property
def CONML(self) -> pd.Series:
"""CONML -- Company Legal Name (CONML): string"""
return self.data["CONML"]
@property
def COUNTY(self) -> pd.Series:
"""COUNTY -- County Code (COUNTY): string"""
return self.data["COUNTY"]
@property
def DLDTE(self) -> pd.Series:
"""DLDTE -- Research Company Deletion Date (DLDTE): date"""
return self.data["DLDTE"]
@property
def DLRSN(self) -> pd.Series:
"""DLRSN -- Research Co Reason for Deletion (DLRSN): string"""
return self.data["DLRSN"]
@property
def EIN(self) -> pd.Series:
"""EIN -- Employer Identification Number (EIN): string"""
return self.data["EIN"]
@property
def FAX(self) -> pd.Series:
"""FAX -- Fax Number (FAX): string"""
return self.data["FAX"]
@property
def FYRC(self) -> pd.Series:
"""FYRC -- Current Fiscal Year End Month (FYRC): double"""
return self.data["FYRC"]
@property
def GGROUP(self) -> pd.Series:
"""GGROUP -- GIC Groups (GGROUP): string"""
return self.data["GGROUP"]
@property
def GIND(self) -> pd.Series:
"""GIND -- GIC Industries (GIND): string"""
return self.data["GIND"]
@property
def GSECTOR(self) -> pd.Series:
"""GSECTOR -- GIC Sectors (GSECTOR): string"""
return self.data["GSECTOR"]
@property
def GSUBIND(self) -> pd.Series:
"""GSUBIND -- GIC Sub-Industries (GSUBIND): string"""
return self.data["GSUBIND"]
@property
def IDBFLAG(self) -> pd.Series:
"""IDBFLAG -- International, Domestic, Both Indicator (IDBFLAG): string"""
return self.data["IDBFLAG"]
@property
def INCORP(self) -> pd.Series:
"""INCORP -- Current State/Province of Incorporation Code (INCORP): string"""
return self.data["INCORP"]
@property
def IPODATE(self) -> pd.Series:
"""IPODATE -- Company Initial Public Offering Date (IPODATE): date"""
return self.data["IPODATE"]
@property
def LOC(self) -> pd.Series:
"""LOC -- Current ISO Country Code - Headquarters (LOC): string"""
return self.data["LOC"]
@property
def NAICS(self) -> pd.Series:
"""NAICS -- North American Industry Classification Code (NAICS): string"""
return self.data["NAICS"]
@property
def PHONE(self) -> pd.Series:
"""PHONE -- Phone Number (PHONE): string"""
return self.data["PHONE"]
@property
def PRICAN(self) -> pd.Series:
"""PRICAN -- Current Primary Issue Tag - Canada (PRICAN): string"""
return self.data["PRICAN"]
@property
def PRIROW(self) -> pd.Series:
"""PRIROW -- Primary Issue Tag - Rest of World (PRIROW): string"""
return self.data["PRIROW"]
@property
def PRIUSA(self) -> pd.Series:
"""PRIUSA -- Current Primary Issue Tag - US (PRIUSA): string"""
return self.data["PRIUSA"]
@property
def SIC(self) -> pd.Series:
"""SIC -- Standard Industry Classification Code (SIC): string"""
return self.data["SIC"]
@property
def SPCINDCD(self) -> pd.Series:
"""SPCINDCD -- S&P Industry Sector Code (SPCINDCD): double"""
return self.data["SPCINDCD"]
@property
def SPCSECCD(self) -> pd.Series:
"""SPCSECCD -- S&P Economic Sector Code (SPCSECCD): double"""
return self.data["SPCSECCD"]
@property
def SPCSRC(self) -> pd.Series:
"""SPCSRC -- S&P Quality Ranking - Current (SPCSRC): string"""
return self.data["SPCSRC"]
@property
def STATE(self) -> pd.Series:
"""STATE -- State/Province (STATE): string"""
return self.data["STATE"]
@property
def STKO(self) -> pd.Series:
"""STKO -- Stock Ownership Code (STKO): double"""
return self.data["STKO"]
@property
def WEBURL(self) -> pd.Series:
"""WEBURL -- Web URL (WEBURL): string"""
return self.data["WEBURL"]
@property
def ACCTCHG(self) -> pd.Series:
"""ACCTCHG -- Adoption of Accounting Changes (ACCTCHG): string"""
return self.data["ACCTCHG"]
@property
def ACCTSTD(self) -> pd.Series:
"""ACCTSTD -- Accounting Standard (ACCTSTD): string"""
return self.data["ACCTSTD"]
@property
def ACQMETH(self) -> pd.Series:
"""ACQMETH -- Acquisition Method (ACQMETH): string"""
return self.data["ACQMETH"]
@property
def ADRR(self) -> pd.Series:
"""ADRR -- ADR Ratio (ADRR): double"""
return self.data["ADRR"]
@property
def AJEX(self) -> pd.Series:
"""AJEX -- Adjustment Factor (Company) - Cumulative by Ex-Date (AJEX): double"""
return self.data["AJEX"]
@property
def AJP(self) -> pd.Series:
"""AJP -- Adjustment Factor (Company) - Cumulative byPay-Date (AJP): double"""
return self.data["AJP"]
@property
def APDEDATE(self) -> pd.Series:
"""APDEDATE -- Actual Period End date (APDEDATE): date"""
return self.data["APDEDATE"]
@property
def BSPR(self) -> pd.Series:
"""BSPR -- Balance Sheet Presentation (BSPR): string"""
return self.data["BSPR"]
@property
def COMPST(self) -> pd.Series:
"""COMPST -- Comparability Status (COMPST): string"""
return self.data["COMPST"]
@property
def CURNCD(self) -> pd.Series:
"""CURNCD -- Native Currency Code (CURNCD): string"""
return self.data["CURNCD"]
@property
def CURRTR(self) -> pd.Series:
"""CURRTR -- Currency Translation Rate (CURRTR): double"""
return self.data["CURRTR"]
@property
def CURUSCN(self) -> pd.Series:
"""CURUSCN -- US Canadian Translation Rate (CURUSCN): double"""
return self.data["CURUSCN"]
@property
def FDATE(self) -> pd.Series:
"""FDATE -- Final Date (FDATE): date"""
return self.data["FDATE"]
@property
def FINAL(self) -> pd.Series:
"""FINAL -- Final Indicator Flag (FINAL): string"""
return self.data["FINAL"]
@property
def FYEAR(self) -> pd.Series:
"""FYEAR -- Data Year - Fiscal (FYEAR): int"""
return self.data["FYEAR"].astype("Int32")
@property
def ISMOD(self) -> pd.Series:
"""ISMOD -- Income Statement Model Number (ISMOD): double"""
return self.data["ISMOD"]
@property
def LTCM(self) -> pd.Series:
"""LTCM -- Long Term Contract Method (LTCM): string"""
return self.data["LTCM"]
@property
def OGM(self) -> pd.Series:
"""OGM -- OIL & GAS METHOD (OGM): string"""
return self.data["OGM"]
@property
def PDATE(self) -> pd.Series:
"""PDATE -- Preliminary Date (PDATE): date"""
return self.data["PDATE"]
@property
def PDDUR(self) -> pd.Series:
"""PDDUR -- Period Duration (PDDUR): double"""
return self.data["PDDUR"]
@property
def SCF(self) -> pd.Series:
"""SCF -- Cash Flow Format (SCF): double"""
return self.data["SCF"]
@property
def SRC(self) -> pd.Series:
"""SRC -- Source Document (SRC): double"""
return self.data["SRC"]
@property
def STALT(self) -> pd.Series:
"""STALT -- Status Alert (STALT): string"""
return self.data["STALT"]
@property
def UDPL(self) -> pd.Series:
"""UDPL -- Utility - Liberalized Depreciation Code (UDPL): string"""
return self.data["UDPL"]
@property
def UPD(self) -> pd.Series:
"""UPD -- Update Code (UPD): double"""
return self.data["UPD"]
@property
def ACCO(self) -> pd.Series:
"""ACCO -- Acceptances Outstanding (ACCO): double"""
return self.data["ACCO"]
@property
def ACDO(self) -> pd.Series:
"""ACDO -- Current Assets of Discontinued Operations (ACDO): double"""
return self.data["ACDO"]
@property
def ACO(self) -> pd.Series:
"""ACO -- Current Assets Other Total (ACO): double"""
return self.data["ACO"]
@property
def ACODO(self) -> pd.Series:
"""ACODO -- Other Current Assets Excl Discontinued Operations (ACODO): double"""
return self.data["ACODO"]
@property
def ACOMINC(self) -> pd.Series:
"""ACOMINC -- Accumulated Other Comprehensive Income (Loss) (ACOMINC): double"""
return self.data["ACOMINC"]
@property
def ACOX(self) -> pd.Series:
"""ACOX -- Current Assets Other Sundry (ACOX): double"""
return self.data["ACOX"]
@property
def ACOXAR(self) -> pd.Series:
"""ACOXAR -- Current Assets - Other - Total As Reported (ACOXAR): double"""
return self.data["ACOXAR"]
@property
def ACT(self) -> pd.Series:
"""ACT -- Current Assets - Total (ACT): double"""
return self.data["ACT"]
@property
def AEDI(self) -> pd.Series:
"""AEDI -- Accrued Expenses and Deferred Income (AEDI): double"""
return self.data["AEDI"]
@property
def ALDO(self) -> pd.Series:
"""ALDO -- Long-term Assets of Discontinued Operations (ALDO): double"""
return self.data["ALDO"]
@property
def AO(self) -> pd.Series:
"""AO -- Assets - Other (AO): double"""
return self.data["AO"]
@property
def AOCIDERGL(self) -> pd.Series:
"""AOCIDERGL -- Accum Other Comp Inc - Derivatives Unrealized Gain/Loss (AOCIDERGL): double"""
return self.data["AOCIDERGL"]
@property
def AOCIOTHER(self) -> pd.Series:
"""AOCIOTHER -- Accum Other Comp Inc - Other Adjustments (AOCIOTHER): double"""
return self.data["AOCIOTHER"]
@property
def AOCIPEN(self) -> pd.Series:
"""AOCIPEN -- Accum Other Comp Inc - Min Pension Liab Adj (AOCIPEN): double"""
return self.data["AOCIPEN"]
@property
def AOCISECGL(self) -> pd.Series:
"""AOCISECGL -- Accum Other Comp Inc - Unreal G/L Ret Int in Sec Assets (AOCISECGL): double"""
return self.data["AOCISECGL"]
@property
def AODO(self) -> pd.Series:
"""AODO -- Other Assets excluding Discontinued Operations (AODO): double"""
return self.data["AODO"]
@property
def AOX(self) -> pd.Series:
"""AOX -- Assets - Other - Sundry (AOX): double"""
return self.data["AOX"]
@property
def AP(self) -> pd.Series:
"""AP -- Accounts Payable - Trade (AP): double"""
return self.data["AP"]
@property
def APB(self) -> pd.Series:
"""APB -- Accounts Payable/Creditors - Brokers, Dealers, and Clearing Organizations (APB): double"""
return self.data["APB"]
@property
def APC(self) -> pd.Series:
"""APC -- Accounts Payable/Creditors - Customer (APC): double"""
return self.data["APC"]
@property
def APOFS(self) -> pd.Series:
"""APOFS -- Accounts Payable/Creditors - Other - FS (APOFS): double"""
return self.data["APOFS"]
@property
def ARB(self) -> pd.Series:
"""ARB -- Accounts Receivable/Debtors - Brokers, Dealers, and Clearing Organizations (ARB): double"""
return self.data["ARB"]
@property
def ARC(self) -> pd.Series:
"""ARC -- Accounts Receivable/Debtors - Customer (ARC): double"""
return self.data["ARC"]
@property
def ARTFS(self) -> pd.Series:
"""ARTFS -- Accounts Receivable/Debtors - Total (ARTFS): double"""
return self.data["ARTFS"]
@property
def AT(self) -> pd.Series:
"""AT -- Assets - Total (AT): double"""
return self.data["AT"]
@property
def BAST(self) -> pd.Series:
"""BAST -- Average Short-Term Borrowings (BAST): double"""
return self.data["BAST"]
@property
def BKVLPS(self) -> pd.Series:
"""BKVLPS -- Book Value Per Share (BKVLPS): double"""
return self.data["BKVLPS"]
@property
def CA(self) -> pd.Series:
"""CA -- Customers' Acceptance (CA): double"""
return self.data["CA"]
@property
def CAPS(self) -> pd.Series:
"""CAPS -- Capital Surplus/Share Premium Reserve (CAPS): double"""
return self.data["CAPS"]
@property
def CB(self) -> pd.Series:
"""CB -- Compensating Balance (CB): double"""
return self.data["CB"]
@property
def CEQ(self) -> pd.Series:
"""CEQ -- Common/Ordinary Equity - Total (CEQ): double"""
return self.data["CEQ"]
@property
def CEQL(self) -> pd.Series:
"""CEQL -- Common Equity Liquidation Value (CEQL): double"""
return self.data["CEQL"]
@property
def CEQT(self) -> pd.Series:
"""CEQT -- Common Equity Tangible (CEQT): double"""
return self.data["CEQT"]
@property
def CH(self) -> pd.Series:
"""CH -- Cash (CH): double"""
return self.data["CH"]
@property
def CHE(self) -> pd.Series:
"""CHE -- Cash and Short-Term Investments (CHE): double"""
return self.data["CHE"]
@property
def CHS(self) -> pd.Series:
"""CHS -- Cash and Deposits - Segregated (CHS): double"""
return self.data["CHS"]
@property
def CLD2(self) -> pd.Series:
"""CLD2 -- Capitalized Leases - Due in 2nd Year (CLD2): double"""
return self.data["CLD2"]
@property
def CLD3(self) -> pd.Series:
"""CLD3 -- Capitalized Leases - Due in 3rd Year (CLD3): double"""
return self.data["CLD3"]
@property
def CLD4(self) -> pd.Series:
"""CLD4 -- Capitalized Leases - Due in 4th Year (CLD4): double"""
return self.data["CLD4"]
@property
def CLD5(self) -> pd.Series:
"""CLD5 -- Capitalized Leases - Due in 5th Year (CLD5): double"""
return self.data["CLD5"]
@property
def CLFC(self) -> pd.Series:
"""CLFC -- Contingent Liabilities - Forward and Future Contracts (CLFC): double"""
return self.data["CLFC"]
@property
def CLFX(self) -> pd.Series:
"""CLFX -- Contingent Liabilities - Foreign Exchange Commitments (CLFX): double"""
return self.data["CLFX"]
@property
def CLG(self) -> pd.Series:
"""CLG -- Contingent Liabilities - Guarantees (CLG): double"""
return self.data["CLG"]
@property
def CLIS(self) -> pd.Series:
"""CLIS -- Contingent Liabilities - Interest Rate Swaps (CLIS): double"""
return self.data["CLIS"]
@property
def CLL(self) -> pd.Series:
"""CLL -- Contingent Liabilities - Letters of Credit (CLL): double"""
return self.data["CLL"]
@property
def CLLC(self) -> pd.Series:
"""CLLC -- Contingent Liabilities - Loan Commitments (CLLC): double"""
return self.data["CLLC"]
@property
def CLO(self) -> pd.Series:
"""CLO -- Contingent Liabilities - Other (CLO): double"""
return self.data["CLO"]
@property
def CLRLL(self) -> pd.Series:
"""CLRLL -- Credit Loss Reserve Allocated for LDC Loans (CLRLL): double"""
return self.data["CLRLL"]
@property
def CLT(self) -> pd.Series:
"""CLT -- Contingent Liabilities - Total (CLT): double"""
return self.data["CLT"]
@property
def CMP(self) -> pd.Series:
"""CMP -- Commercial Paper (CMP): double"""
return self.data["CMP"]
@property
def CRV(self) -> pd.Series:
"""CRV -- Consolidation Reserves (CRV): double"""
return self.data["CRV"]
@property
def CRVNLI(self) -> pd.Series:
"""CRVNLI -- Reserves for Claims (Losses) - Nonlife (Insurance) (CRVNLI): double"""
return self.data["CRVNLI"]
@property
def CSTK(self) -> pd.Series:
"""CSTK -- Common/Ordinary Stock (Capital) (CSTK): double"""
return self.data["CSTK"]
@property
def CSTKCV(self) -> pd.Series:
"""CSTKCV -- Common Stock-Carrying Value (CSTKCV): double"""
return self.data["CSTKCV"]
@property
def DC(self) -> pd.Series:
"""DC -- Deferred Charges (DC): double"""
return self.data["DC"]
@property
def DCLO(self) -> pd.Series:
"""DCLO -- Debt Capitalized Lease Obligations (DCLO): double"""
return self.data["DCLO"]
@property
def DCOM(self) -> pd.Series:
"""DCOM -- Deferred Compensation (DCOM): double"""
return self.data["DCOM"]
@property
def DCPSTK(self) -> pd.Series:
"""DCPSTK -- Convertible Debt and Preferred Stock (DCPSTK): double"""
return self.data["DCPSTK"]
@property
def DCS(self) -> pd.Series:
"""DCS -- Debt Consolidated Subsidiary (DCS): double"""
return self.data["DCS"]
@property
def DCVSR(self) -> pd.Series:
"""DCVSR -- Debt Senior Convertible (DCVSR): double"""
return self.data["DCVSR"]
@property
def DCVSUB(self) -> pd.Series:
"""DCVSUB -- Debt Subordinated Convertible (DCVSUB): double"""
return self.data["DCVSUB"]
@property
def DCVT(self) -> pd.Series:
"""DCVT -- Debt - Convertible (DCVT): double"""
return self.data["DCVT"]
@property
def DD(self) -> pd.Series:
"""DD -- Debt Debentures (DD): double"""
return self.data["DD"]
@property
def DD1(self) -> pd.Series:
"""DD1 -- Long-Term Debt Due in One Year (DD1): double"""
return self.data["DD1"]
@property
def DD2(self) -> pd.Series:
"""DD2 -- Debt Due in 2nd Year (DD2): double"""
return self.data["DD2"]
@property
def DD3(self) -> pd.Series:
"""DD3 -- Debt Due in 3rd Year (DD3): double"""
return self.data["DD3"]
@property
def DD4(self) -> pd.Series:
"""DD4 -- Debt Due in 4th Year (DD4): double"""
return self.data["DD4"]
@property
def DD5(self) -> pd.Series:
"""DD5 -- Debt Due in 5th Year (DD5): double"""
return self.data["DD5"]
@property
def DFPAC(self) -> pd.Series:
"""DFPAC -- Deferred Policy Acquisition Costs (DFPAC): double"""
return self.data["DFPAC"]
@property
def DFS(self) -> pd.Series:
"""DFS -- Debt Finance Subsidiary (DFS): double"""
return self.data["DFS"]
@property
def DLC(self) -> pd.Series:
"""DLC -- Debt in Current Liabilities - Total (DLC): double"""
return self.data["DLC"]
@property
def DLTO(self) -> pd.Series:
"""DLTO -- Other Long-term Debt (DLTO): double"""
return self.data["DLTO"]
@property
def DLTP(self) -> pd.Series:
"""DLTP -- Long-Term Debt Tied to Prime (DLTP): double"""
return self.data["DLTP"]
@property
def DLTSUB(self) -> pd.Series:
"""DLTSUB -- Long-Term Debt - Subordinated (DLTSUB): double"""
return self.data["DLTSUB"]
@property
def DLTT(self) -> pd.Series:
"""DLTT -- Long-Term Debt - Total (DLTT): double"""
return self.data["DLTT"]
@property
def DM(self) -> pd.Series:
"""DM -- Debt Mortgages & Other Secured (DM): double"""
return self.data["DM"]
@property
def DN(self) -> pd.Series:
"""DN -- Debt Notes (DN): double"""
return self.data["DN"]
@property
def DPACB(self) -> pd.Series:
"""DPACB -- Depreciation (Accumulated) Buildings (DPACB): double"""
return self.data["DPACB"]
@property
def DPACC(self) -> pd.Series:
"""DPACC -- Depreciation (Accumulated) Construction in Progress (DPACC): double"""
return self.data["DPACC"]
@property
def DPACLI(self) -> pd.Series:
"""DPACLI -- Depreciation (Accumulated) Land and Improvements (DPACLI): double"""
return self.data["DPACLI"]
@property
def DPACLS(self) -> pd.Series:
"""DPACLS -- Depreciation (Accumulated) Leases (DPACLS): double"""
return self.data["DPACLS"]
@property
def DPACME(self) -> pd.Series:
"""DPACME -- Depreciation (Accumulated) Machinery and Equipment (DPACME): double"""
return self.data["DPACME"]
@property
def DPACNR(self) -> pd.Series:
"""DPACNR -- Depreciation (Accumulated) Natural Resources (DPACNR): double"""
return self.data["DPACNR"]
@property
def DPACO(self) -> pd.Series:
"""DPACO -- Depreciation (Accumulated) Other (DPACO): double"""
return self.data["DPACO"]
@property
def DPACRE(self) -> pd.Series:
"""DPACRE -- Accumulated Depreciation of RE Property (DPACRE): double"""
return self.data["DPACRE"]
@property
def DPACT(self) -> pd.Series:
"""DPACT -- Depreciation, Depletion and Amortization (Accumulated) (DPACT): double"""
return self.data["DPACT"]
@property
def DPDC(self) -> pd.Series:
"""DPDC -- Deposits - Demand - Customer (DPDC): double"""
return self.data["DPDC"]
@property
def DPLTB(self) -> pd.Series:
"""DPLTB -- Deposits - Long-Term Time - Bank (DPLTB): double"""
return self.data["DPLTB"]
@property
def DPSC(self) -> pd.Series:
"""DPSC -- Deposits vings - Customer (DPSC): double"""
return self.data["DPSC"]
@property
def DPSTB(self) -> pd.Series:
"""DPSTB -- Deposits - Short-Term Demand - Bank (DPSTB): double"""
return self.data["DPSTB"]
@property
def DPTB(self) -> pd.Series:
"""DPTB -- Deposits - Total - Banks (DPTB): double"""
return self.data["DPTB"]
@property
def DPTC(self) -> pd.Series:
"""DPTC -- Deposits - Total - Customer (DPTC): double"""
return self.data["DPTC"]
@property
def DPTIC(self) -> pd.Series:
"""DPTIC -- Deposits - Time - Customer (DPTIC): double"""
return self.data["DPTIC"]
@property
def DPVIEB(self) -> pd.Series:
"""DPVIEB -- Depreciation (Accumulated) Ending Balance (Schedule VI) (DPVIEB): double"""
return self.data["DPVIEB"]
@property
def DPVIO(self) -> pd.Series:
"""DPVIO -- Depreciation (Accumulated) Other Changes (Schedule VI) (DPVIO): double"""
return self.data["DPVIO"]
@property
def DPVIR(self) -> pd.Series:
"""DPVIR -- Depreciation (Accumulated) Retirements (Schedule VI) (DPVIR): double"""
return self.data["DPVIR"]
@property
def DRC(self) -> pd.Series:
"""DRC -- Deferred Revenue Current (DRC): double"""
return self.data["DRC"]
@property
def DRCI(self) -> pd.Series:
"""DRCI -- Deduction From Policy and Claims Reserves for Reinsurance Ceded (DRCI): double"""
return self.data["DRCI"]
@property
def DRLT(self) -> pd.Series:
"""DRLT -- Deferred Revenue Long-term (DRLT): double"""
return self.data["DRLT"]
@property
def DS(self) -> pd.Series:
"""DS -- Debt-Subordinated (DS): double"""
return self.data["DS"]
@property
def DUDD(self) -> pd.Series:
"""DUDD -- Debt Unamortized Debt Discount and Other (DUDD): double"""
return self.data["DUDD"]
@property
def DVPA(self) -> pd.Series:
"""DVPA -- Preferred Dividends in Arrears (DVPA): double"""
return self.data["DVPA"]
@property
def DVPIBB(self) -> pd.Series:
"""DVPIBB -- Depreciation (Accumulated) Beginning Balance (Schedule VI) (DVPIBB): double"""
return self.data["DVPIBB"]
@property
def DXD2(self) -> pd.Series:
"""DXD2 -- Debt (excl Capitalized Leases) - Due in 2nd Year (DXD2): double"""
return self.data["DXD2"]
@property
def DXD3(self) -> pd.Series:
"""DXD3 -- Debt (excl Capitalized Leases) - Due in 3rd Year (DXD3): double"""
return self.data["DXD3"]
@property
def DXD4(self) -> pd.Series:
"""DXD4 -- Debt (excl Capitalized Leases) - Due in 4th Year (DXD4): double"""
return self.data["DXD4"]
@property
def DXD5(self) -> pd.Series:
"""DXD5 -- Debt (excl Capitalized Leases) - Due in 5th Year (DXD5): double"""
return self.data["DXD5"]
@property
def EA(self) -> pd.Series:
"""EA -- Exchange Adjustments (Assets) (EA): double"""
return self.data["EA"]
@property
def ESOPCT(self) -> pd.Series:
"""ESOPCT -- ESOP Obligation (Common) - Total (ESOPCT): double"""
return self.data["ESOPCT"]
@property
def ESOPDLT(self) -> pd.Series:
"""ESOPDLT -- ESOP Debt - Long Term (ESOPDLT): double"""
return self.data["ESOPDLT"]
@property
def ESOPNR(self) -> pd.Series:
"""ESOPNR -- Preferred ESOP Obligation - Non-Redeemable (ESOPNR): double"""
return self.data["ESOPNR"]
@property
def ESOPR(self) -> pd.Series:
"""ESOPR -- Preferred ESOP Obligation - Redeemable (ESOPR): double"""
return self.data["ESOPR"]
@property
def ESOPT(self) -> pd.Series:
"""ESOPT -- Preferred ESOP Obligation - Total (ESOPT): double"""
return self.data["ESOPT"]
@property
def EXCADJ(self) -> pd.Series:
"""EXCADJ -- Exchange Adjustments (Liabilities) (EXCADJ): double"""
return self.data["EXCADJ"]
@property
def FATB(self) -> pd.Series:
"""FATB -- Property, Plant, and Equipment Buildings at Cost (FATB): double"""
return self.data["FATB"]
@property
def FATC(self) -> pd.Series:
"""FATC -- Property, Plant, and Equipment Construction in Progress at Cost (FATC): double"""
return self.data["FATC"]
@property
def FATE(self) -> pd.Series:
"""FATE -- Property, Plant, and Equipment Machinery and Equipment at Cost (FATE): double"""
return self.data["FATE"]
@property
def FATL(self) -> pd.Series:
"""FATL -- Property, Plant, and Equipment Leases at Cost (FATL): double"""
return self.data["FATL"]
@property
def FATN(self) -> pd.Series:
"""FATN -- Property, Plant, and Equipment Natural Resources at Cost (FATN): double"""
return self.data["FATN"]
@property
def FATO(self) -> pd.Series:
"""FATO -- Property, Plant, and Equipment Other at Cost (FATO): double"""
return self.data["FATO"]
@property
def FATP(self) -> pd.Series:
"""FATP -- Property, Plant, and Equipment Land and Improvements at Cost (FATP): double"""
return self.data["FATP"]
@property
def FDFR(self) -> pd.Series:
"""FDFR -- Federal Funds Purchased (FDFR): double"""
return self.data["FDFR"]
@property
def FEA(self) -> pd.Series:
"""FEA -- Foreign Exchange Assets (FEA): double"""
return self.data["FEA"]
@property
def FEL(self) -> pd.Series:
"""FEL -- Foreign Exchange Liabilities (FEL): double"""
return self.data["FEL"]
@property
def FFS(self) -> pd.Series:
"""FFS -- Federal Funds Sold (FFS): double"""
return self.data["FFS"]
@property
def GDWL(self) -> pd.Series:
"""GDWL -- Goodwill (GDWL): double"""
return self.data["GDWL"]
@property
def GEQRV(self) -> pd.Series:
"""GEQRV -- Grants - Equity Reserves (GEQRV): double"""
return self.data["GEQRV"]
@property
def GOVGR(self) -> pd.Series:
"""GOVGR -- Government Grants (GOVGR): double"""
return self.data["GOVGR"]
@property
def IAEQ(self) -> pd.Series:
"""IAEQ -- Investment Assets - Equity Securities (Insurance) (IAEQ): double"""
return self.data["IAEQ"]
@property
def IAEQCI(self) -> pd.Series:
"""IAEQCI -- Investment Assets (Insurance) - Equity Securities (Cost) (IAEQCI): double"""
return self.data["IAEQCI"]
@property
def IAEQMI(self) -> pd.Series:
"""IAEQMI -- Investment Assets (Insurance) - Equity Securities (Market) (IAEQMI): double"""
return self.data["IAEQMI"]
@property
def IAFICI(self) -> pd.Series:
"""IAFICI -- Investment Assets (Insurance) - Fixed Income Securities (Cost) (IAFICI): double"""
return self.data["IAFICI"]
@property
def IAFXI(self) -> pd.Series:
"""IAFXI -- Investment Assets - Fixed Income Securities (Insurance) (IAFXI): double"""
return self.data["IAFXI"]
@property
def IAFXMI(self) -> pd.Series:
"""IAFXMI -- Investment Assets (Insurance) - Fixed Income Securities (Market) (IAFXMI): double"""
return self.data["IAFXMI"]
@property
def IALI(self) -> pd.Series:
"""IALI -- Investment Assets (Insurance) - Listed Securities-Total (IALI): double"""
return self.data["IALI"]
@property
def IALOI(self) -> pd.Series:
"""IALOI -- Investment Assets - Loans - Other (Insurance) (IALOI): double"""
return self.data["IALOI"]
@property
def IALTI(self) -> pd.Series:
"""IALTI -- Investment Assets - Loans - Total (Insurance) (IALTI): double"""
return self.data["IALTI"]
@property
def IAMLI(self) -> pd.Series:
"""IAMLI -- Investment Assets - Mortgage Loans (Insurance) (IAMLI): double"""
return self.data["IAMLI"]
@property
def IAOI(self) -> pd.Series:
"""IAOI -- Investment Assets - Other (Insurance) (IAOI): double"""
return self.data["IAOI"]
@property
def IAPLI(self) -> pd.Series:
"""IAPLI -- Investment Assets - Policy Loans (Insurance) (IAPLI): double"""
return self.data["IAPLI"]
@property
def IAREI(self) -> pd.Series:
"""IAREI -- Investment Assets - Real Estate (Insurance) (IAREI): double"""
return self.data["IAREI"]
@property
def IASCI(self) -> pd.Series:
"""IASCI -- Investment Assets (Insurance) - Securities - Sundry (Cost) (IASCI): double"""
return self.data["IASCI"]
@property
def IASMI(self) -> pd.Series:
"""IASMI -- Investment Assets (Insurance) - Securities - Sundry (Market) (IASMI): double"""
return self.data["IASMI"]
@property
def IASSI(self) -> pd.Series:
"""IASSI -- Investment Assets - Securities - Sundry (Insurance) (IASSI): double"""
return self.data["IASSI"]
@property
def IASTI(self) -> pd.Series:
"""IASTI -- Investment Assets - Securities - Total (Insurance) (IASTI): double"""
return self.data["IASTI"]
@property
def IATCI(self) -> pd.Series:
"""IATCI -- Investment Assets (Insurance) - Securities - Total (Cost) (IATCI): double"""
return self.data["IATCI"]
@property
def IATI(self) -> pd.Series:
"""IATI -- Investment Assets - Total (Insurance) (IATI): double"""
return self.data["IATI"]
@property
def IATMI(self) -> pd.Series:
"""IATMI -- Investment Assets (Insurance) - Securities - Total (Market) (IATMI): double"""
return self.data["IATMI"]
@property
def IAUI(self) -> pd.Series:
"""IAUI -- Investment Assets (Insurance) - Unlisted Securities - Total (IAUI): double"""
return self.data["IAUI"]
@property
def ICAPT(self) -> pd.Series:
"""ICAPT -- Invested Capital - Total (ICAPT): double"""
return self.data["ICAPT"]
@property
def INTAN(self) -> pd.Series:
"""INTAN -- Intangible Assets - Total (INTAN): double"""
return self.data["INTAN"]
@property
def INTANO(self) -> pd.Series:
"""INTANO -- Other Intangibles (INTANO): double"""
return self.data["INTANO"]
@property
def INVFG(self) -> pd.Series:
"""INVFG -- Inventories Finished Goods (INVFG): double"""
return self.data["INVFG"]
@property
def INVO(self) -> pd.Series:
"""INVO -- Inventories Other (INVO): double"""
return self.data["INVO"]
@property
def INVOFS(self) -> pd.Series:
"""INVOFS -- Inventory/Stock - Other (INVOFS): double"""
return self.data["INVOFS"]
@property
def INVREH(self) -> pd.Series:
"""INVREH -- Inventory/Stock - Real Estate Held for Development (INVREH): double"""
return self.data["INVREH"]
@property
def INVREI(self) -> pd.Series:
"""INVREI -- Inventory/Stock - Real Estate Under Development (INVREI): double"""
return self.data["INVREI"]
@property
def INVRES(self) -> pd.Series:
"""INVRES -- Inventory/Stock - Real Estate Held for Sale (INVRES): double"""
return self.data["INVRES"]
@property
def INVRM(self) -> pd.Series:
"""INVRM -- Inventories Raw Materials (INVRM): double"""
return self.data["INVRM"]
@property
def INVT(self) -> pd.Series:
"""INVT -- Inventories - Total (INVT): double"""
return self.data["INVT"]
@property
def INVWIP(self) -> pd.Series:
"""INVWIP -- Inventories Work In Process (INVWIP): double"""
return self.data["INVWIP"]
@property
def IP(self) -> pd.Series:
"""IP -- Investment Property (IP): double"""
return self.data["IP"]
@property
def IPC(self) -> pd.Series:
"""IPC -- Investment Property (Cost) (IPC): double"""
return self.data["IPC"]
@property
def IPV(self) -> pd.Series:
"""IPV -- Investment Property (Valuation) (IPV): double"""
return self.data["IPV"]
@property
def ISEQ(self) -> pd.Series:
"""ISEQ -- Investment Securities - Equity (ISEQ): double"""
return self.data["ISEQ"]
@property
def ISEQC(self) -> pd.Series:
"""ISEQC -- Investment Securities - Equity (Cost) (ISEQC): double"""
return self.data["ISEQC"]
@property
def ISEQM(self) -> pd.Series:
"""ISEQM -- Investment Securities - Equity (Market) (ISEQM): double"""
return self.data["ISEQM"]
@property
def ISFI(self) -> pd.Series:
"""ISFI -- Investment Securities - Fixed Income (ISFI): double"""
return self.data["ISFI"]
@property
def ISFXC(self) -> pd.Series:
"""ISFXC -- Investment Securities - Fixed Income (Cost) (ISFXC): double"""
return self.data["ISFXC"]
@property
def ISFXM(self) -> pd.Series:
"""ISFXM -- Investment Securities - Fixed Income (Market) (ISFXM): double"""
return self.data["ISFXM"]
@property
def ISLG(self) -> pd.Series:
"""ISLG -- Investment Securities - Local Governments (ISLG): double"""
return self.data["ISLG"]
@property
def ISLGC(self) -> pd.Series:
"""ISLGC -- Investment Securities - Local Governments (Cost) (ISLGC): double"""
return self.data["ISLGC"]
@property
def ISLGM(self) -> pd.Series:
"""ISLGM -- Investment Securities - Local Governments (Market) (ISLGM): double"""
return self.data["ISLGM"]
@property
def ISLT(self) -> pd.Series:
"""ISLT -- Investment Securities - Listed - Total (ISLT): double"""
return self.data["ISLT"]
@property
def ISNG(self) -> pd.Series:
"""ISNG -- Investment Securities - National Governments (ISNG): double"""
return self.data["ISNG"]
@property
def ISNGC(self) -> pd.Series:
"""ISNGC -- Investment Securities - National Governments (Cost) (ISNGC): double"""
return self.data["ISNGC"]
@property
def ISNGM(self) -> pd.Series:
"""ISNGM -- Investment Securities - National Governments (Market) (ISNGM): double"""
return self.data["ISNGM"]
@property
def ISOTC(self) -> pd.Series:
"""ISOTC -- Invetsment Securities - Other (Cost) (ISOTC): double"""
return self.data["ISOTC"]
@property
def ISOTH(self) -> pd.Series:
"""ISOTH -- Investment Securities - Other (ISOTH): double"""
return self.data["ISOTH"]
@property
def ISOTM(self) -> pd.Series:
"""ISOTM -- Invetsment Securities - Other (Market) (ISOTM): double"""
return self.data["ISOTM"]
@property
def ISSC(self) -> pd.Series:
"""ISSC -- Investment Securities - Sundry (Cost) (ISSC): double"""
return self.data["ISSC"]
@property
def ISSM(self) -> pd.Series:
"""ISSM -- Investment Securities - Sundry (Market) (ISSM): double"""
return self.data["ISSM"]
@property
def ISSU(self) -> pd.Series:
"""ISSU -- Investment Securities - Sundry (ISSU): double"""
return self.data["ISSU"]
@property
def IST(self) -> pd.Series:
"""IST -- Investment Securities -Total (IST): double"""
return self.data["IST"]
@property
def ISTC(self) -> pd.Series:
"""ISTC -- Investment Securities - Total (Cost) (ISTC): double"""
return self.data["ISTC"]
@property
def ISTM(self) -> pd.Series:
"""ISTM -- Investment Securities - Total (Market) (ISTM): double"""
return self.data["ISTM"]
@property
def ISUT(self) -> pd.Series:
"""ISUT -- Investment Securities - Unlisted - Total (ISUT): double"""
return self.data["ISUT"]
@property
def ITCB(self) -> pd.Series:
"""ITCB -- Investment Tax Credit (Balance Sheet) (ITCB): double"""
return self.data["ITCB"]
@property
def IVAEQ(self) -> pd.Series:
"""IVAEQ -- Investment and Advances - Equity (IVAEQ): double"""
return self.data["IVAEQ"]
@property
def IVAO(self) -> pd.Series:
"""IVAO -- Investment and Advances Other (IVAO): double"""
return self.data["IVAO"]
@property
def IVGOD(self) -> pd.Series:
"""IVGOD -- Investments Grants and Other Deductions (IVGOD): double"""
return self.data["IVGOD"]
@property
def IVPT(self) -> pd.Series:
"""IVPT -- Investments - Permanent - Total (IVPT): double"""
return self.data["IVPT"]
@property
def IVST(self) -> pd.Series:
"""IVST -- Short-Term Investments - Total (IVST): double"""
return self.data["IVST"]
@property
def LCABG(self) -> pd.Series:
"""LCABG -- Loans/Claims/Advances - Banks and Government - Total (LCABG): double"""
return self.data["LCABG"]
@property
def LCACL(self) -> pd.Series:
"""LCACL -- Loans/Claims/Advances - Commercial (LCACL): double"""
return self.data["LCACL"]
@property
def LCACR(self) -> pd.Series:
"""LCACR -- Loans/Claims/Advances - Consumer (LCACR): double"""
return self.data["LCACR"]
@property
def LCAG(self) -> pd.Series:
"""LCAG -- Loans/Claims/Advances - Government (LCAG): double"""
return self.data["LCAG"]
@property
def LCAL(self) -> pd.Series:
"""LCAL -- Loans/Claims/Advances - Lease (LCAL): double"""
return self.data["LCAL"]
@property
def LCALT(self) -> pd.Series:
"""LCALT -- Loans/Claims/Advances - Long-Term (Banks) (LCALT): double"""
return self.data["LCALT"]
@property
def LCAM(self) -> pd.Series:
"""LCAM -- Loans/Claims/Advances - Mortgage (LCAM): double"""
return self.data["LCAM"]
@property
def LCAO(self) -> pd.Series:
"""LCAO -- Loans/Claims/Advances - Other (LCAO): double"""
return self.data["LCAO"]
@property
def LCAST(self) -> pd.Series:
"""LCAST -- Loans/Claims/Advances - Short-Term - Banks (LCAST): double"""
return self.data["LCAST"]
@property
def LCAT(self) -> pd.Series:
"""LCAT -- Loans/Claims/Advances - Total (LCAT): double"""
return self.data["LCAT"]
@property
def LCO(self) -> pd.Series:
"""LCO -- Current Liabilities Other Total (LCO): double"""
return self.data["LCO"]
@property
def LCOX(self) -> pd.Series:
"""LCOX -- Current Liabilities Other Sundry (LCOX): double"""
return self.data["LCOX"]
@property
def LCOXAR(self) -> pd.Series:
"""LCOXAR -- Current Liabilities - Other - Total As Reported (LCOXAR): double"""
return self.data["LCOXAR"]
@property
def LCOXDR(self) -> pd.Series:
"""LCOXDR -- Current Liabilities - Other - Excluding Deferred Revenue (LCOXDR): double"""
return self.data["LCOXDR"]
@property
def LCT(self) -> pd.Series:
"""LCT -- Current Liabilities - Total (LCT): double"""
return self.data["LCT"]
@property
def LCUACU(self) -> pd.Series:
"""LCUACU -- Loans/Claims/Advances - Customer - Total (LCUACU): double"""
return self.data["LCUACU"]
@property
def LIF(self) -> pd.Series:
"""LIF -- Life Insurance in Force (LIF): double"""
return self.data["LIF"]
@property
def LIFR(self) -> pd.Series:
"""LIFR -- LIFO Reserve (LIFR): double"""
return self.data["LIFR"]
@property
def LLOML(self) -> pd.Series:
"""LLOML -- LDC Loans Outstanding - Medium and Long-Term (LLOML): double"""
return self.data["LLOML"]
@property
def LLOO(self) -> pd.Series:
"""LLOO -- LDC Loans Outstanding - Other (LLOO): double"""
return self.data["LLOO"]
@property
def LLOT(self) -> pd.Series:
"""LLOT -- LDC Loans Outstanding - Total (LLOT): double"""
return self.data["LLOT"]
@property
def LO(self) -> pd.Series:
"""LO -- Liabilities - Other - Total (LO): double"""
return self.data["LO"]
@property
def LOXDR(self) -> pd.Series:
"""LOXDR -- Liabilities - Other - Excluding Deferred Revenue (LOXDR): double"""
return self.data["LOXDR"]
@property
def LRV(self) -> pd.Series:
"""LRV -- Legal Reserves (LRV): double"""
return self.data["LRV"]
@property
def LS(self) -> pd.Series:
"""LS -- Liabilities - Other - Sundry (LS): double"""
return self.data["LS"]
@property
def LSE(self) -> pd.Series:
"""LSE -- Liabilities and Stockholders Equity - Total (LSE): double"""
return self.data["LSE"]
@property
def LT(self) -> pd.Series:
"""LT -- Liabilities - Total (LT): double"""
return self.data["LT"]
@property
def MIB(self) -> pd.Series:
"""MIB -- Minority Interest (Balance Sheet) (MIB): double"""
return self.data["MIB"]
@property
def MRC1(self) -> pd.Series:
"""MRC1 -- Rental Commitments Minimum 1st Year (MRC1): double"""
return self.data["MRC1"]
@property
def MRC2(self) -> pd.Series:
"""MRC2 -- Rental Commitments Minimum 2nd Year (MRC2): double"""
return self.data["MRC2"]
@property
def MRC3(self) -> pd.Series:
"""MRC3 -- Rental Commitments Minimum 3rd Year (MRC3): double"""
return self.data["MRC3"]
@property
def MRC4(self) -> pd.Series:
"""MRC4 -- Rental Commitments Minimum 4th Year (MRC4): double"""
return self.data["MRC4"]
@property
def MRC5(self) -> pd.Series:
"""MRC5 -- Rental Commitments Minimum 5th Year (MRC5): double"""
return self.data["MRC5"]
@property
def MRCT(self) -> pd.Series:
"""MRCT -- Rental Commitments Minimum 5 Year Total (MRCT): double"""
return self.data["MRCT"]
@property
def MRCTA(self) -> pd.Series:
"""MRCTA -- Thereafter Portion of Leases (MRCTA): double"""
return self.data["MRCTA"]
@property
def MSA(self) -> pd.Series:
"""MSA -- Marketable Securities Adjustment (MSA): double"""
return self.data["MSA"]
@property
def MSVRV(self) -> pd.Series:
"""MSVRV -- Mandatory Securities Valuation Reserve (Statutory) (MSVRV): double"""
return self.data["MSVRV"]
@property
def MTL(self) -> pd.Series:
"""MTL -- Loans From Securities Finance Companies for Margin Transactions (MTL): double"""
return self.data["MTL"]
@property
def NAT(self) -> pd.Series:
"""NAT -- Nonadmitted Assets - Total (Statutory) (NAT): double"""
return self.data["NAT"]
@property
def NP(self) -> pd.Series:
"""NP -- Notes Payable Short-Term Borrowings (NP): double"""
return self.data["NP"]
@property
def NPANL(self) -> pd.Series:
"""NPANL -- Nonperforming Assets - Nonaccrual Loans (NPANL): double"""
return self.data["NPANL"]
@property
def NPAORE(self) -> pd.Series:
"""NPAORE -- Nonperforming Assets - Other Real Estate Owned (NPAORE): double"""
return self.data["NPAORE"]
@property
def NPARL(self) -> pd.Series:
"""NPARL -- Nonperforming Assets - Restructured Loans (NPARL): double"""
return self.data["NPARL"]
@property
def NPAT(self) -> pd.Series:
"""NPAT -- Nonperforming Assets - Total (NPAT): double"""
return self.data["NPAT"]
@property
def OB(self) -> pd.Series:
"""OB -- Order Backlog (OB): double"""
return self.data["OB"]
@property
def OPTPRCCA(self) -> pd.Series:
"""OPTPRCCA -- Options Cancelled - Price (OPTPRCCA): double"""
return self.data["OPTPRCCA"]
@property
def OPTPRCEX(self) -> pd.Series:
"""OPTPRCEX -- Options Exercised - Price (OPTPRCEX): double"""
return self.data["OPTPRCEX"]
@property
def OPTPRCEY(self) -> pd.Series:
"""OPTPRCEY -- Options Outstanding End of Year - Price (OPTPRCEY): double"""
return self.data["OPTPRCEY"]
@property
def OPTPRCGR(self) -> pd.Series:
"""OPTPRCGR -- Options Granted - Price (OPTPRCGR): double"""
return self.data["OPTPRCGR"]
@property
def OPTPRCWA(self) -> pd.Series:
"""OPTPRCWA -- Options Exercisable - Weighted Avg Price (OPTPRCWA): double"""
return self.data["OPTPRCWA"]
@property
def PPEGT(self) -> pd.Series:
"""PPEGT -- Property, Plant and Equipment - Total (Gross) (PPEGT): double"""
return self.data["PPEGT"]
@property
def PPENB(self) -> pd.Series:
"""PPENB -- Property, Plant, and Equipment Buildings (Net) (PPENB): double"""
return self.data["PPENB"]
@property
def PPENC(self) -> pd.Series:
"""PPENC -- Property, Plant, and Equipment Construction in Progress (Net) (PPENC): double"""
return self.data["PPENC"]
@property
def PPENLI(self) -> pd.Series:
"""PPENLI -- Property, Plant, and Equipment Land and Improvements (Net) (PPENLI): double"""
return self.data["PPENLI"]
@property
def PPENLS(self) -> pd.Series:
"""PPENLS -- Property, Plant, and Equipment Leases (Net) (PPENLS): double"""
return self.data["PPENLS"]
@property
def PPENME(self) -> pd.Series:
"""PPENME -- Property, Plant, and Equipment Machinery and Equipment (Net) (PPENME): double"""
return self.data["PPENME"]
@property
def PPENNR(self) -> pd.Series:
"""PPENNR -- Property, Plant, and Equipment Natural Resources (Net) (PPENNR): double"""
return self.data["PPENNR"]
@property
def PPENO(self) -> pd.Series:
"""PPENO -- Property, Plant, and Equipment Other (Net) (PPENO): double"""
return self.data["PPENO"]
@property
def PPENT(self) -> pd.Series:
"""PPENT -- Property, Plant and Equipment - Total (Net) (PPENT): double"""
return self.data["PPENT"]
@property
def PPEVBB(self) -> pd.Series:
"""PPEVBB -- Property, Plant and Equipment Beginning Balance (Schedule V) (PPEVBB): double"""
return self.data["PPEVBB"]
@property
def PPEVEB(self) -> pd.Series:
"""PPEVEB -- Property, Plant, and Equipment Ending Balance (Schedule V) (PPEVEB): double"""
return self.data["PPEVEB"]
@property
def PPEVO(self) -> pd.Series:
"""PPEVO -- Property, Plant, and Equipment Other Changes (Schedule V) (PPEVO): double"""
return self.data["PPEVO"]
@property
def PPEVR(self) -> pd.Series:
"""PPEVR -- Property, Plant and Equipment Retirements (Schedule V) (PPEVR): double"""
return self.data["PPEVR"]
@property
def PRC(self) -> pd.Series:
"""PRC -- Participation Rights Certificates (PRC): double"""
return self.data["PRC"]
@property
def PRODV(self) -> pd.Series:
"""PRODV -- Proposed Dividends (PRODV): double"""
return self.data["PRODV"]
@property
def PRVT(self) -> pd.Series:
"""PRVT -- Policy Reserves - Total (Statutory) (PRVT): double"""
return self.data["PRVT"]
@property
def PSTK(self) -> pd.Series:
"""PSTK -- Preferred/Preference Stock (Capital) - Total (PSTK): double"""
return self.data["PSTK"]
@property
def PSTKC(self) -> pd.Series:
"""PSTKC -- Preferred Stock Convertible (PSTKC): double"""
return self.data["PSTKC"]
@property
def PSTKL(self) -> pd.Series:
"""PSTKL -- Preferred Stock Liquidating Value (PSTKL): double"""
return self.data["PSTKL"]
@property
def PSTKN(self) -> pd.Series:
"""PSTKN -- Preferred/Preference Stock - Nonredeemable (PSTKN): double"""
return self.data["PSTKN"]
@property
def PSTKR(self) -> pd.Series:
"""PSTKR -- Preferred/Preference Stock - Redeemable (PSTKR): double"""
return self.data["PSTKR"]
@property
def PSTKRV(self) -> pd.Series:
"""PSTKRV -- Preferred Stock Redemption Value (PSTKRV): double"""
return self.data["PSTKRV"]
@property
def PVCL(self) -> pd.Series:
"""PVCL -- Provision - Credit Losses (Balance Sheet) (PVCL): double"""
return self.data["PVCL"]
@property
def PVPL(self) -> pd.Series:
"""PVPL -- Provision - Pension Liabilities (PVPL): double"""
return self.data["PVPL"]
@property
def PVT(self) -> pd.Series:
"""PVT -- Provisions - Total (PVT): double"""
return self.data["PVT"]
@property
def RADP(self) -> pd.Series:
"""RADP -- Reinsurance Assets - Deposits and Other (Insurance) (RADP): double"""
return self.data["RADP"]
@property
def RAGR(self) -> pd.Series:
"""RAGR -- Resale Agreements (RAGR): double"""
return self.data["RAGR"]
@property
def RARI(self) -> pd.Series:
"""RARI -- Reinsurance Assets - Receivable/Debtors (Insurance) (RARI): double"""
return self.data["RARI"]
@property
def RATI(self) -> pd.Series:
"""RATI -- Reinsurance Assets - Total (Insurance) (RATI): double"""
return self.data["RATI"]
@property
def RCL(self) -> pd.Series:
"""RCL -- Reserves for Credit Losses (Assets) (RCL): double"""
return self.data["RCL"]
@property
def RDP(self) -> pd.Series:
"""RDP -- Regulatory Deposits (RDP): double"""
return self.data["RDP"]
@property
def RE(self) -> pd.Series:
"""RE -- Retained Earnings (RE): double"""
return self.data["RE"]
@property
def REA(self) -> pd.Series:
"""REA -- Retained Earnings Restatement (REA): double"""
return self.data["REA"]
@property
def REAJO(self) -> pd.Series:
"""REAJO -- Retained Earnings Other Adjustments (REAJO): double"""
return self.data["REAJO"]
@property
def RECCO(self) -> pd.Series:
"""RECCO -- Receivables - Current - Other (RECCO): double"""
return self.data["RECCO"]
@property
def RECD(self) -> pd.Series:
"""RECD -- Receivables - Estimated Doubtful (RECD): double"""
return self.data["RECD"]
@property
def RECT(self) -> pd.Series:
"""RECT -- Receivables Total (RECT): double"""
return self.data["RECT"]
@property
def RECTA(self) -> pd.Series:
"""RECTA -- Retained Earnings Cumulative Translation Adjustment (RECTA): double"""
return self.data["RECTA"]
@property
def RECTR(self) -> pd.Series:
"""RECTR -- Receivables - Trade (RECTR): double"""
return self.data["RECTR"]
@property
def RECUB(self) -> pd.Series:
"""RECUB -- Unbilled Receivables (RECUB): double"""
return self.data["RECUB"]
@property
def RET(self) -> pd.Series:
"""RET -- Total RE Property (RET): double"""
return self.data["RET"]
@property
def REUNA(self) -> pd.Series:
"""REUNA -- Retained Earnings Unadjusted (REUNA): double"""
return self.data["REUNA"]
@property
def REUNR(self) -> pd.Series:
"""REUNR -- Retained Earnings Unrestricted (REUNR): double"""
return self.data["REUNR"]
@property
def RLL(self) -> pd.Series:
"""RLL -- Reserve for Loan/Asset Losses (RLL): double"""
return self.data["RLL"]
@property
def RLO(self) -> pd.Series:
"""RLO -- Reinsurance Liabilities - Other (RLO): double"""
return self.data["RLO"]
@property
def RLP(self) -> pd.Series:
"""RLP -- Reinsurance Liabilities - Payables/Creditors (RLP): double"""
return self.data["RLP"]
@property
def RLRI(self) -> pd.Series:
"""RLRI -- Reinsurers' Liability for Reserves (Insurance) (RLRI): double"""
return self.data["RLRI"]
@property
def RLT(self) -> pd.Series:
"""RLT -- Reinsurance Liabilities - Total (RLT): double"""
return self.data["RLT"]
@property
def RPAG(self) -> pd.Series:
"""RPAG -- Repurchase Agreements (RPAG): double"""
return self.data["RPAG"]
@property
def RREPS(self) -> pd.Series:
"""RREPS -- Reversal Restructuring/Acq Basic EPS Effect (RREPS): double"""
return self.data["RREPS"]
@property
def RVBCI(self) -> pd.Series:
"""RVBCI -- Reserves for Benefits - Life - Claims (Insurance) (RVBCI): double"""
return self.data["RVBCI"]
@property
def RVBPI(self) -> pd.Series:
"""RVBPI -- Reserves for Benefits - Life - Policy (Insurance) (RVBPI): double"""
return self.data["RVBPI"]
@property
def RVBTI(self) -> pd.Series:
"""RVBTI -- Reserves for Benefits - Life - Total (Insurance) (RVBTI): double"""
return self.data["RVBTI"]
@property
def RVDO(self) -> pd.Series:
"""RVDO -- Reserves - Distributable - Other (RVDO): double"""
return self.data["RVDO"]
@property
def RVDT(self) -> pd.Series:
"""RVDT -- Reserves - Distributable - Total (RVDT): double"""
return self.data["RVDT"]
@property
def RVEQT(self) -> pd.Series:
"""RVEQT -- Equity Reserves - Total (RVEQT): double"""
return self.data["RVEQT"]
@property
def RVLRV(self) -> pd.Series:
"""RVLRV -- Revaluation Reserve (RVLRV): double"""
return self.data["RVLRV"]
@property
def RVNO(self) -> pd.Series:
"""RVNO -- Reserves - Nondistributable - Other (RVNO): double"""
return self.data["RVNO"]
@property
def RVNT(self) -> pd.Series:
"""RVNT -- Reserves - Nondistributable - Total (RVNT): double"""
return self.data["RVNT"]
@property
def RVRI(self) -> pd.Series:
"""RVRI -- Reserves - Reinsurance (Insurance) (RVRI): double"""
return self.data["RVRI"]
@property
def RVSI(self) -> pd.Series:
"""RVSI -- Reserves - Sundry (Insurance) (RVSI): double"""
return self.data["RVSI"]
@property
def RVTI(self) -> pd.Series:
"""RVTI -- Reserves - Total (RVTI): double"""
return self.data["RVTI"]
@property
def RVTXR(self) -> pd.Series:
"""RVTXR -- Reserves - Tax-Regulated (RVTXR): double"""
return self.data["RVTXR"]
@property
def RVUPI(self) -> pd.Series:
"""RVUPI -- Reserves for Unearned Premiums (Insurance) (RVUPI): double"""
return self.data["RVUPI"]
@property
def RVUTX(self) -> pd.Series:
"""RVUTX -- Reserves - Untaxed (RVUTX): double"""
return self.data["RVUTX"]
@property
def SAA(self) -> pd.Series:
"""SAA -- Separate Account Assets (SAA): double"""
return self.data["SAA"]
@property
def SAL(self) -> pd.Series:
"""SAL -- Separate Account Liabilities (SAL): double"""
return self.data["SAL"]
@property
def SBDC(self) -> pd.Series:
"""SBDC -- Securities Borrowed and Deposited by Customers (SBDC): double"""
return self.data["SBDC"]
@property
def SC(self) -> pd.Series:
"""SC -- Securities In Custody (SC): double"""
return self.data["SC"]
@property
def SCO(self) -> pd.Series:
"""SCO -- Share Capital - Other (SCO): double"""
return self.data["SCO"]
@property
def SECU(self) -> pd.Series:
"""SECU -- Securities Gains (Losses) - Unrealized (SECU): double"""
return self.data["SECU"]
@property
def SEQ(self) -> pd.Series:
"""SEQ -- Stockholders' Equity - Total (SEQ): double"""
return self.data["SEQ"]
@property
def SEQO(self) -> pd.Series:
"""SEQO -- Other Stockholders Equity Adjustments (SEQO): double"""
return self.data["SEQO"]
@property
def SRT(self) -> pd.Series:
"""SRT -- Surplus - Total (Statutory) (SRT): double"""
return self.data["SRT"]
@property
def SSNP(self) -> pd.Series:
"""SSNP -- Securities Sold Not Yet Purchased (SSNP): double"""
return self.data["SSNP"]
@property
def STBO(self) -> pd.Series:
"""STBO -- Short-Term Borrowings - Other (STBO): double"""
return self.data["STBO"]
@property
def STIO(self) -> pd.Series:
"""STIO -- Short-Term Investments - Other (STIO): double"""
return self.data["STIO"]
@property
def TDSCD(self) -> pd.Series:
"""TDSCD -- Trading/Dealing Account Securities - Corporate Debt (TDSCD): double"""
return self.data["TDSCD"]
@property
def TDSCE(self) -> pd.Series:
"""TDSCE -- Trading/Dealing Account Securities - Corporate Equity (TDSCE): double"""
return self.data["TDSCE"]
@property
def TDSLG(self) -> pd.Series:
"""TDSLG -- Trading/Dealing Account Securities - Local Governments (TDSLG): double"""
return self.data["TDSLG"]
@property
def TDSMM(self) -> pd.Series:
"""TDSMM -- Trading/Dealing Account Securities - Money Market (TDSMM): double"""
return self.data["TDSMM"]
@property
def TDSNG(self) -> pd.Series:
"""TDSNG -- Trading/Dealing Account Securities - National Governments (TDSNG): double"""
return self.data["TDSNG"]
@property
def TDSO(self) -> pd.Series:
"""TDSO -- Trading/Dealing Account Securities - Other (TDSO): double"""
return self.data["TDSO"]
@property
def TDSS(self) -> pd.Series:
"""TDSS -- Trading/Dealing Account Securities - Sundry (TDSS): double"""
return self.data["TDSS"]
@property
def TDST(self) -> pd.Series:
"""TDST -- Trading/Dealing Account Securities - Total (TDST): double"""
return self.data["TDST"]
@property
def TLCF(self) -> pd.Series:
"""TLCF -- Tax Loss Carry Forward (TLCF): double"""
return self.data["TLCF"]
@property
def TRANSA(self) -> pd.Series:
"""TRANSA -- Cumulative Translation Adjustment (TRANSA): double"""
return self.data["TRANSA"]
@property
def TSA(self) -> pd.Series:
"""TSA -- Treasury Stock (Assets) (TSA): double"""
return self.data["TSA"]
@property
def TSO(self) -> pd.Series:
"""TSO -- Treasury Stock - Other Share Capital (TSO): double"""
return self.data["TSO"]
@property
def TSTK(self) -> pd.Series:
"""TSTK -- Treasury Stock - Total (All Capital) (TSTK): double"""
return self.data["TSTK"]
@property
def TSTKC(self) -> pd.Series:
"""TSTKC -- Treasury Stock - Common (TSTKC): double"""
return self.data["TSTKC"]
@property
def TSTKME(self) -> pd.Series:
"""TSTKME -- Treasury Stock Book Value Memo Entry (TSTKME): double"""
return self.data["TSTKME"]
@property
def TSTKP(self) -> pd.Series:
"""TSTKP -- Treasury Stock - Preferrred (TSTKP): double"""
return self.data["TSTKP"]
@property
def TXDB(self) -> pd.Series:
"""TXDB -- Deferred Taxes (Balance Sheet) (TXDB): double"""
return self.data["TXDB"]
@property
def TXDBA(self) -> pd.Series:
"""TXDBA -- Deferred Tax Asset - Long Term (TXDBA): double"""
return self.data["TXDBA"]
@property
def TXDBCA(self) -> pd.Series:
"""TXDBCA -- Deferred Tax Asset - Current (TXDBCA): double"""
return self.data["TXDBCA"]
@property
def TXDBCL(self) -> pd.Series:
"""TXDBCL -- Deferred Tax Liability - Current (TXDBCL): double"""
return self.data["TXDBCL"]
@property
def TXDITC(self) -> pd.Series:
"""TXDITC -- Deferred Taxes and Investment Tax Credit (TXDITC): double"""
return self.data["TXDITC"]
@property
def TXNDB(self) -> pd.Series:
"""TXNDB -- Net Deferred Tax Asset (Liab) - Total (TXNDB): double"""
return self.data["TXNDB"]
@property
def TXNDBA(self) -> pd.Series:
"""TXNDBA -- Net Deferred Tax Asset (TXNDBA): double"""
return self.data["TXNDBA"]
@property
def TXNDBL(self) -> pd.Series:
"""TXNDBL -- Net Deferred Tax Liability (TXNDBL): double"""
return self.data["TXNDBL"]
@property
def TXNDBR(self) -> pd.Series:
"""TXNDBR -- Deferred Tax Residual (TXNDBR): double"""
return self.data["TXNDBR"]
@property
def TXP(self) -> pd.Series:
"""TXP -- Income Taxes Payable (TXP): double"""
return self.data["TXP"]
@property
def TXR(self) -> pd.Series:
"""TXR -- Income Tax Refund (TXR): double"""
return self.data["TXR"]
@property
def UAOX(self) -> pd.Series:
"""UAOX -- Other Assets - Utility (UAOX): double"""
return self.data["UAOX"]
@property
def UAPT(self) -> pd.Series:
"""UAPT -- Accounts Payable - Utility (UAPT): double"""
return self.data["UAPT"]
@property
def UCAPS(self) -> pd.Series:
"""UCAPS -- Paid in Capital - Other (UCAPS): double"""
return self.data["UCAPS"]
@property
def UCCONS(self) -> pd.Series:
"""UCCONS -- Contributions in Aid of Construction (UCCONS): double"""
return self.data["UCCONS"]
@property
def UCEQ(self) -> pd.Series:
"""UCEQ -- Common Equity Total - Utility (UCEQ): double"""
return self.data["UCEQ"]
@property
def UCUSTAD(self) -> pd.Series:
"""UCUSTAD -- Customer Advances for Construction (UCUSTAD): double"""
return self.data["UCUSTAD"]
@property
def UDCOPRES(self) -> pd.Series:
"""UDCOPRES -- Deferred Credits and Operating Reserves - Other (UDCOPRES): double"""
return self.data["UDCOPRES"]
@property
def UDD(self) -> pd.Series:
"""UDD -- Debt (Debentures) (UDD): double"""
return self.data["UDD"]
@property
def UDMB(self) -> pd.Series:
"""UDMB -- Debt (Mortgage Bonds) - Utility (UDMB): double"""
return self.data["UDMB"]
@property
def UDOLT(self) -> pd.Series:
"""UDOLT -- Debt (Other Long-Term) - Utility (UDOLT): double"""
return self.data["UDOLT"]
@property
def UDPCO(self) -> pd.Series:
"""UDPCO -- Debt (Pollution Control Obligations) - Utility (UDPCO): double"""
return self.data["UDPCO"]
@property
def UI(self) -> pd.Series:
"""UI -- Unearned Income (UI): double"""
return self.data["UI"]
@property
def UINVT(self) -> pd.Series:
"""UINVT -- Inventories - Utility (UINVT): double"""
return self.data["UINVT"]
@property
def ULCM(self) -> pd.Series:
"""ULCM -- Current Liabilities - Miscellaneous (ULCM): double"""
return self.data["ULCM"]
@property
def ULCO(self) -> pd.Series:
"""ULCO -- Current Liabilities - Other - Utility (ULCO): double"""
return self.data["ULCO"]
@property
def UNL(self) -> pd.Series:
"""UNL -- Unappropriated Net Loss (UNL): double"""
return self.data["UNL"]
@property
def UNNP(self) -> pd.Series:
"""UNNP -- Unappropriated Net Profit (Stockholders' Equity) (UNNP): double"""
return self.data["UNNP"]
@property
def UNNPL(self) -> pd.Series:
"""UNNPL -- Unappropriated Net Profit (UNNPL): double"""
return self.data["UNNPL"]
@property
def UOPRES(self) -> pd.Series:
"""UOPRES -- Operating Reserves (UOPRES): double"""
return self.data["UOPRES"]
@property
def UPMCSTK(self) -> pd.Series:
"""UPMCSTK -- Premium on Common Stock* (UPMCSTK): double"""
return self.data["UPMCSTK"]
@property
def UPMPF(self) -> pd.Series:
"""UPMPF -- Premium on Preferred Stock* (UPMPF): double"""
return self.data["UPMPF"]
@property
def UPMPFS(self) -> pd.Series:
"""UPMPFS -- Premium on Preference Stock* (UPMPFS): double"""
return self.data["UPMPFS"]
@property
def UPMSUBP(self) -> pd.Series:
"""UPMSUBP -- Premium on Subsidiary Preferred Stock* (UPMSUBP): double"""
return self.data["UPMSUBP"]
@property
def UPSTK(self) -> pd.Series:
"""UPSTK -- Preferred Stock at Carrying Value (UPSTK): double"""
return self.data["UPSTK"]
@property
def UPSTKC(self) -> pd.Series:
"""UPSTKC -- Preference Stock at Carrying Value* (UPSTKC): double"""
return self.data["UPSTKC"]
@property
def UPSTKSF(self) -> pd.Series:
"""UPSTKSF -- Preferred/Preference Stock Sinking Fund Requirement (UPSTKSF): double"""
return self.data["UPSTKSF"]
@property
def URECT(self) -> pd.Series:
"""URECT -- Receivables (Net) (URECT): double"""
return self.data["URECT"]
@property
def URECTR(self) -> pd.Series:
"""URECTR -- Accounts Receivable - Trade - Utility (URECTR): double"""
return self.data["URECTR"]
@property
def UREVUB(self) -> pd.Series:
"""UREVUB -- Accrued Unbilled Revenues (Balance Sheet) (UREVUB): double"""
return self.data["UREVUB"]
@property
def USUBPSTK(self) -> pd.Series:
"""USUBPSTK -- Subsidiary Preferred Stock at Carrying Value (USUBPSTK): double"""
return self.data["USUBPSTK"]
@property
def VPAC(self) -> pd.Series:
"""VPAC -- Investments - Permanent - Associated Companies (VPAC): double"""
return self.data["VPAC"]
@property
def VPO(self) -> pd.Series:
"""VPO -- Investments - Permanent - Other (VPO): double"""
return self.data["VPO"]
@property
def WCAP(self) -> pd.Series:
"""WCAP -- Working Capital (Balance Sheet) (WCAP): double"""
return self.data["WCAP"]
@property
def XACC(self) -> pd.Series:
"""XACC -- Accrued Expenses (XACC): double"""
return self.data["XACC"]
@property
def XPP(self) -> pd.Series:
"""XPP -- Prepaid Expenses (XPP): double"""
return self.data["XPP"]
@property
def ACCHG(self) -> pd.Series:
"""ACCHG -- Accounting Changes Cumulative Effect (ACCHG): double"""
return self.data["ACCHG"]
@property
def ADPAC(self) -> pd.Series:
"""ADPAC -- Amortization of Deferred Policy Acquisition Costs (ADPAC): double"""
return self.data["ADPAC"]
@property
def AM(self) -> pd.Series:
"""AM -- Amortization of Intangibles (AM): double"""
return self.data["AM"]
@property
def AMDC(self) -> pd.Series:
"""AMDC -- Amortization of Deferred Charges (AMDC): double"""
return self.data["AMDC"]
@property
def AMGW(self) -> pd.Series:
"""AMGW -- Amortization of Goodwill (AMGW): double"""
return self.data["AMGW"]
@property
def AQA(self) -> pd.Series:
"""AQA -- Acquisition/Merger After-tax (AQA): double"""
return self.data["AQA"]
@property
def AQD(self) -> pd.Series:
"""AQD -- Acquisition/Merger Diluted EPS Effect (AQD): double"""
return self.data["AQD"]
@property
def AQEPS(self) -> pd.Series:
"""AQEPS -- Acquisition/Merger Basic EPS Effect (AQEPS): double"""
return self.data["AQEPS"]
@property
def AQI(self) -> pd.Series:
"""AQI -- Acquisitions Income Contribution (AQI): double"""
return self.data["AQI"]
@property
def AQP(self) -> pd.Series:
"""AQP -- Acquisition/Merger Pretax (AQP): double"""
return self.data["AQP"]
@property
def AQS(self) -> pd.Series:
"""AQS -- Acquisitions Sales Contribution (AQS): double"""
return self.data["AQS"]
@property
def ARCE(self) -> pd.Series:
"""ARCE -- As Reported Core After-tax (ARCE): double"""
return self.data["ARCE"]
@property
def ARCED(self) -> pd.Series:
"""ARCED -- As Reported Core Diluted EPS Effect (ARCED): double"""
return self.data["ARCED"]
@property
def ARCEEPS(self) -> pd.Series:
"""ARCEEPS -- As Reported Core Basic EPS Effect (ARCEEPS): double"""
return self.data["ARCEEPS"]
@property
def AUTXR(self) -> pd.Series:
"""AUTXR -- Appropriations to Untaxed Reserves (AUTXR): double"""
return self.data["AUTXR"]
@property
def BALR(self) -> pd.Series:
"""BALR -- Benefits Assumed - Life (BALR): double"""
return self.data["BALR"]
@property
def BANLR(self) -> pd.Series:
"""BANLR -- Benefits Assumed - Nonlife (BANLR): double"""
return self.data["BANLR"]
@property
def BATR(self) -> pd.Series:
"""BATR -- Benefits Assumed - Total (BATR): double"""
return self.data["BATR"]
@property
def BCEF(self) -> pd.Series:
"""BCEF -- Brokerage, Clearing and Exchange Fees (BCEF): double"""
return self.data["BCEF"]
@property
def BCLR(self) -> pd.Series:
"""BCLR -- Benefits Ceded - Life (BCLR): double"""
return self.data["BCLR"]
@property
def BCLTBL(self) -> pd.Series:
"""BCLTBL -- Benefits and Claims - Total (Business Line) (BCLTBL): double"""
return self.data["BCLTBL"]
@property
def BCNLR(self) -> pd.Series:
"""BCNLR -- Benefits Ceded - Nonlife (BCNLR): double"""
return self.data["BCNLR"]
@property
def BCRBL(self) -> pd.Series:
"""BCRBL -- Benefits and Claims - Reinsurance (Business Line) (BCRBL): double"""
return self.data["BCRBL"]
@property
def BCT(self) -> pd.Series:
"""BCT -- Benefits and Claims - Total (Insurance) (BCT): double"""
return self.data["BCT"]
@property
def BCTBL(self) -> pd.Series:
"""BCTBL -- Benefits and Claims - Other (Business Line) (BCTBL): double"""
return self.data["BCTBL"]
@property
def BCTR(self) -> pd.Series:
"""BCTR -- Benefits Ceded - Total (BCTR): double"""
return self.data["BCTR"]
@property
def BLTBL(self) -> pd.Series:
"""BLTBL -- Benefits - Life - Total (Business Line) (BLTBL): double"""
return self.data["BLTBL"]
@property
def CBI(self) -> pd.Series:
"""CBI -- Claims Incurred - Insurance (CBI): double"""
return self.data["CBI"]
@property
def CDPAC(self) -> pd.Series:
"""CDPAC -- Capitalized Deferred Polcy Acquisition Costs (CDPAC): double"""
return self.data["CDPAC"]
@property
def CFBD(self) -> pd.Series:
"""CFBD -- Commissions and Fees - (Broker/Dealer) (CFBD): double"""
return self.data["CFBD"]
@property
def CFERE(self) -> pd.Series:
"""CFERE -- Commissions and Fees - (Real Estate) (CFERE): double"""
return self.data["CFERE"]
@property
def CFO(self) -> pd.Series:
"""CFO -- Commissions and Fees - Other (CFO): double"""
return self.data["CFO"]
@property
def CFPDO(self) -> pd.Series:
"""CFPDO -- Commissions and Fees Paid - Other (CFPDO): double"""
return self.data["CFPDO"]
@property
def CGA(self) -> pd.Series:
"""CGA -- Capital Gains - After-Tax (CGA): double"""
return self.data["CGA"]
@property
def CGRI(self) -> pd.Series:
"""CGRI -- Capital Gains - Realized (Insurance) (CGRI): double"""
return self.data["CGRI"]
@property
def CGTI(self) -> pd.Series:
"""CGTI -- Capital Gains - Total (Insurance) (CGTI): double"""
return self.data["CGTI"]
@property
def CGUI(self) -> pd.Series:
"""CGUI -- Capital Gains - Unrealized (Insurance) (CGUI): double"""
return self.data["CGUI"]
@property
def CIBEGNI(self) -> pd.Series:
"""CIBEGNI -- Comp Inc - Beginning Net Income (CIBEGNI): double"""
return self.data["CIBEGNI"]
@property
def CICURR(self) -> pd.Series:
"""CICURR -- Comp Inc - Currency Trans Adj (CICURR): double"""
return self.data["CICURR"]
@property
def CIDERGL(self) -> pd.Series:
"""CIDERGL -- Comp Inc - Derivative Gains/Losses (CIDERGL): double"""
return self.data["CIDERGL"]
@property
def CIOTHER(self) -> pd.Series:
"""CIOTHER -- Comp Inc - Other Adj (CIOTHER): double"""
return self.data["CIOTHER"]
@property
def CIPEN(self) -> pd.Series:
"""CIPEN -- Comp Inc - Minimum Pension Adj (CIPEN): double"""
return self.data["CIPEN"]
@property
def CISECGL(self) -> pd.Series:
"""CISECGL -- Comp Inc - Securities Gains/Losses (CISECGL): double"""
return self.data["CISECGL"]
@property
def CITOTAL(self) -> pd.Series:
"""CITOTAL -- Comprehensive Income - Total (CITOTAL): double"""
return self.data["CITOTAL"]
@property
def CNLTBL(self) -> pd.Series:
"""CNLTBL -- Claims - Non-Life - Total (Business Line) (CNLTBL): double"""
return self.data["CNLTBL"]
@property
def COGS(self) -> pd.Series:
"""COGS -- Cost of Goods Sold (COGS): double"""
return self.data["COGS"]
@property
def CPCBL(self) -> pd.Series:
"""CPCBL -- Commercial Property and Casualty Claims (Business Line) (CPCBL): double"""
return self.data["CPCBL"]
@property
def CPDOI(self) -> pd.Series:
"""CPDOI -- Claims Paid - Other (CPDOI): double"""
return self.data["CPDOI"]
@property
def CPNLI(self) -> pd.Series:
"""CPNLI -- Claims Paid - Non-Life (CPNLI): double"""
return self.data["CPNLI"]
@property
def CPPBL(self) -> pd.Series:
"""CPPBL -- Commercial Property and Casualty Premiums (Business Line) (CPPBL): double"""
return self.data["CPPBL"]
@property
def CPREI(self) -> pd.Series:
"""CPREI -- Claims Paid - Reinsurance (CPREI): double"""
return self.data["CPREI"]
@property
def CSTKE(self) -> pd.Series:
"""CSTKE -- Common Stock Equivalents - Dollar Savings (CSTKE): double"""
return self.data["CSTKE"]
@property
def DBI(self) -> pd.Series:
"""DBI -- Death Benefits - Insurance (DBI): double"""
return self.data["DBI"]
@property
def DFXA(self) -> pd.Series:
"""DFXA -- Depreciation of Tangible Fixed Assets (DFXA): double"""
return self.data["DFXA"]
@property
def DILADJ(self) -> pd.Series:
"""DILADJ -- Dilution Adjustment (DILADJ): double"""
return self.data["DILADJ"]
@property
def DILAVX(self) -> pd.Series:
"""DILAVX -- Dilution Available Excluding Extraordinary Items (DILAVX): double"""
return self.data["DILAVX"]
@property
def DO(self) -> pd.Series:
"""DO -- Discontinued Operations (DO): double"""
return self.data["DO"]
@property
def DONR(self) -> pd.Series:
"""DONR -- Nonrecurring Disc Operations (DONR): double"""
return self.data["DONR"]
@property
def DP(self) -> pd.Series:
"""DP -- Depreciation and Amortization (DP): double"""
return self.data["DP"]
@property
def DPRET(self) -> pd.Series:
"""DPRET -- Depr/Amort of Property (DPRET): double"""
return self.data["DPRET"]
@property
def DTEA(self) -> pd.Series:
"""DTEA -- Extinguishment of Debt After-tax (DTEA): double"""
return self.data["DTEA"]
@property
def DTED(self) -> pd.Series:
"""DTED -- Extinguishment of Debt Diluted EPS Effect (DTED): double"""
return self.data["DTED"]
@property
def DTEEPS(self) -> pd.Series:
"""DTEEPS -- Extinguishment of Debt Basic EPS Effect (DTEEPS): double"""
return self.data["DTEEPS"]
@property
def DTEP(self) -> pd.Series:
"""DTEP -- Extinguishment of Debt Pretax (DTEP): double"""
return self.data["DTEP"]
@property
def DVC(self) -> pd.Series:
"""DVC -- Dividends Common/Ordinary (DVC): double"""
return self.data["DVC"]
@property
def DVDNP(self) -> pd.Series:
"""DVDNP -- Dividends Declared and Not Provided (DVDNP): double"""
return self.data["DVDNP"]
@property
def DVP(self) -> pd.Series:
"""DVP -- Dividends - Preferred/Preference (DVP): double"""
return self.data["DVP"]
@property
def DVPD(self) -> pd.Series:
"""DVPD -- Cash Dividends Paid (DVPD): double"""
return self.data["DVPD"]
@property
def DVPDP(self) -> pd.Series:
"""DVPDP -- Dividends and Bonuses Paid Policyholders (DVPDP): double"""
return self.data["DVPDP"]
@property
def DVRPIV(self) -> pd.Series:
"""DVRPIV -- Dividends Received from Permanent Investments (DVRPIV): double"""
return self.data["DVRPIV"]
@property
def DVRRE(self) -> pd.Series:
"""DVRRE -- Development Revenue (Real Estate) (DVRRE): double"""
return self.data["DVRRE"]
@property
def DVSCO(self) -> pd.Series:
"""DVSCO -- Dividends - Share Capital - Other (DVSCO): double"""
return self.data["DVSCO"]
@property
def DVT(self) -> pd.Series:
"""DVT -- Dividends - Total (DVT): double"""
return self.data["DVT"]
@property
def EBIT(self) -> pd.Series:
"""EBIT -- Earnings Before Interest and Taxes (EBIT): double"""
return self.data["EBIT"]
@property
def EBITDA(self) -> pd.Series:
"""EBITDA -- Earnings Before Interest (EBITDA): double"""
return self.data["EBITDA"]
@property
def EIEA(self) -> pd.Series:
"""EIEA -- Equity in Earnings - After-Tax (EIEA): double"""
return self.data["EIEA"]
@property
def EMOL(self) -> pd.Series:
"""EMOL -- Directors' Emoluments (EMOL): double"""
return self.data["EMOL"]
@property
def EPSFI(self) -> pd.Series:
"""EPSFI -- Earnings Per Share (Diluted) Including Extraordinary Items (EPSFI): double"""
return self.data["EPSFI"]
@property
def EPSFX(self) -> pd.Series:
"""EPSFX -- Earnings Per Share (Diluted) Excluding Extraordinary Items (EPSFX): double"""
return self.data["EPSFX"]
@property
def EPSPI(self) -> pd.Series:
"""EPSPI -- Earnings Per Share (Basic) Including Extraordinary Items (EPSPI): double"""
return self.data["EPSPI"]
@property
def EPSPX(self) -> pd.Series:
"""EPSPX -- Earnings Per Share (Basic) Excluding Extraordinary Items (EPSPX): double"""
return self.data["EPSPX"]
@property
def ESUB(self) -> pd.Series:
"""ESUB -- Equity in Earnings - Unconsolidated Subsidiaries (ESUB): double"""
return self.data["ESUB"]
@property
def FATD(self) -> pd.Series:
"""FATD -- Fixed Assets and Investments - Disposals - Gain (Loss) (FATD): double"""
return self.data["FATD"]
@property
def FCA(self) -> pd.Series:
"""FCA -- Foreign Exchange Income (Loss) (FCA): double"""
return self.data["FCA"]
@property
def FFO(self) -> pd.Series:
"""FFO -- Funds From Operations (REIT) (FFO): double"""
return self.data["FFO"]
@property
def GBBL(self) -> pd.Series:
"""GBBL -- Group Benefits (Business Line) (GBBL): double"""
return self.data["GBBL"]
@property
def GDWLAM(self) -> pd.Series:
"""GDWLAM -- Goodwill Amortization (GDWLAM): double"""
return self.data["GDWLAM"]
@property
def GDWLIA(self) -> pd.Series:
"""GDWLIA -- Impairments of Goodwill After-tax (GDWLIA): double"""
return self.data["GDWLIA"]
@property
def GDWLID(self) -> pd.Series:
"""GDWLID -- Impairments of Goodwill Diluted EPS Effect (GDWLID): double"""
return self.data["GDWLID"]
@property
def GDWLIEPS(self) -> pd.Series:
"""GDWLIEPS -- Impairments of Goodwill Basic EPS Effect (GDWLIEPS): double"""
return self.data["GDWLIEPS"]
@property
def GDWLIP(self) -> pd.Series:
"""GDWLIP -- Impairments of Goodwill Pretax (GDWLIP): double"""
return self.data["GDWLIP"]
@property
def GLA(self) -> pd.Series:
"""GLA -- Gain/Loss After-tax (GLA): double"""
return self.data["GLA"]
@property
def GLCEA(self) -> pd.Series:
"""GLCEA -- Gain/Loss on Sale (Core Earnings Adjusted) After-tax (GLCEA): double"""
return self.data["GLCEA"]
@property
def GLCED(self) -> pd.Series:
"""GLCED -- Gain/Loss on Sale (Core Earnings Adjusted) Diluted EPS (GLCED): double"""
return self.data["GLCED"]
@property
def GLCEEPS(self) -> pd.Series:
"""GLCEEPS -- Gain/Loss on Sale (Core Earnings Adjusted) Basic EPS Effect (GLCEEPS): double"""
return self.data["GLCEEPS"]
@property
def GLCEP(self) -> pd.Series:
"""GLCEP -- Gain/Loss on Sale (Core Earnings Adjusted) Pretax (GLCEP): double"""
return self.data["GLCEP"]
@property
def GLD(self) -> pd.Series:
"""GLD -- Gain/Loss Diluted EPS Effect (GLD): double"""
return self.data["GLD"]
@property
def GLEPS(self) -> pd.Series:
"""GLEPS -- Gain/Loss Basic EPS Effect (GLEPS): double"""
return self.data["GLEPS"]
@property
def GLP(self) -> pd.Series:
"""GLP -- Gain/Loss Pretax (GLP): double"""
return self.data["GLP"]
@property
def GP(self) -> pd.Series:
"""GP -- Gross Profit (Loss) (GP): double"""
return self.data["GP"]
@property
def GPHBL(self) -> pd.Series:
"""GPHBL -- Group Premiums - Health (Business Line) (GPHBL): double"""
return self.data["GPHBL"]
@property
def GPLBL(self) -> pd.Series:
"""GPLBL -- Group Premiums - Life (Business Line) (GPLBL): double"""
return self.data["GPLBL"]
@property
def GPOBL(self) -> pd.Series:
"""GPOBL -- Group Premiums - Other (Business Line) (GPOBL): double"""
return self.data["GPOBL"]
@property
def GPRBL(self) -> pd.Series:
"""GPRBL -- Group Premiums - Retirement Benefits (Business Line) (GPRBL): double"""
return self.data["GPRBL"]
@property
def GPTBL(self) -> pd.Series:
"""GPTBL -- Group Premiums - Total (Business Line) (GPTBL): double"""
return self.data["GPTBL"]
@property
def GWO(self) -> pd.Series:
"""GWO -- Goodwill Written Off (GWO): double"""
return self.data["GWO"]
@property
def HEDGEGL(self) -> pd.Series:
"""HEDGEGL -- Gain/Loss on Ineffective Hedges (HEDGEGL): double"""
return self.data["HEDGEGL"]
@property
def IB(self) -> pd.Series:
"""IB -- Income Before Extraordinary Items (IB): double"""
return self.data["IB"]
@property
def IBADJ(self) -> pd.Series:
"""IBADJ -- Income Before Extraordinary Items Adjusted for Common Stock Equivalents (IBADJ): double"""
return self.data["IBADJ"]
@property
def IBBL(self) -> pd.Series:
"""IBBL -- Individual Benefits (Business Line) (IBBL): double"""
return self.data["IBBL"]
@property
def IBCOM(self) -> pd.Series:
"""IBCOM -- Income Before Extraordinary Items Available for Common (IBCOM): double"""
return self.data["IBCOM"]
@property
def IBKI(self) -> pd.Series:
"""IBKI -- Investment Banking Income (IBKI): double"""
return self.data["IBKI"]
@property
def IDIIS(self) -> pd.Series:
"""IDIIS -- Interest and Dividend Income - Investment Securities (IDIIS): double"""
return self.data["IDIIS"]
@property
def IDILB(self) -> pd.Series:
"""IDILB -- Interest and Dividend Income - Loans/Claims/Advances - Banks (IDILB): double"""
return self.data["IDILB"]
@property
def IDILC(self) -> pd.Series:
"""IDILC -- Interest and Dividend Income - Loans/Claims/Advances - Customers (IDILC): double"""
return self.data["IDILC"]
@property
def IDIS(self) -> pd.Series:
"""IDIS -- Interest and Dividend Income - Sundry (IDIS): double"""
return self.data["IDIS"]
@property
def IDIST(self) -> pd.Series:
"""IDIST -- Interest and Dividend Income - Short-Term Investments (IDIST): double"""
return self.data["IDIST"]
@property
def IDIT(self) -> pd.Series:
"""IDIT -- Interest and Related Income - Total (IDIT): double"""
return self.data["IDIT"]
@property
def IDITS(self) -> pd.Series:
"""IDITS -- Interest and Dividend Income - Trading Securities (IDITS): double"""
return self.data["IDITS"]
@property
def IIRE(self) -> pd.Series:
"""IIRE -- Investment Income (Real Estate) (IIRE): double"""
return self.data["IIRE"]
@property
def INITB(self) -> pd.Series:
"""INITB -- Income - Non-interest - Total (Bank) (INITB): double"""
return self.data["INITB"]
@property
def INTC(self) -> pd.Series:
"""INTC -- Interest Capitalized (INTC): double"""
return self.data["INTC"]
@property
def IOBD(self) -> pd.Series:
"""IOBD -- Income - Other (Broker Dealer) (IOBD): double"""
return self.data["IOBD"]
@property
def IOI(self) -> pd.Series:
"""IOI -- Income - Other (Insurance) (IOI): double"""
return self.data["IOI"]
@property
def IORE(self) -> pd.Series:
"""IORE -- Income - Other (Real Estate) (IORE): double"""
return self.data["IORE"]
@property
def IPABL(self) -> pd.Series:
"""IPABL -- Individual Premiums - Annuity (Business Line) (IPABL): double"""
return self.data["IPABL"]
@property
def IPHBL(self) -> pd.Series:
"""IPHBL -- Individual Premiums - Health (Business Line) (IPHBL): double"""
return self.data["IPHBL"]
@property
def IPLBL(self) -> pd.Series:
"""IPLBL -- Individual Premiums - Life (Business Line) (IPLBL): double"""
return self.data["IPLBL"]
@property
def IPOBL(self) -> pd.Series:
"""IPOBL -- Individual Premiums - Other (Business Line) (IPOBL): double"""
return self.data["IPOBL"]
@property
def IPTBL(self) -> pd.Series:
"""IPTBL -- Individual Premiums - Total (Business Line) (IPTBL): double"""
return self.data["IPTBL"]
@property
def IPTI(self) -> pd.Series:
"""IPTI -- Insurance Premiums - Total (Insurance) (IPTI): double"""
return self.data["IPTI"]
@property
def IREI(self) -> pd.Series:
"""IREI -- Interest and Related Income - Reinsurance (Insurance) (IREI): double"""
return self.data["IREI"]
@property
def IRENT(self) -> pd.Series:
"""IRENT -- Rental Income (IRENT): double"""
return self.data["IRENT"]
@property
def IRII(self) -> pd.Series:
"""IRII -- Interest and Related Income (Insurance) (IRII): double"""
return self.data["IRII"]
@property
def IRLI(self) -> pd.Series:
"""IRLI -- Interest and Related Income - Life (Insurance) (IRLI): double"""
return self.data["IRLI"]
@property
def IRNLI(self) -> pd.Series:
"""IRNLI -- Interest and Related Income - Non-Life (Insurance) (IRNLI): double"""
return self.data["IRNLI"]
@property
def IRSI(self) -> pd.Series:
"""IRSI -- Interest and Related Income - Sundry (Insurance) (IRSI): double"""
return self.data["IRSI"]
@property
def ISGR(self) -> pd.Series:
"""ISGR -- Investment Securities - Gain (Loss) - Realized (ISGR): double"""
return self.data["ISGR"]
@property
def ISGT(self) -> pd.Series:
"""ISGT -- Investment Securities - Gain (Loss) - Total (ISGT): double"""
return self.data["ISGT"]
@property
def ISGU(self) -> pd.Series:
"""ISGU -- Investment Securities - Gain (Loss) - Unrealized (ISGU): double"""
return self.data["ISGU"]
@property
def ITCI(self) -> pd.Series:
"""ITCI -- Investment Tax Credit (Income Account) (ITCI): double"""
return self.data["ITCI"]
@property
def IVI(self) -> pd.Series:
"""IVI -- Investment Income - Total (Insurance) (IVI): double"""
return self.data["IVI"]
@property
def LI(self) -> pd.Series:
"""LI -- Leasing Income (LI): double"""
return self.data["LI"]
@property
def LLRCI(self) -> pd.Series:
"""LLRCI -- Loan Loss Recoveries - Credited to Income (LLRCI): double"""
return self.data["LLRCI"]
@property
def LLRCR(self) -> pd.Series:
"""LLRCR -- Loan Loss Recoveries - Credited to Reserves (LLRCR): double"""
return self.data["LLRCR"]
@property
def LLWOCI(self) -> pd.Series:
"""LLWOCI -- Loan Loss Written Off - Charged to Income (LLWOCI): double"""
return self.data["LLWOCI"]
@property
def LLWOCR(self) -> pd.Series:
"""LLWOCR -- Loan Loss Written Off - Charged to Reserves (LLWOCR): double"""
return self.data["LLWOCR"]
@property
def LST(self) -> pd.Series:
"""LST -- Life Insurance Surrenders and Terminations (LST): double"""
return self.data["LST"]
@property
def MII(self) -> pd.Series:
"""MII -- Minority Interest (Income Account) (MII): double"""
return self.data["MII"]
@property
def NCO(self) -> pd.Series:
"""NCO -- Net Charge-Offs (NCO): double"""
return self.data["NCO"]
@property
def NFSR(self) -> pd.Series:
"""NFSR -- Non-Financial Services Revenue (NFSR): double"""
return self.data["NFSR"]
@property
def NI(self) -> pd.Series:
"""NI -- Net Income (Loss) (NI): double"""
return self.data["NI"]
@property
def NIADJ(self) -> pd.Series:
"""NIADJ -- Net Income Adjusted for Common/Ordinary Stock (Capital) Equivalents (NIADJ): double"""
return self.data["NIADJ"]
@property
def NIECI(self) -> pd.Series:
"""NIECI -- Net Income Effect Capitalized Interest (NIECI): double"""
return self.data["NIECI"]
@property
def NIINT(self) -> pd.Series:
"""NIINT -- Net Interest Income (NIINT): double"""
return self.data["NIINT"]
@property
def NIIT(self) -> pd.Series:
"""NIIT -- Net Interest Income (Tax Equivalent) (NIIT): double"""
return self.data["NIIT"]
@property
def NIM(self) -> pd.Series:
"""NIM -- Net Interest Margin (NIM): double"""
return self.data["NIM"]
@property
def NIO(self) -> pd.Series:
"""NIO -- Net Items - Other (NIO): double"""
return self.data["NIO"]
@property
def NIT(self) -> pd.Series:
"""NIT -- Net Item - Total (NIT): double"""
return self.data["NIT"]
@property
def NITS(self) -> pd.Series:
"""NITS -- Net Income - Total (Statutory) (NITS): double"""
return self.data["NITS"]
@property
def NOPI(self) -> pd.Series:
"""NOPI -- Nonoperating Income (Expense) (NOPI): double"""
return self.data["NOPI"]
@property
def NOPIO(self) -> pd.Series:
"""NOPIO -- Nonoperating Income (Expense) Other (NOPIO): double"""
return self.data["NOPIO"]
@property
def NRTXT(self) -> pd.Series:
"""NRTXT -- Nonrecurring Income Taxes After-tax (NRTXT): double"""
return self.data["NRTXT"]
@property
def NRTXTD(self) -> pd.Series:
"""NRTXTD -- Nonrecurring Income Tax Diluted EPS Effect (NRTXTD): double"""
return self.data["NRTXTD"]
@property
def NRTXTEPS(self) -> pd.Series:
"""NRTXTEPS -- Nonrecurring Income Tax Basic EPS Effect (NRTXTEPS): double"""
return self.data["NRTXTEPS"]
@property
def OIADP(self) -> pd.Series:
"""OIADP -- Operating Income After Depreciation (OIADP): double"""
return self.data["OIADP"]
@property
def OIBDP(self) -> pd.Series:
"""OIBDP -- Operating Income Before Depreciation (OIBDP): double"""
return self.data["OIBDP"]
@property
def OPEPS(self) -> pd.Series:
"""OPEPS -- Earnings Per Share from Operations (OPEPS): double"""
return self.data["OPEPS"]
@property
def OPILI(self) -> pd.Series:
"""OPILI -- Operating Income - Life (OPILI): double"""
return self.data["OPILI"]
@property
def OPINCAR(self) -> pd.Series:
"""OPINCAR -- Operating Income - As Reported (OPINCAR): double"""
return self.data["OPINCAR"]
@property
def OPINI(self) -> pd.Series:
"""OPINI -- Operating Income - Non-Life (OPINI): double"""
return self.data["OPINI"]
@property
def OPIOI(self) -> pd.Series:
"""OPIOI -- Operating Income - Other (OPIOI): double"""
return self.data["OPIOI"]
@property
def OPIRI(self) -> pd.Series:
"""OPIRI -- Operating Income - Reinsurance (OPIRI): double"""
return self.data["OPIRI"]
@property
def OPITI(self) -> pd.Series:
"""OPITI -- Operating Income - Total (OPITI): double"""
return self.data["OPITI"]
@property
def OPREPSX(self) -> pd.Series:
"""OPREPSX -- Earnings Per Share Diluted from Operations (OPREPSX): double"""
return self.data["OPREPSX"]
@property
def PALR(self) -> pd.Series:
"""PALR -- Premiums Assumed - Life (PALR): double"""
return self.data["PALR"]
@property
def PANLR(self) -> pd.Series:
"""PANLR -- Premiums Assumed - Nonlife (PANLR): double"""
return self.data["PANLR"]
@property
def PATR(self) -> pd.Series:
"""PATR -- Premiums Assumed - Total (PATR): double"""
return self.data["PATR"]
@property
def PCL(self) -> pd.Series:
"""PCL -- Provision - Credit Losses (Income Account) (PCL): double"""
return self.data["PCL"]
@property
def PCLR(self) -> pd.Series:
"""PCLR -- Premiums Ceded - Life (PCLR): double"""
return self.data["PCLR"]
@property
def PCNLR(self) -> pd.Series:
"""PCNLR -- Premiums Ceded - Nonlife (PCNLR): double"""
return self.data["PCNLR"]
@property
def PCTR(self) -> pd.Series:
"""PCTR -- Premiums Ceded - Total (PCTR): double"""
return self.data["PCTR"]
@property
def PI(self) -> pd.Series:
"""PI -- Pretax Income (PI): double"""
return self.data["PI"]
@property
def PIDOM(self) -> pd.Series:
"""PIDOM -- Pretax Income Domestic (PIDOM): double"""
return self.data["PIDOM"]
@property
def PIFO(self) -> pd.Series:
"""PIFO -- Pretax Income Foreign (PIFO): double"""
return self.data["PIFO"]
@property
def PLL(self) -> pd.Series:
"""PLL -- Provision for Loan/Asset Losses (PLL): double"""
return self.data["PLL"]
@property
def PLTBL(self) -> pd.Series:
"""PLTBL -- Premiums - Life - Total (Business Line) (PLTBL): double"""
return self.data["PLTBL"]
@property
def PNCA(self) -> pd.Series:
"""PNCA -- Core Pension Adjustment (PNCA): double"""
return self.data["PNCA"]
@property
def PNCAD(self) -> pd.Series:
"""PNCAD -- Core Pension Adjustment Diluted EPS Effect (PNCAD): double"""
return self.data["PNCAD"]
@property
def PNCAEPS(self) -> pd.Series:
"""PNCAEPS -- Core Pension Adjustment Basic EPS Effect (PNCAEPS): double"""
return self.data["PNCAEPS"]
@property
def PNCIA(self) -> pd.Series:
"""PNCIA -- Core Pension Interest Adjustment After-tax (PNCIA): double"""
return self.data["PNCIA"]
@property
def PNCID(self) -> pd.Series:
"""PNCID -- Core Pension Interest Adjustment Diluted EPS Effect (PNCID): double"""
return self.data["PNCID"]
@property
def PNCIEPS(self) -> pd.Series:
"""PNCIEPS -- Core Pension Interest Adjustment Basic EPS Effect (PNCIEPS): double"""
return self.data["PNCIEPS"]
@property
def PNCIP(self) -> pd.Series:
"""PNCIP -- Core Pension Interest Adjustment Pretax (PNCIP): double"""
return self.data["PNCIP"]
@property
def PNCWIA(self) -> pd.Series:
"""PNCWIA -- Core Pension w/o Interest Adjustment After-tax (PNCWIA): double"""
return self.data["PNCWIA"]
@property
def PNCWID(self) -> pd.Series:
"""PNCWID -- Core Pension w/o Interest Adjustment Diluted EPS Effect (PNCWID): double"""
return self.data["PNCWID"]
@property
def PNCWIEPS(self) -> pd.Series:
"""PNCWIEPS -- Core Pension w/o Interest Adjustment Basic EPS Effect (PNCWIEPS): double"""
return self.data["PNCWIEPS"]
@property
def PNCWIP(self) -> pd.Series:
"""PNCWIP -- Core Pension w/o Interest Adjustment Pretax (PNCWIP): double"""
return self.data["PNCWIP"]
@property
def PNLBL(self) -> pd.Series:
"""PNLBL -- Premiums - Nonlife - Total (Business Line) (PNLBL): double"""
return self.data["PNLBL"]
@property
def PNLI(self) -> pd.Series:
"""PNLI -- Premiums Written - Non-Life (PNLI): double"""
return self.data["PNLI"]
@property
def POBL(self) -> pd.Series:
"""POBL -- Premiums - Other (Business Line) (POBL): double"""
return self.data["POBL"]
@property
def PPCBL(self) -> pd.Series:
"""PPCBL -- Personal Property and Casualty Claims (Business Line) (PPCBL): double"""
return self.data["PPCBL"]
@property
def PPPABL(self) -> pd.Series:
"""PPPABL -- Personal Property and Casualty Premiums - Automobile (Business Line) (PPPABL): double"""
return self.data["PPPABL"]
@property
def PPPHBL(self) -> pd.Series:
"""PPPHBL -- Personal Property and Casualty Premiums - Homeowners (Business Line) (PPPHBL): double"""
return self.data["PPPHBL"]
@property
def PPPOBL(self) -> pd.Series:
"""PPPOBL -- Personal Property and Casualty Premiums - Other (Business Line) (PPPOBL): double"""
return self.data["PPPOBL"]
@property
def PPPTBL(self) -> pd.Series:
"""PPPTBL -- Personal Property & Casualty Premiums - Total (Business Line) (PPPTBL): double"""
return self.data["PPPTBL"]
@property
def PRCA(self) -> pd.Series:
"""PRCA -- Core Post Retirement Adjustment (PRCA): double"""
return self.data["PRCA"]
@property
def PRCAD(self) -> pd.Series:
"""PRCAD -- Core Post Retirement Adjustment Diluted EPS Effect (PRCAD): double"""
return self.data["PRCAD"]
@property
def PRCAEPS(self) -> pd.Series:
"""PRCAEPS -- Core Post Retirement Adjustment Basic EPS Effect (PRCAEPS): double"""
return self.data["PRCAEPS"]
@property
def PREBL(self) -> pd.Series:
"""PREBL -- Premiums - Reinsurance (Business Line) (PREBL): double"""
return self.data["PREBL"]
@property
def PRI(self) -> pd.Series:
"""PRI -- Premiums Written - Reinsurance (PRI): double"""
return self.data["PRI"]
@property
def PTBL(self) -> pd.Series:
"""PTBL -- Premiums - Total (Business Line) (PTBL): double"""
return self.data["PTBL"]
@property
def PTRAN(self) -> pd.Series:
"""PTRAN -- Principal Transactions (PTRAN): double"""
return self.data["PTRAN"]
@property
def PVO(self) -> pd.Series:
"""PVO -- Provision - Other (PVO): double"""
return self.data["PVO"]
@property
def PVON(self) -> pd.Series:
"""PVON -- Provisions - Other (Net) (PVON): double"""
return self.data["PVON"]
@property
def PWOI(self) -> pd.Series:
"""PWOI -- Premiums Written - Other (PWOI): double"""
return self.data["PWOI"]
@property
def RCA(self) -> pd.Series:
"""RCA -- Restructuring Costs After-tax (RCA): double"""
return self.data["RCA"]
@property
def RCD(self) -> pd.Series:
"""RCD -- Restructuring Costs Diluted EPS Effect (RCD): double"""
return self.data["RCD"]
@property
def RCEPS(self) -> pd.Series:
"""RCEPS -- Restructuring Costs Basic EPS Effect (RCEPS): double"""
return self.data["RCEPS"]
@property
def RCP(self) -> pd.Series:
"""RCP -- Restructuring Costs Pretax (RCP): double"""
return self.data["RCP"]
@property
def RDIP(self) -> pd.Series:
"""RDIP -- In Process R&D Expense (RDIP): double"""
return self.data["RDIP"]
@property
def RDIPA(self) -> pd.Series:
"""RDIPA -- In Process R&D Expense After-tax (RDIPA): double"""
return self.data["RDIPA"]
@property
def RDIPD(self) -> pd.Series:
"""RDIPD -- In Process R&D Expense Diluted EPS Effect (RDIPD): double"""
return self.data["RDIPD"]
@property
def RDIPEPS(self) -> pd.Series:
"""RDIPEPS -- In Process R&D Expense Basic EPS Effect (RDIPEPS): double"""
return self.data["RDIPEPS"]
@property
def REVT(self) -> pd.Series:
"""REVT -- Revenue - Total (REVT): double"""
return self.data["REVT"]
@property
def RIS(self) -> pd.Series:
"""RIS -- Revenue/Income - Sundry (RIS): double"""
return self.data["RIS"]
@property
def RMUM(self) -> pd.Series:
"""RMUM -- Auditors' Remuneraton (RMUM): double"""
return self.data["RMUM"]
@property
def RRA(self) -> pd.Series:
"""RRA -- Reversal Restructruring/Acquisition Aftertax (RRA): double"""
return self.data["RRA"]
@property
def RRD(self) -> pd.Series:
"""RRD -- Reversal Restructuring/Acq Diluted EPS Effect (RRD): double"""
return self.data["RRD"]
@property
def RRP(self) -> pd.Series:
"""RRP -- Reversal Restructruring/Acquisition Pretax (RRP): double"""
return self.data["RRP"]
@property
def SALE(self) -> pd.Series:
"""SALE -- Sales/Turnover (Net) (SALE): double"""
return self.data["SALE"]
@property
def SETA(self) -> pd.Series:
"""SETA -- Settlement (Litigation/Insurance) After-tax (SETA): double"""
return self.data["SETA"]
@property
def SETD(self) -> pd.Series:
"""SETD -- Settlement (Litigation/Insurance) Diluted EPS Effect (SETD): double"""
return self.data["SETD"]
@property
def SETEPS(self) -> pd.Series:
"""SETEPS -- Settlement (Litigation/Insurance) Basic EPS Effect (SETEPS): double"""
return self.data["SETEPS"]
@property
def SETP(self) -> pd.Series:
"""SETP -- Settlement (Litigation/Insurance) Pretax (SETP): double"""
return self.data["SETP"]
@property
def SPCE(self) -> pd.Series:
"""SPCE -- S&P Core Earnings (SPCE): double"""
return self.data["SPCE"]
@property
def SPCED(self) -> pd.Series:
"""SPCED -- S&P Core Earnings EPS Diluted (SPCED): double"""
return self.data["SPCED"]
@property
def SPCEEPS(self) -> pd.Series:
"""SPCEEPS -- S&P Core Earnings EPS Basic (SPCEEPS): double"""
return self.data["SPCEEPS"]
@property
def SPI(self) -> pd.Series:
"""SPI -- Special Items (SPI): double"""
return self.data["SPI"]
@property
def SPID(self) -> pd.Series:
"""SPID -- Other Special Items Diluted EPS Effect (SPID): double"""
return self.data["SPID"]
@property
def SPIEPS(self) -> pd.Series:
"""SPIEPS -- Other Special Items Basic EPS Effect (SPIEPS): double"""
return self.data["SPIEPS"]
@property
def SPIOA(self) -> pd.Series:
"""SPIOA -- Other Special Items After-tax (SPIOA): double"""
return self.data["SPIOA"]
@property
def SPIOP(self) -> pd.Series:
"""SPIOP -- Other Special Items Pretax (SPIOP): double"""
return self.data["SPIOP"]
@property
def SRET(self) -> pd.Series:
"""SRET -- Gain/Loss on Sale of Property (SRET): double"""
return self.data["SRET"]
@property
def STKCO(self) -> pd.Series:
"""STKCO -- Stock Compensation Expense (STKCO): double"""
return self.data["STKCO"]
@property
def STKCPA(self) -> pd.Series:
"""STKCPA -- After-tax stock compensation (STKCPA): double"""
return self.data["STKCPA"]
@property
def TDSG(self) -> pd.Series:
"""TDSG -- Trading/Dealing Securities - Gain (Loss) (TDSG): double"""
return self.data["TDSG"]
@property
def TF(self) -> pd.Series:
"""TF -- Trust Fees (TF): double"""
return self.data["TF"]
@property
def TIE(self) -> pd.Series:
"""TIE -- Interest Expense Total (Financial Services) (TIE): double"""
return self.data["TIE"]
@property
def TII(self) -> pd.Series:
"""TII -- Interest Income Total (Financial Services) (TII): double"""
return self.data["TII"]
@property
def TXC(self) -> pd.Series:
"""TXC -- Income Taxes - Current (TXC): double"""
return self.data["TXC"]
@property
def TXDFED(self) -> pd.Series:
"""TXDFED -- Deferred Taxes-Federal (TXDFED): double"""
return self.data["TXDFED"]
@property
def TXDFO(self) -> pd.Series:
"""TXDFO -- Deferred Taxes-Foreign (TXDFO): double"""
return self.data["TXDFO"]
@property
def TXDI(self) -> pd.Series:
"""TXDI -- Income Taxes - Deferred (TXDI): double"""
return self.data["TXDI"]
@property
def TXDS(self) -> pd.Series:
"""TXDS -- Deferred Taxes-State (TXDS): double"""
return self.data["TXDS"]
@property
def TXEQA(self) -> pd.Series:
"""TXEQA -- Tax - Equivalent Adjustment (TXEQA): double"""
return self.data["TXEQA"]
@property
def TXEQII(self) -> pd.Series:
"""TXEQII -- Tax - Equivalent Interest Income (Gross) (TXEQII): double"""
return self.data["TXEQII"]
@property
def TXFED(self) -> pd.Series:
"""TXFED -- Income Taxes Federal (TXFED): double"""
return self.data["TXFED"]
@property
def TXFO(self) -> pd.Series:
"""TXFO -- Income Taxes - Foreign (TXFO): double"""
return self.data["TXFO"]
@property
def TXO(self) -> pd.Series:
"""TXO -- Income Taxes - Other (TXO): double"""
return self.data["TXO"]
@property
def TXS(self) -> pd.Series:
"""TXS -- Income Taxes State (TXS): double"""
return self.data["TXS"]
@property
def TXT(self) -> pd.Series:
"""TXT -- Income Taxes - Total (TXT): double"""
return self.data["TXT"]
@property
def TXVA(self) -> pd.Series:
"""TXVA -- Value Added Taxes (TXVA): double"""
return self.data["TXVA"]
@property
def TXW(self) -> pd.Series:
"""TXW -- Excise Taxes (TXW): double"""
return self.data["TXW"]
@property
def UDPFA(self) -> pd.Series:
"""UDPFA -- Depreciation of Fixed Assets (UDPFA): double"""
return self.data["UDPFA"]
@property
def UDVP(self) -> pd.Series:
"""UDVP -- Preferred Dividend Requirements (UDVP): double"""
return self.data["UDVP"]
@property
def UGI(self) -> pd.Series:
"""UGI -- Gross Income (Income Before Interest Charges) (UGI): double"""
return self.data["UGI"]
@property
def UNIAMI(self) -> pd.Series:
"""UNIAMI -- Net Income before Extraordinary Items and after Minority Interest (UNIAMI): double"""
return self.data["UNIAMI"]
@property
def UNOPINC(self) -> pd.Series:
"""UNOPINC -- Nonoperating Income (Net) - Other (UNOPINC): double"""
return self.data["UNOPINC"]
@property
def UOPI(self) -> pd.Series:
"""UOPI -- Operating Income - Total - Utility (UOPI): double"""
return self.data["UOPI"]
@property
def UPDVP(self) -> pd.Series:
"""UPDVP -- Preference Dividend Requirements* (UPDVP): double"""
return self.data["UPDVP"]
@property
def USPI(self) -> pd.Series:
"""USPI -- Special Items (USPI): double"""
return self.data["USPI"]
@property
def USUBDVP(self) -> pd.Series:
"""USUBDVP -- Subsidiary Preferred Dividends (USUBDVP): double"""
return self.data["USUBDVP"]
@property
def UTME(self) -> pd.Series:
"""UTME -- Maintenance Expense - Total (UTME): double"""
return self.data["UTME"]
@property
def UTXFED(self) -> pd.Series:
"""UTXFED -- Current Taxes - Federal (Operating) (UTXFED): double"""
return self.data["UTXFED"]
@property
def UXINST(self) -> pd.Series:
"""UXINST -- Interest On Short-Term Debt - Utility (UXINST): double"""
return self.data["UXINST"]
@property
def UXINTD(self) -> pd.Series:
"""UXINTD -- Interest on Long-Term Debt* (UXINTD): double"""
return self.data["UXINTD"]
@property
def WDA(self) -> pd.Series:
"""WDA -- Writedowns After-tax (WDA): double"""
return self.data["WDA"]
@property
def WDD(self) -> pd.Series:
"""WDD -- Writedowns Diluted EPS Effect (WDD): double"""
return self.data["WDD"]
@property
def WDEPS(self) -> pd.Series:
"""WDEPS -- Writedowns Basic EPS Effect (WDEPS): double"""
return self.data["WDEPS"]
@property
def WDP(self) -> pd.Series:
"""WDP -- Writedowns Pretax (WDP): double"""
return self.data["WDP"]
@property
def XAD(self) -> pd.Series:
"""XAD -- Advertising Expense (XAD): double"""
return self.data["XAD"]
@property
def XAGO(self) -> pd.Series:
"""XAGO -- Administrative and General Expense - Other (XAGO): double"""
return self.data["XAGO"]
@property
def XAGT(self) -> pd.Series:
"""XAGT -- Administrative and General Expense - Total (XAGT): double"""
return self.data["XAGT"]
@property
def XCOM(self) -> pd.Series:
"""XCOM -- Communications Expense (XCOM): double"""
return self.data["XCOM"]
@property
def XCOMI(self) -> pd.Series:
"""XCOMI -- Commissions Expense (Insurance) (XCOMI): double"""
return self.data["XCOMI"]
@property
def XDEPL(self) -> pd.Series:
"""XDEPL -- Depletion Expense (Schedule VI) (XDEPL): double"""
return self.data["XDEPL"]
@property
def XDP(self) -> pd.Series:
"""XDP -- Depreciation Expense (Schedule VI) (XDP): double"""
return self.data["XDP"]
@property
def XDVRE(self) -> pd.Series:
"""XDVRE -- Expense - Development (Real Estate) (XDVRE): double"""
return self.data["XDVRE"]
@property
def XEQO(self) -> pd.Series:
"""XEQO -- Equipment and Occupancy Expense (XEQO): double"""
return self.data["XEQO"]
@property
def XI(self) -> pd.Series:
"""XI -- Extraordinary Items (XI): double"""
return self.data["XI"]
@property
def XIDO(self) -> pd.Series:
"""XIDO -- Extraordinary Items and Discontinued Operations (XIDO): double"""
return self.data["XIDO"]
@property
def XINDB(self) -> pd.Series:
"""XINDB -- Interest Expense - Deposits - Banks (XINDB): double"""
return self.data["XINDB"]
@property
def XINDC(self) -> pd.Series:
"""XINDC -- Interest Expense - Deposits - Customer (XINDC): double"""
return self.data["XINDC"]
@property
def XINS(self) -> pd.Series:
"""XINS -- Interest Expense - Sundry (XINS): double"""
return self.data["XINS"]
@property
def XINST(self) -> pd.Series:
"""XINST -- Interest Expense - Short-Term Borrowings (XINST): double"""
return self.data["XINST"]
@property
def XINT(self) -> pd.Series:
"""XINT -- Interest and Related Expense - Total (XINT): double"""
return self.data["XINT"]
@property
def XINTD(self) -> pd.Series:
"""XINTD -- Interest Expense - Long-Term Debt (XINTD): double"""
return self.data["XINTD"]
@property
def XINTOPT(self) -> pd.Series:
"""XINTOPT -- Implied Option Expense (XINTOPT): double"""
return self.data["XINTOPT"]
@property
def XIVI(self) -> pd.Series:
"""XIVI -- Investment Expense (Insurance) (XIVI): double"""
return self.data["XIVI"]
@property
def XIVRE(self) -> pd.Series:
"""XIVRE -- Expense - Investment (Real Estate) (XIVRE): double"""
return self.data["XIVRE"]
@property
def XLR(self) -> pd.Series:
"""XLR -- Staff Expense - Total (XLR): double"""
return self.data["XLR"]
@property
def XNBI(self) -> pd.Series:
"""XNBI -- Other Insurance Expense (XNBI): double"""
return self.data["XNBI"]
@property
def XNF(self) -> pd.Series:
"""XNF -- Non-Financial Services Expense (XNF): double"""
return self.data["XNF"]
@property
def XNINS(self) -> pd.Series:
"""XNINS -- Other Expense - Noninsurance (XNINS): double"""
return self.data["XNINS"]
@property
def XNITB(self) -> pd.Series:
"""XNITB -- Expense - Noninterest - Total (Bank) (XNITB): double"""
return self.data["XNITB"]
@property
def XOBD(self) -> pd.Series:
"""XOBD -- Expense - Other (Broker/Dealer) (XOBD): double"""
return self.data["XOBD"]
@property
def XOI(self) -> pd.Series:
"""XOI -- Expenses - Other (Insurance) (XOI): double"""
return self.data["XOI"]
@property
def XOPR(self) -> pd.Series:
"""XOPR -- Operating Expenses Total (XOPR): double"""
return self.data["XOPR"]
@property
def XOPRAR(self) -> pd.Series:
"""XOPRAR -- Operatings Expenses - As Reported (XOPRAR): double"""
return self.data["XOPRAR"]
@property
def XOPTD(self) -> pd.Series:
"""XOPTD -- Implied Option EPS Diluted (XOPTD): double"""
return self.data["XOPTD"]
@property
def XOPTEPS(self) -> pd.Series:
"""XOPTEPS -- Implied Option EPS Basic (XOPTEPS): double"""
return self.data["XOPTEPS"]
@property
def XORE(self) -> pd.Series:
"""XORE -- Expense - Other (Real Estate) (XORE): double"""
return self.data["XORE"]
@property
def XPR(self) -> pd.Series:
"""XPR -- Pension and Retirement Expense (XPR): double"""
return self.data["XPR"]
@property
def XRD(self) -> pd.Series:
"""XRD -- Research and Development Expense (XRD): double"""
return self.data["XRD"]
@property
def XRENT(self) -> pd.Series:
"""XRENT -- Rental Expense (XRENT): double"""
return self.data["XRENT"]
@property
def XS(self) -> pd.Series:
"""XS -- Expense - Sundry (XS): double"""
return self.data["XS"]
@property
def XSGA(self) -> pd.Series:
"""XSGA -- Selling, General and Administrative Expense (XSGA): double"""
return self.data["XSGA"]
@property
def XSTF(self) -> pd.Series:
"""XSTF -- Staff Expense (Income Account) (XSTF): double"""
return self.data["XSTF"]
@property
def XSTFO(self) -> pd.Series:
"""XSTFO -- Staff Expense - Other (XSTFO): double"""
return self.data["XSTFO"]
@property
def XSTFWS(self) -> pd.Series:
"""XSTFWS -- Staff Expense - Wages and Salaries (XSTFWS): double"""
return self.data["XSTFWS"]
@property
def XT(self) -> pd.Series:
"""XT -- Expense - Total (XT): double"""
return self.data["XT"]
@property
def XUW(self) -> pd.Series:
"""XUW -- Other Underwriting Expenses - Insurance (XUW): double"""
return self.data["XUW"]
@property
def XUWLI(self) -> pd.Series:
"""XUWLI -- Underwriting Expense - Life (XUWLI): double"""
return self.data["XUWLI"]
@property
def XUWNLI(self) -> pd.Series:
"""XUWNLI -- Underwriting Expense - Non-Life (XUWNLI): double"""
return self.data["XUWNLI"]
@property
def XUWOI(self) -> pd.Series:
"""XUWOI -- Underwriting Expense - Other (XUWOI): double"""
return self.data["XUWOI"]
@property
def XUWREI(self) -> pd.Series:
"""XUWREI -- Underwriting Expense - Reinsurance (XUWREI): double"""
return self.data["XUWREI"]
@property
def XUWTI(self) -> pd.Series:
"""XUWTI -- Underwriting Expense - Total (XUWTI): double"""
return self.data["XUWTI"]
@property
def AFUDCC(self) -> pd.Series:
"""AFUDCC -- Allowance for Funds Used During Construction (Cash Flow) (AFUDCC): double"""
return self.data["AFUDCC"]
@property
def AFUDCI(self) -> pd.Series:
"""AFUDCI -- Allowance for Funds Used During Construction (Investing) (Cash Flow) (AFUDCI): double"""
return self.data["AFUDCI"]
@property
def AMC(self) -> pd.Series:
"""AMC -- Amortization (Cash Flow) - Utility (AMC): double"""
return self.data["AMC"]
@property
def AOLOCH(self) -> pd.Series:
"""AOLOCH -- Assets and Liabilities Other Net Change (AOLOCH): double"""
return self.data["AOLOCH"]
@property
def APALCH(self) -> pd.Series:
"""APALCH -- Accounts Payable and Accrued Liabilities Increase/(Decrease) (APALCH): double"""
return self.data["APALCH"]
@property
def AQC(self) -> pd.Series:
"""AQC -- Acquisitions (AQC): double"""
return self.data["AQC"]
@property
def CAPX(self) -> pd.Series:
"""CAPX -- Capital Expenditures (CAPX): double"""
return self.data["CAPX"]
@property
def CAPXV(self) -> pd.Series:
"""CAPXV -- Capital Expend Property, Plant and Equipment Schd V (CAPXV): double"""
return self.data["CAPXV"]
@property
def CDVC(self) -> pd.Series:
"""CDVC -- Cash Dividends on Common Stock (Cash Flow) (CDVC): double"""
return self.data["CDVC"]
@property
def CHECH(self) -> pd.Series:
"""CHECH -- Cash and Cash Equivalents Increase/(Decrease) (CHECH): double"""
return self.data["CHECH"]
@property
def DEPC(self) -> pd.Series:
"""DEPC -- Depreciation and Depletion (Cash Flow) (DEPC): double"""
return self.data["DEPC"]
@property
def DLCCH(self) -> pd.Series:
"""DLCCH -- Current Debt Changes (DLCCH): double"""
return self.data["DLCCH"]
@property
def DLTIS(self) -> pd.Series:
"""DLTIS -- Long-Term Debt Issuance (DLTIS): double"""
return self.data["DLTIS"]
@property
def DLTR(self) -> pd.Series:
"""DLTR -- Long-Term Debt Reduction (DLTR): double"""
return self.data["DLTR"]
@property
def DPC(self) -> pd.Series:
"""DPC -- Depreciation and Amortization (Cash Flow) (DPC): double"""
return self.data["DPC"]
@property
def DV(self) -> pd.Series:
"""DV -- Cash Dividends (Cash Flow) (DV): double"""
return self.data["DV"]
@property
def ESUBC(self) -> pd.Series:
"""ESUBC -- Equity in Net Loss Earnings (ESUBC): double"""
return self.data["ESUBC"]
@property
def EXRE(self) -> pd.Series:
"""EXRE -- Exchange Rate Effect (EXRE): double"""
return self.data["EXRE"]
@property
def FIAO(self) -> pd.Series:
"""FIAO -- Financing Activities Other (FIAO): double"""
return self.data["FIAO"]
@property
def FINCF(self) -> pd.Series:
"""FINCF -- Financing Activities Net Cash Flow (FINCF): double"""
return self.data["FINCF"]
@property
def FOPO(self) -> pd.Series:
"""FOPO -- Funds from Operations Other (FOPO): double"""
return self.data["FOPO"]
@property
def FOPOX(self) -> pd.Series:
"""FOPOX -- Funds from Operations - Other excluding Option Tax Benefit (FOPOX): double"""
return self.data["FOPOX"]
@property
def FOPT(self) -> pd.Series:
"""FOPT -- Funds From Operations Total (FOPT): double"""
return self.data["FOPT"]
@property
def FSRCO(self) -> pd.Series:
"""FSRCO -- Sources of Funds Other (FSRCO): double"""
return self.data["FSRCO"]
@property
def FSRCT(self) -> pd.Series:
"""FSRCT -- Sources of Funds Total (FSRCT): double"""
return self.data["FSRCT"]
@property
def FUSEO(self) -> pd.Series:
"""FUSEO -- Uses of Funds Other (FUSEO): double"""
return self.data["FUSEO"]
@property
def FUSET(self) -> pd.Series:
"""FUSET -- Uses of Funds Total (FUSET): double"""
return self.data["FUSET"]
@property
def IBC(self) -> pd.Series:
"""IBC -- Income Before Extraordinary Items (Cash Flow) (IBC): double"""
return self.data["IBC"]
@property
def INTPN(self) -> pd.Series:
"""INTPN -- Interest Paid Net (INTPN): double"""
return self.data["INTPN"]
@property
def INVCH(self) -> pd.Series:
"""INVCH -- Inventory Decrease (Increase) (INVCH): double"""
return self.data["INVCH"]
@property
def ITCC(self) -> pd.Series:
"""ITCC -- Investment Tax Credit - Net (Cash Flow) - Utility (ITCC): double"""
return self.data["ITCC"]
@property
def IVACO(self) -> pd.Series:
"""IVACO -- Investing Activities Other (IVACO): double"""
return self.data["IVACO"]
@property
def IVCH(self) -> pd.Series:
"""IVCH -- Increase in Investments (IVCH): double"""
return self.data["IVCH"]
@property
def IVNCF(self) -> pd.Series:
"""IVNCF -- Investing Activities Net Cash Flow (IVNCF): double"""
return self.data["IVNCF"]
@property
def IVSTCH(self) -> pd.Series:
"""IVSTCH -- Short-Term Investments Change (IVSTCH): double"""
return self.data["IVSTCH"]
@property
def OANCF(self) -> pd.Series:
"""OANCF -- Operating Activities Net Cash Flow (OANCF): double"""
return self.data["OANCF"]
@property
def PDVC(self) -> pd.Series:
"""PDVC -- Cash Dividends on Preferred/Preference Stock (Cash Flow) (PDVC): double"""
return self.data["PDVC"]
@property
def PRSTKC(self) -> pd.Series:
"""PRSTKC -- Purchase of Common and Preferred Stock (PRSTKC): double"""
return self.data["PRSTKC"]
@property
def PRSTKCC(self) -> pd.Series:
"""PRSTKCC -- Purchase of Common Stock (Cash Flow) (PRSTKCC): double"""
return self.data["PRSTKCC"]
@property
def PRSTKPC(self) -> pd.Series:
"""PRSTKPC -- Purchase of Preferred/Preference Stock (Cash Flow) (PRSTKPC): double"""
return self.data["PRSTKPC"]
@property
def RECCH(self) -> pd.Series:
"""RECCH -- Accounts Receivable Decrease (Increase) (RECCH): double"""
return self.data["RECCH"]
@property
def SCSTKC(self) -> pd.Series:
"""SCSTKC -- Sale of Common Stock (Cash Flow) (SCSTKC): double"""
return self.data["SCSTKC"]
@property
def SIV(self) -> pd.Series:
"""SIV -- Sale of Investments (SIV): double"""
return self.data["SIV"]
@property
def SPPE(self) -> pd.Series:
"""SPPE -- Sale of Property (SPPE): double"""
return self.data["SPPE"]
@property
def SPPIV(self) -> pd.Series:
"""SPPIV -- Sale of Property, Plant and Equipment and Investments Gain (Loss) (SPPIV): double"""
return self.data["SPPIV"]
@property
def SPSTKC(self) -> pd.Series:
"""SPSTKC -- Sale of Preferred/Preference Stock (Cash Flow) (SPSTKC): double"""
return self.data["SPSTKC"]
@property
def SSTK(self) -> pd.Series:
"""SSTK -- Sale of Common and Preferred Stock (SSTK): double"""
return self.data["SSTK"]
@property
def TDC(self) -> pd.Series:
"""TDC -- Deferred Income Taxes - Net (Cash Flow) (TDC): double"""
return self.data["TDC"]
@property
def TSAFC(self) -> pd.Series:
"""TSAFC -- Total Sources/Applications of Funds (Cash Flow) (TSAFC): double"""
return self.data["TSAFC"]
@property
def TXACH(self) -> pd.Series:
"""TXACH -- Income Taxes Accrued Increase/(Decrease) (TXACH): double"""
return self.data["TXACH"]
@property
def TXBCO(self) -> pd.Series:
"""TXBCO -- Excess Tax Benefit Stock Options - Cash Flow Operating (TXBCO): double"""
return self.data["TXBCO"]
@property
def TXBCOF(self) -> pd.Series:
"""TXBCOF -- Excess Tax Benefit of Stock Options - Cash Flow Financing (TXBCOF): double"""
return self.data["TXBCOF"]
@property
def TXDC(self) -> pd.Series:
"""TXDC -- Deferred Taxes (Cash Flow) (TXDC): double"""
return self.data["TXDC"]
@property
def TXPD(self) -> pd.Series:
"""TXPD -- Income Taxes Paid (TXPD): double"""
return self.data["TXPD"]
@property
def UAOLOCH(self) -> pd.Series:
"""UAOLOCH -- Other Assets and Liabilities - Net Change (Statement of Cash Flows) (UAOLOCH): double"""
return self.data["UAOLOCH"]
@property
def UDFCC(self) -> pd.Series:
"""UDFCC -- Deferred Fuel - Increase (Decrease) (Statement of Cash Flows) (UDFCC): double"""
return self.data["UDFCC"]
@property
def UFRETSD(self) -> pd.Series:
"""UFRETSD -- Funds for Retirement of Securities and Short-Term Debt (Cash Flow) (UFRETSD): double"""
return self.data["UFRETSD"]
@property
def UNWCC(self) -> pd.Series:
"""UNWCC -- Working Capital (Use) - Increase (Decrease) (Cash Flow) (UNWCC): double"""
return self.data["UNWCC"]
@property
def UOIS(self) -> pd.Series:
"""UOIS -- Other Internal Sources - Net (Cash Flow) (UOIS): double"""
return self.data["UOIS"]
@property
def USTDNC(self) -> pd.Series:
"""USTDNC -- Short-Term Debt - Decrease (Increase) (Cash Flow) (USTDNC): double"""
return self.data["USTDNC"]
@property
def UTFDOC(self) -> pd.Series:
"""UTFDOC -- Total Funds From Operations (Cash Flow) (UTFDOC): double"""
return self.data["UTFDOC"]
@property
def UTFOSC(self) -> pd.Series:
"""UTFOSC -- Total Funds from Outside Sources (Cash Flow) (UTFOSC): double"""
return self.data["UTFOSC"]
@property
def UWKCAPC(self) -> pd.Series:
"""UWKCAPC -- Working Capital (Source) - Decrease (Increase) (Cash Flow) (UWKCAPC): double"""
return self.data["UWKCAPC"]
@property
def WCAPC(self) -> pd.Series:
"""WCAPC -- Working Capital Change Other Increase/(Decrease) (WCAPC): double"""
return self.data["WCAPC"]
@property
def WCAPCH(self) -> pd.Series:
"""WCAPCH -- Working Capital Change Total (WCAPCH): double"""
return self.data["WCAPCH"]
@property
def XIDOC(self) -> pd.Series:
"""XIDOC -- Extraordinary Items and Discontinued Operations (Cash Flow) (XIDOC): double"""
return self.data["XIDOC"]
@property
def ACCRT(self) -> pd.Series:
"""ACCRT -- ARO Accretion Expense (ACCRT): double"""
return self.data["ACCRT"]
@property
def ACQAO(self) -> pd.Series:
"""ACQAO -- Acquired Assets > Other Long-Term Assets (ACQAO): double"""
return self.data["ACQAO"]
@property
def ACQCSHI(self) -> pd.Series:
"""ACQCSHI -- Shares Issued for Acquisition (ACQCSHI): double"""
return self.data["ACQCSHI"]
@property
def ACQGDWL(self) -> pd.Series:
"""ACQGDWL -- Acquired Assets - Goodwill (ACQGDWL): double"""
return self.data["ACQGDWL"]
@property
def ACQIC(self) -> pd.Series:
"""ACQIC -- Acquisitions - Current Income Contribution (ACQIC): double"""
return self.data["ACQIC"]
@property
def ACQINTAN(self) -> pd.Series:
"""ACQINTAN -- Acquired Assets - Intangibles (ACQINTAN): double"""
return self.data["ACQINTAN"]
@property
def ACQINVT(self) -> pd.Series:
"""ACQINVT -- Acquired Assets - Inventory (ACQINVT): double"""
return self.data["ACQINVT"]
@property
def ACQLNTAL(self) -> pd.Series:
"""ACQLNTAL -- Acquired Loans (ACQLNTAL): double"""
return self.data["ACQLNTAL"]
@property
def ACQNIINTC(self) -> pd.Series:
"""ACQNIINTC -- Net Interest Income Contribution (ACQNIINTC): double"""
return self.data["ACQNIINTC"]
@property
def ACQPPE(self) -> pd.Series:
"""ACQPPE -- Acquired Assets > Property, Plant & Equipment (ACQPPE): double"""
return self.data["ACQPPE"]
@property
def ACQSC(self) -> pd.Series:
"""ACQSC -- Acquisitions - Current Sales Contribution (ACQSC): double"""
return self.data["ACQSC"]
@property
def ANO(self) -> pd.Series:
"""ANO -- Assets Netting & Other Adjustments (ANO): double"""
return self.data["ANO"]
@property
def AOL2(self) -> pd.Series:
"""AOL2 -- Assets Level2 (Observable) (AOL2): double"""
return self.data["AOL2"]
@property
def AQPL1(self) -> pd.Series:
"""AQPL1 -- Assets Level1 (Quoted Prices) (AQPL1): double"""
return self.data["AQPL1"]
@property
def AU(self) -> pd.Series:
"""AU -- Auditor (AU): string"""
return self.data["AU"]
@property
def AUL3(self) -> pd.Series:
"""AUL3 -- Assets Level3 (Unobservable) (AUL3): double"""
return self.data["AUL3"]
@property
def AUOP(self) -> pd.Series:
"""AUOP -- Auditor Opinion (AUOP): string"""
return self.data["AUOP"]
@property
def AUOPIC(self) -> pd.Series:
"""AUOPIC -- Auditor Opinion - Internal Control (AUOPIC): string"""
return self.data["AUOPIC"]
@property
def BASTR(self) -> pd.Series:
"""BASTR -- Average Short-Term Borrowings Rate (BASTR): double"""
return self.data["BASTR"]
@property
def BILLEXCE(self) -> pd.Series:
"""BILLEXCE -- Billings in Excess of Cost & Earnings (BILLEXCE): double"""
return self.data["BILLEXCE"]
@property
def CAPR1(self) -> pd.Series:
"""CAPR1 -- Risk-Adjusted Capital Ratio - Tier 1 (CAPR1): double"""
return self.data["CAPR1"]
@property
def CAPR2(self) -> pd.Series:
"""CAPR2 -- Risk-Adjusted Capital Ratio - Tier 2 (CAPR2): double"""
return self.data["CAPR2"]
@property
def CAPR3(self) -> pd.Series:
"""CAPR3 -- Risk-Adjusted Capital Ratio - Combined (CAPR3): double"""
return self.data["CAPR3"]
@property
def CAPSFT(self) -> pd.Series:
"""CAPSFT -- Capitalized Software (CAPSFT): double"""
return self.data["CAPSFT"]
@property
def CEIEXBILL(self) -> pd.Series:
"""CEIEXBILL -- Cost & Earnings in Excess of Billings (CEIEXBILL): double"""
return self.data["CEIEXBILL"]
@property
def CEOSO(self) -> pd.Series:
"""CEOSO -- Chief Executive Officer SOX Certification (CEOSO): string"""
return self.data["CEOSO"]
@property
def CFOSO(self) -> pd.Series:
"""CFOSO -- Chief Financial Officer SOX Certification (CFOSO): string"""
return self.data["CFOSO"]
@property
def CI(self) -> pd.Series:
"""CI -- Comprehensive Income - Total (CI): double"""
return self.data["CI"]
@property
def CIMII(self) -> pd.Series:
"""CIMII -- Comprehensive Income - Noncontrolling Interest (CIMII): double"""
return self.data["CIMII"]
@property
def CSHFD(self) -> pd.Series:
"""CSHFD -- Common Shares Used to Calc Earnings Per Share Fully Diluted (CSHFD): double"""
return self.data["CSHFD"]
@property
def CSHI(self) -> pd.Series:
"""CSHI -- Common Shares Issued (CSHI): double"""
return self.data["CSHI"]
@property
def CSHO(self) -> pd.Series:
"""CSHO -- Common Shares Outstanding (CSHO): double"""
return self.data["CSHO"]
@property
def CSHPRI(self) -> pd.Series:
"""CSHPRI -- Common Shares Used to Calculate Earnings Per Share Basic (CSHPRI): double"""
return self.data["CSHPRI"]
@property
def CSHR(self) -> pd.Series:
"""CSHR -- Common/Ordinary Shareholders (CSHR): double"""
return self.data["CSHR"]
@property
def CSHRC(self) -> pd.Series:
"""CSHRC -- Common Shares Reserved for Conversion Convertible Debt (CSHRC): double"""
return self.data["CSHRC"]
@property
def CSHRP(self) -> pd.Series:
"""CSHRP -- Common Shares Reserved for Conversion Preferred Stock (CSHRP): double"""
return self.data["CSHRP"]
@property
def CSHRSO(self) -> pd.Series:
"""CSHRSO -- Common Shares Reserved for Conversion Stock Options (CSHRSO): double"""
return self.data["CSHRSO"]
@property
def CSHRT(self) -> pd.Series:
"""CSHRT -- Common Shares Reserved for Conversion Total (CSHRT): double"""
return self.data["CSHRT"]
@property
def CSHRW(self) -> pd.Series:
"""CSHRW -- Common Shares Reserved for Conversion Warrants and Other (CSHRW): double"""
return self.data["CSHRW"]
@property
def DERAC(self) -> pd.Series:
"""DERAC -- Derivative Assets - Current (DERAC): double"""
return self.data["DERAC"]
@property
def DERALT(self) -> pd.Series:
"""DERALT -- Derivative Assets Long-Term (DERALT): double"""
return self.data["DERALT"]
@property
def DERHEDGL(self) -> pd.Series:
"""DERHEDGL -- Gains/Losses on Derivatives and Hedging (DERHEDGL): double"""
return self.data["DERHEDGL"]
@property
def DERLC(self) -> pd.Series:
"""DERLC -- Derivative Liabilities- Current (DERLC): double"""
return self.data["DERLC"]
@property
def DERLLT(self) -> pd.Series:
"""DERLLT -- Derivative Liabilities Long-Term (DERLLT): double"""
return self.data["DERLLT"]
@property
def DT(self) -> pd.Series:
"""DT -- Total Debt Including Current (DT): double"""
return self.data["DT"]
@property
def DVINTF(self) -> pd.Series:
"""DVINTF -- Dividends & Interest Receivable (Cash Flow) (DVINTF): double"""
return self.data["DVINTF"]
@property
def EMP(self) -> pd.Series:
"""EMP -- Employees (EMP): double"""
return self.data["EMP"]
@property
def FINACO(self) -> pd.Series:
"""FINACO -- Finance Division Other Current Assets, Total (FINACO): double"""
return self.data["FINACO"]
@property
def FINAO(self) -> pd.Series:
"""FINAO -- Finance Division Other Long-Term Assets, Total (FINAO): double"""
return self.data["FINAO"]
@property
def FINCH(self) -> pd.Series:
"""FINCH -- Finance Division - Cash (FINCH): double"""
return self.data["FINCH"]
@property
def FINDLC(self) -> pd.Series:
"""FINDLC -- Finance Division Long-Term Debt - Current (FINDLC): double"""
return self.data["FINDLC"]
@property
def FINDLT(self) -> pd.Series:
"""FINDLT -- Finance Division Debt - Long-Term (FINDLT): double"""
return self.data["FINDLT"]
@property
def FINIVST(self) -> pd.Series:
"""FINIVST -- Finance Division - Short-Term Investments (FINIVST): double"""
return self.data["FINIVST"]
@property
def FINLCO(self) -> pd.Series:
"""FINLCO -- Finance Division Other Current Liabilities, Total (FINLCO): double"""
return self.data["FINLCO"]
@property
def FINLTO(self) -> pd.Series:
"""FINLTO -- Finance Division Other Long Term Liabilities, Total (FINLTO): double"""
return self.data["FINLTO"]
@property
def FINNP(self) -> pd.Series:
"""FINNP -- Finance Division Notes Payable (FINNP): double"""
return self.data["FINNP"]
@property
def FINRECC(self) -> pd.Series:
"""FINRECC -- Finance Division - Current Receivables (FINRECC): double"""
return self.data["FINRECC"]
@property
def FINRECLT(self) -> pd.Series:
"""FINRECLT -- Finance Division - Long-Term Receivables (FINRECLT): double"""
return self.data["FINRECLT"]
@property
def FINREV(self) -> pd.Series:
"""FINREV -- Finance Division Revenue (FINREV): double"""
return self.data["FINREV"]
@property
def FINXINT(self) -> pd.Series:
"""FINXINT -- Finance Division Interest Expense (FINXINT): double"""
return self.data["FINXINT"]
@property
def FINXOPR(self) -> pd.Series:
"""FINXOPR -- Finance Division Operating Expense (FINXOPR): double"""
return self.data["FINXOPR"]
@property
def GLIV(self) -> pd.Series:
"""GLIV -- Gains/Losses on investments (GLIV): double"""
return self.data["GLIV"]
@property
def GOVTOWN(self) -> pd.Series:
"""GOVTOWN -- Percent of Gov't Owned (GOVTOWN): double"""
return self.data["GOVTOWN"]
@property
def IBMII(self) -> pd.Series:
"""IBMII -- Income before Extraordinary Items and Noncontrolling Interests (IBMII): double"""
return self.data["IBMII"]
@property
def LIFRP(self) -> pd.Series:
"""LIFRP -- LIFO Reserve - Prior (LIFRP): double"""
return self.data["LIFRP"]
@property
def LNO(self) -> pd.Series:
"""LNO -- Liabilities Netting & Other Adjustments (LNO): double"""
return self.data["LNO"]
@property
def LOL2(self) -> pd.Series:
"""LOL2 -- Liabilities Level2 (Observable) (LOL2): double"""
return self.data["LOL2"]
@property
def LQPL1(self) -> pd.Series:
"""LQPL1 -- Liabilities Level1 (Quoted Prices) (LQPL1): double"""
return self.data["LQPL1"]
@property
def LUL3(self) -> pd.Series:
"""LUL3 -- Liabilities Level3 (Unobservable) (LUL3): double"""
return self.data["LUL3"]
@property
def MIBN(self) -> pd.Series:
"""MIBN -- Noncontrolling Interests - Nonredeemable - Balance Sheet (MIBN): double"""
return self.data["MIBN"]
@property
def MIBT(self) -> pd.Series:
"""MIBT -- Noncontrolling Interests - Total - Balance Sheet (MIBT): double"""
return self.data["MIBT"]
@property
def NIINTPFC(self) -> pd.Series:
"""NIINTPFC -- Pro Forma Net Interest Income - Current (NIINTPFC): double"""
return self.data["NIINTPFC"]
@property
def NIINTPFP(self) -> pd.Series:
"""NIINTPFP -- Pro Forma Net Interest Income - Prior (NIINTPFP): double"""
return self.data["NIINTPFP"]
@property
def NIPFC(self) -> pd.Series:
"""NIPFC -- Pro Forma Net Income - Current (NIPFC): double"""
return self.data["NIPFC"]
@property
def NIPFP(self) -> pd.Series:
"""NIPFP -- Pro Forma Net Income - Prior (NIPFP): double"""
return self.data["NIPFP"]
@property
def OPTCA(self) -> pd.Series:
"""OPTCA -- Options - Cancelled (-) (OPTCA): double"""
return self.data["OPTCA"]
@property
def OPTDR(self) -> pd.Series:
"""OPTDR -- Dividend Rate - Assumption (%) (OPTDR): double"""
return self.data["OPTDR"]
@property
def OPTEX(self) -> pd.Series:
"""OPTEX -- Options Exercisable (000) (OPTEX): double"""
return self.data["OPTEX"]
@property
def OPTEXD(self) -> pd.Series:
"""OPTEXD -- Options - Exercised (-) (OPTEXD): double"""
return self.data["OPTEXD"]
@property
def OPTFVGR(self) -> pd.Series:
"""OPTFVGR -- Options - Fair Value of Options Granted (OPTFVGR): double"""
return self.data["OPTFVGR"]
@property
def OPTGR(self) -> pd.Series:
"""OPTGR -- Options - Granted (OPTGR): double"""
return self.data["OPTGR"]
@property
def OPTLIFE(self) -> pd.Series:
"""OPTLIFE -- Life of Options - Assumption (# yrs) (OPTLIFE): double"""
return self.data["OPTLIFE"]
@property
def OPTOSBY(self) -> pd.Series:
"""OPTOSBY -- Options Outstanding - Beg of Year (OPTOSBY): double"""
return self.data["OPTOSBY"]
@property
def OPTOSEY(self) -> pd.Series:
"""OPTOSEY -- Options Outstanding - End of Year (OPTOSEY): double"""
return self.data["OPTOSEY"]
@property
def OPTPRCBY(self) -> pd.Series:
"""OPTPRCBY -- Options Outstanding Beg of Year - Price (OPTPRCBY): double"""
return self.data["OPTPRCBY"]
@property
def OPTRFR(self) -> pd.Series:
"""OPTRFR -- Risk Free Rate - Assumption (%) (OPTRFR): double"""
return self.data["OPTRFR"]
@property
def OPTVOL(self) -> pd.Series:
"""OPTVOL -- Volatility - Assumption (%) (OPTVOL): double"""
return self.data["OPTVOL"]
@property
def PNRSHO(self) -> pd.Series:
"""PNRSHO -- Nonred Pfd Shares Outs (000) (PNRSHO): double"""
return self.data["PNRSHO"]
@property
def PRSHO(self) -> pd.Series:
"""PRSHO -- Redeem Pfd Shares Outs (000) (PRSHO): double"""
return self.data["PRSHO"]
@property
def RANK(self) -> pd.Series:
"""RANK -- Rank - Auditor (RANK): double"""
return self.data["RANK"]
@property
def RSTCHE(self) -> pd.Series:
"""RSTCHE -- Restricted Cash & Investments - Current (RSTCHE): double"""
return self.data["RSTCHE"]
@property
def RSTCHELT(self) -> pd.Series:
"""RSTCHELT -- Long-Term Restricted Cash & Investments (RSTCHELT): double"""
return self.data["RSTCHELT"]
@property
def SALEPFC(self) -> pd.Series:
"""SALEPFC -- Pro Forma Net Sales - Current Year (SALEPFC): double"""
return self.data["SALEPFC"]
@property
def SALEPFP(self) -> pd.Series:
"""SALEPFP -- Pro Forma Net Sales - Prior Year (SALEPFP): double"""
return self.data["SALEPFP"]
@property
def TEQ(self) -> pd.Series:
"""TEQ -- Stockholders Equity - Total (TEQ): double"""
return self.data["TEQ"]
@property
def TFVA(self) -> pd.Series:
"""TFVA -- Total Fair Value Assets (TFVA): double"""
return self.data["TFVA"]
@property
def TFVCE(self) -> pd.Series:
"""TFVCE -- Total Fair Value Changes including Earnings (TFVCE): double"""
return self.data["TFVCE"]
@property
def TFVL(self) -> pd.Series:
"""TFVL -- Total Fair Value Liabilities (TFVL): double"""
return self.data["TFVL"]
@property
def TSTKN(self) -> pd.Series:
"""TSTKN -- Treasury Stock Number of Common Shares (TSTKN): double"""
return self.data["TSTKN"]
@property
def TXTUBADJUST(self) -> pd.Series:
"""TXTUBADJUST -- Other Unrecog Tax Benefit Adj. (TXTUBADJUST): double"""
return self.data["TXTUBADJUST"]
@property
def TXTUBBEGIN(self) -> pd.Series:
"""TXTUBBEGIN -- Unrecog. Tax Benefits - Beg of Year (TXTUBBEGIN): double"""
return self.data["TXTUBBEGIN"]
@property
def TXTUBEND(self) -> pd.Series:
"""TXTUBEND -- Unrecog. Tax Benefits - End of Year (TXTUBEND): double"""
return self.data["TXTUBEND"]
@property
def TXTUBMAX(self) -> pd.Series:
"""TXTUBMAX -- Chg. In Unrecog. Tax Benefits - Max (TXTUBMAX): double"""
return self.data["TXTUBMAX"]
@property
def TXTUBMIN(self) -> pd.Series:
"""TXTUBMIN -- Chg. In Unrecog. Tax Benefits - Min (TXTUBMIN): double"""
return self.data["TXTUBMIN"]
@property
def TXTUBPOSDEC(self) -> pd.Series:
"""TXTUBPOSDEC -- Decrease- Current Tax Positions (TXTUBPOSDEC): double"""
return self.data["TXTUBPOSDEC"]
@property
def TXTUBPOSINC(self) -> pd.Series:
"""TXTUBPOSINC -- Increase- Current Tax Positions (TXTUBPOSINC): double"""
return self.data["TXTUBPOSINC"]
@property
def TXTUBPOSPDEC(self) -> pd.Series:
"""TXTUBPOSPDEC -- Decrease- Prior Tax Positions (TXTUBPOSPDEC): double"""
return self.data["TXTUBPOSPDEC"]
@property
def TXTUBPOSPINC(self) -> pd.Series:
"""TXTUBPOSPINC -- Increase- Prior Tax Positions (TXTUBPOSPINC): double"""
return self.data["TXTUBPOSPINC"]
@property
def TXTUBSETTLE(self) -> pd.Series:
"""TXTUBSETTLE -- Settlements with Tax Authorities (TXTUBSETTLE): double"""
return self.data["TXTUBSETTLE"]
@property
def TXTUBSOFLIMIT(self) -> pd.Series:
"""TXTUBSOFLIMIT -- Lapse of Statute of Limitations (TXTUBSOFLIMIT): double"""
return self.data["TXTUBSOFLIMIT"]
@property
def TXTUBTXTR(self) -> pd.Series:
"""TXTUBTXTR -- Impact on Effective Tax Rate (TXTUBTXTR): double"""
return self.data["TXTUBTXTR"]
@property
def TXTUBXINTBS(self) -> pd.Series:
"""TXTUBXINTBS -- Interest & Penalties Accrued - B/S (TXTUBXINTBS): double"""
return self.data["TXTUBXINTBS"]
@property
def TXTUBXINTIS(self) -> pd.Series:
"""TXTUBXINTIS -- Interest & Penalties Reconized - I/S (TXTUBXINTIS): double"""
return self.data["TXTUBXINTIS"]
@property
def XRDP(self) -> pd.Series:
"""XRDP -- Research & Development - Prior (XRDP): double"""
return self.data["XRDP"]
@property
def ADJEX_C(self) -> pd.Series:
"""ADJEX_C -- Cumulative Adjustment Factor by Ex-Date - Calendar (ADJEX_C): double"""
return self.data["ADJEX_C"]
@property
def ADJEX_F(self) -> pd.Series:
"""ADJEX_F -- Cumulative Adjustment Factor by Ex-Date - Fiscal (ADJEX_F): double"""
return self.data["ADJEX_F"]
@property
def CSHTR_C(self) -> pd.Series:
"""CSHTR_C -- Common Shares Traded - Annual - Calendar (CSHTR_C): double"""
return self.data["CSHTR_C"]
@property
def CSHTR_F(self) -> pd.Series:
"""CSHTR_F -- Common Shares Traded - Annual - Fiscal (CSHTR_F): double"""
return self.data["CSHTR_F"]
@property
def DVPSP_C(self) -> pd.Series:
"""DVPSP_C -- Dividends per Share - Pay Date - Calendar (DVPSP_C): double"""
return self.data["DVPSP_C"]
@property
def DVPSP_F(self) -> pd.Series:
"""DVPSP_F -- Dividends per Share - Pay Date - Fiscal (DVPSP_F): double"""
return self.data["DVPSP_F"]
@property
def DVPSX_C(self) -> pd.Series:
"""DVPSX_C -- Dividends per Share - Ex-Date - Calendar (DVPSX_C): double"""
return self.data["DVPSX_C"]
@property
def DVPSX_F(self) -> pd.Series:
"""DVPSX_F -- Dividends per Share - Ex-Date - Fiscal (DVPSX_F): double"""
return self.data["DVPSX_F"]
@property
def MKVALT(self) -> pd.Series:
"""MKVALT -- Market Value - Total - Fiscal (MKVALT): double"""
return self.data["MKVALT"]
@property
def NAICSH(self) -> pd.Series:
"""NAICSH -- North America Industrial Classification System - Historical (NAICSH): string"""
return self.data["NAICSH"]
@property
def PRCC_C(self) -> pd.Series:
"""PRCC_C -- Price Close - Annual - Calendar (PRCC_C): double"""
return self.data["PRCC_C"]
@property
def PRCC_F(self) -> pd.Series:
"""PRCC_F -- Price Close - Annual - Fiscal (PRCC_F): double"""
return self.data["PRCC_F"]
@property
def PRCH_C(self) -> pd.Series:
"""PRCH_C -- Price High - Annual - Calendar (PRCH_C): double"""
return self.data["PRCH_C"]
@property
def PRCH_F(self) -> pd.Series:
"""PRCH_F -- Price High - Annual - Fiscal (PRCH_F): double"""
return self.data["PRCH_F"]
@property
def PRCL_C(self) -> pd.Series:
"""PRCL_C -- Price Low - Annual - Calendar (PRCL_C): double"""
return self.data["PRCL_C"]
@property
def PRCL_F(self) -> pd.Series:
"""PRCL_F -- Price Low - Annual - Fiscal (PRCL_F): double"""
return self.data["PRCL_F"]
@property
def SICH(self) -> pd.Series:
"""SICH -- Standard Industrial Classification - Historical (SICH): double"""
return self.data["SICH"]
|
py | 7dfe81243af493e9e8e4226cc84e7284b55e4308 | from .rename import rename
|
py | 7dfe8128d8c428b6b7217a4fad902fbad536673c | class _PayrollSystem:
def __init__(self):
self._employee_policies = {
1: SalaryPolicy(1000),
2: SalaryPolicy(950),
3: CommissionPolicy(600, 100),
4: HourlyPolicy(22),
5: HourlyPolicy(16),
}
def _get_policy(self, employee_id):
policy = self._employee_policies.get(employee_id)
if not policy:
raise ValueError('invalid employee_id')
return policy
def _calculate_payroll(self, employees):
print('Calaculating Payroll')
print('====================')
for employee in employees:
print(f'Payroll for: {employee.identification} - {employee.name}')
print(f'- Check Amount: {employee.calculate_payroll()}')
if employee.address:
print('- Sent to')
print(employee.address)
print('')
class DisabilityPolicy:
def __init__(self):
self._base_policy = None
def track_work(self, hours):
self._check_base_policy()
return self._base_policy.track_work(hours)
def calculate_payroll(self):
self._check_base_policy()
base_salary = self._base_policy.calculate_payroll()
return base_salary * 0.6
def apply_to_policy(self, base_policy):
self._base_policy = base_policy
def _check_base_policy(self):
if not self._base_policy:
raise RuntimeError('Base policy missing')
class PayrollPolicy:
def __init__(self):
self.hours_worked = 0
def track_work(self, hours):
self.hours_worked += hours
class SalaryPolicy(PayrollPolicy):
def __init__(self, weekly_salary):
super().__init__()
self.weekly_salary = weekly_salary
def calculate_payroll(self):
return self.weekly_salary
class HourlyPolicy(PayrollPolicy):
def __init__(self, hourly_rate):
super().__init__()
self.hourly_rate = hourly_rate
def calculate_payroll(self):
return self.hours_worked * self.hourly_rate
class CommissionPolicy(SalaryPolicy):
def __init__(self, weekly_salary, commission_per_sale):
super().__init__(weekly_salary)
self.commission_per_sale = commission_per_sale
def commission(self):
sales = self.hours_worked / 5
return sales * self.commission_per_sale
def calculate_payroll(self):
return super().calculate_payroll() + self.commission()
payroll_system = _PayrollSystem()
# Public interface
def get_policy(employee_id):
return payroll_system._get_policy(employee_id)
def calculate_payroll(employees):
return payroll_system._calculate_payroll(employees)
|
py | 7dfe81d9dd1fc766d5f75725f7127edca94101cc | """
A non-python example, with tests for IRKernel (irkernel.github.io).
(Beware of python quoting/string escaping rules being different to the
language being tested)
"""
import os
import unittest
from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel
import jupyter_kernel_test as jkt
class IRKernelTests(jkt.KernelTests):
kernel_name = "ir"
@classmethod
def setUpClass(cls):
try:
cls.km, cls.kc = jkt.start_new_kernel(kernel_name=cls.kernel_name)
except NoSuchKernel:
raise unittest.SkipTest("No ir kernel installed")
language_name = "R"
file_extension = ".r"
code_hello_world = 'print("hello, world")'
completion_samples = [
{
'text': 'zi',
'matches': {'zip'},
},
] if os.name != 'nt' else [] # zip is not available on Windows
complete_code_samples = ['1', "print('hello, world')", "f <- function(x) {x*2}"]
incomplete_code_samples = ["print('hello", "f <- function(x) {x"]
code_generate_error = "raise"
code_display_data = [
{'code': "plot(iris)", 'mime': "image/png"},
{'code': "1+2+3", "mime": "text/plain" }
]
if __name__ == '__main__':
unittest.main()
|
py | 7dfe8212c5fdb428ee67d54b3280b02eb220dd17 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
bl_info = {
"name": "LipSync Importer & Blinker",
"author": "Yousef Harfoush - bat3a ;)",
"version": (0, 5, 1),
"blender": (2, 70, 0),
"location": "3D window > Tool Shelf",
"description": "Plots Moho (Papagayo, Jlipsync, Yolo) file "
"to frames and adds automatic blinking",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php?title=Extensions:2.6/Py/"
"Scripts/Import-Export/Lipsync Importer",
"tracker_url": "https://developer.blender.org/maniphest/task/edit/form/2/",
"category": "Import-Export"}
import bpy, re
from random import random
from bpy.props import *
from bpy.props import IntProperty, FloatProperty, StringProperty
global lastPhoneme
lastPhoneme="nothing"
# add blinking
def blinker():
scn = bpy.context.scene
obj = bpy.context.object
if scn.regMenuTypes.enumBlinkTypes == '0':
modifier = 0
elif scn.regMenuTypes.enumBlinkTypes == '1':
modifier = scn.blinkMod
#creating keys with blinkNm count
for y in range(scn.blinkNm):
frame = y * scn.blinkSp + int(random()*modifier)
createShapekey('blink', frame)
# -----------code contributed by dalai felinto adds armature support modified by me-------------------
bone_keys = {
"AI": ('location', 0),
"E": ('location', 1),
"FV": ('location', 2),
"L": ('rotation_euler', 0),
"MBP": ('rotation_euler', 1),
"O": ('rotation_euler', 2),
"U": ('scale', 0),
"WQ": ('scale', 1),
"etc": ('scale', 2),
"rest": ('ik_stretch', -1)
}
def lipsyncerBone():
# reading imported file & creating keys
object = bpy.context.object
scene = bpy.context.scene
bone = bpy.context.active_pose_bone
resetBoneScale(bone)
f=open(scene.fpath) # importing file
# f.readline() # reading the 1st line that we don"t need
for line in f:
# removing new lines
lsta = re.split("\n+", line)
# building a list of frames & shapes indexes
# lst = re.split(":? ", lsta[0])# making a list of a frame & number
lst = re.split(":?\t", lsta[0])# making a list of a frame & number
# frame = int(lst[0])
frame = float(lst[0]) * 30.0
for key,attribute in bone_keys.items():
if lst[1] == key:
createBoneKeys(key, bone, attribute, frame)
def resetBoneScale(bone):
# set the attributes used by papagayo to 0.0
for attribute,index in bone_keys.values():
if index != -1:
#bone.location[0] = 0.0
exec("bone.%s[%d] = %f" % (attribute, index, 0.0))
else:
exec("bone.%s = %f" % (attribute, 0.0))
def addBoneKey(bone, data_path, index=-1, value=None, frame=0, group=""):
# set a value and keyframe for the bone
# it assumes the 'bone' variable was defined before
# and it's the current selected bone
frame=bpy.context.scene.frame_current
if value != None:
if index != -1:
# bone.location[0] = 0.0
exec("bone.%s[%d] = %f" % (data_path, index, value))
else:
exec("bone.%s = %f" % (data_path, value))
# bone.keyframe_insert("location", 0, 10.0, "Lipsync")
exec('bone.keyframe_insert("%s", %d, %f, "%s")' % (data_path, index, frame, group))
# creating keys with offset and eases for a phonem @ the Skframe
def createBoneKeys(phoneme, bone, attribute, frame):
global lastPhoneme
scene = bpy.context.scene
object = bpy.context.object
offst = scene.offset # offset value
skVlu = scene.skscale # shape key value
#in case of Papagayo format
if scene.regMenuTypes.enumFileTypes == '0' :
frmIn = scene.easeIn # ease in value
frmOut = scene.easeOut # ease out value
hldIn = scene.holdGap # holding time value
#in case of Jlipsync format or Yolo
elif scene.regMenuTypes.enumFileTypes == '1' :
frmIn = 1
frmOut = 1
hldIn = 0
# inserting the In key only when phonem change or when blinking
if lastPhoneme!=phoneme or eval(scene.regMenuTypes.enumModeTypes) == 1:
addBoneKey(bone, attribute[0], attribute[1], 0.0, offst+frame-frmIn, "Lipsync")
addBoneKey(bone, attribute[0], attribute[1], skVlu, offst+frame, "Lipsync")
addBoneKey(bone, attribute[0], attribute[1], skVlu, offst+frame+hldIn, "Lipsync")
addBoneKey(bone, attribute[0], attribute[1], 0.0, offst+frame+hldIn+frmOut, "Lipsync")
lastPhoneme=phoneme
# -------------------------------------------------------------------------------
# reading imported file & creating keys
def lipsyncer():
obj = bpy.context.object
scn = bpy.context.scene
f=open(scn.fpath) # importing file
# f.readline() # reading the 1st line that we don"t need
for line in f:
# removing new lines
lsta = re.split("\n+", line)
# building a list of frames & shapes indexes
# lst = re.split(":? ", lsta[0])# making a list of a frame & number
lst = re.split(":?\t", lsta[0])# making a list of a frame & number
# frame = int(lst[0])
frame = float(lst[0]) * 30.0
for key in obj.data.shape_keys.key_blocks:
if lst[1] == key.name:
createShapekey(key.name, frame)
# creating keys with offset and eases for a phonem @ the frame
def createShapekey(phoneme, frame):
global lastPhoneme
scn = bpy.context.scene
obj = bpy.context.object
objSK = obj.data.shape_keys
offst = scn.offset # offset value
skVlu = scn.skscale # shape key value
#in case of Papagayo format
if scn.regMenuTypes.enumFileTypes == '0' :
frmIn = scn.easeIn # ease in value
frmOut = scn.easeOut # ease out value
hldIn = scn.holdGap # holding time value
#in case of Jlipsync format or Yolo
elif scn.regMenuTypes.enumFileTypes == '1' :
frmIn = 1
frmOut = 1
hldIn = 0
# inserting the In key only when phonem change or when blinking
if lastPhoneme!=phoneme or eval(scn.regMenuTypes.enumModeTypes) == 1:
objSK.key_blocks[phoneme].value=0.0
objSK.key_blocks[phoneme].keyframe_insert("value",
-1, offst+frame-frmIn, "Lipsync")
objSK.key_blocks[phoneme].value=skVlu
objSK.key_blocks[phoneme].keyframe_insert("value",
-1, offst+frame, "Lipsync")
objSK.key_blocks[phoneme].value=skVlu
objSK.key_blocks[phoneme].keyframe_insert("value",
-1, offst+frame+hldIn, "Lipsync")
objSK.key_blocks[phoneme].value=0.0
objSK.key_blocks[phoneme].keyframe_insert("value",
-1, offst+frame+hldIn+frmOut, "Lipsync")
lastPhoneme = phoneme
# lipsyncer operation start
class btn_lipsyncer(bpy.types.Operator):
bl_idname = 'lipsync.go'
bl_label = 'Start Processing'
bl_description = 'Plots the voice file keys to timeline'
def execute(self, context):
scn = context.scene
obj = context.active_object
# testing if object is valid
if obj!=None:
if obj.type=="MESH":
if obj.data.shape_keys!=None:
if scn.fpath!='': lipsyncer()
else: print ("select a Moho file")
else: print("No shape keys")
elif obj.type=="ARMATURE":
if 1:#XXX add prop test
if scn.fpath!='': lipsyncerBone()
else: print ("select a Moho file")
else: print("Create Pose properties")
else: print ("Object is not a mesh ot bone")
else: print ("Select object")
return {'FINISHED'}
# blinker operation start
class btn_blinker(bpy.types.Operator):
bl_idname = 'blink.go'
bl_label = 'Start Processing'
bl_description = 'Add blinks at random or specifice frames'
def execute(self, context):
scn = context.scene
obj = context.object
# testing if object is valid
if obj!=None:
if obj.type=="MESH":
if obj.data.shape_keys!=None:
for key in obj.data.shape_keys.key_blocks:
if key.name=='blink':
blinker()
#return
else: print("No shape keys")
else: print ("Object is not a mesh ot bone")
else: print ("Select object")
return {'FINISHED'}
#defining custom enumeratos
class menuTypes(bpy.types.PropertyGroup):
enumFileTypes = EnumProperty(items =(('0', 'Papagayo', ''),
('1', 'Jlipsync Or Yolo', '')
#,('2', 'Retarget', '')
),
name = 'Choose FileType',
default = '0')
enumBlinkTypes = EnumProperty(items =(('0', 'Specific', ''),
('1', 'Random','')),
name = 'Choose BlinkType',
default = '0')
enumModeTypes = EnumProperty(items =(('0', 'Lipsyncer',''),
('1', 'Blinker','')),
name = 'Choose Mode',
default = '0')
# drawing the user interface
class LipSyncBoneUI(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_label = "Phonemes"
bl_category = 'Animation'
def draw(self, context):
layout = self.layout
col = layout.column()
bone = bpy.context.active_pose_bone
#showing the current object type
if bone: #and if scn.regMenuTypes.enumModeTypes == '0':
col.prop(bone, "location", index=0, text="AI")
col.prop(bone, "location", index=1, text="E")
col.prop(bone, "location", index=2, text="FV")
if bpy.context.scene.unit_settings.system_rotation == 'RADIANS':
col.prop(bone, "rotation_euler", index=0, text="L")
col.prop(bone, "rotation_euler", index=1, text="MBP")
col.prop(bone, "rotation_euler", index=2, text="O")
else:
row=col.row()
row.prop(bone, "rotation_euler", index=0, text="L")
row.label(text=str("%4.2f" % (bone.rotation_euler.x)))
row=col.row()
row.prop(bone, "rotation_euler", index=1, text="MBP")
row.label(text=str("%4.2f" % (bone.rotation_euler.y)))
row=col.row()
row.prop(bone, "rotation_euler", index=2, text="O")
row.label(text=str("%4.2f" % (bone.rotation_euler.z)))
col.prop(bone, "scale", index=0, text="U")
col.prop(bone, "scale", index=1, text="WQ")
col.prop(bone, "scale", index=2, text="etc")
else:
layout.label(text="No good bone is selected")
# drawing the user interface
class LipSyncUI(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOL_PROPS"
bl_label = "LipSync Importer & Blinker"
newType= bpy.types.Scene
newType.fpath = StringProperty(name="Import File ", description="Select your voice file", subtype="FILE_PATH")
newType.skscale = FloatProperty(description="Smoothing shape key values", min=0.1, max=1.0, default=0.8)
newType.offset = IntProperty(description="Offset your frames", default=0)
newType.easeIn = IntProperty(description="Smoothing In curve", min=1, default=3)
newType.easeOut = IntProperty(description="Smoothing Out curve", min=1, default=3)
newType.holdGap = IntProperty(description="Holding for slow keys", min=0, default=0)
newType.blinkSp = IntProperty(description="Space between blinks", min=1, default=100)
newType.blinkNm = IntProperty(description="Number of blinks", min=1, default=10)
newType.blinkMod = IntProperty(description="Randomzing keyframe placment", min=1, default=10)
def draw(self, context):
obj = bpy.context.active_object
scn = bpy.context.scene
layout = self.layout
col = layout.column()
# showing the current object type
if obj != None:
if obj.type == "MESH":
split = col.split(align=True)
split.label(text="The active object is: ", icon="OBJECT_DATA")
split.label(obj.name, icon="EDITMODE_HLT")
elif obj.type == "ARMATURE": # bone needs to be selected
if obj.mode == "POSE": # mode needs to be pose
split = col.split(align=True)
split.label(text="The active object is: ", icon="ARMATURE_DATA")
split.label(obj.name, icon="EDITMODE_HLT")
else:
col.label(text="You need to select Pose mode!", icon="OBJECT_DATA")
else:
col.label(text="The active object is not a Mesh or Armature!", icon="OBJECT_DATA")
else:
layout.label(text="No object is selected", icon="OBJECT_DATA")
col.row().prop(scn.regMenuTypes, 'enumModeTypes')
col.separator()
# the lipsyncer panel
if scn.regMenuTypes.enumModeTypes == '0':
# choose the file format
col.row().prop(scn.regMenuTypes, 'enumFileTypes', text = ' ', expand = True)
# Papagayo panel
if scn.regMenuTypes.enumFileTypes == '0':
col.prop(context.scene, "fpath")
split = col.split(align=True)
split.label("Key Value :")
split.prop(context.scene, "skscale")
split = col.split(align=True)
split.label("Frame Offset :")
split.prop(context.scene, "offset")
split = col.split(align=True)
split.prop(context.scene, "easeIn", "Ease In")
split.prop(context.scene, "holdGap", "Hold Gap")
split.prop(context.scene, "easeOut", "Ease Out")
col.operator('lipsync.go', text='Plot Keys to the Timeline')
# Jlipsync & Yolo panel
elif scn.regMenuTypes.enumFileTypes == '1':
col.prop(context.scene, "fpath")
split = col.split(align=True)
split.label("Key Value :")
split.prop(context.scene, "skscale")
split = col.split(align=True)
split.label("Frame Offset :")
split.prop(context.scene, "offset")
col.operator('lipsync.go', text='Plot Keys to the Timeline')
# the blinker panel
elif scn.regMenuTypes.enumModeTypes == '1':
# choose blink type
col.row().prop(scn.regMenuTypes, 'enumBlinkTypes', text = ' ', expand = True)
# specific panel
if scn.regMenuTypes.enumBlinkTypes == '0':
split = col.split(align=True)
split.label("Key Value :")
split.prop(context.scene, "skscale")
split = col.split(align=True)
split.label("Frame Offset :")
split.prop(context.scene, "offset")
split = col.split(align=True)
split.prop(context.scene, "easeIn", "Ease In")
split.prop(context.scene, "holdGap", "Hold Gap")
split.prop(context.scene, "easeOut", "Ease Out")
col.prop(context.scene, "blinkSp", "Spacing")
col.prop(context.scene, "blinkNm", "Times")
col.operator('blink.go', text='Add Keys to the Timeline')
# Random panel
elif scn.regMenuTypes.enumBlinkTypes == '1':
split = col.split(align=True)
split.label("Key Value :")
split.prop(context.scene, "skscale")
split = col.split(align=True)
split.label("Frame Start :")
split.prop(context.scene, "offset")
split = col.split(align=True)
split.prop(context.scene, "easeIn", "Ease In")
split.prop(context.scene, "holdGap", "Hold Gap")
split.prop(context.scene, "easeOut", "Ease Out")
split = col.split(align=True)
split.prop(context.scene, "blinkSp", "Spacing")
split.prop(context.scene, "blinkMod", "Random Modifier")
col.prop(context.scene, "blinkNm", "Times")
col.operator('blink.go', text='Add Keys to the Timeline')
# clearing vars
def clear_properties():
# can happen on reload
if bpy.context.scene is None:
return
props = ["fpath", "skscale", "offset", "easeIn", "easeOut", "holdGap", "blinkSp", "blinkNm", "blinkMod"]
for p in props:
if p in bpy.types.Scene.bl_rna.properties:
exec("del bpy.types.Scene."+p)
if p in bpy.context.scene:
del bpy.context.scene[p]
# registering the script
def register():
bpy.utils.register_module(__name__)
bpy.types.Scene.regMenuTypes = PointerProperty(type = menuTypes)
def unregister():
bpy.utils.unregister_module(__name__)
del bpy.types.Scene.regMenuTypes
clear_properties()
if __name__ == "__main__":
register()
|
py | 7dfe84c1f6d604d90eebba5247acea649aa72796 | #
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Made the given object sortable and comparable
class SortableObject(object):
def __init__(self):
# derived class MUST set up _mSortableName!
self.mSortableName = ""
def __hash__(self):
return hash(self.mSortableName)
def __eq__(self, other):
if isinstance(other, SortableObject):
return self.mSortableName == other.mSortableName
elif isinstance(other, str):
return self.mSortableName == other
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, SortableObject):
return self.mSortableName < other.mSortableName
elif isinstance(other, str):
return self.mSortableName < other
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, SortableObject):
return self.mSortableName > other.mSortableName
elif isinstance(other, str):
return self.mSortableName > other
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, SortableObject):
return not (self.mSortableName == other.mSortableName)
elif isinstance(other, str):
return not (self.mSortableName == other)
else:
return NotImplemented
|
py | 7dfe8613f5238bd474d873ed824cebf9616dd5f3 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VmCloneEvent(vim, *args, **kwargs):
'''The is the base event for all clone operations.'''
obj = vim.client.factory.create('{urn:vim25}VmCloneEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 5:
raise IndexError('Expected at least 6 arguments got: %d' % len(args))
required = [ 'template', 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
py | 7dfe863a0c4edf758117d976adf456c4f968db2a | import setuptools
with open('repixelator/repixelator.py', encoding='utf-8') as f:
for line in f.readlines():
if '__version__' in line:
__version__ = line.split("'")[1].strip()
break
with open('README.md', 'r', encoding='utf-8') as f:
long_description = f.read()
setuptools.setup(
name='repixelator',
version=__version__,
author='YeongChan Lee',
author_email='[email protected]',
description='Converts resized pixel arts to their original resolution',
long_description=long_description,
long_description_content_type='text/markdown',
license='MIT License',
url='https://github.com/yclee126/RePixelator',
entry_points ={
'console_scripts': [
'repixelator = repixelator:cmd',
'repixelator-gui = repixelator:gui',
]
},
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
packages=['repixelator'],
package_data={'repixelator' : ['icon.ico']},
install_requires = [
'opencv-python',
'numpy',
],
python_requires='>=3.5',
) |
py | 7dfe866e5981a641492854aaada34f5070a5ca05 | # -*- coding: utf-8 -*-
'''
Tools for working with data from MLML public data portal:
http://pubdata.mlml.calstate.edu
'''
try:
# For Python 3.0 and later
from urllib.request import urlopen
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen
import re
import os
import sys
import numpy as np
from glob import glob
from datetime import datetime, timedelta
from physoce import util
try:
import pandas as pd
except ImportError:
pass
try:
import xarray as xr
except ImportError:
pass
def make_netcdf(station_dir,netcdf_file,station,download=False,overwrite=False):
"""
Create a netcdf file containing MLML historical seawater or weather data. The file will be created from csv and readme files already on disk, or they can be downloaded.
INPUT:
station_dir - string specifying the location of csv files (e.g. '/home/username/data/')
netcdf_file - string specifying the location and name of netcdf file to be created (e.g. '/home/username/data/mlml_seawater.nc')
station - either 'seawater' or 'weather' (default: 'seawater')
download - boolean specifying whether to download new files
(default: False)
overwrite - boolean specifying whether to overwrite the existing files, only used if downloading new data (default: False)
"""
# download new data, if specified
if download == True:
download_station_data(station_dir,station,overwrite)
# read data in csv files to xarray dataset
d = read_csv_data(station_dir,format='dataset')
# specify location of readme file and add metadata to dataset
readme_file = station_dir + '1_README.TXT'
_add_metadata_xarray(d,station,readme_file)
d.attrs['history'] = d.attrs['history'] + 'netcdf file created using physoce.obs.mlml.make_netcdf(station_dir'+station_dir+',netcdf_file='+netcdf_file+',station='+station+'download='+str(download)+',overwrite='+str(overwrite)+'): ' + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ', '
# create netcdf file
d.to_netcdf(netcdf_file,mode='w')
def download_station_data(station_dir,station='seawater',overwrite=True):
'''
Download all historical csv files for the MLML seawater intake or weather station. A latest version of the readme file is also downloaded. It is highly recommended to use different directories for seawater and weather, since the readme files have the same name. By default, new files are downloaded and existing files are overwritten.
INPUT:
station_dir - string specifying the local directory where you want to put
the data files
station - either 'seawater' or 'weather' (default: 'seawater')
overwrite - boolean specifying whether to overwrite the existing files
(default: False)
'''
# remote directories
base_url = 'http://pubdata.mlml.calstate.edu/mlml_last/'
station_url = base_url + '/' + station + '/'
# local directory
station_dir = station_dir + '/'
# check whether a directory exists for this station
if os.path.isdir(station_dir) == False:
os.makedirs(station_dir)
# find names of csv files that exist on the web and create a list
# the csv filenames are in format yyyy-mm.csv
urlpath =urlopen(station_url)
html_string = urlpath.read().decode()
urlpath.close()
file_pattern = '[0-9][0-9][0-9][0-9]-[0-9][0-9].csv'
csv_list = re.findall(file_pattern,html_string)
# get updated readme file
urlr = urlopen(station_url + '1_README.TXT')
fr = open(station_dir + '1_README.TXT','w')
fr.write(str(urlr.read()))
fr.close()
urlr.close()
# loop through each remote csv file and download if:
# the file does not exist, the overwrite option is True or it is the last
# file in the list (there may be new data in that file)
for csv_name in csv_list:
print('downloading ' + station + ': ' + csv_name)
remote_file = station_url + csv_name
local_file = station_dir + csv_name
write_conditions = [os.path.exists(local_file) == False,
overwrite == True,
csv_name == csv_list[-1]]
if any(write_conditions):
urlfile = urlopen(remote_file)
f = open(local_file,'w')
filebytes = urlfile.read()
f.write(filebytes.decode('utf8'))
f.close()
urlfile.close()
def read_csv_data(data_dir,format='dict'):
'''
Read historical text data (.csv files) from the MLML seawater intake or weather station. The data must be stored locally, and can be downloaded automatically with the download_station_data() function.
Inputs:
data_dir - Specifies the directory where the data files are located. All files with the format yyyy-mm.csv in this directory will be read.
Options:
format: output format
format = 'dict' (default): dictionary
format = 'dataframe': pandas DataFrame
format = 'dataset': xarray DataSet
Output: dictionary, pandas DataFrame or xarray DataSet with keys/variable names taken from column headers
'''
file_list = glob(data_dir+'*.csv')
# get list of variable names from header of first file
f = open(file_list[0],'r')
header = f.readline()
f.close()
header = header.strip('\r\n')
varnames = header.split(',')
#initialize dictionary with key and empty list for each variable
d = dict()
for ii,var in enumerate(varnames[0:]):
d[varnames[ii]] = []
# specify which columns contain numeric data
floatcols = range(2,len(varnames))
allcols = range(0,len(varnames))
strcols = list(set(allcols)-set(floatcols))
for file_name in file_list:
print('reading ' + file_name)
# get numeric data, with missing values as NaN
datamasked = np.genfromtxt(file_name,
skip_header=1,
delimiter=',',
missing_values='-99999',
usemask=True)
data = datamasked.filled(np.nan)
# get string data
datastr = np.genfromtxt(file_name,
skip_header=1,
delimiter=',',
usecols=tuple(strcols),
dtype=str)
# append data variables
if data.size != 0:
for col in floatcols:
vname = varnames[col]
d[vname] = np.append(d[vname],data[:,col])
for si,col in enumerate(strcols):
vname = varnames[col]
d[vname] = np.append(d[vname],datastr[:,si])
# create date variables
# put in a numpy array for easy indexing
# new variable for datetime
dtime = np.array(util.list2date(d['utc_time'],'%Y-%m-%dT%H:%M:%SZ'))
# remove duplicate times
ii = np.where(np.diff(dtime) > timedelta(0.))[0]
dtime = dtime[ii]
for var in varnames:
d[var] = d[var][ii]
# Try loading in pandas or xarray format if specified, default to dictionary format
if format == 'dataset':
if 'xarray' not in sys.modules:
format = 'dataframe'
print("Warning: xarray not installed, loading MLML data in pandas dataframe format instead")
if format == 'dataframe':
if 'pandas' not in sys.modules:
format = 'dict'
print("Warning: pandas not installed, loading MLML data in dictionary format instead")
if format is 'dataframe':
# turn dictionary into pandas dataframe
d = pd.DataFrame(d,index=dtime)
d.index.name = 'time'
elif format is 'dataset':
# turn dictionary in xarray dataset, using dataframe as intermediate format
d = pd.DataFrame(d,index=dtime)
d.index.name = 'time'
d = xr.Dataset(d)
d.attrs['history'] = 'dataset created using physoce.obs.mlml.read_csv_data: ' + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ', '
else:
# default format: dictionary containing numpy arrays
d['dtime'] = []
d['dtime'] = dtime
return d
def _add_metadata_xarray(d,station,readme_file):
"""
Add metadata to xarray dataset. Currently this adds lat and lon coordinates and puts the contents of the readme in an attribute. For the weather data, the anemometer height is also added as a coordinate.
"""
if station is 'seawater':
d.coords['lon'] = -121.7915
d.coords['lat'] = 36.8025
elif station is 'weather':
d.coords['lon'] = -121.78842
d.coords['lat'] = 36.80040
d.coords['z'] = 3.3
d.coords['z'].attrs['name'] = 'anemometer height'
d.coords['z'].attrs['units'] = 'meters'
with open(readme_file) as f:
contents = f.read()
d.attrs['readme'] = contents
d.attrs['history'] = d.attrs['history'] + 'attributes added to dataset using physoce.obs.mlml._add_metadata_xarray: ' + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ', '
|
py | 7dfe86d28bee75977ddcca919e18dc7aaf834288 | from typing import Any, NoReturn
from project_automation.commands import DenoCommand
from project_automation.files import TypescriptFile
from project_automation.projects import Project
class DenoProject(Project):
"""
Represents the base of a Deno project to create.
Attributes
----------
path : str
path of the parent root of the project
name : str
name of the project (used for make directory and github repo)
allow_install : bool
True if you want to automatically install the required packages, False otherwise
github_settings : dict
some github informations
errors : list of string
all occured error during project creation (not exceptions)
user : ~github.AuthenticatedUser or ~github.NamedUser
github user if ``github_settings`` is not empty
root : ~files.Folder
root folder of the project
"""
CONFIG = {
'languages': ["Deno"],
'readme_content': {
'1': ("title", "Table of contents", 2),
'2': ("paragraph", "1. [Usage of the application](#usage)"),
'3': ("title", "Usage", 2),
'4': ("code", "$ deno run --allow-net server.ts", "shell")
},
}
def __init__(self, path: str, name: str, github_settings: dict = {}, **kwargs: Any) -> NoReturn:
"""
Constructor and initializer.
Parameters
----------
path : str
path of the parent root of the project
name : str
name of the project (used for make directory and github repo)
github_settings : dict
some github informations
**kwargs : Any
other keywords parameters
"""
super().__init__(path, name, github_settings=github_settings, **kwargs)
def create(self) -> NoReturn:
"""
Create the structure of the project.
"""
super().create()
server_file = TypescriptFile(self.path, 'server')
server_file.write("""import { serve } from "https://deno.land/[email protected]/http/server.ts";
const s = serve({ port: 8000 });
console.log("http://localhost:8000/");
for await (const req of s) {
req.respond({ body: "Hello World\\n" });
}
""")
def verify_installation(self) -> NoReturn:
"""
Verify if all the required programs are installed.
See also
--------
commands.DenoCommand
"""
super().verify_installation()
DenoCommand(self.allow_install)
|
py | 7dfe879af20eec210ef250cf9da094487f8e594f | from base64 import standard_b64encode
from aiohttp import ClientSession
from jinja2 import PackageLoader
from blacksheep.server import Application
from blacksheep.server.templating import use_templates
from app import controllers
from app.configuration import config
app = Application(show_error_details=config.show_error_details)
use_templates(app, PackageLoader("app", "views"))
async def configure_http_client(app):
http_client = ClientSession(config.hsp_api_url, headers={
"Accept": "application/json",
"Authorization": "Basic " + standard_b64encode(f"{config.hsp_api_username}:{config.hsp_api_password}".encode()).decode(),
"Content-Type": "application/json"
})
app.services.add_instance(http_client)
async def dispose_http_client(app):
http_client = app.services.get(ClientSession)
await http_client.close()
app.on_start += configure_http_client
app.on_stop += dispose_http_client
app.serve_files("app/static")
|
py | 7dfe88c9bd24602a77d5c6c8481a601f54273f62 | import boto3
import yaml
import sys
import traceback
with open('env-details.yml', 'r') as f:
doc = yaml.load(f)
flag = False
try:
for cmd, props in doc.iteritems():
if cmd == 'describe_security_groups':
print 'Checking Security group ' + props['Name']
ec2 = boto3.client('ec2', region_name='eu-west-1')
sg_res = ec2.describe_security_groups(Filters=[{'Name': 'group-name',
'Values': ['*' + props['Name'] + '*']}])
sg_perms = sg_res['SecurityGroups'][0]['IpPermissions'][0]
if sg_perms['IpProtocol'] != props['Properties']['IpProtocol'] or \
sg_perms['ToPort'] != props['Properties']['ToPort'] or \
sg_perms['FromPort'] != props['Properties']['FromPort'] or \
sg_perms['IpRanges'][0]['CidrIp'] != props['Properties']['Target']:
sys.exit(-1)
else:
continue
if cmd == 'get_role':
print 'Checking role ' + props['Name']
iam = boto3.client('iam', region_name='us-east-1')
role_res = iam.get_role(RoleName=props['Name'])
role_perms = role_res['Role']['AssumeRolePolicyDocument']['Statement'][0]
props_role = props['Properties']['AssumeRolePolicyDocument'][0]
if role_perms['Action'] != props_role['Action'] or \
role_perms['Effect'] != props_role['Effect'] or \
role_perms['Principal']['Service'] != props_role['Principal']['Service']:
sys.exit(-1)
else:
continue
if cmd == 'describe_load_balancers':
print 'Checking ELB ' + props['Name']
elb = boto3.client('elb', region_name='eu-west-1')
elb_res = elb.describe_load_balancers()
for lbs in elb_res['LoadBalancerDescriptions']:
if props['Name'] in lbs['LoadBalancerName']:
elb_config = lbs
elb_lstr = elb_config['ListenerDescriptions'][0]['Listener']
break
props_elb = props['Properties']['Listeners'][0]
if elb_config['Scheme'] != props['Properties']['Scheme'] or \
elb_lstr['InstancePort'] != props_elb['InstancePort'] or \
elb_lstr['LoadBalancerPort'] != props_elb['LoadBalancerPort'] or \
elb_lstr['Protocol'] != props_elb['Protocol']:
print elb_lstr
print props_elb
sys.exit(-1)
else:
continue
if cmd == 'describe_launch_configurations':
print 'Checking Launch Configuration ' + props['Name']
asg = boto3.client('autoscaling', region_name='eu-west-1')
lc_res = asg.describe_launch_configurations()
for lcs in lc_res['LaunchConfigurations']:
if props['Name'] in lcs['LaunchConfigurationName']:
lc_config = lcs
break
props_asg = props['Properties']
if lc_config['KeyName'] != props_asg['KeyName'] or \
lc_config['ImageId'] != props_asg['ImageId'] or \
lc_config['InstanceType'] != props_asg['InstanceType']:
sys.exit(-1)
else:
continue
if cmd == 'describe_auto_scaling_groups':
print 'Checking Auto Scaling group ' + props['Name']
asg = boto3.client('autoscaling', region_name='eu-west-1')
asg_res = asg.describe_auto_scaling_groups()
for asgs in asg_res['AutoScalingGroups']:
for tag in asgs['Tags']:
if 'Name' in tag.values():
if 'WebServer-test' in tag['Value']:
asg_config = asgs
flag = True
break
if flag:
break
props_asg = props['Properties']
if props_asg['LaunchConfigurationName'] not in asg_config['LaunchConfigurationName'] or \
asg_config['MinSize'] != props_asg['MinSize'] or \
asg_config['MaxSize'] != props_asg['MaxSize'] or \
props_asg['LoadBalancerNames'] not in asg_config['LoadBalancerNames'][0]:
print asg_config
print props_asg
sys.exit(-1)
else:
continue
except:
print 'Failed'
traceback.print_exc()
sys.exit(-1)
|
py | 7dfe8b2cc124c9ca281849516abb3a8880cd16d4 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.18.20
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes_asyncio.client.configuration import Configuration
class V1EventSeries(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'count': 'int',
'last_observed_time': 'datetime',
'state': 'str'
}
attribute_map = {
'count': 'count',
'last_observed_time': 'lastObservedTime',
'state': 'state'
}
def __init__(self, count=None, last_observed_time=None, state=None, local_vars_configuration=None): # noqa: E501
"""V1EventSeries - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._count = None
self._last_observed_time = None
self._state = None
self.discriminator = None
if count is not None:
self.count = count
if last_observed_time is not None:
self.last_observed_time = last_observed_time
if state is not None:
self.state = state
@property
def count(self):
"""Gets the count of this V1EventSeries. # noqa: E501
Number of occurrences in this series up to the last heartbeat time # noqa: E501
:return: The count of this V1EventSeries. # noqa: E501
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this V1EventSeries.
Number of occurrences in this series up to the last heartbeat time # noqa: E501
:param count: The count of this V1EventSeries. # noqa: E501
:type: int
"""
self._count = count
@property
def last_observed_time(self):
"""Gets the last_observed_time of this V1EventSeries. # noqa: E501
Time of the last occurrence observed # noqa: E501
:return: The last_observed_time of this V1EventSeries. # noqa: E501
:rtype: datetime
"""
return self._last_observed_time
@last_observed_time.setter
def last_observed_time(self, last_observed_time):
"""Sets the last_observed_time of this V1EventSeries.
Time of the last occurrence observed # noqa: E501
:param last_observed_time: The last_observed_time of this V1EventSeries. # noqa: E501
:type: datetime
"""
self._last_observed_time = last_observed_time
@property
def state(self):
"""Gets the state of this V1EventSeries. # noqa: E501
State of this Series: Ongoing or Finished Deprecated. Planned removal for 1.18 # noqa: E501
:return: The state of this V1EventSeries. # noqa: E501
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""Sets the state of this V1EventSeries.
State of this Series: Ongoing or Finished Deprecated. Planned removal for 1.18 # noqa: E501
:param state: The state of this V1EventSeries. # noqa: E501
:type: str
"""
self._state = state
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1EventSeries):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1EventSeries):
return True
return self.to_dict() != other.to_dict()
|
py | 7dfe8b43a67801348ab81dd76e5faaa67b4589a1 | import numpy as np
import tensorflow as tf
import time
import logging
import os
try:
from collections.abc import Sequence as SequenceCollection
except:
from collections import Sequence as SequenceCollection
from deepchem.data import Dataset, NumpyDataset
from deepchem.metrics import Metric
from deepchem.models.losses import Loss
from deepchem.models.models import Model
from deepchem.models.optimizers import Adam, Optimizer, LearningRateSchedule
from deepchem.trans import Transformer, undo_transforms
from deepchem.utils.evaluate import GeneratorEvaluator
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple, Union
from deepchem.utils.typing import ArrayLike, LossFn, OneOrMany
from deepchem.models.wandblogger import WandbLogger
try:
import wandb
wandb.ensure_configured()
if wandb.api.api_key is None:
_has_wandb = False
wandb.termwarn(
"W&B installed but not logged in. Run `wandb login` or set the WANDB_API_KEY env variable."
)
else:
_has_wandb = True
except (ImportError, AttributeError):
_has_wandb = False
logger = logging.getLogger(__name__)
class KerasModel(Model):
"""This is a DeepChem model implemented by a Keras model.
This class provides several advantages over using the Keras
model's fitting and prediction methods directly.
1. It provides better integration with the rest of DeepChem,
such as direct support for Datasets and Transformers.
2. It defines the loss in a more flexible way. In particular,
Keras does not support multidimensional weight matrices,
which makes it impossible to implement most multitask
models with Keras.
3. It provides various additional features not found in the
Keras Model class, such as uncertainty prediction and
saliency mapping.
Here is a simple example of code that uses KerasModel to train
a Keras model on a DeepChem dataset.
>> keras_model = tf.keras.Sequential([
>> tf.keras.layers.Dense(1000, activation='tanh'),
>> tf.keras.layers.Dense(1)
>> ])
>> model = KerasModel(keras_model, loss=dc.models.losses.L2Loss())
>> model.fit(dataset)
The loss function for a model can be defined in two different
ways. For models that have only a single output and use a
standard loss function, you can simply provide a
dc.models.losses.Loss object. This defines the loss for each
sample or sample/task pair. The result is automatically
multiplied by the weights and averaged over the batch. Any
additional losses computed by model layers, such as weight
decay penalties, are also added.
For more complicated cases, you can instead provide a function
that directly computes the total loss. It must be of the form
f(outputs, labels, weights), taking the list of outputs from
the model, the expected values, and any weight matrices. It
should return a scalar equal to the value of the loss function
for the batch. No additional processing is done to the
result; it is up to you to do any weighting, averaging, adding
of penalty terms, etc.
You can optionally provide an output_types argument, which
describes how to interpret the model's outputs. This should
be a list of strings, one for each output. You can use an
arbitrary output_type for a output, but some output_types are
special and will undergo extra processing:
- 'prediction': This is a normal output, and will be returned by predict().
If output types are not specified, all outputs are assumed
to be of this type.
- 'loss': This output will be used in place of the normal
outputs for computing the loss function. For example,
models that output probability distributions usually do it
by computing unbounded numbers (the logits), then passing
them through a softmax function to turn them into
probabilities. When computing the cross entropy, it is more
numerically stable to use the logits directly rather than
the probabilities. You can do this by having the model
produce both probabilities and logits as outputs, then
specifying output_types=['prediction', 'loss']. When
predict() is called, only the first output (the
probabilities) will be returned. But during training, it is
the second output (the logits) that will be passed to the
loss function.
- 'variance': This output is used for estimating the
uncertainty in another output. To create a model that can
estimate uncertainty, there must be the same number of
'prediction' and 'variance' outputs. Each variance output
must have the same shape as the corresponding prediction
output, and each element is an estimate of the variance in
the corresponding prediction. Also be aware that if a model
supports uncertainty, it MUST use dropout on every layer,
and dropout most be enabled during uncertainty prediction.
Otherwise, the uncertainties it computes will be inaccurate.
- other: Arbitrary output_types can be used to extract outputs
produced by the model, but will have no additional
processing performed.
"""
def __init__(self,
model: tf.keras.Model,
loss: Union[Loss, LossFn],
output_types: Optional[List[str]] = None,
batch_size: int = 100,
model_dir: Optional[str] = None,
learning_rate: Union[float, LearningRateSchedule] = 0.001,
optimizer: Optional[Optimizer] = None,
tensorboard: bool = False,
wandb: bool = False,
log_frequency: int = 100,
wandb_logger: Optional[WandbLogger] = None,
**kwargs) -> None:
"""Create a new KerasModel.
Parameters
----------
model: tf.keras.Model
the Keras model implementing the calculation
loss: dc.models.losses.Loss or function
a Loss or function defining how to compute the training loss for each
batch, as described above
output_types: list of strings
the type of each output from the model, as described above
batch_size: int
default batch size for training and evaluating
model_dir: str
the directory on disk where the model will be stored. If this is None,
a temporary directory is created.
learning_rate: float or LearningRateSchedule
the learning rate to use for fitting. If optimizer is specified, this is
ignored.
optimizer: Optimizer
the optimizer to use for fitting. If this is specified, learning_rate is
ignored.
tensorboard: bool
whether to log progress to TensorBoard during training
wandb: bool
whether to log progress to Weights & Biases during training (deprecated)
log_frequency: int
The frequency at which to log data. Data is logged using
`logging` by default. If `tensorboard` is set, data is also
logged to TensorBoard. If `wandb` is set, data is also logged
to Weights & Biases. Logging happens at global steps. Roughly,
a global step corresponds to one batch of training. If you'd
like a printout every 10 batch steps, you'd set
`log_frequency=10` for example.
wandb_logger: WandbLogger
the Weights & Biases logger object used to log data and metrics
"""
super(KerasModel, self).__init__(model=model, model_dir=model_dir, **kwargs)
self.loss = loss # not used
self.learning_rate = learning_rate # not used
self.output_types = output_types # not used
if isinstance(loss, Loss):
self._loss_fn: LossFn = _StandardLoss(model, loss)
else:
self._loss_fn = loss
self.batch_size = batch_size
if optimizer is None:
self.optimizer: Optimizer = Adam(learning_rate=learning_rate)
else:
self.optimizer = optimizer
self.tensorboard = tensorboard
# W&B flag support (DEPRECATED)
if wandb:
logger.warning(
"`wandb` argument is deprecated. Please use `wandb_logger` instead. "
"This argument will be removed in a future release of DeepChem.")
if wandb and not _has_wandb:
logger.warning(
"You set wandb to True but W&B is not installed. To use wandb logging, "
"run `pip install wandb; wandb login`")
self.wandb = wandb and _has_wandb
self.wandb_logger = wandb_logger
# If `wandb=True` and no logger is provided, initialize default logger
if self.wandb and (self.wandb_logger is None):
self.wandb_logger = WandbLogger()
# Setup and initialize W&B logging
if (self.wandb_logger is not None) and (not self.wandb_logger.initialized):
self.wandb_logger.setup()
# Update config with KerasModel params
wandb_logger_config = dict(
loss=loss,
output_types=output_types,
batch_size=batch_size,
model_dir=model_dir,
learning_rate=learning_rate,
optimizer=optimizer,
tensorboard=tensorboard,
log_frequency=log_frequency)
wandb_logger_config.update(**kwargs)
if self.wandb_logger is not None:
self.wandb_logger.update_config(wandb_logger_config)
# Backwards compatibility
if "tensorboard_log_frequency" in kwargs:
logger.warning(
"tensorboard_log_frequency is deprecated. Please use log_frequency instead. This argument will be removed in a future release of DeepChem."
)
self.log_frequency = kwargs["tensorboard_log_frequency"]
else:
self.log_frequency = log_frequency
if self.tensorboard:
self._summary_writer = tf.summary.create_file_writer(self.model_dir)
if output_types is None:
self._prediction_outputs = None
self._loss_outputs = None
self._variance_outputs = None
self._other_outputs = None
else:
self._prediction_outputs = []
self._loss_outputs = []
self._variance_outputs = []
self._other_outputs = []
for i, type in enumerate(output_types):
if type == 'prediction':
self._prediction_outputs.append(i)
elif type == 'loss':
self._loss_outputs.append(i)
elif type == 'variance':
self._variance_outputs.append(i)
else:
self._other_outputs.append(i)
if len(self._loss_outputs) == 0:
self._loss_outputs = self._prediction_outputs
self._built = False
self._inputs_built = False
self._training_ops_built = False
self._output_functions: Dict[Any, Any] = {}
self._gradient_fn_for_vars: Dict[Any, Any] = {}
def _ensure_built(self) -> None:
"""The first time this is called, create internal data structures."""
if self._built:
return
self._built = True
self._global_step = tf.Variable(0, trainable=False)
self._tf_optimizer = self.optimizer._create_tf_optimizer(self._global_step)
self._checkpoint = tf.train.Checkpoint(
optimizer=self._tf_optimizer, model=self.model)
def _create_inputs(self, example_inputs: List) -> None:
"""The first time this is called, create tensors representing the inputs and outputs."""
if self._inputs_built:
return
self._ensure_built()
self._inputs_built = True
if (self.model.inputs is not None) and len(self.model.inputs) > 0:
self._input_shapes = [t.shape for t in self.model.inputs]
self._input_dtypes = [t.dtype.as_numpy_dtype for t in self.model.inputs]
else:
self._input_shapes = [(None,) + i.shape[1:] for i in example_inputs]
self._input_dtypes = [
np.float32 if x.dtype == np.float64 else x.dtype
for x in example_inputs
]
def _create_training_ops(self,
example_batch: Tuple[List, List, List]) -> None:
"""The first time this is called, create tensors used in optimization."""
if self._training_ops_built:
return
self._create_inputs(example_batch[0])
self._training_ops_built = True
self._label_dtypes = [
np.float32 if x.dtype == np.float64 else x.dtype
for x in example_batch[1]
]
self._weights_dtypes = [
np.float32 if x.dtype == np.float64 else x.dtype
for x in example_batch[2]
]
def fit(self,
dataset: Dataset,
nb_epoch: int = 10,
max_checkpoints_to_keep: int = 5,
checkpoint_interval: int = 1000,
deterministic: bool = False,
restore: bool = False,
variables: Optional[List[tf.Variable]] = None,
loss: Optional[LossFn] = None,
callbacks: Union[Callable, List[Callable]] = [],
all_losses: Optional[List[float]] = None) -> float:
"""Train this model on a dataset.
Parameters
----------
dataset: Dataset
the Dataset to train on
nb_epoch: int
the number of epochs to train for
max_checkpoints_to_keep: int
the maximum number of checkpoints to keep. Older checkpoints are discarded.
checkpoint_interval: int
the frequency at which to write checkpoints, measured in training steps.
Set this to 0 to disable automatic checkpointing.
deterministic: bool
if True, the samples are processed in order. If False, a different random
order is used for each epoch.
restore: bool
if True, restore the model from the most recent checkpoint and continue training
from there. If False, retrain the model from scratch.
variables: list of tf.Variable
the variables to train. If None (the default), all trainable variables in
the model are used.
loss: function
a function of the form f(outputs, labels, weights) that computes the loss
for each batch. If None (the default), the model's standard loss function
is used.
callbacks: function or list of functions
one or more functions of the form f(model, step) that will be invoked after
every step. This can be used to perform validation, logging, etc.
all_losses: Optional[List[float]], optional (default None)
If specified, all logged losses are appended into this list. Note that
you can call `fit()` repeatedly with the same list and losses will
continue to be appended.
Returns
-------
The average loss over the most recent checkpoint interval
"""
return self.fit_generator(
self.default_generator(
dataset, epochs=nb_epoch,
deterministic=deterministic), max_checkpoints_to_keep,
checkpoint_interval, restore, variables, loss, callbacks, all_losses)
def fit_generator(self,
generator: Iterable[Tuple[Any, Any, Any]],
max_checkpoints_to_keep: int = 5,
checkpoint_interval: int = 1000,
restore: bool = False,
variables: Optional[List[tf.Variable]] = None,
loss: Optional[LossFn] = None,
callbacks: Union[Callable, List[Callable]] = [],
all_losses: Optional[List[float]] = None) -> float:
"""Train this model on data from a generator.
Parameters
----------
generator: generator
this should generate batches, each represented as a tuple of the form
(inputs, labels, weights).
max_checkpoints_to_keep: int
the maximum number of checkpoints to keep. Older checkpoints are discarded.
checkpoint_interval: int
the frequency at which to write checkpoints, measured in training steps.
Set this to 0 to disable automatic checkpointing.
restore: bool
if True, restore the model from the most recent checkpoint and continue training
from there. If False, retrain the model from scratch.
variables: list of tf.Variable
the variables to train. If None (the default), all trainable variables in
the model are used.
loss: function
a function of the form f(outputs, labels, weights) that computes the loss
for each batch. If None (the default), the model's standard loss function
is used.
callbacks: function or list of functions
one or more functions of the form f(model, step) that will be invoked after
every step. This can be used to perform validation, logging, etc.
all_losses: Optional[List[float]], optional (default None)
If specified, all logged losses are appended into this list. Note that
you can call `fit()` repeatedly with the same list and losses will
continue to be appended.
Returns
-------
The average loss over the most recent checkpoint interval
"""
if not isinstance(callbacks, SequenceCollection):
callbacks = [callbacks]
self._ensure_built()
if checkpoint_interval > 0:
manager = tf.train.CheckpointManager(self._checkpoint, self.model_dir,
max_checkpoints_to_keep)
avg_loss = 0.0
last_avg_loss = 0.0
averaged_batches = 0
if loss is None:
loss = self._loss_fn
var_key = None
if variables is not None:
var_key = tuple(v.ref() for v in variables)
# The optimizer creates internal variables the first time apply_gradients()
# is called for a new set of variables. If that happens inside a function
# annotated with tf.function it throws an exception, so call it once here.
zero_grads = [tf.zeros(v.shape) for v in variables]
self._tf_optimizer.apply_gradients(zip(zero_grads, variables))
if var_key not in self._gradient_fn_for_vars:
self._gradient_fn_for_vars[var_key] = self._create_gradient_fn(variables)
apply_gradient_for_batch = self._gradient_fn_for_vars[var_key]
time1 = time.time()
# Main training loop.
for batch in generator:
self._create_training_ops(batch)
if restore:
self.restore()
restore = False
inputs, labels, weights = self._prepare_batch(batch)
# Execute the loss function, accumulating the gradients.
if len(inputs) == 1:
inputs = inputs[0]
batch_loss = apply_gradient_for_batch(inputs, labels, weights, loss)
current_step = self._global_step.numpy()
avg_loss += batch_loss
# Report progress and write checkpoints.
averaged_batches += 1
should_log = (current_step % self.log_frequency == 0)
if should_log:
avg_loss = float(avg_loss) / averaged_batches
logger.info(
'Ending global_step %d: Average loss %g' % (current_step, avg_loss))
if all_losses is not None:
all_losses.append(avg_loss)
# Capture the last avg_loss in case of return since we're resetting to
# 0 now
last_avg_loss = avg_loss
avg_loss = 0.0
averaged_batches = 0
if checkpoint_interval > 0 and current_step % checkpoint_interval == checkpoint_interval - 1:
manager.save()
for c in callbacks:
c(self, current_step)
if self.tensorboard and should_log:
self._log_scalar_to_tensorboard('loss', batch_loss, current_step)
if (self.wandb_logger is not None) and should_log:
all_data = dict({'train/loss': batch_loss})
self.wandb_logger.log_data(all_data, step=current_step)
# Report final results.
if averaged_batches > 0:
avg_loss = float(avg_loss) / averaged_batches
logger.info(
'Ending global_step %d: Average loss %g' % (current_step, avg_loss))
if all_losses is not None:
all_losses.append(avg_loss)
last_avg_loss = avg_loss
if checkpoint_interval > 0:
manager.save()
time2 = time.time()
logger.info("TIMING: model fitting took %0.3f s" % (time2 - time1))
return last_avg_loss
def _create_gradient_fn(self,
variables: Optional[List[tf.Variable]]) -> Callable:
"""Create a function that computes gradients and applies them to the model.
Because of the way TensorFlow function tracing works, we need to create a
separate function for each new set of variables.
"""
@tf.function(experimental_relax_shapes=True)
def apply_gradient_for_batch(inputs, labels, weights, loss):
with tf.GradientTape() as tape:
outputs = self.model(inputs, training=True)
if tf.is_tensor(outputs):
outputs = [outputs]
if self._loss_outputs is not None:
outputs = [outputs[i] for i in self._loss_outputs]
batch_loss = loss(outputs, labels, weights)
if variables is None:
vars = self.model.trainable_variables
else:
vars = variables
grads = tape.gradient(batch_loss, vars)
self._tf_optimizer.apply_gradients(zip(grads, vars))
self._global_step.assign_add(1)
return batch_loss
return apply_gradient_for_batch
def fit_on_batch(self,
X: Sequence,
y: Sequence,
w: Sequence,
variables: Optional[List[tf.Variable]] = None,
loss: Optional[LossFn] = None,
callbacks: Union[Callable, List[Callable]] = [],
checkpoint: bool = True,
max_checkpoints_to_keep: int = 5) -> float:
"""Perform a single step of training.
Parameters
----------
X: ndarray
the inputs for the batch
y: ndarray
the labels for the batch
w: ndarray
the weights for the batch
variables: list of tf.Variable
the variables to train. If None (the default), all trainable variables in
the model are used.
loss: function
a function of the form f(outputs, labels, weights) that computes the loss
for each batch. If None (the default), the model's standard loss function
is used.
callbacks: function or list of functions
one or more functions of the form f(model, step) that will be invoked after
every step. This can be used to perform validation, logging, etc.
checkpoint: bool
if true, save a checkpoint after performing the training step
max_checkpoints_to_keep: int
the maximum number of checkpoints to keep. Older checkpoints are discarded.
Returns
-------
the loss on the batch
"""
self._ensure_built()
dataset = NumpyDataset(X, y, w)
return self.fit(
dataset,
nb_epoch=1,
max_checkpoints_to_keep=max_checkpoints_to_keep,
checkpoint_interval=self._global_step.numpy() + 2 if checkpoint else 0,
variables=variables,
loss=loss,
callbacks=callbacks)
def _predict(
self, generator: Iterable[Tuple[Any, Any, Any]],
transformers: List[Transformer], outputs: Optional[OneOrMany[tf.Tensor]],
uncertainty: bool,
other_output_types: Optional[OneOrMany[str]]) -> OneOrMany[np.ndarray]:
"""
Predict outputs for data provided by a generator.
This is the private implementation of prediction. Do not
call it directly. Instead call one of the public prediction
methods.
Parameters
----------
generator: generator
this should generate batches, each represented as a tuple of the form
(inputs, labels, weights).
transformers: list of dc.trans.Transformers
Transformers that the input data has been transformed by. The output
is passed through these transformers to undo the transformations.
outputs: Tensor or list of Tensors
The outputs to return. If this is None, the model's standard prediction
outputs will be returned. Alternatively one or more Tensors within the
model may be specified, in which case the output of those Tensors will be
returned.
uncertainty: bool
specifies whether this is being called as part of estimating uncertainty.
If True, it sets the training flag so that dropout will be enabled, and
returns the values of the uncertainty outputs.
other_output_types: list, optional
Provides a list of other output_types (strings) to predict from model.
Returns
-------
a NumPy array of the model produces a single output, or a list of arrays
if it produces multiple outputs
"""
results: Optional[List[List[np.ndarray]]] = None
variances: Optional[List[List[np.ndarray]]] = None
if (outputs is not None) and (other_output_types is not None):
raise ValueError(
'This model cannot compute outputs and other output_types simultaneously.'
'Please invoke one at a time.')
if uncertainty and (other_output_types is not None):
raise ValueError(
'This model cannot compute uncertainties and other output types simultaneously.'
'Please invoke one at a time.')
if uncertainty:
assert outputs is None
if self._variance_outputs is None or len(self._variance_outputs) == 0:
raise ValueError('This model cannot compute uncertainties')
if len(self._variance_outputs) != len(self._prediction_outputs):
raise ValueError(
'The number of variances must exactly match the number of outputs')
if other_output_types:
assert outputs is None
if self._other_outputs is None or len(self._other_outputs) == 0:
raise ValueError(
'This model cannot compute other outputs since no other output_types were specified.'
)
if (outputs is not None and self.model.inputs is not None and
len(self.model.inputs) == 0):
raise ValueError(
"Cannot use 'outputs' argument with a model that does not specify its inputs."
"Note models defined in imperative subclassing style cannot specify outputs"
)
if tf.is_tensor(outputs):
outputs = [outputs]
for batch in generator:
inputs, labels, weights = batch
self._create_inputs(inputs)
inputs, _, _ = self._prepare_batch((inputs, None, None))
# Invoke the model.
if len(inputs) == 1:
inputs = inputs[0]
if outputs is not None:
outputs = tuple(outputs)
key = tuple(t.ref() for t in outputs)
if key not in self._output_functions:
self._output_functions[key] = tf.keras.backend.function(
self.model.inputs, outputs)
output_values = self._output_functions[key](inputs)
else:
output_values = self._compute_model(inputs)
if tf.is_tensor(output_values):
output_values = [output_values]
output_values = [t.numpy() for t in output_values]
# Apply tranformers and record results.
if uncertainty:
var = [output_values[i] for i in self._variance_outputs]
if variances is None:
variances = [var]
else:
for i, t in enumerate(var):
variances[i].append(t)
access_values = []
if other_output_types:
access_values += self._other_outputs
elif self._prediction_outputs is not None:
access_values += self._prediction_outputs
if len(access_values) > 0:
output_values = [output_values[i] for i in access_values]
if len(transformers) > 0:
if len(output_values) > 1:
raise ValueError(
"predict() does not support Transformers for models with multiple outputs."
)
elif len(output_values) == 1:
output_values = [undo_transforms(output_values[0], transformers)]
if results is None:
results = [[] for i in range(len(output_values))]
for i, t in enumerate(output_values):
results[i].append(t)
# Concatenate arrays to create the final results.
final_results = []
final_variances = []
if results is not None:
for r in results:
final_results.append(np.concatenate(r, axis=0))
if uncertainty and variances is not None:
for v in variances:
final_variances.append(np.concatenate(v, axis=0))
return zip(final_results, final_variances)
if len(final_results) == 1:
return final_results[0]
else:
return final_results
@tf.function(experimental_relax_shapes=True)
def _compute_model(self, inputs: Sequence):
"""Evaluate the model for a set of inputs."""
return self.model(inputs, training=False)
def predict_on_generator(
self,
generator: Iterable[Tuple[Any, Any, Any]],
transformers: List[Transformer] = [],
outputs: Optional[OneOrMany[tf.Tensor]] = None,
output_types: Optional[OneOrMany[str]] = None) -> OneOrMany[np.ndarray]:
"""
Parameters
----------
generator: generator
this should generate batches, each represented as a tuple of the form
(inputs, labels, weights).
transformers: list of dc.trans.Transformers
Transformers that the input data has been transformed by. The output
is passed through these transformers to undo the transformations.
outputs: Tensor or list of Tensors
The outputs to return. If this is None, the model's
standard prediction outputs will be returned.
Alternatively one or more Tensors within the model may be
specified, in which case the output of those Tensors will
be returned. If outputs is specified, output_types must be
None.
output_types: String or list of Strings
If specified, all outputs of this type will be retrieved
from the model. If output_types is specified, outputs must
be None.
Returns:
a NumPy array of the model produces a single output, or a list of arrays
if it produces multiple outputs
"""
return self._predict(generator, transformers, outputs, False, output_types)
def predict_on_batch(
self,
X: ArrayLike,
transformers: List[Transformer] = [],
outputs: Optional[OneOrMany[tf.Tensor]] = None) -> OneOrMany[np.ndarray]:
"""Generates predictions for input samples, processing samples in a batch.
Parameters
----------
X: ndarray
the input data, as a Numpy array.
transformers: list of dc.trans.Transformers
Transformers that the input data has been transformed by. The output
is passed through these transformers to undo the transformations.
outputs: Tensor or list of Tensors
The outputs to return. If this is None, the model's standard prediction
outputs will be returned. Alternatively one or more Tensors within the
model may be specified, in which case the output of those Tensors will be
returned.
Returns
-------
a NumPy array of the model produces a single output, or a list of arrays
if it produces multiple outputs
"""
dataset = NumpyDataset(X=X, y=None)
return self.predict(dataset, transformers, outputs)
def predict_uncertainty_on_batch(self, X: Sequence, masks: int = 50
) -> OneOrMany[Tuple[np.ndarray, np.ndarray]]:
"""
Predict the model's outputs, along with the uncertainty in each one.
The uncertainty is computed as described in https://arxiv.org/abs/1703.04977.
It involves repeating the prediction many times with different dropout masks.
The prediction is computed as the average over all the predictions. The
uncertainty includes both the variation among the predicted values (epistemic
uncertainty) and the model's own estimates for how well it fits the data
(aleatoric uncertainty). Not all models support uncertainty prediction.
Parameters
----------
X: ndarray
the input data, as a Numpy array.
masks: int
the number of dropout masks to average over
Returns
-------
for each output, a tuple (y_pred, y_std) where y_pred is the predicted
value of the output, and each element of y_std estimates the standard
deviation of the corresponding element of y_pred
"""
dataset = NumpyDataset(X=X, y=None)
return self.predict_uncertainty(dataset, masks)
def predict(
self,
dataset: Dataset,
transformers: List[Transformer] = [],
outputs: Optional[OneOrMany[tf.Tensor]] = None,
output_types: Optional[List[str]] = None) -> OneOrMany[np.ndarray]:
"""
Uses self to make predictions on provided Dataset object.
Parameters
----------
dataset: dc.data.Dataset
Dataset to make prediction on
transformers: list of dc.trans.Transformers
Transformers that the input data has been transformed by. The output
is passed through these transformers to undo the transformations.
outputs: Tensor or list of Tensors
The outputs to return. If this is None, the model's standard prediction
outputs will be returned. Alternatively one or more Tensors within the
model may be specified, in which case the output of those Tensors will be
returned.
output_types: String or list of Strings
If specified, all outputs of this type will be retrieved
from the model. If output_types is specified, outputs must
be None.
Returns
-------
a NumPy array of the model produces a single output, or a list of arrays
if it produces multiple outputs
"""
generator = self.default_generator(
dataset, mode='predict', deterministic=True, pad_batches=False)
return self.predict_on_generator(
generator,
transformers=transformers,
outputs=outputs,
output_types=output_types)
def predict_embedding(self, dataset: Dataset) -> OneOrMany[np.ndarray]:
"""
Predicts embeddings created by underlying model if any exist.
An embedding must be specified to have `output_type` of
`'embedding'` in the model definition.
Parameters
----------
dataset: dc.data.Dataset
Dataset to make prediction on
Returns
-------
a NumPy array of the embeddings model produces, or a list
of arrays if it produces multiple embeddings
"""
generator = self.default_generator(
dataset, mode='predict', pad_batches=False)
return self._predict(generator, [], None, False, ['embedding'])
def predict_uncertainty(self, dataset: Dataset, masks: int = 50
) -> OneOrMany[Tuple[np.ndarray, np.ndarray]]:
"""
Predict the model's outputs, along with the uncertainty in each one.
The uncertainty is computed as described in https://arxiv.org/abs/1703.04977.
It involves repeating the prediction many times with different dropout masks.
The prediction is computed as the average over all the predictions. The
uncertainty includes both the variation among the predicted values (epistemic
uncertainty) and the model's own estimates for how well it fits the data
(aleatoric uncertainty). Not all models support uncertainty prediction.
Parameters
----------
dataset: dc.data.Dataset
Dataset to make prediction on
masks: int
the number of dropout masks to average over
Returns
-------
for each output, a tuple (y_pred, y_std) where y_pred is the predicted
value of the output, and each element of y_std estimates the standard
deviation of the corresponding element of y_pred
"""
sum_pred: List[np.ndarray] = []
sum_sq_pred: List[np.ndarray] = []
sum_var: List[np.ndarray] = []
for i in range(masks):
generator = self.default_generator(
dataset, mode='uncertainty', pad_batches=False)
results = self._predict(generator, [], None, True, None)
if len(sum_pred) == 0:
for p, v in results:
sum_pred.append(p)
sum_sq_pred.append(p * p)
sum_var.append(v)
else:
for j, (p, v) in enumerate(results):
sum_pred[j] += p
sum_sq_pred[j] += p * p
sum_var[j] += v
output = []
std = []
for i in range(len(sum_pred)):
p = sum_pred[i] / masks
output.append(p)
std.append(np.sqrt(sum_sq_pred[i] / masks - p * p + sum_var[i] / masks))
if len(output) == 1:
return (output[0], std[0])
else:
return list(zip(output, std))
def evaluate_generator(self,
generator: Iterable[Tuple[Any, Any, Any]],
metrics: List[Metric],
transformers: List[Transformer] = [],
per_task_metrics: bool = False):
"""Evaluate the performance of this model on the data produced by a generator.
Parameters
----------
generator: generator
this should generate batches, each represented as a tuple of the form
(inputs, labels, weights).
metric: list of deepchem.metrics.Metric
Evaluation metric
transformers: list of dc.trans.Transformers
Transformers that the input data has been transformed by. The output
is passed through these transformers to undo the transformations.
per_task_metrics: bool
If True, return per-task scores.
Returns
-------
dict
Maps tasks to scores under metric.
"""
evaluator = GeneratorEvaluator(self, generator, transformers)
return evaluator.compute_model_performance(metrics, per_task_metrics)
def compute_saliency(self, X: np.ndarray) -> OneOrMany[np.ndarray]:
"""Compute the saliency map for an input sample.
This computes the Jacobian matrix with the derivative of each output element
with respect to each input element. More precisely,
- If this model has a single output, it returns a matrix of shape
(output_shape, input_shape) with the derivatives.
- If this model has multiple outputs, it returns a list of matrices, one
for each output.
This method cannot be used on models that take multiple inputs.
Parameters
----------
X: ndarray
the input data for a single sample
Returns
-------
the Jacobian matrix, or a list of matrices
"""
input_shape = X.shape
X = np.reshape(X, [1] + list(X.shape))
self._create_inputs([X])
X, _, _ = self._prepare_batch(([X], None, None))
# Use a GradientTape to compute gradients.
X = tf.constant(X[0])
with tf.GradientTape(
persistent=True, watch_accessed_variables=False) as tape:
tape.watch(X)
outputs = self._compute_model(X)
if tf.is_tensor(outputs):
outputs = [outputs]
final_result = []
for output in outputs:
output_shape = tuple(output.shape.as_list()[1:])
output = tf.reshape(output, [-1])
result = []
for i in range(output.shape[0]):
result.append(tape.gradient(output[i], X))
final_result.append(
tf.reshape(tf.stack(result), output_shape + input_shape).numpy())
if len(final_result) == 1:
return final_result[0]
return final_result
def _prepare_batch(self,
batch: Tuple[Any, Any, Any]) -> Tuple[List, List, List]:
inputs, labels, weights = batch
inputs = [
x if x.dtype == t else x.astype(t)
for x, t in zip(inputs, self._input_dtypes)
]
if labels is not None:
labels = [
x if x.dtype == t else x.astype(t)
for x, t in zip(labels, self._label_dtypes)
]
if weights is not None:
weights = [
x if x.dtype == t else x.astype(t)
for x, t in zip(weights, self._weights_dtypes)
]
for i in range(len(inputs)):
shape = inputs[i].shape
dims = len(shape)
expected_dims = len(self._input_shapes[i])
if dims < expected_dims:
inputs[i] = inputs[i].reshape(shape + (1,) * (expected_dims - dims))
elif dims > expected_dims and all(d == 1 for d in shape[expected_dims:]):
inputs[i] = inputs[i].reshape(shape[:expected_dims])
return (inputs, labels, weights)
def default_generator(
self,
dataset: Dataset,
epochs: int = 1,
mode: str = 'fit',
deterministic: bool = True,
pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]:
"""Create a generator that iterates batches for a dataset.
Subclasses may override this method to customize how model inputs are
generated from the data.
Parameters
----------
dataset: Dataset
the data to iterate
epochs: int
the number of times to iterate over the full dataset
mode: str
allowed values are 'fit' (called during training), 'predict' (called
during prediction), and 'uncertainty' (called during uncertainty
prediction)
deterministic: bool
whether to iterate over the dataset in order, or randomly shuffle the
data for each epoch
pad_batches: bool
whether to pad each batch up to this model's preferred batch size
Returns
-------
a generator that iterates batches, each represented as a tuple of lists:
([inputs], [outputs], [weights])
"""
for epoch in range(epochs):
for (X_b, y_b, w_b, ids_b) in dataset.iterbatches(
batch_size=self.batch_size,
deterministic=deterministic,
pad_batches=pad_batches):
yield ([X_b], [y_b], [w_b])
def save_checkpoint(self,
max_checkpoints_to_keep: int = 5,
model_dir: Optional[str] = None) -> None:
"""Save a checkpoint to disk.
Usually you do not need to call this method, since fit() saves checkpoints
automatically. If you have disabled automatic checkpointing during fitting,
this can be called to manually write checkpoints.
Parameters
----------
max_checkpoints_to_keep: int
the maximum number of checkpoints to keep. Older checkpoints are discarded.
model_dir: str, default None
Model directory to save checkpoint to. If None, revert to self.model_dir
"""
self._ensure_built()
if model_dir is None:
model_dir = self.model_dir
if not os.path.exists(model_dir):
os.makedirs(model_dir)
manager = tf.train.CheckpointManager(self._checkpoint, model_dir,
max_checkpoints_to_keep)
manager.save()
def get_checkpoints(self, model_dir: Optional[str] = None):
"""Get a list of all available checkpoint files.
Parameters
----------
model_dir: str, default None
Directory to get list of checkpoints from. Reverts to self.model_dir if None
"""
if model_dir is None:
model_dir = self.model_dir
return tf.train.get_checkpoint_state(model_dir).all_model_checkpoint_paths
def restore(self,
checkpoint: Optional[str] = None,
model_dir: Optional[str] = None) -> None:
"""Reload the values of all variables from a checkpoint file.
Parameters
----------
checkpoint: str
the path to the checkpoint file to load. If this is None, the most recent
checkpoint will be chosen automatically. Call get_checkpoints() to get a
list of all available checkpoints.
model_dir: str, default None
Directory to restore checkpoint from. If None, use self.model_dir.
"""
self._ensure_built()
if model_dir is None:
model_dir = self.model_dir
if checkpoint is None:
checkpoint = tf.train.latest_checkpoint(model_dir)
if checkpoint is None:
raise ValueError('No checkpoint found')
self._checkpoint.restore(checkpoint)
def get_global_step(self) -> int:
"""Get the number of steps of fitting that have been performed."""
return int(self._global_step)
def _log_scalar_to_tensorboard(self, name: str, value: Any, step: int):
"""Log a scalar value to Tensorboard."""
with self._summary_writer.as_default():
tf.summary.scalar(name, value, step)
def _create_assignment_map(self,
source_model: "KerasModel",
include_top: bool = True,
**kwargs) -> Dict[Any, Any]:
"""
Creates a default assignment map between variables of source and current model.
This is used only when a custom assignment map is missing. This assumes the
model is made of different layers followed by a dense layer for mapping to
output tasks. include_top is used to control whether or not the final dense
layer is used. The default assignment map is useful in cases where the type
of task is different (classification vs regression) and/or number of tasks.
Parameters
----------
source_model: dc.models.KerasModel
Source model to copy variable values from.
include_top: bool, default True
if true, copies the last dense layer
"""
assignment_map: Dict[Any, Any] = {}
source_vars = source_model.model.trainable_variables
dest_vars = self.model.trainable_variables
if not include_top:
source_vars = source_vars[:-2]
dest_vars = dest_vars[:-2]
for source_var, dest_var in zip(source_vars, dest_vars):
assignment_map[source_var.ref()] = dest_var
return assignment_map
def _create_value_map(self, source_model: "KerasModel",
**kwargs) -> Dict[Any, Any]:
"""
Creates a value map between variables in the source model and their
current values. This is used only when a custom value map is missing, and
assumes the restore method has been called under self.session.
Parameters
----------
source_model: dc.models.KerasModel
Source model to create value map from
"""
value_map: Dict[Any, Any] = {}
source_vars = source_model.model.trainable_variables
for source_var in source_vars:
value_map[source_var.ref()] = source_var.numpy()
return value_map
def load_from_pretrained(self,
source_model: "KerasModel",
assignment_map: Optional[Dict[Any, Any]] = None,
value_map: Optional[Dict[Any, Any]] = None,
checkpoint: Optional[str] = None,
model_dir: Optional[str] = None,
include_top: bool = True,
inputs: Optional[Sequence[Any]] = None,
**kwargs) -> None:
"""Copies variable values from a pretrained model. `source_model` can either
be a pretrained model or a model with the same architecture. `value_map`
is a variable-value dictionary. If no `value_map` is provided, the variable
values are restored to the `source_model` from a checkpoint and a default
`value_map` is created. `assignment_map` is a dictionary mapping variables
from the `source_model` to the current model. If no `assignment_map` is
provided, one is made from scratch and assumes the model is composed of
several different layers, with the final one being a dense layer. include_top
is used to control whether or not the final dense layer is used. The default
assignment map is useful in cases where the type of task is different
(classification vs regression) and/or number of tasks in the setting.
Parameters
----------
source_model: dc.KerasModel, required
source_model can either be the pretrained model or a dc.KerasModel with
the same architecture as the pretrained model. It is used to restore from
a checkpoint, if value_map is None and to create a default assignment map
if assignment_map is None
assignment_map: Dict, default None
Dictionary mapping the source_model variables and current model variables
value_map: Dict, default None
Dictionary containing source_model trainable variables mapped to numpy
arrays. If value_map is None, the values are restored and a default
variable map is created using the restored values
checkpoint: str, default None
the path to the checkpoint file to load. If this is None, the most recent
checkpoint will be chosen automatically. Call get_checkpoints() to get a
list of all available checkpoints
model_dir: str, default None
Restore model from custom model directory if needed
include_top: bool, default True
if True, copies the weights and bias associated with the final dense
layer. Used only when assignment map is None
inputs: List, input tensors for model
if not None, then the weights are built for both the source and self.
This option is useful only for models that are built by
subclassing tf.keras.Model, and not using the functional API by tf.keras
"""
if inputs is not None:
# Ensure weights for both models are built.
source_model.model(inputs)
self.model(inputs)
self._ensure_built()
if value_map is None:
logger.info(
"No value map provided. Creating default value map from restored model."
)
source_model.restore(model_dir=model_dir, checkpoint=checkpoint)
value_map = self._create_value_map(source_model=source_model)
if assignment_map is None:
logger.info("No assignment map provided. Creating custom assignment map.")
assignment_map = self._create_assignment_map(
source_model=source_model, include_top=include_top)
for source_var, dest_var in assignment_map.items():
assert source_var.deref().shape == dest_var.shape
dest_var.assign(value_map[source_var])
class _StandardLoss(object):
"""The implements the loss function for models that use a dc.models.losses.Loss."""
def __init__(self, model: tf.keras.Model, loss: Loss) -> None:
self.model = model
self.loss = loss
def __call__(self, outputs: List, labels: List, weights: List) -> float:
if len(outputs) != 1 or len(labels) != 1 or len(weights) != 1:
raise ValueError(
"Loss functions expects exactly one each of outputs, labels, and weights"
)
losses = self.loss._compute_tf_loss(outputs[0], labels[0])
w = weights[0]
if len(w.shape) < len(losses.shape):
if tf.is_tensor(w):
shape = tuple(w.shape.as_list())
else:
shape = w.shape
shape = tuple(-1 if x is None else x for x in shape)
w = tf.reshape(w, shape + (1,) * (len(losses.shape) - len(w.shape)))
loss = losses * w
return tf.reduce_mean(loss) + sum(self.model.losses)
|
py | 7dfe8df0761e6d9cad35d0ffe2571ecee4c173a8 | #!/usr/bin/env python
import time
import ystockquote
def run(stocks):
stocks.sort()
for stock in stocks:
print stock.ljust(8),
print ''
try:
while True:
tick(stocks)
time.sleep(1)
except KeyboardInterrupt:
return
def tick(stocks):
for stock in stocks:
print ystockquote.get_price(stock).ljust(8),
print ''
|
py | 7dfe8ee7e4900970ab84089842d3eab6f522ce12 | # -*- coding: utf-8 -*-
"""Unit tests for the package module."""
import steenzout.primogen
def test_version():
"""Test version() function."""
assert steenzout.primogen.version() == steenzout.primogen.__version__
|
py | 7dfe8f3215fb56a867ad743b8fd2f511a9556323 | # ______ _ _ _ _ _ _ _
# | ___ \ | | | | (_) (_) | | (_)
# | |_/ / __ ___ | |__ __ _| |__ _| |_ ___| |_ _ ___
# | __/ '__/ _ \| '_ \ / _` | '_ \| | | / __| __| |/ __|
# | | | | | (_) | |_) | (_| | |_) | | | \__ \ |_| | (__
# \_| |_| \___/|_.__/ \__,_|_.__/|_|_|_|___/\__|_|\___|
# ___ ___ _ _
# | \/ | | | (_)
# | . . | ___ ___| |__ __ _ _ __ _ ___ ___
# | |\/| |/ _ \/ __| '_ \ / _` | '_ \| |/ __/ __|
# | | | | __/ (__| | | | (_| | | | | | (__\__ \
# \_| |_/\___|\___|_| |_|\__,_|_| |_|_|\___|___/
# _ _ _
# | | | | | |
# | | __ _| |__ ___ _ __ __ _| |_ ___ _ __ _ _
# | | / _` | '_ \ / _ \| '__/ _` | __/ _ \| '__| | | |
# | |___| (_| | |_) | (_) | | | (_| | || (_) | | | |_| |
# \_____/\__,_|_.__/ \___/|_| \__,_|\__\___/|_| \__, |
# __/ |
# |___/
#
# MIT License
#
# Copyright (c) 2019 Probabilistic Mechanics Laboratory
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
""" Train physics-informed recursive neural network
"""
import numpy as np
import pandas as pd
import os
import time
from pinn.layers import getScalingDenseLayer
from tensorflow.keras.layers import Dense
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras import Sequential
from tensorflow.keras.callbacks import ModelCheckpoint, TerminateOnNaN
from tensorflow.keras import backend
import matplotlib.pyplot as plt
import matplotlib as matplotlib
matplotlib.rc('font', size=14)
myLW = 2.5
myMS = 7
from bias_model import create_model
from tqdm import tqdm
# =============================================================================
# Auxiliary Functions
# =============================================================================
def arch_model(switch, input_location, input_scale):
dLInputScaling = getScalingDenseLayer(input_location, input_scale)
if switch == 1:
L1 = Dense(5, activation = 'tanh')
L2 = Dense(1, activation = 'linear')
model = Sequential([dLInputScaling,L1,L2], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
elif switch == 2:
L1 = Dense(10, activation = 'elu')
L2 = Dense(5, activation = 'elu')
L3 = Dense(1, activation = 'linear')
model = Sequential([dLInputScaling,L1,L2,L3], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
elif switch == 3:
L1 = Dense(10, activation = 'elu')
L2 = Dense(5, activation = 'sigmoid')
L3 = Dense(1, activation = 'elu')
model = Sequential([dLInputScaling,L1,L2,L3], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
elif switch == 4:
L1 = Dense(10, activation = 'tanh')
L2 = Dense(5, activation = 'tanh')
L3 = Dense(1, activation = 'elu')
model = Sequential([dLInputScaling,L1,L2,L3], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
elif switch == 5:
L1 = Dense(20, activation = 'tanh')
L2 = Dense(10, activation = 'elu')
L3 = Dense(5, activation = 'sigmoid')
L4 = Dense(1, activation = 'linear', trainable = True)
model = Sequential([dLInputScaling,L1,L2,L3,L4], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
elif switch == 6:
L1 = Dense(20, activation = 'elu')
L2 = Dense(10, activation = 'sigmoid')
L3 = Dense(5, activation = 'sigmoid')
L4 = Dense(1, activation = 'elu', trainable = True)
model = Sequential([dLInputScaling,L1,L2,L3,L4], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
elif switch == 7:
L1 = Dense(40, activation = 'elu')
L2 = Dense(20, activation = 'sigmoid')
L3 = Dense(10, activation = 'sigmoid')
L4 = Dense(1, activation = 'elu', trainable = True)
model = Sequential([dLInputScaling,L1,L2,L3,L4], name = 'bias_mlp')
model.compile(loss='mse', optimizer=RMSprop(1e-3), metrics=['mae'])
return model
if __name__ == "__main__":
start = time.time()
#--------------------------------------------------------------------------
# pre- processing
myDtype = 'float32' # defining type for the layer
insp = int(15000) # # of flights in the inspection data
dfcidx = pd.read_csv('tr_cidx_'+str(insp)+'_flights.csv', index_col = None,
dtype = myDtype) # loading corrosion index data
cidx = dfcidx.values[:,1:-1]
dfdS = pd.read_csv('tr_dS_'+str(insp)+'_flights.csv', index_col = None,
dtype = myDtype) # loading mech. load data
dS = dfdS.values[:,1:-1]
dfR = pd.read_csv('tr_R_'+str(insp)+'_flights.csv', index_col = None,
dtype = myDtype) # loading stress ratio data
R = dfR.values[:,1:-1]
# Filtering infinite crack values
nFleet, nCycles = np.shape(cidx)
# RNN inputs
input_array = np.dstack((dS, R))
input_array = np.dstack((input_array, cidx))
a0 = pd.read_csv('initial_crack_length.csv', index_col = None, dtype = myDtype)
a0RNN = np.zeros(input_array.shape[0])
a0RNN[:] = a0.values[:,-1] # initial crack length
a0RNN = np.reshape(a0RNN,(len(a0RNN),1))
aT = pd.read_csv('tr_crack_'+str(insp)+'_flights.csv', index_col = None, dtype = myDtype)
aTarget = np.zeros(input_array.shape[0])
aTarget[:] = aT.values[:,-1] # initial crack length
aTarget = np.reshape(aTarget,(len(aTarget),1))
#--------------------------------------------------------------------------
batch_input_shape = input_array.shape
#--------------------------------------------------------------------------
dfcrl = pd.read_csv('MLP_crack_tr.csv', index_col = None)
dfdSl = pd.read_csv('MLP_dS_tr.csv', index_col = None)
dfRl = pd.read_csv('MLP_R_tr.csv', index_col = None)
dfcidxl = pd.read_csv('MLP_cidx_tr.csv', index_col = None)
dfaux = pd.read_csv('MLP_bias_tr.csv', index_col = None)
F = 2.8 # stress intensity factor
beta,gamma = -1e8,.68 # Walker model customized sigmoid function parameters
Co,m = 1.1323e-10,3.859 # Walker model coefficients (similar to Paris law)
selectbias = [0,1,2,3]
selectidx = [3]
selectdk = [0,1]
selectwk = [2]
outputs = aT.values[:,-1]
EPOCHS = 15
arch = np.linspace(1,7,7,dtype = int)
planes = np.linspace(1,19,19,dtype = int)
cont = 0
for ii in tqdm(arch):
for jj in planes:
# Loading MLP info and parameters
inputs = np.column_stack((dfcrl[str(jj)],dfdSl[str(jj)]))
inputs = np.column_stack((inputs,dfRl[str(jj)]))
inputs = np.column_stack((inputs,dfcidxl[str(jj)]))
input_location = np.asarray(inputs.min(axis=0))
input_scale = np.asarray(inputs.max(axis=0)) - np.asarray(inputs.min(axis=0))
low = np.asarray(dfaux[[str(jj)]].min(axis=0))
up = np.asarray(dfaux[[str(jj)]].max(axis=0))
Bias_layer = arch_model(ii, input_location, input_scale)
checkpoint_MLP = 'training_MLP_arch_'+str(ii)+'_plane_'+str(jj)+'/cp.ckpt'
Bias_layer.load_weights(checkpoint_MLP)
Bias_layer.trainable = True
model = create_model(Bias_layer, low, up, F, beta, gamma, Co, m, a0RNN, batch_input_shape,
selectbias, selectidx, selectdk, selectwk, myDtype)
#--------------------------------------------------------------------------
before = model.predict_on_batch(input_array)
err_b = before[:,0] - outputs
mse_b = np.mean(err_b**2)
mepe_b = np.mean(np.sqrt((err_b/outputs)**2))
str_b = "before training\nLoss = %1.1e\nMSE = %1.1e" % (mepe_b, mse_b)
#--------------------------------------------------------------------------
checkpoint_path = 'rnn_arch_'+str(ii)+'_plane_'+str(jj)+'/cp.ckpt'
checkpoint_dir = os.path.dirname(checkpoint_path)
# Create checkpoint callback
cp_callback = ModelCheckpoint(checkpoint_path, save_weights_only=True, save_best_only=True, monitor = 'loss')
kill = TerminateOnNaN()
history = model.fit(input_array, aTarget, steps_per_epoch=1, epochs=EPOCHS, verbose=1,
callbacks = [cp_callback,kill])
#--------------------------------------------------------------------------
results = model.predict_on_batch(input_array)
err_a = results[:,0] - outputs
mse_a = np.mean(err_a**2)
mepe_a = np.mean(np.sqrt((err_a/outputs)**2))
str_a = "after training\nLoss = %1.1e\nMSE = %1.1e" % (mepe_a, mse_a)
#--------------------------------------------------------------------------
# Plot actual x predict
fig = plt.figure()
fig.clf()
plt.plot([0,20],[0,20],'--k')
plt.plot(outputs*1e3,before*1e3,'o', label = str_b, markersize = myMS)
plt.plot(outputs*1e3,results*1e3,'o', label = str_a, markersize = myMS)
plt.title('Training: arch_'+str(ii)+'_plane_'+str(jj))
plt.xlabel('actual (mm)')
plt.ylabel('predicted (mm)')
plt.grid(which = 'both')
plt.legend(loc='upper left', facecolor='w')
plt.tight_layout()
plt.savefig('Plots/training_arch'+str(ii)+'_plane_'+str(jj)+'.png')
plt.close()
cont+=1
backend.clear_session()
print('Elapsed time is %s seconds'%(time.time()-start))
|
py | 7dfe8f7e7d7178d9f89145453daa6aa78d97c5b4 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""Defination of PSLib."""
import os
import sys
from .optimizer_factory import *
from google.protobuf import text_format
import paddle.fluid as fluid
from paddle.fluid.framework import Program
from paddle.fluid.incubate.fleet.base.fleet_base import Fleet
from paddle.fluid.incubate.fleet.base.fleet_base import Mode
from paddle.fluid.incubate.fleet.base.fleet_base import DistributedOptimizer
from paddle.fluid.incubate.fleet.base.role_maker import MPISymetricRoleMaker
class PSLib(Fleet):
"""PSLib class."""
def __init__(self):
super(PSLib, self).__init__(Mode.PSLIB)
self._opt_info = None
self._local_ip = 0
self._fleet_ptr = None
self._main_programs = []
self._scopes = []
self._client2client_request_timeout_ms = 500000
self._client2client_connect_timeout_ms = 10000
self._client2client_max_retry = 3
def init(self, role_maker=None):
if role_maker is None:
role_maker = MPISymetricRoleMaker()
super(PSLib, self).init(role_maker)
self._fleet_ptr = fluid.core.Fleet()
def _set_client_communication_config(self, request_timeout_ms,
connect_timeout_ms, max_retry):
self._client2client_request_timeout_ms = request_timeout_ms
self._client2client_connect_timeout_ms = connect_timeout_ms
self._client2client_max_retry = max_retry
def set_pull_local_thread_num(self, thread_num):
self._fleet_ptr.set_pull_local_thread_num(thread_num)
def init_worker(self):
"""
init_worker(): will be called by user. When a user knows current process is_server(), he/she
should call init_worker() to initialize global information about worker and connect
worker with pserver. You should run startup program before init_worker.
Args:
executor(Executor): The executor to run for init server.
programs(Program|None): The program that need to run.
"""
if len(self._main_programs) == 0:
raise ValueError(
"You should run DistributedOptimizer.minimize() first")
if self._opt_info:
if "fleet_desc" in self._opt_info:
self._dist_desc_str = text_format.MessageToString(
self._opt_info["fleet_desc"])
self._dist_desc = self._opt_info["fleet_desc"]
else:
raise Exception(
"You should run DistributedOptimizer.minimize() first")
# barrier_all for init_server, wait for server starts
self._role_maker._barrier_all()
self.all_ips_ = self._role_maker._all_gather(self._local_ip)
# worker_index * 2 is for compatible with older versions of pslib
self._fleet_ptr.init_worker(self._dist_desc_str, self.all_ips_,
self._role_maker._get_size(),
self._role_maker.worker_index() * 2)
# barrier_all for init_worker
self._role_maker._barrier_all()
# prepare for client to client communication
info = self._fleet_ptr.get_clients_info()
all_info = self._role_maker._worker_gather(info[0])
self._fleet_ptr.gather_clients(all_info)
self._fleet_ptr.set_client2client_config(
self._client2client_request_timeout_ms,
self._client2client_connect_timeout_ms,
self._client2client_max_retry)
self._fleet_ptr.create_client2client_connection()
# barrier for init model
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
tables = []
for tp in self._dist_desc.trainer_param:
for i in tp.dense_table:
tables.append(i)
for prog, scope in zip(self._main_programs, self._scopes):
prog_id = str(id(prog))
prog_conf = self._opt_info['program_configs'][prog_id]
prog_tables = {}
for key in prog_conf:
if "dense" not in key:
continue
for table_id in prog_conf[key]:
prog_tables[int(table_id)] = 0
for table in tables:
if int(table.table_id) not in prog_tables:
continue
var_name_list = []
for i in range(0, len(table.dense_variable_name)):
var_name = table.dense_variable_name[i]
if scope.find_var(var_name) is None:
raise ValueError(
"var " + var_name + " not found in scope, "
+ "you should run startup program first")
var_name_list.append(var_name)
self._fleet_ptr.init_model(scope,
int(table.table_id),
var_name_list)
# barrier for init model done
self._role_maker._barrier_worker()
else:
raise NameError(
"You should run DistributedOptimizer.minimize() first")
def init_server(self, model_dir=None, **kwargs):
"""
init_server() will be called by user. It will load model from model_dir.
Args:
model_dir(str): load model path, can be local or hdfs/afs path.
kwargs: user-defined attributes, currently support following:
model(int): load model mode.
0 is for load whole model,
1 is for load delta model (load diff),
default is 0.
Example:
>>> fleet.init_server("/you/path/to/model", mode = 0)
"""
mode = kwargs.get("mode", 0)
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.load_model(model_dir, mode)
self._role_maker._barrier_worker()
def run_server(self):
"""
init_pserver(): will be called by user. When a user knows current process is_worker(), he/she
should call init_pserver() to initialize global information about parameter server
"""
if self._opt_info:
if "fleet_desc" in self._opt_info:
self._dist_desc_str = text_format.MessageToString(
self._opt_info["fleet_desc"])
self._dist_desc = self._opt_info["fleet_desc"]
else:
raise Exception(
"You should run DistributedOptimizer.minimize() first")
# server_index * 2 is for compatible with older versions of pslib
self._fleet_ptr.init_server(self._dist_desc_str,
self._role_maker.server_index() * 2)
if isinstance(self._role_maker, MPISymetricRoleMaker):
self._local_ip = self._fleet_ptr.run_server()
else:
local_endpoint = self._role_maker.get_local_endpoint()
local_endpoint = local_endpoint.split(":")
self._local_ip = self._fleet_ptr.run_server(
str(local_endpoint[0]), int(local_endpoint[1]))
# barrier_all for init_server
self._role_maker._barrier_all()
self.all_ips_ = self._role_maker._all_gather(self._local_ip)
self._fleet_ptr.gather_servers(self.all_ips_,
self._role_maker._get_size())
# barrier_all for init_worker, wait all workers start
self._role_maker._barrier_all()
else:
raise Exception(
"You should run DistributedOptimizer.minimize() first")
def stop_worker(self):
"""
stop(): will be called after a user finishes his/her training task. Fleet instance will be
destroyed when stop() is called.
"""
self._role_maker._barrier_worker()
# all worker should be finalize first
if self._role_maker.is_worker():
self._fleet_ptr.finalize_worker()
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.stop_server()
self._role_maker._barrier_worker()
self._role_maker._barrier_all()
self._role_maker._finalize()
def distributed_optimizer(self, optimizer, strategy={}):
"""
distributed_optimizer
Args:
optimizer(Optimizer): optimizer
strategy(dict): strategy
Examples:
.. code-block:: python
fleet.distributed_optimizer(optimizer)
Returns:
optimizer(DownpourOptimizer): downpour optimizer
"""
self._optimizer = DownpourOptimizer(optimizer, strategy)
return self._optimizer
def save_inference_model(self,
executor,
dirname,
feeded_var_names=None,
target_vars=None,
main_program=None,
export_for_deployment=True):
"""
save pserver model called from a worker
Args:
executor(Executor): fluid executor
dirname(str): save model path
feeded_var_names(list): default None
target_vars(list): default None
main_program(Program): default None
export_for_deployment(bool): default None
Examples:
.. code-block:: python
fleet.save_inference_model(dirname="hdfs:/my/path")
"""
self._fleet_ptr.save_model(dirname, 0)
def print_table_stat(self, table_id):
"""
print stat info of table_id,
format: tableid, feasign size, mf size
Args:
table_id(int): the id of table
Example:
.. code-block:: python
fleet.print_table_stat(0)
"""
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.print_table_stat(table_id)
self._role_maker._barrier_worker()
def save_persistables(self, executor, dirname, main_program=None, **kwargs):
"""
save presistable parameters,
when using fleet, it will save sparse and dense feature
Args:
executor(Executor): fluid executor
dirname(str): save path. It can be hdfs/afs path or local path
main_program(Program): fluid program, default None
kwargs: use define property, current support following
mode(int): 0 means save all pserver model,
1 means save delta pserver model (save diff),
2 means save xbox base,
3 means save batch model.
Example:
.. code-block:: python
fleet.save_persistables(dirname="/you/path/to/model", mode = 0)
"""
mode = kwargs.get("mode", 0)
self._fleet_ptr.client_flush()
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.save_model(dirname, mode)
self._role_maker._barrier_worker()
def save_cache_model(self, executor, dirname, main_program=None, **kwargs):
"""
save sparse cache table,
when using fleet, it will save sparse cache table
Args:
executor(Executor): fluid executor
dirname(str): save path. It can be hdfs/afs path or local path
main_program(Program): fluid program, default None
kwargs: use define property, current support following
mode(int): define for feature extension in the future,
currently no use, will pass a default value 0
table_id(int): which table to save cache, default is 0
Returns:
feasign_num(int): cache feasign num
Example:
.. code-block:: python
fleet.save_cache_model(None, dirname="/you/path/to/model", mode = 0)
"""
mode = kwargs.get("mode", 0)
table_id = kwargs.get("table_id", 0)
self._fleet_ptr.client_flush()
self._role_maker._barrier_worker()
cache_threshold = 0.0
if self._role_maker.is_first_worker():
cache_threshold = self._fleet_ptr.get_cache_threshold(table_id)
#check cache threshold right or not
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.cache_shuffle(table_id, dirname, mode,
cache_threshold)
self._role_maker._barrier_worker()
feasign_num = -1
if self._role_maker.is_first_worker():
feasign_num = self._fleet_ptr.save_cache(table_id, dirname, mode)
self._role_maker._barrier_worker()
return feasign_num
def shrink_sparse_table(self):
"""
shrink cvm of all sparse embedding in pserver, the decay rate
is defined as "show_click_decay_rate" in fleet_desc.prototxt
Example:
>>> fleet.shrink_sparse_table()
"""
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
tables = []
for tp in self._opt_info["fleet_desc"].trainer_param:
for i in tp.sparse_table:
tables.append(i.table_id)
for i in list(set(tables)):
self._fleet_ptr.shrink_sparse_table(i)
self._role_maker._barrier_worker()
def shrink_dense_table(self, decay, emb_dim=11, scope=None, table_id=None):
"""
shrink batch_sum in pserver by multiplying by decay
Args:
decay(float): the decay rate, usually range in (0, 1)
emb_dim(int): one element's length in datanorm layer
scope(Scope): Scope object, default is fluid.global_scope()
table_id(int): table id of shrinking dense table. None means shrink all,
you should specify it when using multiple scopes,
default is None.
Example:
>>> fleet.shrink_dense_table(0.98, 11, myscope1, 1)
>>> fleet.shrink_dense_table(0.98, 11, myscope1, 2)
>>> fleet.shrink_dense_table(0.98, 11, myscope2, 3)
"""
if scope is None:
scope = fluid.global_scope()
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
for tp in self._opt_info["fleet_desc"].trainer_param:
for i in tp.dense_table:
if table_id is not None and table_id != i.table_id:
continue
var_list = [var for var in i.dense_variable_name]
skip = False
for var in var_list:
if scope.find_var(var) is None:
skip = True
break
if skip:
continue
self._fleet_ptr.shrink_dense_table(i.table_id, scope,
var_list, decay, emb_dim)
self._role_maker._barrier_worker()
def clear_one_table(self, table_id):
"""
clear_one_table() will be called by user. It will clear one table.
Args:
table_id(int): table id
Examples:
.. code-block:: python
fleet.clear_one_table(0)
"""
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.clear_one_table(table_id)
self._role_maker._barrier_worker()
def clear_model(self):
"""
clear_model() will be called by user. It will clear sparse model.
Examples:
.. code-block:: python
fleet.clear_model()
"""
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.clear_model()
self._role_maker._barrier_worker()
def clear_model(self):
"""
clear_model() will be called by user. It will clear sparse model.
Examples:
.. code-block:: python
fleet.clear_model()
"""
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
self._fleet_ptr.clear_model()
self._role_maker._barrier_worker()
def load_one_table(self, table_id, model_path, **kwargs):
"""
load pslib model for one table or load params from paddle model
Args:
table_id(int): load table id
model_path(str): load model path, can be local or hdfs/afs path
kwargs(dict): user defined params, currently support following:
only for load pslib model for one table:
mode(int): load model mode. 0 is for load whole model, 1 is
for load delta model (load diff), default is 0.
only for load params from paddle model:
scope(Scope): Scope object
model_proto_file(str): path of program desc proto binary
file, can be local or hdfs/afs file
var_names(list): var name list
load_combine(bool): load from a file or split param files
default False.
Examples:
.. code-block:: python
# load pslib model for one table
fleet.load_one_table(0, "hdfs:/my_fleet_model/20190714/0/")
fleet.load_one_table(1, "hdfs:/xx/xxx", mode = 0)
# load params from paddle model
fleet.load_one_table(2, "hdfs:/my_paddle_model/",
scope = my_scope,
model_proto_file = "./my_program.bin",
load_combine = False)
# below is how to save proto binary file
with open("my_program.bin", "wb") as fout:
my_program = fluid.default_main_program()
fout.write(my_program.desc.serialize_to_string())
"""
self._role_maker._barrier_worker()
mode = kwargs.get("mode", 0)
scope = kwargs.get("scope", None)
model_proto_file = kwargs.get("model_proto_file", None)
var_names = kwargs.get("var_names", None)
load_combine = kwargs.get("load_combine", False)
self._role_maker._barrier_worker()
if scope is not None and model_proto_file is not None:
self._load_one_table_from_paddle_model(scope, table_id, model_path,
model_proto_file, var_names,
load_combine)
elif self._role_maker.is_first_worker():
self._fleet_ptr.load_model_one_table(table_id, model_path, mode)
self._role_maker._barrier_worker()
def _load_one_table_from_paddle_model(self,
scope,
table_id,
model_path,
model_proto_file,
var_names=None,
load_combine=False):
"""
load params from paddle model, and push params to pserver
Args:
scope(Scope): Scope object
table_id(int): the id of table to load
model_path(str): path of paddle model, can be local or hdfs/afs file
model_proto_file(str): path of program desc proto binary file,
can be local or hdfs/afs file
var_names(list): load var names
load_combine(bool): load from a file or split param files
"""
self._role_maker._barrier_worker()
if self._role_maker.is_first_worker():
# get fs config from fleet_desc
fs_name = self._opt_info["fleet_desc"].fs_client_param.uri
fs_ugi = self._opt_info["fleet_desc"].fs_client_param.user + "," + \
self._opt_info["fleet_desc"].fs_client_param.passwd
hadoop_bin = self._opt_info["fleet_desc"].fs_client_param.hadoop_bin
# download model_path if it's hdfs/afs
if model_path.startswith("hdfs:") or model_path.startswith("afs:"):
dest = "./model_for_load_table_%s" % table_id
cmd = hadoop_bin + " fs -D fs.default.name=" + fs_name + \
" -D hadoop.job.ugi=" + fs_ugi + " -get " + model_path + \
" " + dest
ret = os.system(cmd)
if ret != 0:
raise RuntimeError("download model failed")
model_path = dest
# download model_proto_file if it's hdfs/afs
if model_proto_file.startswith("hdfs:") or \
model_proto_file.startswith("afs:"):
dest = "./model_proto_file_for_load_table_%s" % table_id
cmd = hadoop_bin + " fs -D fs.default.name=" + fs_name + \
" -D hadoop.job.ugi=" + fs_ugi + " -get " + \
model_proto_file + " " + dest
ret = os.system(cmd)
if ret != 0:
raise RuntimeError("download model proto file failed")
model_proto_file = dest
for tp in self._opt_info["fleet_desc"].trainer_param:
for i in tp.dense_table:
if table_id is not None and table_id != i.table_id:
continue
table_var_names = [var for var in i.dense_variable_name]
skip = False
for var in table_var_names:
if scope.find_var(var) is None:
skip = True
break
if skip:
continue
self._fleet_ptr.load_from_paddle_model(
scope, table_id, var_names, model_path,
model_proto_file, table_var_names, load_combine)
self._role_maker._barrier_worker()
def _set_opt_info(self, opt_info):
"""
this function saves the result from DistributedOptimizer.minimize()
"""
self._opt_info = opt_info
fleet = PSLib()
def _prepare_params(input,
size,
is_sparse=False,
is_distributed=False,
padding_idx=None,
param_attr=None,
dtype='float32'):
"""
preprocess params, this interface is not for users.
Args:
input(Variable|list of Variable): Input is a Tensor<int64> Variable
size(list of int): the embedding dim
is_sparse(bool): whether input is sparse ids
is_distributed(bool): whether in distributed mode
padding_idx(int): padding idx of input
param_attr(ParamAttr): To specify the weight parameter property
dtype(str): data type of output
"""
if param_attr is None:
raise ValueError("param_attr must be set")
name = param_attr.name
if name is None:
raise ValueError("embedding name must be set")
if not isinstance(size, list) and not isinstance(size, tuple):
raise ValueError("embedding size must be list or tuple")
size = size[-1]
global FLEET_GLOBAL_DICT
FLEET_GLOBAL_DICT["enable"] = True
d_table = FLEET_GLOBAL_DICT["emb_to_table"]
d_accessor = FLEET_GLOBAL_DICT["emb_to_accessor"]
d_size = FLEET_GLOBAL_DICT["emb_to_size"]
# check embedding size
if d_size.get(name) is None:
d_size[name] = size
elif d_size[name] != size:
raise ValueError("embedding size error: %s vs %s" %
(size, d_size[name]))
# check embedding accessor
accessor = FLEET_GLOBAL_DICT["cur_accessor"]
if d_accessor.get(name) is None:
d_accessor[name] = accessor
elif d_accessor[name] != accessor:
raise ValueError("embedding size error: %s vs %s" %
(d_accessor[name], accessor))
# check embedding table id
if d_table.get(name) is None:
d_table[name] = FLEET_GLOBAL_DICT["cur_sparse_id"]
FLEET_GLOBAL_DICT["cur_sparse_id"] += 1
# check other params
if not is_sparse:
raise ValueError("is_sparse must be True")
elif not is_distributed:
raise ValueError("is_distributed must be True")
elif dtype != "float32":
raise ValueError("dtype must be float32")
def _fleet_embedding(input,
size,
is_sparse=False,
is_distributed=False,
padding_idx=None,
param_attr=None,
dtype='float32'):
"""
add fleet embedding, this interface is not for users.
Args:
input(Variable|list of Variable): Input is a Tensor<int64> Variable
size(list of int): the embedding dim
is_sparse(bool): whether input is sparse ids
is_distributed(bool): whether in distributed mode
padding_idx(int): padding idx of input
param_attr(ParamAttr): To specify the weight parameter property
dtype(str): data type of output
"""
# check and set params
_prepare_params(input, size, is_sparse, is_distributed, padding_idx,
param_attr, dtype)
name = param_attr.name
size = size[-1]
if padding_idx is None:
padding_idx = 0
global FLEET_GLOBAL_DICT
return fluid.layers.nn._pull_sparse(
input=input,
size=size,
table_id=FLEET_GLOBAL_DICT["emb_to_table"][name],
accessor_class=FLEET_GLOBAL_DICT["emb_to_accessor"][name],
name=name,
ctr_label_name=FLEET_GLOBAL_DICT["click_name"],
padding_id=padding_idx,
dtype=dtype,
scale_sparse_grad=FLEET_GLOBAL_DICT["scale_sparse_grad"])
def _fleet_embedding_v2(input,
size,
is_sparse=False,
is_distributed=False,
padding_idx=None,
param_attr=None,
dtype='float32'):
"""
add fleet embedding v2, this interface is not for users.
Args:
input(Variable|list of Variable): Input is a Tensor<int64> Variable
size(list of int): the embedding dim
is_sparse(bool): whether input is sparse ids
is_distributed(bool): whether in distributed mode
padding_idx(int): padding idx of input
param_attr(ParamAttr): To specify the weight parameter property
dtype(str): data type of output
"""
# check and set params
_prepare_params(input, size, is_sparse, is_distributed, padding_idx,
param_attr, dtype)
name = param_attr.name
size = size[-1]
if padding_idx is None:
padding_idx = 0
return fluid.layers.nn._pull_sparse_v2(
input=input,
size=size,
table_id=FLEET_GLOBAL_DICT["emb_to_table"][name],
accessor_class=FLEET_GLOBAL_DICT["emb_to_accessor"][name],
name=name,
ctr_label_name=FLEET_GLOBAL_DICT["click_name"],
padding_id=padding_idx,
dtype=dtype,
scale_sparse_grad=FLEET_GLOBAL_DICT["scale_sparse_grad"])
class fleet_embedding(object):
"""
fleet embedding class, it is used as a wrapper
Example:
.. code-block:: python
with fleet_embedding(click_name=label.name):
emb = fluid.layers.embedding(
input=var,
size=[-1, 11],
is_sparse=True,
is_distributed=True,
param_attr=fluid.ParamAttr(name="embedding"))
"""
def __init__(self, click_name, scale_sparse_grad=True):
"""Init."""
self.origin_emb = fluid.layers.embedding
self.origin_emb_v2 = fluid.embedding
# if user uses cvm layer after embedding, click_name can be None
self.click_name = "" if click_name is None else click_name
self.scale_sparse_grad = scale_sparse_grad
# it's default value, will be modified in minimize
self.accessor = "DownpourCtrAccessor"
def __enter__(self):
"""Enter."""
fluid.layers.embedding = _fleet_embedding
fluid.embedding = _fleet_embedding_v2
FLEET_GLOBAL_DICT["cur_accessor"] = self.accessor
FLEET_GLOBAL_DICT["click_name"] = self.click_name
FLEET_GLOBAL_DICT["scale_sparse_grad"] = self.scale_sparse_grad
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit."""
fluid.layers.embedding = self.origin_emb
fluid.embedding = self.origin_emb_v2
FLEET_GLOBAL_DICT["cur_accessor"] = ""
FLEET_GLOBAL_DICT["click_name"] = ""
FLEET_GLOBAL_DICT["scale_sparse_grad"] = None
class DownpourOptimizer(DistributedOptimizer):
"""
DistributedOptimizer is a wrapper for paddle.fluid.optimizer
A user should pass a paddle.fluid.optimizer to DistributedOptimizer
minimize() function is implemented.
DistributedOptimizer is the starting point for a user who wants to
run distributed training. The optimized information will be stored in
Fleet() instance who holds the global information about current distributed
training.
Args:
optimizer(Optimizer): subclass of Optimizer.
strategy(any): config for DownpourOptimizer.
Returns:
None
"""
def __init__(self, optimizer, strategy=None):
super(DownpourOptimizer, self).__init__(optimizer, strategy)
self._optimizer = optimizer
self._optimizer_name = "Distributed%s" % optimizer.type.capitalize()
if optimizer.type != "adam":
print("Currently, distributed optimizer only support Adam"
"Will config built-in adam for you."
"We will support more functions in DistributedOptimizer",
sys.stderr)
self._optimizer_name = "DistributedAdam"
self._distributed_optimizer = globals()[self._optimizer_name](optimizer)
def backward(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None,
callbacks=None):
"""
Currently, backward function can not be called through DistributedOptimizer
"""
raise NotImplementedError()
def apply_gradients(self, params_grads):
"""
Currently, apply_gradients function can not be called through DistributedOptimizer
"""
raise NotImplementedError()
def minimize(self,
losses,
scopes=None,
startup_programs=None,
parameter_list=None,
no_grad_set=None):
"""
minimize a program through loss, loss can be a list in DistributedOptimizer.
Note that in parameter server mode, a worker will not get anything about optimize_os
Because optimizer algorithms run on pserver side. We will make this usable in pserver
process, but currently the optimization part is written into Fleet(). A user does not
need to care about how to startup a pserver node.
Args:
losses (Variable|Variable List): loss variable or loss variable list to run optimization.
scopes (Scope| Scope List): scope instance.
startup_programs (Program|Program List): startup_program for initializing parameters
in `parameter_list`.
parameter_list (list): list of Variables to update.
no_grad_set (set|None): set of Variables should be ignored.
Returns:
tuple: (optimize_ops, params_grads) which are, list of operators appended;
and list of (param, grad) Variables pair for optimization.
"""
if not isinstance(losses, list):
losses = [losses]
optimize_ops, param_grads, opt_info = \
self._distributed_optimizer._minimize(
losses,
startup_programs,
parameter_list,
no_grad_set,
self._strategy)
opt_info["mpi_rank"] = fleet.worker_index()
opt_info["mpi_size"] = fleet.worker_num()
fleet._set_opt_info(opt_info)
programs = [loss.block.program for loss in losses]
if scopes is None:
scopes = [fluid.global_scope()] * len(programs)
if len(scopes) != len(programs):
raise ValueError(
"You should make sure len(scopes) == len(programs) or set scopes None"
)
fleet._main_programs = programs
fleet._scopes = scopes
return [optimize_ops, param_grads]
|
py | 7dfe8f938404caca6ec4036bfb18e4251ff0bd49 | from scipy.stats import multivariate_normal as mn
from .base import Hypothesiser
from ..base import Property
from ..types.detection import MissedDetection
from ..types.hypothesis import SingleProbabilityHypothesis
from ..types.multihypothesis import MultipleHypothesis
from ..types.numeric import Probability
from ..predictor import Predictor
from ..updater import Updater
class PDAHypothesiser(Hypothesiser):
"""Hypothesiser based on Probabilistic Data Association (PDA)
Generate track predictions at detection times and calculate probabilities
for all prediction-detection pairs for single prediction and multiple
detections.
"""
predictor = Property(
Predictor,
doc="Predict tracks to detection times")
updater = Property(
Updater,
doc="Updater used to get measurement prediction")
clutter_spatial_density = Property(
float,
doc="Spatial density of clutter - tied to probability of false "
"detection")
prob_detect = Property(
Probability,
default=Probability(0.85),
doc="Target Detection Probability")
prob_gate = Property(
Probability,
default=Probability(0.95),
doc="Gate Probability - prob. gate contains true measurement "
"if detected")
def hypothesise(self, track, detections, timestamp):
r"""Evaluate and return all track association hypotheses.
For a given track and a set of N detections, return a
MultipleHypothesis with N+1 detections (first detection is
a 'MissedDetection'), each with an associated probability.
Probabilities are assumed to be exhaustive (sum to 1) and mutually
exclusive (two detections cannot be the correct association at the
same time).
Detection 0: missed detection, none of the detections are associated
with the track.
Detection :math:`i, i \in {1...N}`: detection i is associated
with the track.
The probabilities for these detections are calculated as follow:
.. math::
\beta_i(k) = \begin{cases}
\frac{\mathcal{L}_{i}(k)}{1-P_{D}P_{G}+\sum_{j=1}^{m(k)}
\mathcal{L}_{j}(k)}, \quad i=1,...,m(k) \\
\frac{1-P_{D}P_{G}}{1-P_{D}P_{G}+\sum_{j=1}^{m(k)}
\mathcal{L}_{j}(k)}, \quad i=0
\end{cases}
where
.. math::
\mathcal{L}_{i}(k) = \frac{\mathcal{N}[z_{i}(k);\hat{z}(k|k-1),
S(k)]P_{D}}{\lambda}
:math:`\lambda` is the clutter density
:math:`P_{D}` is the detection probability
:math:`P_{G}` is the gate probability
:math:`\mathcal{N}[z_{i}(k);\hat{z}(k|k-1),S(k)]` is the likelihood
ratio of the measurement :math:`z_{i}(k)` originating from the track
target rather than the clutter.
NOTE: Since all probabilities have the same denominator and are
normalized later, the denominator can be discarded.
References:
[1] "The Probabilistic Data Association Filter: Estimation in the
Presence of Measurement Origin Uncertainty" -
https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=5338565
[2] "Robotics 2 Data Association" (Lecture notes) -
http://ais.informatik.uni-freiburg.de/teaching/ws10/robotics2/pdfs/rob2-15-dataassociation.pdf
Parameters
----------
track: :class:`~.Track`
The track object to hypothesise on
detections: :class:`list`
A list of :class:`~Detection` objects, representing the available
detections.
timestamp: :class:`datetime.datetime`
A timestamp used when evaluating the state and measurement
predictions. Note that if a given detection has a non empty
timestamp, then prediction will be performed according to
the timestamp of the detection.
Returns
-------
: :class:`~.MultipleHypothesis`
A container of :class:`~SingleProbabilityHypothesis` objects
"""
hypotheses = list()
# Common state & measurement prediction
prediction = self.predictor.predict(track, timestamp=timestamp)
# Missed detection hypothesis
probability = Probability(1 - self.prob_detect*self.prob_gate)
hypotheses.append(
SingleProbabilityHypothesis(
prediction,
MissedDetection(timestamp=timestamp),
probability
))
# True detection hypotheses
for detection in detections:
# Re-evaluate prediction
prediction = self.predictor.predict(
track, timestamp=detection.timestamp)
# Compute measurement prediction and probability measure
measurement_prediction = self.updater.predict_measurement(
prediction, detection.measurement_model)
# Calculate difference before to handle custom types (mean defaults to zero)
# This is required as log pdf coverts arrays to floats
log_pdf = mn.logpdf(
(detection.state_vector - measurement_prediction.state_vector).ravel(),
cov=measurement_prediction.covar)
pdf = Probability(log_pdf, log_value=True)
probability = (pdf * self.prob_detect)/self.clutter_spatial_density
# True detection hypothesis
hypotheses.append(
SingleProbabilityHypothesis(
prediction,
detection,
probability,
measurement_prediction))
return MultipleHypothesis(hypotheses, normalise=True, total_weight=1)
|
py | 7dfe8fcca04705d1959d2b71a6a665338be95111 | from glob import glob
from collections import defaultdict
import pickle
def load_pickle(dict_dir, InpOrOut, TrainOrVal, chunk):
if InpOrOut == '/noisy/':
with open(dict_dir + InpOrOut + TrainOrVal + 'input_dict_%d' % chunk + '.pickle', 'rb') as handle:
return pickle.load(handle)
else:
with open(dict_dir + InpOrOut + TrainOrVal + 'target_dict_%d' % chunk + '.pickle', 'rb') as handle:
return pickle.load(handle)
def get_dictionaries(dict_dir, chunks, trainFlag):
input_dict = []
target_dict = []
wav_ids = []
num_frames = []
for chunk_idx, chunk in enumerate(chunks):
if trainFlag == 1:
input_dict.append(load_pickle(dict_dir, '/noisy/', 'train/', chunk))
else:
input_dict.append(load_pickle(dict_dir, '/noisy/', 'val/', chunk))
wav_ids.append(list(input_dict[chunk_idx].keys()))
# Assuming same number of frames per wav segment (which is true in the current setup)
num_frames.append(list(input_dict[chunk_idx].values())[0].shape[0])
if trainFlag == 1:
target_dict.append(load_pickle(dict_dir, '/clean/', 'train/', chunk))
else:
target_dict.append(load_pickle(dict_dir, '/clean/', 'val/', chunk))
return input_dict, target_dict, [len(wav_ids[x])*num_frames[x] for x in range(len(chunks))]
|
py | 7dfe8fcdea51f97a2a9523b3d272742e7b5317a7 | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from azure.cli.core.util import sdk_no_wait
def cloud_service_role_instance_list(client,
resource_group_name,
cloud_service_name):
return client.list(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_show(client,
role_instance_name,
resource_group_name,
cloud_service_name):
return client.get(role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_delete(client,
role_instance_name,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_delete,
role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_rebuild(client,
role_instance_name,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_rebuild,
role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_reimage(client,
role_instance_name,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_reimage,
role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_restart(client,
role_instance_name,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_restart,
role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_show_instance_view(client,
role_instance_name,
resource_group_name,
cloud_service_name):
return client.get_instance_view(role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_instance_show_remote_desktop_file(client,
role_instance_name,
resource_group_name,
cloud_service_name):
return client.get_remote_desktop_file(role_instance_name=role_instance_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_list(client,
resource_group_name,
cloud_service_name):
return client.list(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_role_show(client,
role_name,
resource_group_name,
cloud_service_name):
return client.get(role_name=role_name,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_list(client,
resource_group_name):
return client.list(resource_group_name=resource_group_name)
def cloud_service_show(client,
resource_group_name,
cloud_service_name):
return client.get(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_create(client,
resource_group_name,
cloud_service_name,
location=None,
tags=None,
package_url=None,
configuration=None,
configuration_url=None,
start_cloud_service=None,
upgrade_mode=None,
extensions=None,
load_balancer_configurations=None,
id_=None,
secrets=None,
roles=None,
no_wait=False):
parameters = {}
parameters['location'] = location
parameters['tags'] = tags
parameters['properties'] = {}
parameters['properties']['package_url'] = package_url
parameters['properties']['configuration'] = configuration
parameters['properties']['configuration_url'] = configuration_url
parameters['properties']['start_cloud_service'] = start_cloud_service
parameters['properties']['upgrade_mode'] = upgrade_mode
parameters['properties']['extension_profile'] = {}
parameters['properties']['extension_profile']['extensions'] = extensions
parameters['properties']['network_profile'] = {}
parameters['properties']['network_profile']['load_balancer_configurations'] = load_balancer_configurations
parameters['properties']['network_profile']['swappable_cloud_service'] = {}
parameters['properties']['network_profile']['swappable_cloud_service']['id'] = id_
parameters['properties']['os_profile'] = {}
parameters['properties']['os_profile']['secrets'] = secrets
parameters['properties']['role_profile'] = {}
parameters['properties']['role_profile']['roles'] = roles
return sdk_no_wait(no_wait,
client.begin_create_or_update,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
parameters=parameters)
def cloud_service_update(client,
resource_group_name,
cloud_service_name,
tags=None,
no_wait=False):
parameters = {}
parameters['tags'] = tags
return sdk_no_wait(no_wait,
client.begin_update,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
parameters=parameters)
def cloud_service_delete(client,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_delete,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_delete_instance(client,
resource_group_name,
cloud_service_name,
role_instances=None,
no_wait=False):
parameters = {}
parameters['role_instances'] = role_instances
return sdk_no_wait(no_wait,
client.begin_delete_instances,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
parameters=parameters)
def cloud_service_list_all(client):
return client.list_all()
def cloud_service_power_off(client,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_power_off,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_rebuild(client,
resource_group_name,
cloud_service_name,
role_instances=None,
no_wait=False):
parameters = {}
parameters['role_instances'] = role_instances
return sdk_no_wait(no_wait,
client.begin_rebuild,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
parameters=parameters)
def cloud_service_reimage(client,
resource_group_name,
cloud_service_name,
role_instances=None,
no_wait=False):
parameters = {}
parameters['role_instances'] = role_instances
return sdk_no_wait(no_wait,
client.begin_reimage,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
parameters=parameters)
def cloud_service_restart(client,
resource_group_name,
cloud_service_name,
role_instances=None,
no_wait=False):
parameters = {}
parameters['role_instances'] = role_instances
return sdk_no_wait(no_wait,
client.begin_restart,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
parameters=parameters)
def cloud_service_show_instance_view(client,
resource_group_name,
cloud_service_name):
return client.get_instance_view(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_start(client,
resource_group_name,
cloud_service_name,
no_wait=False):
return sdk_no_wait(no_wait,
client.begin_start,
resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_update_domain_list_update_domain(client,
resource_group_name,
cloud_service_name):
return client.list_update_domains(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name)
def cloud_service_update_domain_show_update_domain(client,
resource_group_name,
cloud_service_name,
update_domain):
return client.get_update_domain(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
update_domain=update_domain)
def cloud_service_update_domain_walk_update_domain(client,
resource_group_name,
cloud_service_name,
update_domain):
parameters = {}
return client.begin_walk_update_domain(resource_group_name=resource_group_name,
cloud_service_name=cloud_service_name,
update_domain=update_domain,
parameters=parameters)
|
py | 7dfe906ab30609bb113beb1c5f1a69bac9078dd5 | """Database model for requests
"""
from django.core.validators import RegexValidator
from django.db import models
from cms.models.regions.region import Region
from cms.models.vehicles.vehicle import Vehicle
class Request(models.Model):
"""Object for a request
Args:
models : Database model inherit from the standard django models
"""
MALE = 'm'
FEMALE = 'w'
OTHER = 'x'
GENDER_CHOICES = (
('m', 'male'),
('w', 'female'),
('x', 'other'),
)
GROUPSIZE = [("0", "-")] # , *(zip(range(1, 10), (range(1, 10)))), ("11", ">10")]
for i in range(1, 10):
GROUPSIZE.append([str(i), str(i)])
GROUPSIZE.append(("11", ">10"))
# Data about person in need (pin)
pinname = models.CharField(max_length=250, default="", blank=True)
wheelchair = models.BooleanField(default=False)
gender = models.CharField(max_length=20, choices=GENDER_CHOICES, default=OTHER)
medical_needs = models.BooleanField(default=False)
luggage = models.IntegerField(default=0, choices=zip(range(10), range(10)), unique=False)
iso_mat = models.BooleanField(default=False)
blanket = models.BooleanField(default=False)
jacket = models.BooleanField(default=False)
sleeping_bag = models.BooleanField(default=False)
children = models.BooleanField(default=False)
pets = models.BooleanField(default=False)
group = models.CharField(max_length=5, choices=GROUPSIZE, default="0", unique=False)
# Data about helper
helpername = models.CharField(max_length=250, default="", blank=True)
phone = models.CharField(max_length=20, default="0049")
# Data about location
region = models.ForeignKey(Region, related_name='requests', on_delete=models.SET_NULL, null=True)
address = models.CharField(max_length=250)
postcode = models.CharField(max_length=10, validators=[
RegexValidator(regex='^.{5}$', message='Length has to be 5', code='nomatch')])
city = models.CharField(max_length=250, default="Berlin")
country = models.CharField(max_length=250, default="Berlin")
latitude = models.FloatField(default=0.0, blank=True)
longitude = models.FloatField(default=0.0, blank=True)
# other metadata
archived = models.BooleanField(default=False)
assigned_bus = models.ForeignKey(Vehicle, related_name='assignedRequests', on_delete=models.SET_NULL, null=True, blank=True)
active_route = models.BooleanField(default=True)
@staticmethod
def vehicles():
"""
:return: all objects that belong to vehicle model
"""
return Vehicle.objects.all()
@classmethod
def get_list_view(cls):
"""Provides List of all requests
Returns:
[Request]: List of all requests
"""
return cls.objects.all()
class Meta:
default_permissions = ()
permissions = (
('manage_requests', 'Can manage Requests'),
)
ordering = ['pinname']
|
py | 7dfe927ae825266627fc83c7856f43d93fcda3d3 | #!/usr/bin/env python3
import tkinter as tk
import tkinter.ttk as ttk
class CanvasDraw(tk.Frame):
def __init__(self, parent):
super(CanvasDraw, self).__init__(parent)
self.__init_window_ui()
def __init_window_ui(self):
self.master.title('Canvas Draw Colours')
self.pack(fill = tk.BOTH, expand = True)
self.style = ttk.Style()
self.style.configure("TFrame", background = '#333')
self.__init_window_size(640, 480)
self.__init_canvas()
def __init_canvas(self):
canvas = tk.Canvas(self)
canvas.create_oval(10, 10, 80, 80, outline = 'red', fill = 'green', width = 2)
canvas.create_oval(110, 10, 210, 80, outline = 'red', fill = 'green', width = 2)
canvas.create_rectangle(230, 10, 290, 60, outline = 'red', fill = 'green', width = 2)
canvas.create_arc(30, 200, 90, 100, start = 0, extent = 210, outline = 'red', fill = 'green', width = 2)
canvas.create_rectangle(300, 300, 400, 400)
canvas.create_oval(300, 300, 400, 400, outline = 'blue')
canvas.create_rectangle(500, 200, 600, 450)
canvas.create_oval(500, 200, 600, 450, outline = 'blue')
canvas.create_rectangle(30, 400, 90, 300)
canvas.create_arc(30, 400, 90, 300, start = 0, extent = 90, fill = 'red') # 0 - 90 degress
points = [150, 100, 200, 120, 240, 180, 210, 200, 150, 150, 100, 200]
canvas.create_polygon(points, outline = 'red', fill = 'green', width = 2)
canvas.pack(fill = tk.BOTH, expand = True)
def __init_window_size(self, width = None, height = None):
screen_width = self.master.winfo_screenwidth()
screen_height = self.master.winfo_screenheight()
if width is None:
width = screen_width
if height is None:
height = screen_height
x_coordinate = int((screen_width - width) / 2)
y_coordinate = int((screen_height - height) / 2)
self.master.geometry('{}x{}+{}+{}'.format(width, height, x_coordinate, y_coordinate))
if __name__ == '__main__':
root = tk.Tk()
main_frame = CanvasDraw(root)
root.mainloop()
|
py | 7dfe92bb71855e00150df5264dd35c57c8a5c56a | #A robot moves in a plane starting from the original point (0,0). The robot can move toward UP, DOWN, LEFT and RIGHT with a given steps. The trace of robot movement is shown as the following:
#UP 5
#DOWN 3
#LEFT 3
#RIGHT 2
#The numbers after the direction are steps. Please write a program to compute the distance from current position after a sequence of movement and original point. If the distance is a float, then just print the nearest integer.
#Example:
#If the following tuples are given as input to the program:
#UP 5
#DOWN 3
#LEFT 3
#RIGHT 2
#Then, the output of the program should be:
#2
import math
pos = [0,0]
while True:
s = raw_input()
if not s:
break
movement = s.split(" ")
direction = movement[0]
steps = int(movement[1])
if direction == "UP":
pos[0] += steps
elif direction == "DOWN":
pos[0] -= steps
elif direction == "LEFT":
pos[1] -= steps
elif direction == "RIGHT":
pos[1] += steps
else:
pass
print(int(round(math.sqrt(pos[1]**2+pos[0]**2))))
|
py | 7dfe92df2f5368ef3ad4e83345991ebdeb1e3665 | # Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""helpers methods for the cp_model module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numbers
INT_MIN = -9223372036854775808 # hardcoded to be platform independent.
INT_MAX = 9223372036854775807
INT32_MIN = -2147483648
INT32_MAX = 2147483647
def AssertIsInt64(x):
"""Asserts that x is integer and x is in [min_int_64, max_int_64]."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not an integer: %s' % x)
if x < INT_MIN or x > INT_MAX:
raise OverflowError('Does not fit in an int64_t: %s' % x)
def AssertIsInt32(x):
"""Asserts that x is integer and x is in [min_int_32, max_int_32]."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not an integer: %s' % x)
if x < INT32_MIN or x > INT32_MAX:
raise OverflowError('Does not fit in an int32_t: %s' % x)
def AssertIsBoolean(x):
"""Asserts that x is 0 or 1."""
if not isinstance(x, numbers.Integral) or x < 0 or x > 1:
raise TypeError('Not an boolean: %s' % x)
def CapInt64(v):
"""Restrict v within [INT_MIN..INT_MAX] range."""
if v > INT_MAX:
return INT_MAX
if v < INT_MIN:
return INT_MIN
return v
def CapSub(x, y):
"""Saturated arithmetics. Returns x - y truncated to the int64_t range."""
AssertIsInt64(x)
AssertIsInt64(y)
if y == 0:
return x
if x == y:
if x == INT_MAX or x == INT_MIN:
raise OverflowError(
'Integer NaN: subtracting INT_MAX or INT_MIN to itself')
return 0
if x == INT_MAX or x == INT_MIN:
return x
if y == INT_MAX:
return INT_MIN
if y == INT_MIN:
return INT_MAX
return CapInt64(x - y)
|
py | 7dfe9601995a457c4ddd76d83f98a82cea60a237 | # -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
#
# SPDX-License-Identifier: MIT
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
]
# Uncomment the below if you use native CircuitPython modules such as
# digitalio, micropython and busio. List the modules you use. Without it, the
# autodoc module docs will fail to generate with a warning.
autodoc_mock_imports = ["micropython", "microcontroller", "random"]
intersphinx_mapping = {
"python": ("https://docs.python.org/3.4", None),
"CircuitPython": ("https://circuitpython.readthedocs.io/en/latest/", None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Adafruit MiniMQTT Library"
copyright = "2019 Brent Rubell"
author = "Brent Rubell"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".env", "CODE_OF_CONDUCT.md"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
default_role = "any"
# If true, '()' will be appended to :func: etc. cross-reference text.
#
add_function_parentheses = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# If this is True, todo emits a warning for each TODO entries. The default is False.
todo_emit_warnings = True
napoleon_numpy_docstring = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd: # only import and set the theme if we're building docs locally
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."]
except:
html_theme = "default"
html_theme_path = ["."]
else:
html_theme_path = ["."]
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
html_favicon = "_static/favicon.ico"
# Output file base name for HTML help builder.
htmlhelp_basename = "AdafruitMinimqttLibrarydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"AdafruitMiniMQTTLibrary.tex",
"AdafruitMiniMQTT Library Documentation",
author,
"manual",
),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"AdafruitMiniMQTTlibrary",
"Adafruit MiniMQTT Library Documentation",
[author],
1,
)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"AdafruitMiniMQTTLibrary",
"Adafruit MiniMQTT Library Documentation",
author,
"AdafruitMiniMQTTLibrary",
"One line description of project.",
"Miscellaneous",
),
]
|
py | 7dfe964722e8a3ee7eedec3f4173b10ddc6c6d0c | #!/usr/bin/python2.4
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a Python interface to the Twitter API'''
__author__ = '[email protected]'
__version__ = '0.8.1'
import base64
import calendar
import datetime
import httplib
import os
import rfc822
import sys
import tempfile
import textwrap
import time
import calendar
import urllib
import urllib2
import urlparse
import gzip
import StringIO
import socket
try:
# Python >= 2.6
import json as simplejson
except ImportError:
try:
# Python < 2.6
import simplejson
except ImportError:
try:
# Google App Engine
from django.utils import simplejson
except ImportError:
raise ImportError, "Unable to load a json library"
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl, parse_qs
except ImportError:
from cgi import parse_qsl, parse_qs
try:
from hashlib import md5
except ImportError:
from md5 import md5
import oauth2 as oauth
CHARACTER_LIMIT = 140
# A singleton representing a lazily instantiated FileCache.
DEFAULT_CACHE = object()
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.location
status.relative_created_at # read only
status.user
status.urls
status.user_mentions
status.hashtags
status.geo
status.place
status.coordinates
status.contributors
'''
def __init__(self,
created_at=None,
favorited=None,
id=None,
text=None,
location=None,
user=None,
in_reply_to_screen_name=None,
in_reply_to_user_id=None,
in_reply_to_status_id=None,
truncated=None,
source=None,
now=None,
urls=None,
user_mentions=None,
hashtags=None,
geo=None,
place=None,
coordinates=None,
contributors=None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at:
The time this status message was posted. [Optional]
favorited:
Whether this is a favorite of the authenticated user. [Optional]
id:
The unique id of this status message. [Optional]
text:
The text of this status message. [Optional]
location:
the geolocation string associated with this message. [Optional]
relative_created_at:
A human readable string representing the posting time. [Optional]
user:
A twitter.User instance representing the person posting the
message. [Optional]
now:
The current time, if the client choses to set it.
Defaults to the wall clock time. [Optional]
'''
self.created_at = created_at
self.favorited = favorited
self.id = id
self.text = text
self.location = location
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.source = source
self.urls = urls
self.user_mentions = user_mentions
self.hashtags = hashtags
self.geo = geo
self.place = place
self.coordinates = coordinates
self.contributors = contributors
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at:
The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited:
boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc='The favorited state of this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id:
The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc='')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc='')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc='')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc='')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc='')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text:
The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this status message')
def GetLocation(self):
'''Get the geolocation associated with this status message
Returns:
The geolocation string of this status message.
'''
return self._location
def SetLocation(self, location):
'''Set the geolocation associated with this status message
Args:
location:
The geolocation string of this status message
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geolocation string of this status message')
def GetRelativeCreatedAt(self):
'''Get a human redable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = long(self.now) - long(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1/fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1/fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge) or delta / (60 * 60) == 1:
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1/fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge) or delta / (60 * 60 * 24) == 1:
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc='Get a human readable string representing '
'the posting time')
def GetUser(self):
'''Get a twitter.User reprenting the entity posting this status message.
Returns:
A twitter.User reprenting the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User reprenting the entity posting this status message.
Args:
user:
A twitter.User reprenting the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc='A twitter.User reprenting the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now:
The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc='The wallclock time for this status instance.')
def GetGeo(self):
return self._geo
def SetGeo(self, geo):
self._geo = geo
geo = property(GetGeo, SetGeo,
doc='')
def GetPlace(self):
return self._place
def SetPlace(self, place):
self._place = place
place = property(GetPlace, SetPlace,
doc='')
def GetCoordinates(self):
return self._coordinates
def SetCoordinates(self, coordinates):
self._coordinates = coordinates
coordinates = property(GetCoordinates, SetCoordinates,
doc='')
def GetContributors(self):
return self._contributors
def SetContributors(self, contributors):
self._contributors = contributors
contributors = property(GetContributors, SetContributors,
doc='')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.location == other.location and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.favorited == other.favorited and \
self.source == other.source and \
self.geo == other.geo and \
self.place == other.place and \
self.coordinates == other.coordinates and \
self.contributors == other.contributors
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.location:
data['location'] = self.location
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
if self.geo:
data['geo'] = self.geo
if self.place:
data['place'] = self.place
if self.coordinates:
data['coordinates'] = self.coordinates
if self.contributors:
data['contributors'] = self.contributors
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
urls = None
user_mentions = None
hashtags = None
if 'entities' in data:
if 'urls' in data['entities']:
urls = [Url.NewFromJsonDict(u) for u in data['entities']['urls']]
if 'user_mentions' in data['entities']:
user_mentions = [User.NewFromJsonDict(u) for u in data['entities']['user_mentions']]
if 'hashtags' in data['entities']:
hashtags = [Hashtag.NewFromJsonDict(h) for h in data['entities']['hashtags']]
return Status(created_at=data.get('created_at', None),
favorited=data.get('favorited', None),
id=data.get('id', None),
text=data.get('text', None),
location=data.get('location', None),
in_reply_to_screen_name=data.get('in_reply_to_screen_name', None),
in_reply_to_user_id=data.get('in_reply_to_user_id', None),
in_reply_to_status_id=data.get('in_reply_to_status_id', None),
truncated=data.get('truncated', None),
source=data.get('source', None),
user=user,
urls=urls,
user_mentions=user_mentions,
hashtags=hashtags,
geo=data.get('geo', None),
place=data.get('place', None),
coordinates=data.get('coordinates', None),
contributors=data.get('contributors', None))
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
user.geo_enabled
'''
def __init__(self,
id=None,
name=None,
screen_name=None,
location=None,
description=None,
profile_image_url=None,
profile_background_tile=None,
profile_background_image_url=None,
profile_sidebar_fill_color=None,
profile_background_color=None,
profile_link_color=None,
profile_text_color=None,
protected=None,
utc_offset=None,
time_zone=None,
followers_count=None,
friends_count=None,
statuses_count=None,
favourites_count=None,
url=None,
status=None,
geo_enabled=None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
self.geo_enabled = geo_enabled
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this user.')
def GetScreenName(self):
'''Get the short twitter name of this user.
Returns:
The short twitter name of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short twitter name of this user.
Args:
screen_name: the short twitter name of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc='The short twitter name of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc='The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url= property(GetProfileImageUrl, SetProfileImageUrl,
doc='The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc='Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc='The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone:
The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status:
The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc='The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count:
The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc='The number of friends for this user.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count:
The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc='The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count:
The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc='The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count:
The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc='The number of favourites for this user.')
def GetGeoEnabled(self):
'''Get the setting of geo_enabled for this user.
Returns:
True/False if Geo tagging is enabled
'''
return self._geo_enabled
def SetGeoEnabled(self, geo_enabled):
'''Set the latest twitter.geo_enabled of this user.
Args:
geo_enabled:
True/False if Geo tagging is to be enabled
'''
self._geo_enabled = geo_enabled
geo_enabled = property(GetGeoEnabled, SetGeoEnabled,
doc='The value of twitter.geo_enabled for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status and \
self.geo_enabled == other.geo_enabled
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
if self.geo_enabled:
data['geo_enabled'] = self.geo_enabled
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id=data.get('id', None),
name=data.get('name', None),
screen_name=data.get('screen_name', None),
location=data.get('location', None),
description=data.get('description', None),
statuses_count=data.get('statuses_count', None),
followers_count=data.get('followers_count', None),
favourites_count=data.get('favourites_count', None),
friends_count=data.get('friends_count', None),
profile_image_url=data.get('profile_image_url', None),
profile_background_tile = data.get('profile_background_tile', None),
profile_background_image_url = data.get('profile_background_image_url', None),
profile_sidebar_fill_color = data.get('profile_sidebar_fill_color', None),
profile_background_color = data.get('profile_background_color', None),
profile_link_color = data.get('profile_link_color', None),
profile_text_color = data.get('profile_text_color', None),
protected = data.get('protected', None),
utc_offset = data.get('utc_offset', None),
time_zone = data.get('time_zone', None),
url=data.get('url', None),
status=status,
geo_enabled=data.get('geo_enabled', None))
class List(object):
'''A class representing the List structure used by the twitter API.
The List structure exposes the following properties:
list.id
list.name
list.slug
list.description
list.full_name
list.mode
list.uri
list.member_count
list.subscriber_count
list.following
'''
def __init__(self,
id=None,
name=None,
slug=None,
description=None,
full_name=None,
mode=None,
uri=None,
member_count=None,
subscriber_count=None,
following=None,
user=None):
self.id = id
self.name = name
self.slug = slug
self.description = description
self.full_name = full_name
self.mode = mode
self.uri = uri
self.member_count = member_count
self.subscriber_count = subscriber_count
self.following = following
self.user = user
def GetId(self):
'''Get the unique id of this list.
Returns:
The unique id of this list
'''
return self._id
def SetId(self, id):
'''Set the unique id of this list.
Args:
id:
The unique id of this list.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this list.')
def GetName(self):
'''Get the real name of this list.
Returns:
The real name of this list
'''
return self._name
def SetName(self, name):
'''Set the real name of this list.
Args:
name:
The real name of this list
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this list.')
def GetSlug(self):
'''Get the slug of this list.
Returns:
The slug of this list
'''
return self._slug
def SetSlug(self, slug):
'''Set the slug of this list.
Args:
slug:
The slug of this list.
'''
self._slug = slug
slug = property(GetSlug, SetSlug,
doc='The slug of this list.')
def GetDescription(self):
'''Get the description of this list.
Returns:
The description of this list
'''
return self._description
def SetDescription(self, description):
'''Set the description of this list.
Args:
description:
The description of this list.
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The description of this list.')
def GetFull_name(self):
'''Get the full_name of this list.
Returns:
The full_name of this list
'''
return self._full_name
def SetFull_name(self, full_name):
'''Set the full_name of this list.
Args:
full_name:
The full_name of this list.
'''
self._full_name = full_name
full_name = property(GetFull_name, SetFull_name,
doc='The full_name of this list.')
def GetMode(self):
'''Get the mode of this list.
Returns:
The mode of this list
'''
return self._mode
def SetMode(self, mode):
'''Set the mode of this list.
Args:
mode:
The mode of this list.
'''
self._mode = mode
mode = property(GetMode, SetMode,
doc='The mode of this list.')
def GetUri(self):
'''Get the uri of this list.
Returns:
The uri of this list
'''
return self._uri
def SetUri(self, uri):
'''Set the uri of this list.
Args:
uri:
The uri of this list.
'''
self._uri = uri
uri = property(GetUri, SetUri,
doc='The uri of this list.')
def GetMember_count(self):
'''Get the member_count of this list.
Returns:
The member_count of this list
'''
return self._member_count
def SetMember_count(self, member_count):
'''Set the member_count of this list.
Args:
member_count:
The member_count of this list.
'''
self._member_count = member_count
member_count = property(GetMember_count, SetMember_count,
doc='The member_count of this list.')
def GetSubscriber_count(self):
'''Get the subscriber_count of this list.
Returns:
The subscriber_count of this list
'''
return self._subscriber_count
def SetSubscriber_count(self, subscriber_count):
'''Set the subscriber_count of this list.
Args:
subscriber_count:
The subscriber_count of this list.
'''
self._subscriber_count = subscriber_count
subscriber_count = property(GetSubscriber_count, SetSubscriber_count,
doc='The subscriber_count of this list.')
def GetFollowing(self):
'''Get the following status of this list.
Returns:
The following status of this list
'''
return self._following
def SetFollowing(self, following):
'''Set the following status of this list.
Args:
following:
The following of this list.
'''
self._following = following
following = property(GetFollowing, SetFollowing,
doc='The following status of this list.')
def GetUser(self):
'''Get the user of this list.
Returns:
The owner of this list
'''
return self._user
def SetUser(self, user):
'''Set the user of this list.
Args:
user:
The owner of this list.
'''
self._user = user
user = property(GetUser, SetUser,
doc='The owner of this list.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.slug == other.slug and \
self.description == other.description and \
self.full_name == other.full_name and \
self.mode == other.mode and \
self.uri == other.uri and \
self.member_count == other.member_count and \
self.subscriber_count == other.subscriber_count and \
self.following == other.following and \
self.user == other.user
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.List instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.List instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.List instance.
Returns:
A JSON string representation of this twitter.List instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.List instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.List instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.slug:
data['slug'] = self.slug
if self.description:
data['description'] = self.description
if self.full_name:
data['full_name'] = self.full_name
if self.mode:
data['mode'] = self.mode
if self.uri:
data['uri'] = self.uri
if self.member_count is not None:
data['member_count'] = self.member_count
if self.subscriber_count is not None:
data['subscriber_count'] = self.subscriber_count
if self.following is not None:
data['following'] = self.following
if self.user is not None:
data['user'] = self.user
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.List instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return List(id=data.get('id', None),
name=data.get('name', None),
slug=data.get('slug', None),
description=data.get('description', None),
full_name=data.get('full_name', None),
mode=data.get('mode', None),
uri=data.get('uri', None),
member_count=data.get('member_count', None),
subscriber_count=data.get('subscriber_count', None),
following=data.get('following', None),
user=user)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id=None,
created_at=None,
sender_id=None,
sender_screen_name=None,
recipient_id=None,
recipient_screen_name=None,
text=None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id:
The unique id of this direct message. [Optional]
created_at:
The time this direct message was posted. [Optional]
sender_id:
The id of the twitter user that sent this message. [Optional]
sender_screen_name:
The name of the twitter user that sent this message. [Optional]
recipient_id:
The id of the twitter that received this message. [Optional]
recipient_screen_name:
The name of the twitter that received this message. [Optional]
text:
The text of this direct message. [Optional]
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id:
The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at:
The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender_id:
The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc='The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name:
The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc='The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient_id:
The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc='The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name:
The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc='The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text:
The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at=data.get('created_at', None),
recipient_id=data.get('recipient_id', None),
sender_id=data.get('sender_id', None),
text=data.get('text', None),
sender_screen_name=data.get('sender_screen_name', None),
id=data.get('id', None),
recipient_screen_name=data.get('recipient_screen_name', None))
class Hashtag(object):
''' A class represeinting a twitter hashtag
'''
def __init__(self,
text=None):
self.text = text
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Hashtag instance
'''
return Hashtag(text = data.get('text', None))
class Trend(object):
''' A class representing a trending topic
'''
def __init__(self, name=None, query=None, timestamp=None):
self.name = name
self.query = query
self.timestamp = timestamp
def __str__(self):
return 'Name: %s\nQuery: %s\nTimestamp: %s\n' % (self.name, self.query, self.timestamp)
@staticmethod
def NewFromJsonDict(data, timestamp = None):
'''Create a new instance based on a JSON dict
Args:
data:
A JSON dict
timestamp:
Gets set as the timestamp property of the new object
Returns:
A twitter.Trend object
'''
return Trend(name=data.get('name', None),
query=data.get('query', None),
timestamp=timestamp)
class Url(object):
'''A class representing an URL contained in a tweet'''
def __init__(self,
url=None,
expanded_url=None):
self.url = url
self.expanded_url = expanded_url
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Url instance
'''
return Url(url=data.get('url', None),
expanded_url=data.get('expanded_url', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
consumer key and secret; and the oAuth key and secret:
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret')
To use a proxy server, instantiate the twitter.Api class with a
dictionary containing proxy servers.
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret',
proxy = { 'http' : 'proxy_server',
'https' : 'proxy_server' })
The value of proxy_server can include credentials and port numbers.
For example:
http://mike:[email protected]:8080
To specify a socket timeout, pass an optional parameter when
instantiating twitter.Api.
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret',
timeout=10)
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriendsTimeline(user)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
>>> api.VerifyCredentials()
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
input_encoding=None,
request_headers=None,
cache=DEFAULT_CACHE,
shortner=None,
base_url=None,
use_gzip_compression=False,
debugHTTP=False,
proxy={},
timeout=0):
'''Instantiate a new twitter.Api object.
Args:
consumer_key:
Your Twitter user's consumer_key.
consumer_secret:
Your Twitter user's consumer_secret.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
input_encoding:
The encoding used to encode input strings. [Optional]
request_header:
A dictionary of additional HTTP request headers. [Optional]
cache:
The cache instance to use. Defaults to DEFAULT_CACHE.
Use None to disable caching. [Optional]
shortner:
The shortner instance to use. Defaults to None.
See shorten_url.py for an example shortner. [Optional]
base_url:
The base URL to use to contact the Twitter API.
Defaults to https://twitter.com. [Optional]
use_gzip_compression:
Set to True to tell enable gzip compression for any call
made to Twitter. Defaults to False. [Optional]
debugHTTP:
Set to True to enable debug output from urllib2 when performing
any HTTP requests. Defaults to False. [Optional]
'''
self.SetCache(cache)
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._input_encoding = input_encoding
self._use_gzip = use_gzip_compression
self._debugHTTP = debugHTTP
self._oauth_consumer = None
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
self._proxy = proxy
self._timeout = timeout
if base_url is None:
self.base_url = 'https://api.twitter.com/1'
else:
self.base_url = base_url
if consumer_key is not None and (access_token_key is None or
access_token_secret is None):
print >> sys.stderr, 'Twitter now requires an oAuth Access Token for API calls.'
print >> sys.stderr, 'If your using this library from a command line utility, please'
print >> sys.stderr, 'run the the included get_access_token.py tool to generate one.'
raise TwitterError('Twitter requires oAuth Access Token for all API access')
self.SetCredentials(consumer_key, consumer_secret, access_token_key, access_token_secret)
def SetCredentials(self,
consumer_key,
consumer_secret,
access_token_key=None,
access_token_secret=None):
'''Set the consumer_key and consumer_secret for this instance
Args:
consumer_key:
The consumer_key of the twitter account.
consumer_secret:
The consumer_secret for the twitter account.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
'''
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._access_token_key = access_token_key
self._access_token_secret = access_token_secret
self._oauth_consumer = None
if consumer_key is not None and consumer_secret is not None and \
access_token_key is not None and access_token_secret is not None:
self._signature_method_plaintext = oauth.SignatureMethod_PLAINTEXT()
self._signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
self._oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
self._oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
def ClearCredentials(self):
'''Clear the any credentials for this instance
'''
self._consumer_key = None
self._consumer_secret = None
self._access_token_key = None
self._access_token_secret = None
self._oauth_consumer = None
def GetPublicTimeline(self,
since_id=None,
include_rts=None,
include_entities=None):
'''Fetch the sequence of public twitter.Status message for all users.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
An sequence of twitter.Status instances, one for each message
'''
parameters = {}
if since_id:
parameters['since_id'] = since_id
if include_rts:
parameters['include_rts'] = 1
if include_entities:
parameters['include_entities'] = 1
url = '%s/statuses/public_timeline.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def FilterPublicTimeline(self,
term,
since_id=None):
'''Filter the public twitter timeline by a given search term on
the local machine.
Args:
term:
term to search by.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
containing the term
'''
statuses = self.GetPublicTimeline(since_id)
results = []
for s in statuses:
if s.text.lower().find(term.lower()) != -1:
results.append(s)
return results
def GetSearch(self,
term=None,
geocode=None,
since_id=None,
per_page=15,
page=1,
lang="en",
show_user="true",
query_users=False):
'''Return twitter search results for a given term.
Args:
term:
term to search by. Optional if you include geocode.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
geocode:
geolocation information in the form (latitude, longitude, radius)
[Optional]
per_page:
number of results to return. Default is 15 [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
lang:
language for results. Default is English [Optional]
show_user:
prefixes screen name in status
query_users:
If set to False, then all users only have screen_name and
profile_image_url available.
If set to True, all information of users are available,
but it uses lots of request quota, one per status.
Returns:
A sequence of twitter.Status instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if since_id:
parameters['since_id'] = since_id
if term is None and geocode is None:
return []
if term is not None:
parameters['q'] = term
if geocode is not None:
parameters['geocode'] = ','.join(map(str, geocode))
parameters['show_user'] = show_user
parameters['lang'] = lang
parameters['rpp'] = per_page
parameters['page'] = page
# Make and send requests
url = 'http://search.twitter.com/search.json'
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
results = []
for x in data['results']:
temp = Status.NewFromJsonDict(x)
if query_users:
# Build user object with new request
temp.user = self.GetUser(urllib.quote(x['from_user']))
else:
temp.user = User(screen_name=x['from_user'], profile_image_url=x['profile_image_url'])
results.append(temp)
# Return built list of statuses
return results # [Status.NewFromJsonDict(x) for x in data['results']]
def GetTrendsCurrent(self, exclude=None):
'''Get the current top trending topics
Args:
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains the twitter.
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
url = '%s/trends/current.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
trends = []
for t in data['trends']:
for item in data['trends'][t]:
trends.append(Trend.NewFromJsonDict(item, timestamp = t))
return trends
def GetTrendsDaily(self, exclude=None, startdate=None):
'''Get the current top trending topics for each hour in a given day
Args:
startdate:
The start date for the report.
Should be in the format YYYY-MM-DD. [Optional]
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 24 entries. Each entry contains the twitter.
Trend elements that were trending at the corresponding hour of the day.
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
if not startdate:
startdate = time.strftime('%Y-%m-%d', time.gmtime())
parameters['date'] = startdate
url = '%s/trends/daily.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
trends = []
for i in xrange(24):
trends.append(None)
for t in data['trends']:
idx = int(time.strftime('%H', time.strptime(t, '%Y-%m-%d %H:%M')))
trends[idx] = [Trend.NewFromJsonDict(x, timestamp = t)
for x in data['trends'][t]]
return trends
def GetTrendsWeekly(self, exclude=None, startdate=None):
'''Get the top 30 trending topics for each day in a given week.
Args:
startdate:
The start date for the report.
Should be in the format YYYY-MM-DD. [Optional]
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with each entry contains the twitter.
Trend elements of trending topics for the corrsponding day of the week
'''
parameters = {}
if exclude:
parameters['exclude'] = exclude
if not startdate:
startdate = time.strftime('%Y-%m-%d', time.gmtime())
parameters['date'] = startdate
url = '%s/trends/weekly.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
trends = []
for i in xrange(7):
trends.append(None)
# use the epochs of the dates as keys for a dictionary
times = dict([(calendar.timegm(time.strptime(t, '%Y-%m-%d')),t)
for t in data['trends']])
cnt = 0
# create the resulting structure ordered by the epochs of the dates
for e in sorted(times.keys()):
trends[cnt] = [Trend.NewFromJsonDict(x, timestamp = times[e])
for x in data['trends'][times[e]]]
cnt +=1
return trends
def GetFriendsTimeline(self,
user=None,
count=None,
page=None,
since_id=None,
retweets=None,
include_entities=None):
'''Fetch the sequence of twitter.Status messages for a user's friends
The twitter.Api instance must be authenticated if the user is private.
Args:
user:
Specifies the ID or screen name of the user for whom to return
the friends_timeline. If not specified then the authenticated
user set in the twitter.Api instance will be used. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 100. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
retweets:
If True, the timeline will contain native retweets. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
if not user and not self._oauth_consumer:
raise TwitterError("User must be specified if API is not authenticated.")
url = '%s/statuses/friends_timeline' % self.base_url
if user:
url = '%s/%s.json' % (url, user)
else:
url = '%s.json' % url
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if page is not None:
try:
parameters['page'] = int(page)
except ValueError:
raise TwitterError("'page' must be an integer")
if since_id:
parameters['since_id'] = since_id
if retweets:
parameters['include_rts'] = True
if include_entities:
parameters['include_entities'] = True
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self,
id=None,
user_id=None,
screen_name=None,
since_id=None,
max_id=None,
count=None,
page=None,
include_rts=None,
include_entities=None):
'''Fetch the sequence of public Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
id:
Specifies the ID or screen name of the user for whom to return
the user_timeline. [Optional]
user_id:
Specfies the ID of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid user ID
is also a valid screen name. [Optional]
screen_name:
Specfies the screen name of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid screen
name is also a user ID. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of Status instances, one for each message up to count
'''
parameters = {}
if id:
url = '%s/statuses/user_timeline/%s.json' % (self.base_url, id)
elif user_id:
url = '%s/statuses/user_timeline.json?user_id=%d' % (self.base_url, user_id)
elif screen_name:
url = ('%s/statuses/user_timeline.json?screen_name=%s' % (self.base_url,
screen_name))
elif not self._oauth_consumer:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = '%s/statuses/user_timeline.json' % self.base_url
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if page:
try:
parameters['page'] = int(page)
except:
raise TwitterError("page must be an integer")
if include_rts:
parameters['include_rts'] = 1
if include_entities:
parameters['include_entities'] = 1
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self, id):
'''Returns a single status message.
The twitter.Api instance must be authenticated if the
status message is private.
Args:
id:
The numeric ID of the status you are trying to retrieve.
Returns:
A twitter.Status instance representing that status message
'''
try:
if id:
long(id)
except:
raise TwitterError("id must be an long integer")
url = '%s/statuses/show/%s.json' % (self.base_url, id)
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and the
authenticating user must be the author of the specified status.
Args:
id:
The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
try:
if id:
long(id)
except:
raise TwitterError("id must be an integer")
url = '%s/statuses/destroy/%s.json' % (self.base_url, id)
json = self._FetchUrl(url, post_data={'id': id})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def PostUpdate(self, status, in_reply_to_status_id=None):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
Must be less than or equal to 140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/update.json' % self.base_url
if isinstance(status, unicode) or self._input_encoding is None:
u_status = status
else:
u_status = unicode(status, self._input_encoding)
if len(u_status) > CHARACTER_LIMIT:
raise TwitterError("Text must be less than or equal to %d characters. "
"Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
json = self._FetchUrl(url, post_data=data)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation=None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def GetUserRetweets(self, count=None, since_id=None, max_id=None, include_entities=False):
'''Fetch the sequence of retweets made by a single user.
The twitter.Api instance must be authenticated.
Args:
count:
The number of status messages to retrieve. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
url = '%s/statuses/retweeted_by_me.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
if count:
parameters['count'] = count
if since_id:
parameters['since_id'] = since_id
if include_entities:
parameters['include_entities'] = True
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetReplies(self, since=None, since_id=None, page=None):
'''Get a sequence of status messages representing the 20 most
recent replies (status updates prefixed with @twitterID) to the
authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
since:
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
url = '%s/statuses/replies.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetRetweets(self, statusid):
'''Returns up to 100 of the first retweets of the tweet identified
by statusid
Args:
statusid:
The ID of the tweet for which retweets should be searched for
Returns:
A list of twitter.Status instances, which are retweets of statusid
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instsance must be authenticated.")
url = '%s/statuses/retweets/%s.json?include_entities=true&include_rts=true' % (self.base_url, statusid)
parameters = {}
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(s) for s in data]
def GetFriends(self, user=None, cursor=-1):
'''Fetch the sequence of twitter.User instances, one for each friend.
The twitter.Api instance must be authenticated.
Args:
user:
The twitter name or id of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not user and not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
if user:
url = '%s/statuses/friends/%s.json' % (self.base_url, user)
else:
url = '%s/statuses/friends.json' % self.base_url
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data['users']]
def GetFriendIDs(self, user=None, cursor=-1):
'''Returns a list of twitter user id's for every person
the specified user is following.
Args:
user:
The id or screen_name of the user to retrieve the id list for
[Optional]
Returns:
A list of integers, one for each user id.
'''
if not user and not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
if user:
url = '%s/friends/ids/%s.json' % (self.base_url, user)
else:
url = '%s/friends/ids.json' % self.base_url
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return data
def GetFollowerIDs(self, userid=None, cursor=-1):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances, one for each follower
'''
url = 'http://twitter.com/followers/ids.json'
parameters = {}
parameters['cursor'] = cursor
if userid:
parameters['user_id'] = userid
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return data
def GetFollowers(self, page=None):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Args:
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/statuses/followers.json' % self.base_url
parameters = {}
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetFeatured(self):
'''Fetch the sequence of twitter.User instances featured on twitter.com
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances
'''
url = '%s/statuses/featured.json' % self.base_url
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def UsersLookup(self, user_id=None, screen_name=None, users=None):
'''Fetch extended information for the specified users.
Users may be specified either as lists of either user_ids,
screen_names, or twitter.User objects. The list of users that
are queried is the union of all specified parameters.
The twitter.Api instance must be authenticated.
Args:
user_id:
A list of user_ids to retrieve extended information.
[Optional]
screen_name:
A list of screen_names to retrieve extended information.
[Optional]
users:
A list of twitter.User objects to retrieve extended information.
[Optional]
Returns:
A list of twitter.User objects for the requested users
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if not user_id and not screen_name and not users:
raise TwitterError("Specify at least on of user_id, screen_name, or users.")
url = '%s/users/lookup.json' % self.base_url
parameters = {}
uids = list()
if user_id:
uids.extend(user_id)
if users:
uids.extend([u.id for u in users])
if len(uids):
parameters['user_id'] = ','.join(["%s" % u for u in uids])
if screen_name:
parameters['screen_name'] = ','.join(screen_name)
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(u) for u in data]
def GetUser(self, user):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user: The twitter name or id of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show/%s.json' % (self.base_url, user)
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since=None, since_id=None, page=None):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, user, text):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated.
Args:
user: The ID or screen name of the recipient user.
text: The message text to be posted. Must be less than 140 characters.
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/direct_messages/new.json' % self.base_url
data = {'text': text, 'user': user}
json = self._FetchUrl(url, post_data=data)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = '%s/direct_messages/destroy/%s.json' % (self.base_url, id)
json = self._FetchUrl(url, post_data={'id': id})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user):
'''Befriends the user specified in the user parameter as the authenticating user.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user to befriend.
Returns:
A twitter.User instance representing the befriended user.
'''
url = '%s/friendships/create/%s.json' % (self.base_url, user)
json = self._FetchUrl(url, post_data={'user': user})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user):
'''Discontinues friendship with the user specified in the user parameter.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user with whom to discontinue friendship.
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = '%s/friendships/destroy/%s.json' % (self.base_url, user)
json = self._FetchUrl(url, post_data={'user': user})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status):
'''Favorites the status specified in the status parameter as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status instance to mark as a favorite.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = '%s/favorites/create/%s.json' % (self.base_url, status.id)
json = self._FetchUrl(url, post_data={'id': status.id})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status):
'''Un-favorites the status specified in the ID parameter as the authenticating user.
Returns the un-favorited status in the requested format when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status to unmark as a favorite.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = '%s/favorites/destroy/%s.json' % (self.base_url, status.id)
json = self._FetchUrl(url, post_data={'id': status.id})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def GetFavorites(self,
user=None,
page=None):
'''Return a list of Status objects representing favorited tweets.
By default, returns the (up to) 20 most recent tweets for the
authenticated user.
Args:
user:
The twitter name or id of the user whose favorites you are fetching.
If not specified, defaults to the authenticated user. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
'''
parameters = {}
if page:
parameters['page'] = page
if user:
url = '%s/favorites/%s.json' % (self.base_url, user)
elif not user and not self._oauth_consumer:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = '%s/favorites.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetMentions(self,
since_id=None,
max_id=None,
page=None):
'''Returns the 20 most recent mentions (status containing @twitterID)
for the authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than
(that is, older than) the specified ID. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
Returns:
A sequence of twitter.Status instances, one for each mention of the user.
'''
url = '%s/statuses/mentions.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def CreateList(self, user, name, mode=None, description=None):
'''Creates a new list with the give name
The twitter.Api instance must be authenticated.
Args:
user:
Twitter name to create the list for
name:
New name for the list
mode:
'public' or 'private'.
Defaults to 'public'. [Optional]
description:
Description of the list. [Optional]
Returns:
A twitter.List instance representing the new list
'''
url = '%s/%s/lists.json' % (self.base_url, user)
parameters = {'name': name}
if mode is not None:
parameters['mode'] = mode
if description is not None:
parameters['description'] = description
json = self._FetchUrl(url, post_data=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return List.NewFromJsonDict(data)
def DestroyList(self, user, id):
'''Destroys the list from the given user
The twitter.Api instance must be authenticated.
Args:
user:
The user to remove the list from.
id:
The slug or id of the list to remove.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/%s/lists/%s.json' % (self.base_url, user, id)
json = self._FetchUrl(url, post_data={'_method': 'DELETE'})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return List.NewFromJsonDict(data)
def CreateSubscription(self, owner, list):
'''Creates a subscription to a list by the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner:
User name or id of the owner of the list being subscribed to.
list:
The slug or list id to subscribe the user to
Returns:
A twitter.List instance representing the list subscribed to
'''
url = '%s/%s/%s/subscribers.json' % (self.base_url, owner, list)
json = self._FetchUrl(url, post_data={'list_id': list})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return List.NewFromJsonDict(data)
def DestroySubscription(self, owner, list):
'''Destroys the subscription to a list for the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner:
The user id or screen name of the user that owns the
list that is to be unsubscribed from
list:
The slug or list id of the list to unsubscribe from
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/%s/%s/subscribers.json' % (self.base_url, owner, list)
json = self._FetchUrl(url, post_data={'_method': 'DELETE', 'list_id': list})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return List.NewFromJsonDict(data)
def GetSubscriptions(self, user, cursor=-1):
'''Fetch the sequence of Lists that the given user is subscribed to
The twitter.Api instance must be authenticated.
Args:
user:
The twitter name or id of the user
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/%s/lists/subscriptions.json' % (self.base_url, user)
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
print data
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetLists(self, user, cursor=-1):
'''Fetch the sequence of lists for a user.
The twitter.Api instance must be authenticated.
Args:
user:
The twitter name or id of the user whose friends you are fetching.
If the passed in user is the same as the authenticated user
then you will also receive private list data.
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/%s/lists.json' % (self.base_url, user)
parameters = {}
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetUserByEmail(self, email):
'''Returns a single user by email address.
Args:
email:
The email of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show.json?email=%s' % (self.base_url, email)
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def VerifyCredentials(self):
'''Returns a twitter.User instance if the authenticating user is valid.
Returns:
A twitter.User instance representing that user if the
credentials are valid, None otherwise.
'''
if not self._oauth_consumer:
raise TwitterError("Api instance must first be given user credentials.")
url = '%s/account/verify_credentials.json' % self.base_url
try:
json = self._FetchUrl(url, no_cache=True)
except urllib2.HTTPError, http_error:
if http_error.code == httplib.UNAUTHORIZED:
return None
else:
raise http_error
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache:
An instance that supports the same API as the twitter._FileCache
'''
if cache == DEFAULT_CACHE:
self._cache = _FileCache()
else:
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib:
An instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout:
Time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent:
A string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def GetRateLimitStatus(self):
'''Fetch the rate limit status for the currently authorized user.
Returns:
A dictionary containing the time the limit will reset (reset_time),
the number of remaining hits allowed before the reset (remaining_hits),
the number of hits allowed in a 60-minute period (hourly_limit), and
the time of the reset in seconds since The Epoch (reset_time_in_seconds).
'''
url = '%s/account/rate_limit_status.json' % self.base_url
json = self._FetchUrl(url, no_cache=True)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return data
def MaximumHitFrequency(self):
'''Determines the minimum number of seconds that a program must wait
before hitting the server again without exceeding the rate_limit
imposed for the currently authenticated user.
Returns:
The minimum second interval that a program must use so as to not
exceed the rate_limit imposed for the user.
'''
rate_status = self.GetRateLimitStatus()
reset_time = rate_status.get('reset_time', None)
limit = rate_status.get('remaining_hits', None)
if reset_time and limit:
# put the reset time into a datetime object
reset = datetime.datetime(*rfc822.parsedate(reset_time)[:7])
# find the difference in time between now and the reset time + 1 hour
delta = reset + datetime.timedelta(hours=1) - datetime.datetime.utcnow()
# determine the minimum number of seconds allowed as a regular interval
max_frequency = int(delta.seconds / limit)
# return the number of seconds
return max_frequency
return 0
def _BuildUrl(self, url, path_elements=None, extra_params=None):
# Break url into consituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _DecompressGzippedResponse(self, response):
raw_data = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
url_data = gzip.GzipFile(fileobj=StringIO.StringIO(raw_data)).read()
else:
url_data = raw_data
return url_data
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data:
A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
def _FetchUrl(self,
url,
post_data=None,
parameters=None,
no_cache=None,
use_gzip_compression=None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url:
The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs.
If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [Optional]
no_cache:
If true, overrides the cache on the current request
use_gzip_compression:
If True, tells the server to gzip-compress the response.
It does not apply to POST requests.
Defaults to None, which will get the value to use from
the instance variable self._use_gzip [Optional]
Returns:
A string containing the body of the response.
'''
# Set a timeout if needed
if self._timeout != 0:
socketTimeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(self._timeout)
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
if post_data:
http_method = "POST"
else:
http_method = "GET"
if self._debugHTTP:
_debug = 1
else:
_debug = 0
http_handler = self._urllib.HTTPHandler(debuglevel=_debug)
https_handler = self._urllib.HTTPSHandler(debuglevel=_debug)
proxy_handler = self._urllib.ProxyHandler(self._proxy)
opener = self._urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
if self._proxy:
opener.add_handler(proxy_handler)
if use_gzip_compression is None:
use_gzip = self._use_gzip
else:
use_gzip = use_gzip_compression
# Set up compression
if use_gzip and not post_data:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
if self._oauth_consumer is not None:
if post_data and http_method == "POST":
parameters = post_data.copy()
req = oauth.Request.from_consumer_and_token(self._oauth_consumer,
token=self._oauth_token,
http_method=http_method,
http_url=url, parameters=parameters)
req.sign_request(self._signature_method_hmac_sha1, self._oauth_consumer, self._oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
else:
url = self._BuildUrl(url, extra_params=extra_params)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
opener.close()
else:
# Unique keys are a combination of the url and the oAuth Consumer Key
if self._consumer_key:
key = self._consumer_key + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
try:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
self._cache.Set(key, url_data)
except urllib2.HTTPError, e:
print e
opener.close()
else:
url_data = self._cache.Get(key)
# Restore the original socket timeout
if self._timeout != 0:
socket.setdefaulttimeout(socketTimeout)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self,root_directory=None):
self._InitializeRootDirectory(root_directory)
def Get(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self,key,data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self,key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory ))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self,key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self,hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
|
py | 7dfe96d973b0f082828f5d582483ab062169cb75 | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test suite to ensure the json data is validated correctly against the published JSONSchemas."""
import copy
from http import HTTPStatus
from registry_schemas.example_data import ANNUAL_REPORT
from legal_api.services.filings.validations import schemas
def test_validate_schema_good_ar(app):
"""Assert that a valid filing passes validation."""
# validate_schema(json_data: Dict = None) -> Tuple(int, str):
with app.app_context():
err = schemas.validate_against_schema(ANNUAL_REPORT)
assert not err
def test_validate_schema_bad_ar(app):
"""Assert that an invalid AR returns an error."""
# validate_schema(json_data: Dict = None) -> Tuple(int, str):
ar = copy.deepcopy(ANNUAL_REPORT)
ar['filing']['header'].pop('name')
with app.app_context():
err = schemas.validate_against_schema(ar)
assert err.msg == [{'error': "'name' is a required property", 'path': 'filing/header'}]
assert err.code == HTTPStatus.UNPROCESSABLE_ENTITY
|
py | 7dfe96f663be962eccb8e14fdc8187e7af538879 | import threading
import time
import pytest
from hallo.server import Server
from hallo.server_irc import ServerIRC
@pytest.mark.slow
def test_server_race_cancel_failing_connection(hallo_getter):
test_hallo = hallo_getter({})
# Create a server
server = ServerIRC(test_hallo, "example", "example.com", 80)
test_hallo.add_server(server)
server.start()
# Disconnect a server
server.disconnect()
# Check it's closed
assert server.state == Server.STATE_CLOSED
# Wait a bit
time.sleep(5)
# Check it's still closed
assert server.state == Server.STATE_CLOSED
@pytest.mark.external_integration
def test_server_race_connect_delay_disconnect(hallo_getter):
test_hallo = hallo_getter({})
# Create a server
server = ServerIRC(test_hallo, "freenode", "irc.freenode.net", 6667)
test_hallo.add_server(server)
server.start()
# Delay
time.sleep(1)
test_hallo.open = False
# Disconnect a server
server.disconnect()
# Check it's closed
assert server.state == Server.STATE_CLOSED
# Wait a bit
time.sleep(5)
# Check it's still closed
assert server.state == Server.STATE_CLOSED
@pytest.mark.external_integration
def test_server_race_connect_disconnect(hallo_getter):
test_hallo = hallo_getter({})
# Create a server
server = ServerIRC(test_hallo, "freenode", "irc.freenode.net", 6667)
test_hallo.add_server(server)
server.start()
# Disconnect a server
server.disconnect()
# Check it's closed
assert server.state == Server.STATE_CLOSED
# Wait a bit
time.sleep(5)
# Check it's still closed
assert server.state == Server.STATE_CLOSED
@pytest.mark.slow
def test_server_race_bulk_connect_fail(hallo_getter):
test_hallo = hallo_getter({})
# Create ten servers
for x in range(10):
new_server_obj = ServerIRC(test_hallo, "example" + str(x), "example.com", 80)
new_server_obj.set_auto_connect(True)
new_server_obj.nick = "hallo"
new_server_obj.prefix = None
test_hallo.add_server(new_server_obj)
# Connect to the new server object.
new_server_obj.start()
# Wait a moment
time.sleep(1)
# Disconnect them all
for server in test_hallo.server_list:
server.disconnect()
# Wait a couple seconds
time.sleep(5)
# Ensure they're all still closed
for server in test_hallo.server_list:
assert not server.is_connected()
@pytest.mark.external_integration
def test_server_thread_killed_after_disconnect(hallo_getter):
test_hallo = hallo_getter({})
thread_count = threading.active_count()
# Create a server
server = ServerIRC(test_hallo, "freenode", "irc.freenode.net", 6667)
test_hallo.add_server(server)
server.start()
# Delay
time.sleep(1)
# Disconnect a server
server.disconnect()
# Delay
time.sleep(1)
# Check thread count is back to the start count
assert threading.active_count() == thread_count
# Check it's closed
assert server.state == Server.STATE_CLOSED
|
py | 7dfe97bc3eb68e3cab98918efa87cefacab365ab | import numpy as np
import tensorflow as tf
from kerod.core.matcher import Matcher, hungarian_matching
def test_matcher():
similarity = np.array([
[[1., 1, 1, 3, 1], [2, -1, 2, 0, 4]],
[[1., 0.1, 1, 3, 0], [8, 0.4, 2, 0, 0.2]],
])
num_valid_boxes = np.array([[2], [2]], np.int32)
matcher = Matcher([0.3, 0.5], [0, -1, 1])
matches, match_labels = matcher(similarity, num_valid_boxes)
expected_matched = np.array([[1, 0, 1, 0, 1], [1, 1, 1, 0, 1]])
expected_matched_labels = np.array([[1, 1, 1, 1, 1], [1, -1, 1, 1, 0]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
def test_matcher_low_quality_matcher():
num_valid_boxes = np.array([[3], [3]], np.int32)
similarity = np.array([[
[0, 0.2, 0.49, 0.1, 0.3],
[2, -1, 0.2, 4, 0.38],
[1, 0.25, 0.3, 5, 0.37],
[0, 0, 0, 0, 0.50], # This line is not valid and should be discarded (num_valid_boxes = 3)
], [
[1, 0.3, 1, 3, 0],
[8, 0.4, 2, 0, 0.2],
[0, -1, 0.2, 0.1, 0.39],
[0, 0, 0, 0, 0], # This line is not valid and should be discarded (num_valid_boxes = 3)
]]) # yapf: disable
matcher = Matcher([0.3, 0.5], [0, -1, 1], allow_low_quality_matches=True)
matches, match_labels = matcher(similarity, num_valid_boxes)
expected_matched = np.array([[1, 2, 0, 2, 3], [1, 1, 1, 0, 2]])
# if allow_low_quality_matches was False
# [[1, 0, -1, 1, -1], [1, -1, 1, 1, 0]]
expected_matched_labels = np.array([[1, 0, 1, 1, -1], [1, -1, 1, 1, 1]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
def test_matcher_low_quality_matcher_when_the_best_box_is_undefined():
num_valid_boxes = np.array([[4], [3]], np.int32)
similarity = np.array([[
[0, 0.31, 0, 0, 0],
[0.1, 0, 0, 0, 0],
[0, 0, 0.32, 0, 0],
[0, 0, 0, 0, 0.48],
], [
[1, 0.3, 1, 3, 0],
[8, 0.4, 2, 0, 0.2],
[0, -1, 0.2, 0.1, 0.39],
[0, 0, 0, 0, 0.31],
]]) # yapf: disable
expected_matched = np.array([[1, 0, 2, 0, 3], [1, 1, 1, 0, 2]])
matcher = Matcher([0.3, 0.5], [0, -1, 1], allow_low_quality_matches=False)
matches, match_labels = matcher(similarity, num_valid_boxes)
expected_matched_labels = np.array([[0, -1, -1, 0, -1], [1, -1, 1, 1, -1]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
matcher = Matcher([0.3, 0.5], [0, -1, 1], allow_low_quality_matches=True)
matches, match_labels = matcher(similarity, num_valid_boxes)
# Explanation expactation for batch[0]
# 0 -> 1 because anchor 0 has the highest IoU with gt 1 so it becomes a low quality match
# -1 -> 1 because anchor 1 has the highest IoU with gt 0 => low quality match
# -1 -> 1 because anchor 2 has the highest IoU with gt 2 => low quality match
# 0 = 0 because anchor 3 isn't close enough to any groundtruths
# -1 -> 1 because anchor 4 has the highest IoU with gt 3 => low quality match
expected_matched_labels = np.array([[1, 1, 1, 0, 1], [1, -1, 1, 1, 1]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
def test_matcher_low_quality_matcher_with_one_ground_truth():
num_valid_boxes = np.array([[1]], np.int32)
similarity = np.array([[
[0, 0.31, 0, 0, 0],
]])
expected_matched = np.array([[0, 0, 0, 0, 0]])
matcher = Matcher([0.3, 0.5], [0, -1, 1], allow_low_quality_matches=False)
matches, match_labels = matcher(similarity, num_valid_boxes)
expected_matched_labels = np.array([[0, -1, 0, 0, 0]])
assert match_labels.shape == (1, 5)
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
matcher = Matcher([0.3, 0.5], [0, -1, 1], allow_low_quality_matches=True)
matches, match_labels = matcher(similarity, num_valid_boxes)
expected_matched_labels = np.array([[0, 1, 0, 0, 0]])
assert match_labels.shape == (1, 5)
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
def test_matcher_with_one_threshold():
similarity = np.array([
[[1., 1, 1, 3, 1], [2, -1, 2, 0, 4]],
[[1., 0.1, 1, 3, 0], [8, 0.4, 2, 0, 0.2]],
])
num_valid_boxes = np.array([[2], [2]], np.int32)
matcher = Matcher([0.5], [0, 1])
matches, match_labels = matcher(similarity, num_valid_boxes)
expected_matched = np.array([[1, 0, 1, 0, 1], [1, 1, 1, 0, 1]])
expected_matched_labels = np.array([[1, 1, 1, 1, 1], [1, 0, 1, 1, 0]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
def test_hungarian_matcher_compute_cost_matrix():
similarity = np.array([
[
[0., 1, 1, 3, -1],
[2, -1, 2, 0, 4],
[0.5, 2, 2.3, 0.25, 4],
],
[
[1., 0.15, 1, 0.3, 0.16],
[0.2, 0.1, 2, 0.1, 0.2],
[0.3, 0.4, 2, 0, 0.2],
],
])
num_valid_boxes = np.array([[3], [1]], np.int32)
matches, match_labels = hungarian_matching(similarity, num_valid_boxes)
expected_matched = np.array([[0, 1, 0, 2, 0], [1, 0, 2, 0, 0]])
# For the second batch notice that the number of valid boxes is 1 instead of 2
# It means that the second row (groundtruth = 1) is a padding. Hence we flag
# it has -1.
expected_matched_labels = np.array([[0, 1, 0, 1, 1], [-1, 1, -1, 0, 0]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
# In this part we show that the linear assignment is not the same
# when we do not have padded ground_truths
num_valid_boxes = np.array([[3], [3]], np.int32)
matches, match_labels = hungarian_matching(similarity, num_valid_boxes)
expected_matched = np.array([[0, 1, 0, 2, 0], [0, 1, 0, 2, 0]])
# For the second batch notice that the number of valid boxes is 1 instead of 2
# It means that the second row (groundtruth = 1) is a padding. Hence we flag
# it has -1.
expected_matched_labels = np.array([[0, 1, 0, 1, 1], [0, 1, 0, 1, 1]])
np.testing.assert_array_equal(matches, expected_matched)
np.testing.assert_array_equal(match_labels, expected_matched_labels)
|
py | 7dfe985593a8641cbc797d00ff486247afdfaba3 | import os
ls=["python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_0_ImageCompression.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_1_ImageCompression.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_2_ImageCompression.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_3_ImageCompression.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_4_ImageCompression.yml",
]
for l in ls:
os.system(l) |
py | 7dfe9a0262ac9c6d9b5508eba30cb9adb0857c03 | # coding: utf-8
from chainer import Chain
from chainer.backends import cuda
from chainer.initializers import HeNormal
import chainer.functions as F
import chainer.links as L
from architecture import ASPP, PixelShuffler
from spectral_norms import define_conv
from atrous_conv import define_atrous_conv
class ResNetDeepLab(Chain):
def __init__(self, opt):
super().__init__()
he_w = HeNormal()
with self.init_scope():
# This ResNet101 use a pre-trained caffemodel that can be downloaded at GitHub
# <https://github.com/KaimingHe/deep-residual-networks>.
self.resnet101 = L.ResNet101Layers()
self.use_layer = ('res3', 512)
nf = self.use_layer[1]
self.c1 = define_atrous_conv(opt)(nf, nf, ksize=3, rate=2, initialW=he_w)
self.norm1 = L.BatchNormalization(nf)
self.c2 = define_atrous_conv(opt)(nf, nf, ksize=3, rate=4, initialW=he_w)
self.norm2 = L.BatchNormalization(nf)
self.aspp = ASPP(opt, nf, input_resolution=32)
self.up1 = PixelShuffler(opt, nf, nf // 2, rate=2) # 32 -> 64
self.up2 = PixelShuffler(opt, nf // 2, nf // 4, rate=2) # 64 -> 128
self.up3 = PixelShuffler(opt, nf // 4, nf // 8, rate=2) # 128 -> 256
self.to_class = define_conv(opt)(nf // 8, opt.class_num, ksize=3, pad=1, initialW=he_w)
self.activation = F.leaky_relu
def prepare(self, variable_img):
# out = F.resize_images(variable_img, (224, 224))
out = variable_img
# out = (out + 1) * 0.5
out = out[:, ::-1, :, :]
out = F.transpose(out, (0, 2, 3, 1))
out *= 255
xp = cuda.get_array_module(variable_img.array)
out -= xp.array([103.063, 115.903, 123.152], dtype=variable_img.dtype)
out = F.transpose(out, (0, 3, 1, 2))
return out
def __call__(self, x):
x = self.prepare(x)
h = self.resnet101(x, [self.use_layer[0]])[self.use_layer[0]]
h = self.activation(h)
h = self.c1(h)
h = self.norm1(h)
h = self.activation(h)
h = self.c2(h)
h = self.norm2(h)
h = self.activation(h)
h = self.aspp(h)
h = self.up1(h)
h = self.activation(h)
h = self.up2(h)
h = self.activation(h)
h = self.up3(h)
h = self.activation(h)
out = self.to_class(h)
out = F.softmax(out, axis=1)
return out
class DilatedFCN(Chain):
def __init__(self, opt):
super().__init__()
he_w = HeNormal()
down_sampling_num = 3
ngf = opt.ngf
with self.init_scope():
# [input] 3 x 256 x 256
self.c1 = define_conv(opt)(opt.img_shape[0], ngf, ksize=4, stride=2, pad=1, initialW=he_w)
self.norm1 = L.BatchNormalization(ngf)
# [input] ngf x 128 x 128
self.c2 = define_conv(opt)(ngf, ngf * 2, ksize=4, stride=2, pad=1, initialW=he_w)
self.norm2 = L.BatchNormalization(ngf * 2)
# [input] ngf*2 x 64 x 64
self.c3 = define_conv(opt)(ngf * 2, ngf * 4, ksize=4, stride=2, pad=1, initialW=he_w)
self.norm3 = L.BatchNormalization(ngf * 4)
# [input] ngf*4 x 32 x 32
self.a1 = define_atrous_conv(opt)(ngf * 4, ngf * 4, ksize=3, rate=2, initialW=he_w)
self.norm4 = L.BatchNormalization(ngf * 4)
# [input] ngf*4 x 32 x 32
self.a2 = define_atrous_conv(opt)(ngf * 4, ngf * 4, ksize=3, rate=4, initialW=he_w)
self.norm5 = L.BatchNormalization(ngf * 4)
# [input] ngf*4 x 32 x 32
resolution = max(opt.img_shape[1], opt.img_shape[2]) // 2 ** down_sampling_num
self.aspp = ASPP(opt, ngf * 4, input_resolution=resolution)
# [input] ngf*4 x 32 x 32
self.up1 = PixelShuffler(opt, ngf * 4, ngf * 2, rate=2) # 64 -> 128
self.up2 = PixelShuffler(opt, ngf * 2, ngf, rate=2) # 128 -> 256
self.to_class = define_conv(opt)(ngf, opt.class_num, ksize=3, pad=1, initialW=he_w)
# [output] class_num x 256 x 256
self.activation = F.relu
def __call__(self, x):
h = self.c1(x)
h = self.norm1(h)
h = self.activation(h)
h = self.c2(h)
h = self.norm2(h)
h = self.activation(h)
h = self.c3(h)
h = self.norm3(h)
h = self.activation(h)
h = self.a1(h)
h = self.norm4(h)
h = self.activation(h)
h = self.a2(h)
h = self.norm5(h)
h = self.activation(h)
h = self.aspp(h)
h = self.up1(h)
h = self.activation(h)
h = self.up2(h)
h = self.activation(h)
out = self.to_class(h)
out = F.softmax(out, axis=1)
return out
class UNet(Chain):
def __init__(self, opt):
super().__init__()
he_w = HeNormal()
ngf = opt.ngf
with self.init_scope():
# Encoder
# [input] 3 x 256 x 256
self.e1 = define_conv(opt)(opt.input_ch, ngf, ksize=3, stride=1, pad=1, initialW=he_w)
self.e1_bn = L.BatchNormalization(ngf)
# [input] ngf x 256 x 256
self.e2 = define_conv(opt)(ngf, ngf * 2, ksize=4, stride=2, pad=1, initialW=he_w)
self.e2_bn = L.BatchNormalization(ngf * 2)
# [input] ngf*2 x 128 x 128
self.e3 = define_conv(opt)(ngf * 2, ngf * 4, ksize=4, stride=2, pad=1, initialW=he_w)
self.e3_bn = L.BatchNormalization(ngf * 4)
# [input] ngf*4 x 64 x 64
self.e4 = define_conv(opt)(ngf * 4, ngf * 8, ksize=4, stride=2, pad=1, initialW=he_w)
self.e4_bn = L.BatchNormalization(ngf * 8)
# [input] ngf*8 256 x 32 x 32
self.e5 = define_conv(opt)(ngf * 8, ngf * 16, ksize=4, stride=2, pad=1, initialW=he_w)
self.e5_bn = L.BatchNormalization(ngf * 16)
# Decoder
# [input] ngf*16 x 16 x 16
self.d1 = L.Deconvolution2D(ngf * 16, ngf * 8, ksize=4, stride=2, pad=1, initialW=he_w)
self.d1_bn = L.BatchNormalization(ngf * 8)
# [input] ngf*8*2 x 32 x 32 (concat)
self.d2 = L.Deconvolution2D(ngf * 8 * 2, ngf * 4, ksize=4, stride=2, pad=1, initialW=he_w)
self.d2_bn = L.BatchNormalization(ngf * 4)
# [input] ngf*4*2 x 64 x 64 (concat)
self.d3 = L.Deconvolution2D(ngf * 4 * 2, ngf * 2, ksize=4, stride=2, pad=1, initialW=he_w)
self.d3_bn = L.BatchNormalization(ngf * 2)
# [input] ngf*2*2 x 128 x 128 (concat)
self.d4 = L.Deconvolution2D(ngf * 2 * 2, ngf, ksize=4, stride=2, pad=1, initialW=he_w)
self.d4_bn = L.BatchNormalization(ngf)
# [input] ngf x 256 x 256
self.to_class = define_conv(opt)(ngf, opt.nclass, ksize=3, pad=1, initialW=he_w)
# [output] nclass x 256 x 256
self.activation = F.relu
def __call__(self, x):
# Encoder
eh1 = self.e1(x)
eh1 = self.e1_bn(eh1)
eh1 = self.activation(eh1)
eh2 = self.e2(eh1)
eh2 = self.e2_bn(eh2)
eh2 = self.activation(eh2)
eh3 = self.e3(eh2)
eh3 = self.e3_bn(eh3)
eh3 = self.activation(eh3)
eh4 = self.e4(eh3)
eh4 = self.e4_bn(eh4)
eh4 = self.activation(eh4)
eh5 = self.e5(eh4)
eh5 = self.e5_bn(eh5)
eh5 = self.activation(eh5)
# Decoder
dh1 = self.d1(eh5)
dh1 = self.d1_bn(dh1)
dh1 = F.dropout(dh1)
dh1 = self.activation(dh1)
dh2 = F.concat((eh4, dh1), axis=1)
dh2 = self.d2(dh2)
dh2 = self.d2_bn(dh2)
dh2 = F.dropout(dh2)
dh2 = self.activation(dh2)
dh3 = F.concat((eh3, dh2), axis=1)
dh3 = self.d3(dh3)
dh3 = self.d3_bn(dh3)
dh3 = F.dropout(dh3)
dh3 = self.activation(dh3)
dh4 = F.concat((eh2, dh3), axis=1)
dh4 = self.d4(dh4)
dh4 = self.d4_bn(dh4)
dh4 = self.activation(dh4)
out = self.to_class(dh4)
out = F.softmax(out, axis=1)
return out
|
py | 7dfe9a38f27025f796786f0cdccdd4d4ceb0f4c3 | import tensorflow as tf
models = tf.keras.models # like 'from tensorflow.keras import models' (PyCharm import issue workaround)
layers = tf.keras.layers # like 'from tensorflow.keras import layers' (PyCharm import issue workaround)
def build_CNN(input_shape):
"""
Return a simple CNN model for image classification.
:param input_shape: image input shape (tuple), e.g. (28, 28, 1)
:return:
model compiled tensorflow model
"""
print("Setting up CNN")
# Set up model type
model = models.Sequential(name='CNN')
# Add layers
model.add(layers.Conv2D(filters=32, kernel_size=(5, 5), input_shape=input_shape, padding='same', strides=(2, 2),
name='conv2d_0_global'))
model.add(layers.BatchNormalization(name='batch_norm_0_global'))
model.add(layers.ReLU(name='relu_0_global'))
model.add(layers.Conv2D(filters=64, kernel_size=(5, 5), padding='same', strides=(2, 2), name='conv2d_1_global'))
model.add(layers.BatchNormalization(name='batch_norm_1_global'))
model.add(layers.ReLU(name='relu_1_global'))
model.add(layers.Conv2D(filters=128, kernel_size=(5, 5), padding='same', strides=(2, 2), name='conv2d_2_global'))
model.add(layers.BatchNormalization(name='batch_norm_2_global'))
model.add(layers.ReLU(name='relu_2_global'))
model.add(layers.MaxPooling2D(name='max_pool_2_global'))
model.add(layers.Flatten(name='flatten_0_local'))
model.add(layers.Dense(units=128, name='dense_0_local'))
model.add(layers.BatchNormalization(name='batch_norm_3_local'))
model.add(layers.ReLU(name='relu_3_local'))
model.add(layers.Dense(units=1, activation='sigmoid', name='dense_1_local'))
return model
def build_ResNet(input_shape):
"""
Return a tensorflow model with ResNet 50 as teh feature extractor and two dense layers with Relu and Sigmoid
activation respectively as classification layers.
:param input_shape: image input shape (tuple), e.g. (28, 28, 3)
:return:
model Tensorflow model
"""
print("Setting up ResNet")
base_model = tf.keras.applications.ResNet50(include_top=False, input_shape=(input_shape[0], input_shape[1], 3),
weights='imagenet')
# Freeze the pre-trained model weights
base_model.trainable = False
# Layer classification head with feature detector
model = tf.keras.Sequential([
base_model,
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(units=128),
layers.BatchNormalization(),
layers.ReLU(),
layers.Dense(units=1, activation='sigmoid')
], name='ResNet')
return model
def build_model(input_shape, model_type):
"""
Utility function building either a simple CNN architecture or ResNet
:param input_shape: image input shape (tuple), e.g. (28, 28, 3)
:param model_type: string, either 'CNN' or "ResNet"
:return:
Tensorflow Graph
"""
model_type = model_type.lower()
model_types = {'cnn': build_CNN,
'resnet': build_ResNet}
return model_types[model_type](input_shape=input_shape)
|
py | 7dfe9ae997a5214ee1a582153722cf9224334bed | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeRdsSuperAccountInstancesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Drds', '2019-01-23', 'DescribeRdsSuperAccountInstances','drds')
def get_RdsInstancess(self):
return self.get_query_params().get('RdsInstancess')
def set_RdsInstancess(self,RdsInstancess):
for i in range(len(RdsInstancess)):
if RdsInstancess[i] is not None:
self.add_query_param('RdsInstances.' + str(i + 1) , RdsInstancess[i]);
def get_DbInstType(self):
return self.get_query_params().get('DbInstType')
def set_DbInstType(self,DbInstType):
self.add_query_param('DbInstType',DbInstType)
def get_DrdsInstanceId(self):
return self.get_query_params().get('DrdsInstanceId')
def set_DrdsInstanceId(self,DrdsInstanceId):
self.add_query_param('DrdsInstanceId',DrdsInstanceId) |
py | 7dfe9b97955e4447e317327c66445ee050fc7682 | from .constants import np
from .internals import _init, _noise2, _noise3, _noise4, _noise2a, _noise3a, _noise4a
import time
# Why 3 (and not just 0 or something)? I ran into a bug with"overflowing int" errors while refactoring in numpy and
# using a nonzero seed value... This is a reminder
DEFAULT_SEED = 3
"""
OpenSimplex n-dimensional gradient noise algorithm,
based on work by Kurt Spencer.
"""
def seed(seed: int = DEFAULT_SEED) -> None:
"""
Seeds the underlying permutation array (which produces different outputs),
using a 64-bit seed number.
"""
global _default
_default = OpenSimplex(seed)
def random_seed() -> None:
"""
Works just like seed(), except it uses the system time (in ns) as a seed value.
Not guaranteed to be random so use at your own risk.
"""
seed(time.time_ns())
def noise2(x: float, y: float) -> float:
"""
Generate 2D OpenSimplex noise from X,Y coordinates.
"""
return _default.noise2(x, y)
def noise2array(x: np.ndarray, y: np.ndarray) -> np.ndarray:
"""
Generates 2D OpenSimplex noise using Numpy arrays for increased performance.
:param x: numpy array of x-coords
:param y: numpy array of y-coords
:return: 2D numpy array of shape (y.size, x.size) with the generated noise for the supplied coordinates.
"""
return _default.noise2array(x, y)
def noise3(x: float, y: float, z: float) -> float:
"""
Generate 3D OpenSimplex noise from X,Y,Z coordinates.
"""
return _default.noise3(x, y, z)
def noise3array(x: np.ndarray, y: np.ndarray, z: np.ndarray) -> np.ndarray:
"""
Generates 3D OpenSimplex noise using Numpy arrays for increased performance.
:param x: numpy array of x-coords
:param y: numpy array of y-coords
:param z: numpy array of z-coords
:return: 3D numpy array of shape (z.size, y.size, x.size) with the generated noise for the supplied coordinates.
"""
return _default.noise3array(x, y, z)
def noise4(x: float, y: float, z: float, w: float) -> float:
"""
Generate 4D OpenSimplex noise from X,Y,Z,W coordinates.
"""
return _default.noise4(x, y, z, w)
def noise4array(x: np.ndarray, y: np.ndarray, z: np.ndarray, w: np.ndarray) -> np.ndarray:
"""
Generates 4D OpenSimplex noise using Numpy arrays for increased performance.
:param x: numpy array of x-coords
:param y: numpy array of y-coords
:param z: numpy array of z-coords
:param w: numpy array of w-coords
:return: 4D numpy array of shape (w.size, z.size, y.size, x.size) with the generated noise for the supplied
coordinates.
"""
return _default.noise4array(x, y, z, w)
################################################################################
# This class is provided for backwards compatibility and might disappear in the future. Use at your own risk.
class OpenSimplex(object):
def __init__(self, seed: int) -> None:
self._perm, self._perm_grad_index3 = _init(seed)
def noise2(self, x: float, y: float) -> float:
return _noise2(x, y, self._perm)
def noise2array(self, x: np.ndarray, y: np.ndarray) -> np.ndarray:
return _noise2a(x, y, self._perm)
def noise3(self, x: float, y: float, z: float) -> float:
return _noise3(x, y, z, self._perm, self._perm_grad_index3)
def noise3array(self, x: np.ndarray, y: np.ndarray, z: np.ndarray) -> np.ndarray:
return _noise3a(x, y, z, self._perm, self._perm_grad_index3)
def noise4(self, x: float, y: float, z: float, w: float) -> float:
return _noise4(x, y, z, w, self._perm)
def noise4array(self, x: np.ndarray, y: np.ndarray, z: np.ndarray, w: np.ndarray) -> np.ndarray:
return _noise4a(x, y, z, w, self._perm)
_default = OpenSimplex(DEFAULT_SEED)
|
py | 7dfe9ba9884784dc4b193e7506f99dace45d5a4e | #!/usr/bin/env python
import os
import sys
import argparse
import brambox.boxes as bbb
class StoreKwargs(argparse.Action):
""" Store keyword arguments in a dict.
This action must be used with multiple arguments.
It will parse ints and floats and leave the rest as strings.
"""
def __call__(self, parser, namespace, values, option_string=None):
d = {}
for items in values:
n, v = items.split('=')
try:
v = int(v)
except ValueError:
try:
v = float(v)
except ValueError:
pass
d[n] = v
setattr(namespace, self.dest, d)
def main():
parser = argparse.ArgumentParser(
description='Convert bounding box file(s) from one format to the other',
usage='%(prog)s inputformat inputpath outputformat outputpath [optional arguments]',
epilog=f'Posible formats are: {list(bbb.formats.keys())}',
)
parser.add_argument('inputformat', metavar='inputformat', choices=bbb.formats.keys(), help='Input format')
parser.add_argument('inputpath', help='Bounding box file, folder or file sequence')
parser.add_argument('outputformat', metavar='outputformat', choices=bbb.formats.keys(), help='Ouput format')
parser.add_argument('outputpath', help='Output file or folder')
parser.add_argument('--stride', '-s', metavar='N', type=int, default=1, help='If a sequence expression is given as input, this stride is used')
parser.add_argument('--offset', '-o', metavar='N', type=int, default=0, help='If a sequence expression is given as input, this offset is used')
parser.add_argument('--kwargs', '-k', metavar='KW=V', help='Keyword arguments for the parser', nargs='*', action=StoreKwargs, default={})
args = parser.parse_args()
# Parse arguments
indir = os.path.split(args.inputpath)[0]
if not os.path.exists(indir):
sys.exit(f'Input directory {indir} does not exist')
if os.path.splitext(args.outputpath)[1] != '':
outdir = os.path.split(args.outputpath)[0]
else:
outdir = args.outputpath
if not os.path.exists(outdir):
os.makedirs(outdir)
# Convert
bbox = bbb.parse(args.inputformat, args.inputpath, stride=args.stride, offset=args.offset, **args.kwargs)
bbb.generate(args.outputformat, bbox, args.outputpath, **args.kwargs)
print(f'Converted {len(bbox)} files')
if __name__ == '__main__':
main()
|
py | 7dfe9c0c3c316100bc149a9177806adfc3c882ec | # coding=utf-8
from ... import options as opts
from ...charts.chart import Chart
from ...commons.types import List, Numeric, Optional, Union
from ...globals import ChartType
class TreeMap(Chart):
"""
<<< 树图 >>>
树图是一种常见的表达『层级数据』『树状数据』的可视化形式。它主要用面积的方式,
便于突出展现出『树』的各层级中重要的节点。
"""
def __init__(self, init_opts: Union[opts.InitOpts, dict] = opts.InitOpts()):
super().__init__(init_opts=init_opts)
def add(
self,
series_name: str,
data: List[Union[opts.TreeItem, dict]],
*,
is_selected: bool = True,
leaf_depth: Optional[Numeric] = None,
pos_left: Optional[str] = None,
pos_right: Optional[str] = None,
pos_top: Optional[str] = None,
pos_bottom: Optional[str] = None,
drilldown_icon: str = "▶",
visual_min: Optional[Numeric] = None,
visual_max: Optional[Numeric] = None,
label_opts: Union[opts.LabelOpts, dict] = opts.LabelOpts(),
tooltip_opts: Union[opts.TooltipOpts, dict, None] = None,
itemstyle_opts: Union[opts.ItemStyleOpts, dict, None] = None,
):
if isinstance(label_opts, opts.LabelOpts):
label_opts = label_opts.opts
if isinstance(tooltip_opts, opts.TooltipOpts):
tooltip_opts = tooltip_opts.opts
if isinstance(itemstyle_opts, opts.ItemStyleOpts):
itemstyle_opts = itemstyle_opts.opts
self._append_legend(series_name, is_selected)
self.options.get("series").append(
{
"type": ChartType.TREEMAP,
"name": series_name,
"data": data,
"left": pos_left,
"right": pos_right,
"top": pos_top,
"bottom": pos_bottom,
"label": label_opts,
"leafDepth": leaf_depth,
"drillDownIcon": drilldown_icon,
"visualMin": visual_min,
"visualMax": visual_max,
"tooltip": tooltip_opts,
"itemStyle": itemstyle_opts,
}
)
return self
|
py | 7dfe9e25cda85fb8cca68dcb18b6918e0638f0fe | import os
from pyspark.sql import SparkSession
from pyspark.sql.functions import *
from pyspark.sql.window import Window
# This method takes the parquet file as the input and returns the maximum temperature
def get_max_temp(df):
window = Window.orderBy(col("Screen_Temperature").desc())
return df.withColumn("max_temp",rank().over(window)).filter(col("max_temp")==1).drop(col("max_temp"))
# This method takes the csv files and retrieves only the required fields i.e. Region, Observation Date and Screen Temperature
# for the group of region and observation date the first step takes the maximum screen temperature
# this details will be saved in a parquet file format at the output location
def load_parquet(spark: SparkSession, output_file_path, args):
try:
df = spark.read.csv(args, header=True).select(col("Region"),
col("ObservationDate").cast("Date").alias("Observation_Date"),
col("ScreenTemperature").cast("Decimal(5,2)")) \
.groupBy(col("region"), col("Observation_Date")) \
.agg(max(col("ScreenTemperature")).alias("Screen_Temperature")) \
.select(col("region"), col("Observation_Date"), col("Screen_Temperature"))
df.coalesce(1).write.mode('overwrite').parquet(output_file_path)
except Exception as e:
print('Error while reading csv file or while writing into Parquet - Error Details - {c}, Message, {m}'.format(c = type(e).__name__, m = str(e)))
exit(0)
if __name__ == "__main__":
spark_session = (
SparkSession.builder
.master("local[2]")
.appName("DataTest")
.config("spark.executorEnv.PYTHONHASHSEED", "0")
.getOrCreate()
)
input_file_path = '/tmp/DLG/DataEngineering/Weather/Input/'
output_file_path = '/tmp/DLG/DataEngineering/Weather/Output/'
# Get the file names from the input folder and copy the file names into a list
directories = os.listdir(input_file_path)
file_names = [input_file_path + x for x in directories if 'csv' in x]
# check the length of the list (file_names), if there are files then process the file else there are no files to process
if (len(file_names) > 0):
load_parquet(spark_session, output_file_path, file_names)
df_read_file = spark_session.read.parquet(output_file_path)
max_temp_date_region = get_max_temp(df_read_file)
max_temp_date_region.show()
else:
print(f'File Does not exists in path - {input_file_path}')
|
py | 7dfe9f5063f608906e929cd82af137cefd897430 | """
These closures are intended to be used as a ``Mock`` object's ``side_effect``, allowing a mocked function's
return value to be specified at the same time as the expected arguments. This is a very concise way of doing
things for simple uses where the mocked function is only ever called once (or with one set of arguments) and
can also be used to raise an ``Exception`` at the actual offending call site when there is an argument mismatch,
leading to easy debugging.
>>> from unittest.mock import Mock
>>> mymock = Mock()
>>> mymock.side_effect = assert_args_and_return("two eggs", "two bottles", yards=50)
>>> mymock('two bottles', yards=50)
'two eggs'
>>> mymock('two bottles', metres=50)
Traceback (most recent call last):
...
AssertionError
>>> class EggBottleException(Exception):
... pass
>>> mymock.side_effect = assert_args_and_raise(EggBottleException, "two bottles", yards=50)
>>> mymock('two bottles', yards=50)
Traceback (most recent call last):
...
mocking.EggBottleException
>>> mymock('two bottles', metres=50)
Traceback (most recent call last):
...
AssertionError
>>> mymock.side_effect = assert_args_and_return_or_raise("two eggs", EggBottleException, "two bottles", yards=50)
>>> mymock('two bottles', yards=50)
'two eggs'
>>> mymock('two bottles', metres=50)
Traceback (most recent call last):
...
mocking.EggBottleException
"""
from typing import Callable, Iterable
def assert_args_and_return(retval, *args, **kwargs) -> Callable:
"""
Given a return value and an arbitrary set of arguments, returns a callable which will return ``retval`` when called
with arguments matching those specified here, otherwise will raise an ``AssertionError``.
"""
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
return retval
return _inner
def assert_args_and_raise(e: Exception, *args, **kwargs) -> Callable:
"""
Given a return value and an arbitrary set of arguments, returns a callable which will raise the ``Exception`` ``e``
when called with arguments matching those specified here.
"""
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
raise e
return _inner
def assert_args_and_return_or_raise(retval, e, *args, **kwargs) -> Callable:
"""
Given a return value and an arbitrary set of arguments, returns a callable which will return ``retval`` when called
with arguments matching those specified here, otherwise will raise the exception ``e``.
"""
def _inner(*inner_args, **inner_kwargs):
if args == inner_args and kwargs == inner_kwargs:
return retval
else:
raise e
return _inner
def assert_args_and_return_iter_over(retval: Iterable, *args, **kwargs) -> Callable:
"""
Given an iterable return value and an arbitrary set of arguments, returns a callable which will return
a fresh iterator over ``retval`` when called with arguments matching those specified here, otherwise will raise
an ``AssertionError``.
"""
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
return iter(retval)
return _inner
|
py | 7dfe9f52c86b6dccdd072dae9c7a76aeed0b749f | #!/usr/bin/env python
from __future__ import print_function
import argparse
import subprocess
import json
import os
import requests
import subprocess
import sys
from cli.settings import Settings
from cli.appconfig import AppConfig
from cli.containerconfig import ContainerConfig
from cli.utils import printException, printErrorMsg
requests.packages.urllib3.disable_warnings()
def describe():
return "streams the new output from the tasks's STDOUT and STDERR logs."
class RogerLogs(object):
def parse_args(self):
self.parser = argparse.ArgumentParser(
prog='roger logs', description=describe())
self.parser.add_argument('appTaskId', metavar='appTaskId',
help="first few letters of application task id. Example: 'content.5684")
self.parser.add_argument('-e', '--env', metavar='env',
help="environment to search. Example: 'dev' or 'stage'")
self.parser.add_argument('-H', '--hostname', metavar='hostname',
help="hostname to search. Example: 'daldevmesos01' or 'daldevmesos04'")
self.parser.add_argument(
'-f', '--follow', help="follow log output. Defaults to false.", action="store_true")
self.parser.add_argument(
'-t', '--timestamps', help="show timestamps. Defaults to false.", action="store_true")
self.parser.add_argument(
'-s', '--since', help="show logs since timestamp.")
self.parser.add_argument(
'-T', '--tail', help="number of lines to show from the end of the logs. If a negative number is given, it shows all.")
return self.parser
def main(self):
self.parser = self.parse_args()
args = self.parser.parse_args()
config_dir = settingObj.getConfigDir()
roger_env = appObj.getRogerEnv(config_dir)
environment = roger_env.get('default_environment', '')
if args.env is None:
if "ROGER_ENV" in os.environ:
env_var = os.environ.get('ROGER_ENV')
if env_var.strip() == '':
print(
"Environment variable $ROGER_ENV is not set.Using the default set from roger-mesos-tools.config file")
else:
print(
"Using value {} from environment variable $ROGER_ENV".format(env_var))
environment = env_var
else:
environment = args.env
if environment not in roger_env['environments']:
raise ValueError('Environment not found in roger-mesos-tools.config file.')
hostname = ''
containerId = ''
if args.hostname is None:
hostname = containerconfig.get_hostname_from_marathon(
environment, roger_env, args.appTaskId)
else:
hostname = args.hostname
if hostname != '': # Hostname maybe empty when the given appTaskId does not match any taskId from Marathon
(containerId, mesosTaskId) = containerconfig.get_containerid_mesostaskid(
args.appTaskId, hostname)
else:
print("Most likely hostname could not be retrieved with appTaskId {0}. Hostname is also \
an optional argument. See -h for usage.".format(args.appTaskId))
if containerId is not '' and containerId is not None:
print("If there are multiple containers that pattern match the given mesos task Id, \
then will log into the first one")
print("Displaying logs in docker container - {0} on host - {1} for mesosTask Id {2}".format(
containerId, hostname, mesosTaskId))
command = "docker -H tcp://{0}:4243 logs ".format(hostname)
if args.follow:
command = "{} -f=true".format(command)
else:
command = "{} -f=false".format(command)
if args.since:
command = "{} --since=\"{}\"".format(command, args.since)
if args.timestamps:
command = "{} -t".format(command, args.since)
if args.tail:
command = "{} --tail=\"{}\"".format(command, args.tail)
command = "{} {}".format(command, containerId)
try:
subprocess.check_call("{}".format(command), shell=True)
except (KeyboardInterrupt, SystemExit):
print("Exited.")
except (subprocess.CalledProcessError) as e:
printException(e)
else:
print("No Container found on host {0} with application Task Id {1}".format(hostname, args.appTaskId))
if __name__ == '__main__':
settingObj = Settings()
appObj = AppConfig()
containerconfig = ContainerConfig()
roger_logs = RogerLogs()
roger_logs.main()
|
py | 7dfea0a06ea8bee85bc5d30431b0299d469cbe18 | """scrapli.transport.base"""
from scrapli.transport.base.async_transport import AsyncTransport
from scrapli.transport.base.base_transport import BasePluginTransportArgs, BaseTransportArgs
from scrapli.transport.base.sync_transport import Transport
__all__ = (
"AsyncTransport",
"BaseTransportArgs",
"BasePluginTransportArgs",
"Transport",
)
|
py | 7dfea0bf2eb6f63a6511b15ed31e96dff38ea80d | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
# Copyright 2022 Northern System Service Co., Ltd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
r"""ShapeMask で物体検出を行い、マスク画像を作成するツール
与えられた画像に対して、ShapeMask で物体検出を行う。
検出したオブジェクトごとに1枚のバイナリマスク画像を書き出す。
python /path/to/inference_via.py \
--image_size 256 \
--model=shapemask \
--checkpoint_path=/path/to/model.ckpt-30000 \
--config_file=/path/to/config.yaml \
--image_file_pattern='/path/to/images/*.JPG' \
--label_map_file=/path/to/labels.csv \
--export_to=/path/to/reslt/
マスク画像名のフォーマットは下記の通り。
{元ファイル名}_{オブジェクト連番}_conf{確信度}_label{ラベル名}.png
例) DSC0001_03_conf0.98_labelfruit.png
"""
# pylint: enable=line-too-long
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import csv
import io
import os
from tqdm import tqdm
from pathlib import Path
from absl import flags
from absl import logging
import numpy as np
from PIL import Image
import tensorflow.compat.v1 as tf
from configs import factory as config_factory
from dataloader import mode_keys
from modeling import factory as model_factory
from utils import box_utils
from utils import input_utils
from utils import mask_utils
from utils.object_detection import visualization_utils
from hyperparameters import params_dict
FLAGS = flags.FLAGS
flags.DEFINE_string(
'model', 'retinanet', 'Support `retinanet`, `mask_rcnn` and `shapemask`.')
flags.DEFINE_integer('image_size', 256, 'The image size.')
flags.DEFINE_string(
'checkpoint_path', '', 'The path to the checkpoint file.')
flags.DEFINE_string(
'config_file', '', 'The config file template.')
flags.DEFINE_string(
'params_override', '', 'The YAML file/string that specifies the parameters '
'override in addition to the `config_file`.')
flags.DEFINE_string(
'label_map_file', '',
'The label map file. See --label_map_format for the definition.')
flags.DEFINE_string(
'label_map_format', 'csv',
'The format of the label map file. Currently only support `csv` where the '
'format of each row is: `id:name`.')
flags.DEFINE_string(
'image_file_pattern', '',
'The glob that specifies the image file pattern.')
flags.DEFINE_string(
'export_to', '/path/to/export/to',
'The directory to export masks to.')
def main(unused_argv):
del unused_argv
export_to = Path(FLAGS.export_to)
if not export_to.is_dir():
export_to.mkdir()
# Load the label map.
print(' - Loading the label map...')
label_map_dict = {}
if FLAGS.label_map_format == 'csv':
with tf.gfile.Open(FLAGS.label_map_file, 'r') as csv_file:
reader = csv.reader(csv_file, delimiter=':')
for row in reader:
if len(row) != 2:
raise ValueError('Each row of the csv label map file must be in '
'`id:name` format.')
id_index = int(row[0])
name = row[1]
label_map_dict[id_index] = {
'id': id_index,
'name': name,
}
else:
raise ValueError(
'Unsupported label map format: {}.'.format(FLAGS.label_mape_format))
params = config_factory.config_generator(FLAGS.model)
if FLAGS.config_file:
params = params_dict.override_params_dict(
params, FLAGS.config_file, is_strict=True)
params = params_dict.override_params_dict(
params, FLAGS.params_override, is_strict=True)
params.validate()
params.lock()
model = model_factory.model_generator(params)
with tf.Graph().as_default():
image_input = tf.placeholder(shape=(), dtype=tf.string)
image = tf.io.decode_image(image_input, channels=3)
image.set_shape([None, None, 3])
image = input_utils.normalize_image(image)
image_size = [FLAGS.image_size, FLAGS.image_size]
image, image_info = input_utils.resize_and_crop_image(
image,
image_size,
image_size,
aug_scale_min=1.0,
aug_scale_max=1.0)
image.set_shape([image_size[0], image_size[1], 3])
# batching.
images = tf.reshape(image, [1, image_size[0], image_size[1], 3])
images_info = tf.expand_dims(image_info, axis=0)
# model inference
outputs = model.build_outputs(
images, {'image_info': images_info}, mode=mode_keys.PREDICT)
outputs['detection_boxes'] = (
outputs['detection_boxes'] / tf.tile(images_info[:, 2:3, :], [1, 1, 2]))
predictions = outputs
# Create a saver in order to load the pre-trained checkpoint.
saver = tf.train.Saver()
with tf.Session() as sess:
print(' - Loading the checkpoint...')
saver.restore(sess, FLAGS.checkpoint_path)
image_files = tf.gfile.Glob(FLAGS.image_file_pattern)
for i, image_file in tqdm(
enumerate(image_files), ascii=True, total=len(image_files)):
logging.debug(
' - Generating masks for {} ({})...'.format(i, image_file))
logging.debug(' - Opening {}...'.format(image_file))
with tf.gfile.GFile(image_file, 'rb') as f:
image_bytes = f.read()
image = Image.open(image_file)
image = image.convert('RGB') # needed for images with 4 channels.
width, height = image.size
logging.debug(' - Image size is {}.'.format(image.size))
np_image = (
np.array(image.getdata()).reshape(height, width, 3).astype(np.uint8))
predictions_np = sess.run(
predictions, feed_dict={image_input: image_bytes})
num_detections = int(predictions_np['num_detections'][0])
np_boxes = predictions_np['detection_boxes'][0, :num_detections]
np_scores = predictions_np['detection_scores'][0, :num_detections]
np_classes = predictions_np['detection_classes'][0, :num_detections]
np_classes = np_classes.astype(np.int32)
np_masks = None
if 'detection_masks' in predictions_np:
instance_masks = predictions_np['detection_masks'][0, :num_detections]
np_masks = mask_utils.paste_instance_masks(
instance_masks, box_utils.yxyx_to_xywh(np_boxes), height, width)
# np_masks is a numpy array, shape==(n, H, W)
## Export masks
mask_basename = Path(image_file).stem
for i, np_mask in enumerate(np_masks):
fname = \
'{basename}_{i:02d}_conf{score:.2f}_label{label:s}.png'.format(
basename=mask_basename, i=i, score=np_scores[i],
label=label_map_dict[np_classes[i]]['name']
)
mask_path = Path(FLAGS.export_to)/fname
logging.debug('Exporting {}'.format(mask_path))
im = Image.fromarray(np_mask)
im.save(mask_path)
if __name__ == '__main__':
flags.mark_flag_as_required('model')
flags.mark_flag_as_required('checkpoint_path')
flags.mark_flag_as_required('label_map_file')
flags.mark_flag_as_required('image_file_pattern')
flags.mark_flag_as_required('export_to')
logging.set_verbosity(logging.WARNING)
tf.app.run(main)
|
py | 7dfea0dc1477743398fa39bb1fb7dde3f78fb415 | import cv2
import click
import shutil
from pathlib import Path
from tqdm import tqdm
from object_removal.remover import ObjectRemover
SCRIPT_DIR = str(Path(__file__).parent)
@click.command()
@click.option("--input-image-dir", "-i", default="{}/../images".format(SCRIPT_DIR))
@click.option("--input-mask-dir", "-m", default="{}/../masks".format(SCRIPT_DIR))
@click.option("--output_dir", "-o", default="{}/../output".format(SCRIPT_DIR))
@click.option("--hifill-pb", "-pb", default="{}/../pb/hifill.pb".format(SCRIPT_DIR))
def main(input_image_dir, input_mask_dir, output_dir, hifill_pb):
input_image_dir_pathlib = Path(input_image_dir)
input_mask_dir_pathlib = Path(input_mask_dir)
output_dir_pathlib = Path(output_dir)
if output_dir_pathlib.exists():
shutil.rmtree(output_dir)
output_dir_pathlib.mkdir()
input_image_list = [str(path) for path in input_image_dir_pathlib.glob("*") if path.suffix in [".jpg", ".png"]]
remover = ObjectRemover(hifill_pb)
for input_image_pathstr in tqdm(input_image_list):
image_name = Path(input_image_pathstr).name
input_image = cv2.imread(input_image_pathstr)
mask_image_path = input_mask_dir_pathlib.joinpath(image_name.replace(".jpg", ".png"))
mask_image = cv2.imread(str(mask_image_path), cv2.IMREAD_ANYDEPTH)
if not mask_image_path.exists():
continue
result = remover(input_image, mask_image)
output_image_path = output_dir_pathlib.joinpath(image_name)
cv2.imwrite(str(output_image_path), result)
cv2.waitKey(10)
if __name__ == "__main__":
main()
|
py | 7dfea0ff20f74118414cfd689b2f484c899508cc | from commands.command import Command
from fbchat import Message
from fbchat import Mention
import dialogflow
import os
class talk(Command):
def run(self):
mentions = [Mention(self.author_id, length=len(self.author.first_name) + 1)]
response_text = "@" + self.author.first_name + " "
if len(self.user_params) > 0:
text = " ".join(self.user_params)
DIALOGFLOW_PROJECT_ID = 'dentaku-fyaltw'
DIALOGFLOW_LANGUAGE_CODE = 'en-US'
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = 'dentaku-dialogflow.json'
SESSION_ID = self.thread_id
session_client = dialogflow.SessionsClient()
session = session_client.session_path(DIALOGFLOW_PROJECT_ID, SESSION_ID)
text_input = dialogflow.types.TextInput(text=text, language_code=DIALOGFLOW_LANGUAGE_CODE)
query_input = dialogflow.types.QueryInput(text=text_input)
response = session_client.detect_intent(session=session, query_input=query_input)
response_text += response.query_result.fulfillment_text
else:
response_text += "Please say something"
self.client.send(
Message(text=response_text, mentions=mentions),
thread_id=self.thread_id,
thread_type=self.thread_type
)
def define_documentation(self):
self.documentation = {
"parameters": "MESSAGE",
"function": "Makes small talk."
}
|
py | 7dfea149f800779332c99932b8e1cc92bf95880c | from __future__ import absolute_import, division, print_function
from .data_stream import *
from .antenna import *
from .polyphase_filterbank import *
from .quantization import *
from .waterfall import *
from .level_utils import *
from .backend import *
|
py | 7dfea2276a728bd9987424f8a1f1f59aa273ca6a | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['DatabaseAccountMongoDBCollection']
class DatabaseAccountMongoDBCollection(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
collection_name: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
options: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
resource: Optional[pulumi.Input[pulumi.InputType['MongoDBCollectionResourceArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
An Azure Cosmos DB MongoDB collection.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: Cosmos DB database account name.
:param pulumi.Input[str] collection_name: Cosmos DB collection name.
:param pulumi.Input[str] database_name: Cosmos DB database name.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] options: A key-value pair of options to be applied for the request. This corresponds to the headers sent with the request.
:param pulumi.Input[pulumi.InputType['MongoDBCollectionResourceArgs']] resource: The standard JSON format of a MongoDB collection
:param pulumi.Input[str] resource_group_name: Name of an Azure resource group.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if account_name is None:
raise TypeError("Missing required property 'account_name'")
__props__['account_name'] = account_name
if collection_name is None:
raise TypeError("Missing required property 'collection_name'")
__props__['collection_name'] = collection_name
if database_name is None:
raise TypeError("Missing required property 'database_name'")
__props__['database_name'] = database_name
if options is None:
raise TypeError("Missing required property 'options'")
__props__['options'] = options
if resource is None:
raise TypeError("Missing required property 'resource'")
__props__['resource'] = resource
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['indexes'] = None
__props__['location'] = None
__props__['name'] = None
__props__['shard_key'] = None
__props__['tags'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:documentdb/latest:DatabaseAccountMongoDBCollection"), pulumi.Alias(type_="azure-nextgen:documentdb/v20150401:DatabaseAccountMongoDBCollection"), pulumi.Alias(type_="azure-nextgen:documentdb/v20150408:DatabaseAccountMongoDBCollection"), pulumi.Alias(type_="azure-nextgen:documentdb/v20160319:DatabaseAccountMongoDBCollection"), pulumi.Alias(type_="azure-nextgen:documentdb/v20160331:DatabaseAccountMongoDBCollection")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(DatabaseAccountMongoDBCollection, __self__).__init__(
'azure-nextgen:documentdb/v20151106:DatabaseAccountMongoDBCollection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'DatabaseAccountMongoDBCollection':
"""
Get an existing DatabaseAccountMongoDBCollection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return DatabaseAccountMongoDBCollection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def indexes(self) -> pulumi.Output[Optional[Sequence['outputs.MongoIndexResponse']]]:
"""
List of index keys
"""
return pulumi.get(self, "indexes")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
The location of the resource group to which the resource belongs.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the database account.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="shardKey")
def shard_key(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A key-value pair of shard keys to be applied for the request.
"""
return pulumi.get(self, "shard_key")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB".
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of Azure resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
py | 7dfea255f9be0b5cd3dddbca091598b39b805172 | from django.views.generic import FormView
from django.conf import settings
from django.urls import reverse_lazy
from django.core.exceptions import NON_FIELD_ERRORS, ValidationError
from .forms import ContactForm
class ContactFormView(FormView):
template_name = "contact/form.html"
form_class = ContactForm
success_url = reverse_lazy("contact:success")
def form_valid(self, form):
try:
# It's a bit sad, but if we send to freshdesk when running tests (which happens a lot),
# Freshdesk will spam everyone email about "A new ticket has been created"
if not settings.TESTING:
form.send_freshdesk()
except ValidationError as e:
form.add_error(NON_FIELD_ERRORS, e.message)
return self.form_invalid(form)
return super().form_valid(form)
|
py | 7dfea2e68b0a7015f2787dc2077537b2b3c27ad7 | import datetime
import mock
from twisted.internet import reactor, defer
from twisted.trial import unittest
from twisted.web import server
from jasmin.managers.content import DLRContentForHttpapi, DLRContentForSmpps
from jasmin.queues.configs import AmqpConfig
from jasmin.queues.factory import AmqpFactory
from jasmin.routing.configs import DLRThrowerConfig
from jasmin.routing.proxies import RouterPBProxy
from jasmin.routing.test.http_server import TimeoutLeafServer, AckServer, NoAckServer, Error404Server
from jasmin.routing.test.test_router import SubmitSmTestCaseTools
from jasmin.routing.test.test_router_smpps import SMPPClientTestCases
from jasmin.routing.throwers import DLRThrower
from jasmin.vendor.smpp.pdu import pdu_types
@defer.inlineCallbacks
def waitFor(seconds):
# Wait seconds
waitDeferred = defer.Deferred()
reactor.callLater(seconds, waitDeferred.callback, None)
yield waitDeferred
class DLRThrowerTestCases(unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
# Initiating config objects without any filename
# will lead to setting defaults and that's what we
# need to run the tests
AMQPServiceConfigInstance = AmqpConfig()
AMQPServiceConfigInstance.reconnectOnConnectionLoss = False
self.amqpBroker = AmqpFactory(AMQPServiceConfigInstance)
yield self.amqpBroker.connect()
yield self.amqpBroker.getChannelReadyDeferred()
# Initiating config objects without any filename
# will lead to setting defaults and that's what we
# need to run the tests
DLRThrowerConfigInstance = DLRThrowerConfig()
# Lower the timeout config to pass the timeout tests quickly
DLRThrowerConfigInstance.timeout = 2
DLRThrowerConfigInstance.retry_delay = 1
DLRThrowerConfigInstance.max_retries = 2
# Launch the DLRThrower
self.DLRThrower = DLRThrower(DLRThrowerConfigInstance)
# Add the broker to the DLRThrower
yield self.DLRThrower.addAmqpBroker(self.amqpBroker)
@defer.inlineCallbacks
def tearDown(self):
yield self.amqpBroker.disconnect()
yield self.DLRThrower.stopService()
class HTTPDLRThrowerTestCase(DLRThrowerTestCases):
@defer.inlineCallbacks
def setUp(self):
yield DLRThrowerTestCases.setUp(self)
# Start http servers
self.Error404ServerResource = Error404Server()
self.Error404Server = reactor.listenTCP(0, server.Site(self.Error404ServerResource))
self.AckServerResource = AckServer()
self.AckServer = reactor.listenTCP(0, server.Site(self.AckServerResource))
self.NoAckServerResource = NoAckServer()
self.NoAckServer = reactor.listenTCP(0, server.Site(self.NoAckServerResource))
self.TimeoutLeafServerResource = TimeoutLeafServer()
self.TimeoutLeafServerResource.hangTime = 3
self.TimeoutLeafServer = reactor.listenTCP(0, server.Site(self.TimeoutLeafServerResource))
@defer.inlineCallbacks
def publishDLRContentForHttpapi(self, message_status, msgid, dlr_url, dlr_level, id_smsc='', sub='',
dlvrd='', subdate='', donedate='', err='', text='', method='POST', trycount=0):
content = DLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level, id_smsc, sub, dlvrd, subdate,
donedate, err, text, method, trycount)
yield self.amqpBroker.publish(exchange='messaging', routing_key='dlr_thrower.http', content=content)
@defer.inlineCallbacks
def tearDown(self):
yield DLRThrowerTestCases.tearDown(self)
yield self.Error404Server.stopListening()
yield self.AckServer.stopListening()
yield self.NoAckServer.stopListening()
yield self.TimeoutLeafServer.stopListening()
@defer.inlineCallbacks
def test_throwing_http_connector_with_ack(self):
self.AckServerResource.render_POST = mock.Mock(wraps=self.AckServerResource.render_POST)
dlr_url = 'http://127.0.0.1:%s/dlr' % self.AckServer.getHost().port
dlr_level = 1
msgid = 'anything'
message_status = 'DELIVRD'
self.publishDLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level)
yield waitFor(1)
# No message retries must be made since ACK was received
self.assertEqual(self.AckServerResource.render_POST.call_count, 1)
@defer.inlineCallbacks
def test_throwing_http_connector_without_ack(self):
self.NoAckServerResource.render_POST = mock.Mock(wraps=self.NoAckServerResource.render_POST)
dlr_url = 'http://127.0.0.1:%s/dlr' % self.NoAckServer.getHost().port
dlr_level = 1
msgid = 'anything'
message_status = 'DELIVRD'
self.publishDLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level)
yield waitFor(4)
# Retries must be made when ACK is not received
self.assertTrue(self.NoAckServerResource.render_POST.call_count > 1)
@defer.inlineCallbacks
def test_throwing_http_connector_timeout_retry(self):
self.TimeoutLeafServerResource.render_POST = mock.Mock(wraps=self.TimeoutLeafServerResource.render_POST)
dlr_url = 'http://127.0.0.1:%s/dlr' % self.TimeoutLeafServer.getHost().port
dlr_level = 1
msgid = 'anything'
message_status = 'DELIVRD'
self.publishDLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level)
yield waitFor(12)
self.assertEqual(self.TimeoutLeafServerResource.render_POST.call_count, 3)
@defer.inlineCallbacks
def test_throwing_http_connector_404_error_noretry(self):
"""When receiving a 404 error, no further retries shall be made
"""
self.Error404ServerResource.render_POST = mock.Mock(wraps=self.Error404ServerResource.render_POST)
dlr_url = 'http://127.0.0.1:%s/dlr' % self.Error404Server.getHost().port
dlr_level = 1
msgid = 'anything'
message_status = 'DELIVRD'
self.publishDLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level)
yield waitFor(1)
self.assertEqual(self.Error404ServerResource.render_POST.call_count, 1)
@defer.inlineCallbacks
def test_throwing_http_connector_dlr_level1(self):
self.AckServerResource.render_GET = mock.Mock(wraps=self.AckServerResource.render_GET)
dlr_url = 'http://127.0.0.1:%s/dlr' % self.AckServer.getHost().port
dlr_level = 1
msgid = 'anything'
message_status = 'DELIVRD'
self.publishDLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level, method='GET')
yield waitFor(1)
# No message retries must be made since ACK was received
self.assertEqual(self.AckServerResource.render_GET.call_count, 1)
callArgs = self.AckServerResource.render_GET.call_args_list[0][0][0].args
self.assertEqual(callArgs['message_status'][0], message_status)
self.assertEqual(callArgs['id'][0], msgid)
self.assertEqual(callArgs['level'][0], str(dlr_level))
@defer.inlineCallbacks
def test_throwing_http_connector_dlr_level2(self):
self.AckServerResource.render_GET = mock.Mock(wraps=self.AckServerResource.render_GET)
dlr_url = 'http://127.0.0.1:%s/dlr' % self.AckServer.getHost().port
dlr_level = 2
msgid = 'anything'
message_status = 'DELIVRD'
self.publishDLRContentForHttpapi(message_status, msgid, dlr_url, dlr_level, id_smsc='abc', sub='3',
dlvrd='3', subdate='anydate', donedate='anydate', err='', text='Any text',
method='GET')
yield waitFor(1)
# No message retries must be made since ACK was received
self.assertEqual(self.AckServerResource.render_GET.call_count, 1)
callArgs = self.AckServerResource.render_GET.call_args_list[0][0][0].args
self.assertEqual(callArgs['message_status'][0], message_status)
self.assertEqual(callArgs['id'][0], msgid)
self.assertEqual(callArgs['level'][0], str(dlr_level))
self.assertEqual(callArgs['id_smsc'][0], 'abc')
self.assertEqual(callArgs['sub'][0], '3')
self.assertEqual(callArgs['dlvrd'][0], '3')
self.assertEqual(callArgs['subdate'][0], 'anydate')
self.assertEqual(callArgs['donedate'][0], 'anydate')
self.assertEqual(callArgs['err'][0], '')
self.assertEqual(callArgs['text'][0], 'Any text')
class SMPPDLRThrowerTestCases(RouterPBProxy, SMPPClientTestCases, SubmitSmTestCaseTools):
@defer.inlineCallbacks
def setUp(self):
yield SMPPClientTestCases.setUp(self)
# Lower the timeout config to pass the timeout tests quickly
self.DLRThrower.config.timeout = 2
self.DLRThrower.config.retry_delay = 1
self.DLRThrower.config.max_retries = 2
@defer.inlineCallbacks
def publishDLRContentForSmppapi(self, message_status, msgid, system_id, source_addr, destination_addr,
sub_date=None,
source_addr_ton='UNKNOWN', source_addr_npi='UNKNOWN',
dest_addr_ton='UNKNOWN', dest_addr_npi='UNKNOWN'):
if sub_date is None:
sub_date = datetime.datetime.now()
content = DLRContentForSmpps(message_status, msgid, system_id, source_addr, destination_addr, sub_date,
source_addr_ton, source_addr_npi, dest_addr_ton, dest_addr_npi)
yield self.amqpBroker.publish(exchange='messaging', routing_key='dlr_thrower.smpps', content=content)
@defer.inlineCallbacks
def test_throwing_smpps_to_bound_connection_as_deliver_sm(self):
self.DLRThrower.config.dlr_pdu = 'deliver_sm'
self.DLRThrower.ackMessage = mock.Mock(wraps=self.DLRThrower.ackMessage)
self.DLRThrower.rejectMessage = mock.Mock(wraps=self.DLRThrower.rejectMessage)
self.DLRThrower.smpp_dlr_callback = mock.Mock(wraps=self.DLRThrower.smpp_dlr_callback)
# Bind
yield self.connect('127.0.0.1', self.pbPort)
yield self.prepareRoutingsAndStartConnector()
yield self.smppc_factory.connectAndBind()
# Install mocks
self.smppc_factory.lastProto.PDUDataRequestReceived = mock.Mock(
wraps=self.smppc_factory.lastProto.PDUDataRequestReceived)
sub_date = datetime.datetime.now()
yield self.publishDLRContentForSmppapi('ESME_ROK', 'MSGID', 'username', '999', '000', sub_date)
yield waitFor(1)
# Run tests
self.assertEqual(self.smppc_factory.lastProto.PDUDataRequestReceived.call_count, 1)
# the received pdu must be a DeliverSM
received_pdu_1 = self.smppc_factory.lastProto.PDUDataRequestReceived.call_args_list[0][0][0]
self.assertEqual(received_pdu_1.id, pdu_types.CommandId.deliver_sm)
self.assertEqual(received_pdu_1.params['source_addr'], '000')
self.assertEqual(received_pdu_1.params['destination_addr'], '999')
self.assertEqual(received_pdu_1.params['receipted_message_id'], 'MSGID')
self.assertEqual(str(received_pdu_1.params['message_state']), 'ACCEPTED')
self.assertEqual(received_pdu_1.params['short_message'],
'id:MSGID submit date:%s done date:%s stat:ACCEPTD err:000' % (
sub_date.strftime("%Y%m%d%H%M"),
sub_date.strftime("%Y%m%d%H%M"),
))
# Unbind & Disconnect
yield self.smppc_factory.smpp.unbindAndDisconnect()
yield self.stopSmppClientConnectors()
@defer.inlineCallbacks
def test_throwing_smpps_to_bound_connection(self):
self.DLRThrower.ackMessage = mock.Mock(wraps=self.DLRThrower.ackMessage)
self.DLRThrower.rejectMessage = mock.Mock(wraps=self.DLRThrower.rejectMessage)
self.DLRThrower.smpp_dlr_callback = mock.Mock(wraps=self.DLRThrower.smpp_dlr_callback)
# Bind
yield self.connect('127.0.0.1', self.pbPort)
yield self.prepareRoutingsAndStartConnector()
yield self.smppc_factory.connectAndBind()
yield self.publishDLRContentForSmppapi('ESME_ROK', 'MSGID', 'username', '999', '000')
yield waitFor(1)
# Run tests
self.assertEqual(self.DLRThrower.smpp_dlr_callback.call_count, 1)
self.assertEqual(self.DLRThrower.ackMessage.call_count, 1)
self.assertEqual(self.DLRThrower.rejectMessage.call_count, 0)
# Unbind & Disconnect
yield self.smppc_factory.smpp.unbindAndDisconnect()
yield self.stopSmppClientConnectors()
@defer.inlineCallbacks
def test_throwing_smpps_to_not_bound_connection(self):
self.DLRThrower.ackMessage = mock.Mock(wraps=self.DLRThrower.ackMessage)
self.DLRThrower.rejectMessage = mock.Mock(wraps=self.DLRThrower.rejectMessage)
self.DLRThrower.rejectAndRequeueMessage = mock.Mock(wraps=self.DLRThrower.rejectAndRequeueMessage)
self.DLRThrower.smpp_dlr_callback = mock.Mock(wraps=self.DLRThrower.smpp_dlr_callback)
yield self.publishDLRContentForSmppapi('ESME_ROK', 'MSGID', 'username', '999', '000')
yield waitFor(3)
# Run tests
self.assertEqual(self.DLRThrower.smpp_dlr_callback.call_count, 3)
self.assertEqual(self.DLRThrower.ackMessage.call_count, 0)
self.assertEqual(self.DLRThrower.rejectMessage.call_count, 3)
self.assertEqual(self.DLRThrower.rejectAndRequeueMessage.call_count, 2)
@defer.inlineCallbacks
def test_throwing_smpps_with_no_deliverers(self):
self.DLRThrower.ackMessage = mock.Mock(wraps=self.DLRThrower.ackMessage)
self.DLRThrower.rejectMessage = mock.Mock(wraps=self.DLRThrower.rejectMessage)
self.DLRThrower.rejectAndRequeueMessage = mock.Mock(wraps=self.DLRThrower.rejectAndRequeueMessage)
self.DLRThrower.smpp_dlr_callback = mock.Mock(wraps=self.DLRThrower.smpp_dlr_callback)
# Bind (as a transmitter so we get no deliverers for DLR)
yield self.connect('127.0.0.1', self.pbPort)
yield self.prepareRoutingsAndStartConnector()
self.smppc_config.bindOperation = 'transmitter'
yield self.smppc_factory.connectAndBind()
yield self.publishDLRContentForSmppapi('ESME_ROK', 'MSGID', 'username', '999', '000')
yield waitFor(3)
# Run tests
self.assertEqual(self.DLRThrower.smpp_dlr_callback.call_count, 3)
self.assertEqual(self.DLRThrower.ackMessage.call_count, 0)
self.assertEqual(self.DLRThrower.rejectMessage.call_count, 3)
self.assertEqual(self.DLRThrower.rejectAndRequeueMessage.call_count, 2)
# Unbind & Disconnect
yield self.smppc_factory.smpp.unbindAndDisconnect()
yield self.stopSmppClientConnectors()
@defer.inlineCallbacks
def test_throwing_smpps_without_smppsFactory(self):
self.DLRThrower.ackMessage = mock.Mock(wraps=self.DLRThrower.ackMessage)
self.DLRThrower.rejectMessage = mock.Mock(wraps=self.DLRThrower.rejectMessage)
self.DLRThrower.rejectAndRequeueMessage = mock.Mock(wraps=self.DLRThrower.rejectAndRequeueMessage)
self.DLRThrower.smpp_dlr_callback = mock.Mock(wraps=self.DLRThrower.smpp_dlr_callback)
# Remove smpps from self.DLRThrower
self.DLRThrower.smpps = None
self.DLRThrower.smpps_access = None
yield self.publishDLRContentForSmppapi('ESME_ROK', 'MSGID', 'username', '999', '000')
yield waitFor(1)
# Run tests
self.assertEqual(self.DLRThrower.smpp_dlr_callback.call_count, 1)
self.assertEqual(self.DLRThrower.ackMessage.call_count, 0)
self.assertEqual(self.DLRThrower.rejectMessage.call_count, 1)
self.assertEqual(self.DLRThrower.rejectAndRequeueMessage.call_count, 0)
|
py | 7dfea402adc440dfe3eebc49a97b2efa7fba5753 | import io
import sys
from pathlib import PurePath
from typing import Generic, TypeVar, Union
from checklisting.parser import BaseParser
R = TypeVar('R')
class ConfigurationLoader(Generic[R]):
def __init__(self, parser: BaseParser[R]) -> None:
super().__init__()
self._parser = parser
def load(self, source: Union[str, PurePath, io.BufferedIOBase, None] = None) -> R:
if not source:
return self._parser.load(sys.stdin)
if isinstance(source, io.BufferedIOBase):
return self._parser.load(source)
with open(source, 'r') as stream:
return self._parser.load(stream)
|
py | 7dfea5c8733a814b719aee966466cab94ea7a845 | #!C:\Users\anlan\PycharmProjects\cdut2016\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
|
py | 7dfea68d3e79af6eeeb0468a4c05795564b0fdc1 | def tree(cls, level=0):
yield cls.__name__, level
for sub_cls in cls.__subclasses__():
yield from tree(sub_cls, level + 1)
if __name__ == '__main__':
for cls_name, level in tree(BaseException):
indent = ' ' * 4 * level
print(f'{indent}{cls_name}')
|
py | 7dfea741eddb0b17cffd09aef64dfc32ab13c3bc | # Copyright (c) 2020, Manfred Moitzi
# License: MIT License
import pytest
import os
import ezdxf
BASEDIR = os.path.dirname(__file__)
DATADIR = "data"
COLDFIRE = os.path.join(
ezdxf.EZDXF_TEST_FILES, "CADKitSamples/kit-dev-coldfire-xilinx_5213.dxf"
)
@pytest.mark.skipif(
not os.path.exists(COLDFIRE), reason="test data not present"
)
def test_kit_dev_coldfire():
doc = ezdxf.readfile(COLDFIRE)
auditor = doc.audit()
assert len(auditor) == 0
@pytest.fixture(params=["Leica_Disto_S910.dxf"])
def filename(request):
filename = os.path.join(BASEDIR, DATADIR, request.param)
if not os.path.exists(filename):
pytest.skip(f"File {filename} not found.")
return filename
def test_leica_disto_r12(filename):
doc = ezdxf.readfile(filename)
auditor = doc.audit()
assert len(auditor) == 0
|
py | 7dfea75969cdadcf7597d7f664a6569570ecad2b | import json
import os
import pytest
from toponym import settings
from toponym.recipes import Recipes
from toponym.utils import is_consistent_recipes
from toponym.utils import is_json
from toponym.utils import LanguageNotFoundError
@pytest.mark.parametrize("language", settings.LANGUAGE_DICT.keys())
def test_load_recipes_from_languages(language):
recipes = Recipes()
recipes.load_from_language(language=language)
assert recipes._dict
def test_recipes_load_failed_language_not_supported():
"""test load
"""
with pytest.raises(LanguageNotFoundError):
recipes_fails = Recipes()
recipes_fails.load_from_language(language="funkytown")
def test_recipes_load_with_input_dictionary():
recipe = {
"_default": {"nominative": [[""], 0], "genitive": [[""], 0]},
"i": {"nominative": [[""], 0], "genitive": [["o"], 1]},
}
recipes_test = Recipes()
recipes_test.load_from_dict(input_dict=recipe, language="test")
assert recipes_test._dict
@pytest.mark.parametrize(
"input_dict, expectation",
[
[1, pytest.raises(TypeError)],
[[1, 2, 3], pytest.raises(TypeError)],
["test", pytest.raises(TypeError)],
],
)
def test_recipes_load_with_input_dictionary_fails(input_dict, expectation):
with expectation:
recipes_test = Recipes()
recipes_test.load_from_dict(language="bla", input_dict=input_dict)
@pytest.mark.parametrize(
"language, file, expectation",
[
["test", 123, pytest.raises(TypeError)],
["test", "test", pytest.raises(FileNotFoundError)],
["test", [1, 2, 3], pytest.raises(TypeError)],
],
)
def test_recipes_load_with_input_filepath_fails(language, file, expectation):
with expectation:
recipes = Recipes()
recipes.load_from_file(language=language, filepath=file)
def test_recipes_load_file():
recipes_test = Recipes()
recipes_test.load_from_file(
language="test", filepath="./toponym/resources/_test.json"
)
assert recipes_test.is_loaded
def test_recipes_consistency():
list_dir = os.listdir(settings.PARENT_DIRECTORY + "/resources")
filepaths = [
settings.PARENT_DIRECTORY + "/resources" + "/{}".format(x)
for x in list_dir
if x.endswith(".json")
]
assert all([is_consistent_recipes(recipes) for recipes in filepaths])
def test_recipes_valid_json():
list_dir = os.listdir(settings.PARENT_DIRECTORY + "/resources")
filepaths = [
settings.PARENT_DIRECTORY + "/resources" + "/{}".format(x)
for x in list_dir
if x.endswith(".json")
]
for filepath in filepaths:
with open(filepath, "r", encoding="utf8") as f:
assert is_json(f.read())
def test_recipes_default_in_json():
list_dir = os.listdir(settings.PARENT_DIRECTORY + "/resources")
filepaths = [
settings.PARENT_DIRECTORY + "/resources" + "/{}".format(x)
for x in list_dir
if x.endswith(".json")
]
for filepath in filepaths:
with open(filepath, "r", encoding="utf8") as f:
recipes_check = json.loads(f.read())
assert isinstance(recipes_check, dict)
assert "_default" in recipes_check
|
py | 7dfea964de31d47df991bd1905f264be4a59c100 | class GreasePencilBrushes:
active = None
active_index = None
def new(self, name, set_active=False):
pass
def remove(self, brush):
pass
|
py | 7dfeaa1cd385cc2f366eb019265c35b1ebbd8869 | import OpenEXR, Imath # openexr package (from pypi-install OpenEXR )
import numpy as np
PIXEL_TYPE = Imath.PixelType(OpenEXR.FLOAT)
def save_exr( fname, r=None, g=None, b=None, comments='' ):
r = np.array(r); assert r.ndim==2
g = np.array(g); assert g.ndim==2; assert g.shape==r.shape
b = np.array(b); assert b.ndim==2; assert b.shape==r.shape
header = OpenEXR.Header(r.shape[1], r.shape[0])
header['channels'] = {'R': Imath.Channel(PIXEL_TYPE),
'G': Imath.Channel(PIXEL_TYPE),
'B': Imath.Channel(PIXEL_TYPE),
}
header['comments'] = comments
out = OpenEXR.OutputFile(fname, header)
data = {'R': r.astype(np.float32).tostring(),
'G': g.astype(np.float32).tostring(),
'B': b.astype(np.float32).tostring()}
out.writePixels(data)
out.close()
def read_exr(file,full_output=False):
f = OpenEXR.InputFile(file)
dw = f.header()['dataWindow']
if 'comments' in f.header():
comments = f.header()['comments']
else:
comments = None
size = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1)
pt = Imath.PixelType(Imath.PixelType.FLOAT)
def read_chan(name):
datastr = f.channel(name, pt)
data = np.fromstring(datastr, dtype = np.float32)
data.shape = (size[1], size[0]) # Numpy arrays are (row, col)
return data
r = read_chan('R')
g = read_chan('G')
b = read_chan('B')
f.close()
if full_output:
result = {'comments':comments,
'r':r, 'g':g, 'b':b,
}
else:
result = r,g,b
return result
|
py | 7dfeaa209c54c4d614b9723ed902f4e6c67f89d2 | #!/usr/bin/env python3
# coding: utf-8
import os
import logging
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from astropy.time import Time
from astropy import units as u
from astropy import constants as const
from astroquery.exceptions import RemoteServiceError
from astroquery.ned import Ned
from ztfquery.lightcurve import LCQuery
from astropy.table import Table
from astropy.coordinates import SkyCoord
from ztfquery.io import LOCALSOURCE
from nuztf.ampel_api import ampel_api_name
from nuztf.style import plot_dir, big_fontsize, base_width, base_height, dpi
from nuztf.utils import cosmo, is_ztf_name, is_tns_name, query_tns_by_name
from nuztf.observation_log import get_most_recent_obs
from nuztf.parse_nu_gcn import find_gcn_no, parse_gcn_circular
logger = logging.getLogger(__name__)
def format_date(t, atel=True):
t.format = "fits"
if atel:
frac_days = f"{t.mjd - int(t.mjd):.2f}"[1:]
t.out_subfmt = "date"
dt = "".join([t.value, frac_days])
else:
dt = t.value
return dt
def plot_irsa_lightcurve(
source_name: str,
nu_name: list = None,
source_coords: list = None,
source_redshift: float = None,
plot_mag: bool = False,
atel: bool = True,
plot_folder: str = plot_dir,
extra_folder: str = None,
logger=None,
check_obs: bool = True,
check_obs_lookback_weeks: float = 4,
from_cache: bool = False,
cache_dir: str = os.path.join(LOCALSOURCE, "cache/"),
expanded_labels: bool = True,
ylim: tuple = None,
):
plot_title = source_name
if logger is None:
logger = logging.getLogger(__name__)
else:
logger = logger
# If there are no coordinates, try name resolve to get coordinates!
# source_name = "AT2022lol"
if source_coords is None:
# Try ampel to find ZTF coordinates
if is_ztf_name(name=source_name):
logger.info("Source name is a ZTF name.")
res = ampel_api_name(source_name, with_history=False)[0]
source_coords = [res["candidate"]["ra"], res["candidate"]["dec"]]
logger.info(f"Found ZTF coordinates for source {source_name}")
# Try TNS
elif is_tns_name(name=source_name):
logger.info("Source name is a TNS name.")
result_dict = query_tns_by_name(name=source_name, logger=logger)
if not result_dict:
logger.warning(f"{source_name} is not in TNS.")
if result_dict:
logger.info(f"Found {source_name} on TNS.")
res = result_dict["data"]["reply"]
ra = res["radeg"]
dec = res["decdeg"]
source_coords = [ra, dec]
if "redshift" in res.keys():
source_redshift = res["redshift"]
# Otherwise try NED
else:
result_table = Ned.query_object(source_name)
if len(result_table) == 0:
logger.warning(
f"Failed to resolve name {source_name} in NED. Trying to be clever instead."
)
querystring = "".join(
[
x
for x in source_name
if x in [str(i) for i in range(10)] + ["+", "-"]
]
)
result_table = Ned.query_object(
"".join(
[
x
for x in source_name
if x in [str(i) for i in range(10)] + ["+", "-"]
]
)
)
if len(result_table) == 1:
source_coords = [result_table["RA"][0], result_table["DEC"][0]]
if "ZTF" in plot_title:
plot_title += f' ({result_table["Object Name"][0]})'
if str(result_table["Redshift"][0]) != "--":
source_redshift = result_table["Redshift"]
logger.info(
f"Using Astropy NED query result for name {source_name} ({source_coords})"
)
if source_coords is None:
sc = SkyCoord.from_name(source_name)
logger.info(
f"Using Astropy CDS query result for name {source_name} (RA={sc.ra}, Dec={sc.dec})"
)
source_coords = (sc.ra.value, sc.dec.value)
# Try to find a catalogue source nearby using coordinates
if np.logical_and("ZTF" in source_name, source_coords is not None):
c = SkyCoord(source_coords[0], source_coords[1], unit=u.deg, frame="icrs")
r = 0.5 * u.arcsecond
result_table = Ned.query_region(c, radius=r)
if len(result_table) == 1:
if "ZTF" in plot_title:
plot_title += f' ({result_table["Object Name"][0]})'
source_coords = [result_table["RA"][0], result_table["DEC"][0]]
if str(result_table["Redshift"][0]) != "--":
source_redshift = result_table["Redshift"]
logger.info(
f"Found likely match to {source_name}"
f"(type = '{result_table['Type'][0]}'. "
f"distance = {result_table['Separation'][0]} arcsec')"
)
elif len(result_table) > 1:
logger.warning(
f"Found multiple possible cross-matches: {result_table['Object Name']}"
)
else:
logger.info("No NED crossmatch found.")
# Query IRSA, or load from cache
try:
os.makedirs(cache_dir)
except OSError:
pass
cache_path = os.path.join(cache_dir, f'{source_name.replace(" ", "")}.csv')
if from_cache:
logger.debug(f"Loading from {cache_path}")
df = pd.read_csv(cache_path)
else:
df = LCQuery.from_position(source_coords[0], source_coords[1], 1.0).data
logger.debug(f"Saving to {cache_path}")
df.to_csv(cache_path)
data = Table.from_pandas(df)
logger.info(f"There are a total of {len(data)} detections for {source_name}")
# Start Figure
plt.figure(figsize=(base_width, base_height), dpi=dpi)
if expanded_labels:
ax2 = plt.subplot(111)
ax = ax2.twiny()
else:
ax = plt.subplot(111)
# If you have a redshift, you can add a second y axis!
if source_redshift is None:
logger.info("Querying NED to check for a redshift")
try:
result_table = Ned.query_object(source_name)
if len(result_table["Redshift"]) == 1:
if str(result_table["Redshift"][0]) == "--":
raise RemoteServiceError
source_redshift = result_table["Redshift"][0]
logger.info(f"Found a redshift of {source_redshift}")
elif len(result_table["Redshift"]) > 1:
logger.warning(f"Found multiple redshifts: {result_table}")
else:
raise RemoteServiceError
except (RemoteServiceError, IndexError) as e:
logger.info("No redshift found")
if source_redshift is not None:
ax1b = ax.twinx()
redshift = 1.0 + source_redshift
if plot_mag:
dist_mod = 5 * (
np.log10(cosmo.luminosity_distance(z=(redshift - 1)).to(u.pc).value)
- 1.0
)
else:
conversion_factor = (
4
* np.pi
* cosmo.luminosity_distance(z=(redshift - 1)).to(u.cm) ** 2.0
/ (redshift)
)
cmap = {"zg": "g", "zr": "r", "zi": "orange"}
wl = {
"zg": 472.27,
"zr": 633.96,
"zi": 788.61,
}
markersize = 2.0
latest_index = list(data["mjd"]).index(max(data["mjd"]))
latest = data[latest_index]
dt = format_date(Time(latest["mjd"], format="mjd"), atel=atel)
logger.info(
f"Most recent detection on {dt} UT at a magnitude of "
f"{latest['filtercode'][1]}={latest['mag']:.2f}+/-{latest['magerr']:.2f}"
)
# If you want, you can check the most recent observation
if check_obs:
mro = get_most_recent_obs(
ra=source_coords[0],
dec=source_coords[1],
lookback_weeks_max=check_obs_lookback_weeks,
logger=logger,
)
if mro is not None:
ot = format_date(Time(mro["obsjd"], format="jd"), atel=atel)
logger.info(f"Most recent observation at {ot}")
else:
logger.info("No recent observation found.")
# Plot each band (g/r/i)
for fc in ["zg", "zr", "zi"]:
mask = data["filtercode"] == fc
mags = data["mag"][mask] * u.ABmag
magerrs = (data["magerr"][mask] + data["mag"][mask]) * u.ABmag
if plot_mag:
ax.errorbar(
data["mjd"][mask],
mags.value,
yerr=data["magerr"][mask],
marker="o",
linestyle=" ",
markersize=markersize,
c=cmap[fc],
label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
)
if source_redshift is not None:
ax1b.errorbar(
data["mjd"][mask],
mags.value - dist_mod,
yerr=data["magerr"][mask],
marker="o",
linestyle=" ",
markersize=markersize,
c=cmap[fc],
label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
)
else:
flux_j = mags.to(u.Jansky)
f = (const.c / (wl[fc] * u.nm)).to("Hz")
flux = (flux_j * f).to("erg cm-2 s-1")
jerrs = magerrs.to(u.Jansky)
ferrs = (jerrs * f).to("erg cm-2 s-1").value - flux.value
ax.errorbar(
data["mjd"][mask],
flux.to("erg cm-2 s-1").value,
yerr=ferrs,
marker="o",
linestyle=" ",
markersize=markersize,
c=cmap[fc],
label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
)
if source_redshift is not None:
l = flux * conversion_factor
ax1b.errorbar(
data["mjd"][mask],
l.to("erg s-1"),
marker="o",
linestyle=" ",
markersize=markersize,
c=cmap[fc],
label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
)
# You can force the y limits if you want
if ylim is not None:
ax.set_ylim(ylim)
if plot_mag:
ax.set_ylabel(r"Apparent magnitude [AB]", fontsize=big_fontsize)
ax.invert_yaxis()
if source_redshift is not None:
ax1b.set_ylabel(rf"Absolute magnitude [AB]", fontsize=big_fontsize)
y_min, y_max = ax.get_ylim()
ax1b.invert_yaxis()
ax1b.set_ylim(y_min - dist_mod, y_max - dist_mod)
else:
ax.set_ylabel(r"$\nu$F$_{\nu}$ [erg cm$^{-2}$ s$^{-1}$]", fontsize=big_fontsize)
ax.set_yscale("log")
if source_redshift is not None:
ax1b.set_ylabel(r"$\nu$L$_{\nu}$ [erg s$^{-1}$]", fontsize=big_fontsize)
ax1b.set_yscale("log")
y_min, y_max = ax.get_ylim()
ax1b.set_ylim(
y_min * conversion_factor.value, y_max * conversion_factor.value
)
ax.set_xlabel("Date [MJD]", fontsize=big_fontsize)
# Add neutrino
if nu_name is None:
nu_name = []
if not isinstance(nu_name, list):
nu_name = [nu_name]
for j, nu in enumerate(nu_name):
gcn_no = find_gcn_no(nu)
gcn_info = parse_gcn_circular(gcn_no)
ax.axvline(gcn_info["time"].mjd, linestyle=":", label=nu, color=f"C{j}")
if expanded_labels:
# Set up ISO dates
lmjd, umjd = ax.get_xlim()
lt = Time(lmjd, format="mjd")
ut = Time(umjd, format="mjd")
nt = Time.now()
nt.format = "fits"
mjds = []
labs = []
for year in range(2016, int(nt.value[:4]) + 1):
for k, month in enumerate([1, 7]):
t = Time(f"{year}-{month}-01T00:00:00.01", format="isot", scale="utc")
t.format = "fits"
t.out_subfmt = "date"
if np.logical_and(t > lt, t < ut):
mjds.append(t.mjd)
labs.append(t.value)
ax2.set_xticks(mjds)
ax2.set_xticklabels(labels=labs, rotation=80)
ax2.set_xlim(lmjd, umjd)
ax.set_title(f'ZTF Lightcurve of {plot_title.replace("J", " J")}', y=1.4)
ax2.tick_params(axis="both", which="major", labelsize=big_fontsize)
ax.tick_params(axis="both", which="major", labelsize=big_fontsize)
# plt.setp(ax2.get_yticklabels(), visible=True)
# ax.yaxis.set_tick_params(visible=True)
if source_redshift is not None:
ax1b.tick_params(axis="both", which="major", labelsize=big_fontsize)
ax.legend(
loc="upper center",
bbox_to_anchor=(0.5, 1.22 + 0.2 * float(expanded_labels)),
ncol=3 + len(nu_name),
fancybox=True,
fontsize=big_fontsize,
)
filename = f"{source_name.replace(' ', '')}_lightcurve{['_flux', ''][plot_mag]}.png"
output_path = os.path.join(plot_folder, f"{filename}")
logger.info(f"Saving to {output_path}")
plt.savefig(output_path, bbox_inches="tight", pad_inches=0.05)
if extra_folder is not None:
extra_path = os.path.join(extra_folder, f"{filename}")
logger.info(f"Saving to {extra_path}")
plt.savefig(extra_path, bbox_inches="tight", pad_inches=0.05)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.