blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
49517e70736f11514ca7a72baad84680a1fbc737 | 2ed13136c2139e994472df9ab0a0c1eebde1dd73 | /srcs/mahjong/model/hallModel.py | b809b01afdaeb68d48303ac14d2271792bdf0902 | [] | no_license | rolllyroman/monitoring | 5a98b312ef3ac04b758a807319473366861b7fbc | 5859af04ef15b6865ef7a4fb844168cfc6d93685 | refs/heads/master | 2020-03-27T17:42:58.542132 | 2018-09-10T06:21:53 | 2018-09-10T06:21:53 | 146,868,967 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,896 | py | #-*- coding:utf-8 -*-
#!/usr/bin/python
"""
Author:$Author$
Date:$Date$
Revision:$Revision$
Description:
大厅Model
"""
from web_db_define import *
from datetime import datetime,timedelta
from wechat.wechatData import *
from admin import access_module
from config.config import *
from datetime import datetime
import mahjong_pb2
import poker_pb2
import replay4proto_pb2
from mahjong.model.agentModel import getTopAgentId
from common import log_util
# import socket
# def sendRemoveIpaddr(address):
# host='183.60.133.234'
# port = 9797
# socket.setdefaulttimeout(50)
# sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
# try:
# sock.connect((host,port))
# address = address
# sock.sendall(address)
# print address
# sock.close()
# print 'done'
# except:
# print 'error'
def onReg(redis, account, passwd, type, ip): #传入参数:账号,密码,类型;返回参数:成功返回账号和密码,失败返回None, None
curTime = datetime.now()
#print
log_util.debug('[try onReg] account[%s] passwd[%s] type[%s]'%(account,passwd,type))
if type == 1: #微信code登录
tokenMessage = checkWeixinCode(account)
if tokenMessage:
password = account
accessToken = tokenMessage["access_token"]
refreshToken = tokenMessage["refresh_token"]
openID = tokenMessage["openid"]
userData = getWeixinData(openID, accessToken)
unionid = userData['unionid']
if redis.exists(WEIXIN2ACCOUNT%(unionid)):
realAccount = redis.get(WEIXIN2ACCOUNT%(unionid))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
redis.hmset(table, {'accessToken':accessToken, 'refreshToken':refreshToken, 'password':md5.new(password).hexdigest()})
else:
setOpenid2account(openID, accessToken, refreshToken, ip, redis, account)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return unionid, password
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 2:
if redis.exists(WEIXIN2ACCOUNT%(account)):
realAccount = redis.get(WEIXIN2ACCOUNT%(account))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
truePassword, openID, accessToken = redis.hmget(table, ('password', 'openid', 'accessToken'))
log_util.debug('type 2:passwd[%s] md5[%s] truePassword[%s]'%(md5.new(passwd).hexdigest(), passwd, truePassword))
if truePassword == md5.new(passwd).hexdigest():
userData = getWeixinData(openID, accessToken)
log_util.debug('onReg for type 2, userData:%s'%(userData))
if userData:
redis.hmset(table,
{
'nickname' : userData['nickname'],
'sex' : userData['sex'],
'headImgUrl' : userData['headimgurl']
}
)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return account, passwd
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 3: #微信WEBcode登录
tokenMessage = checkWeixinCodeWEB(account)
if tokenMessage:
password = account
accessToken = tokenMessage["access_token"]
refreshToken = tokenMessage["refresh_token"]
openID = tokenMessage["openid"]
userData = getWeixinData(openID, accessToken)
unionid = userData['unionid']
if redis.exists(WEIXIN2ACCOUNT%(unionid)):
realAccount = redis.get(WEIXIN2ACCOUNT%(unionid))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
redis.hmset(table, {'accessToken':accessToken, 'refreshToken':refreshToken, 'password':md5.new(password).hexdigest()})
else:
setOpenid2account(openID, accessToken, refreshToken, ip, redis, account)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return unionid, password
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 4: #微信WEBcode登录
tokenMessage = checkWeixinCodeWEB(account)
if tokenMessage:
password = account
accessToken = tokenMessage["access_token"]
refreshToken = tokenMessage["refresh_token"]
openID = tokenMessage["openid"]
userData = getWeixinData(openID, accessToken)
unionid = userData['unionid']
if redis.exists(WEIXIN2ACCOUNT%(unionid)):
realAccount = redis.get(WEIXIN2ACCOUNT%(unionid))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
redis.hmset(table, {'accessToken':accessToken, 'refreshToken':refreshToken, 'password':md5.new(password).hexdigest()})
else:
setOpenid2account(openID, accessToken, refreshToken, ip, redis, account)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return unionid, password
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 0:
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(account)
if redis.exists(account2user_table):
table = redis.get(account2user_table)
truePassword = redis.hget(table, 'password')
if truePassword == md5.new(passwd).hexdigest():
return account, passwd
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return None, None
def onRegFish(redis, account, passwd, type, ip): #传入参数:账号,密码,类型;返回参数:成功返回账号和密码,失败返回None, None
curTime = datetime.now()
#print
log_util.debug('[try onReg] account[%s] passwd[%s] type[%s]'%(account,passwd,type))
if type == 1: #微信code登录
tokenMessage = checkWeixinCode4fish(account)
if tokenMessage:
password = account
accessToken = tokenMessage["access_token"]
refreshToken = tokenMessage["refresh_token"]
openID = tokenMessage["openid"]
userData = getWeixinData(openID, accessToken)
unionid = userData['unionid']
if redis.exists(WEIXIN2ACCOUNT4FISH%(unionid)):# or redis.exists(WEIXIN2ACCOUNT%(unionid)):
realAccount = redis.get(WEIXIN2ACCOUNT4FISH%(unionid))
if not realAccount:
realAccount = redis.get(WEIXIN2ACCOUNT%(unionid))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
redis.hmset(table, {'accessToken':accessToken, 'refreshToken':refreshToken, 'password':md5.new(password).hexdigest()})
else:
setOpenid2account4fish(openID, accessToken, refreshToken, ip, redis, account)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return unionid, password
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 2:
if redis.exists(WEIXIN2ACCOUNT4FISH%(account)):# or redis.exists(WEIXIN2ACCOUNT%(account)):
realAccount = redis.get(WEIXIN2ACCOUNT4FISH%(account))
if not realAccount:
realAccount = redis.get(WEIXIN2ACCOUNT%(account))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
truePassword, openID, accessToken = redis.hmget(table, ('password', 'openid', 'accessToken'))
log_util.debug('type 2:passwd[%s] md5[%s] truePassword[%s]'%(md5.new(passwd).hexdigest(), passwd, truePassword))
if truePassword == md5.new(passwd).hexdigest():
userData = getWeixinData(openID, accessToken)
log_util.debug('onReg for type 2, userData:%s'%(userData))
if userData:
redis.hmset(table,
{
'nickname' : userData['nickname'],
'sex' : userData['sex'],
'headImgUrl' : userData['headimgurl']
}
)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return account, passwd
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 3: #微信WEBcode登录
tokenMessage = checkWeixinCodeWEB(account)
if tokenMessage:
password = account
accessToken = tokenMessage["access_token"]
refreshToken = tokenMessage["refresh_token"]
openID = tokenMessage["openid"]
userData = getWeixinData(openID, accessToken)
unionid = userData['unionid']
if redis.exists(WEIXIN2ACCOUNT4FISH%(unionid)):# or redis.exists(WEIXIN2ACCOUNT%(unionid)):
realAccount = redis.get(WEIXIN2ACCOUNT4FISH%(unionid))
if not realAccount:
realAccount = redis.get(WEIXIN2ACCOUNT%(unionid))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
redis.hmset(table, {'accessToken':accessToken, 'refreshToken':refreshToken, 'password':md5.new(password).hexdigest()})
else:
setOpenid2account4fish(openID, accessToken, refreshToken, ip, redis, account)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return unionid, password
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 4: #微信WEBcode登录
tokenMessage = checkWeixinCodeWEB(account)
if tokenMessage:
password = account
accessToken = tokenMessage["access_token"]
refreshToken = tokenMessage["refresh_token"]
openID = tokenMessage["openid"]
userData = getWeixinData(openID, accessToken)
unionid = userData['unionid']
if redis.exists(WEIXIN2ACCOUNT4FISH%(unionid)):# or redis.exists(WEIXIN2ACCOUNT%(unionid)):
realAccount = redis.get(WEIXIN2ACCOUNT4FISH%(unionid))
if not realAccount:
realAccount = redis.get(WEIXIN2ACCOUNT%(unionid))
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(realAccount)
table = redis.get(account2user_table)
redis.hmset(table, {'accessToken':accessToken, 'refreshToken':refreshToken, 'password':md5.new(password).hexdigest()})
else:
setOpenid2account4fish(openID, accessToken, refreshToken, ip, redis, account)
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return unionid, password
redis.srem(FORMAT_LOGIN_POOL_SET,account)
elif type == 0:
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(account)
if redis.exists(account2user_table):
table = redis.get(account2user_table)
truePassword = redis.hget(table, 'password')
if truePassword == md5.new(passwd).hexdigest():
return account, passwd
redis.srem(FORMAT_LOGIN_POOL_SET,account)
return None, None
def saveHotUpDateSetting(redis,settingInfo,sys="HALL"):
"""
保存热更新配置
"""
if sys == 'HALL':
hot_table = HOTUPDATE_TABLE
else:
hot_table = FISH_HOTUPDATE_TABLE
return redis.hmset(hot_table,settingInfo)
def getHotSettingField(redis,field):
"""
获取单个配置信息
"""
return redis.hget(HOTUPDATE_TABLE,field)
def getHotSettingAll(redis):
return redis.hgetall(HOTUPDATE_TABLE)
def get_fish_hall_setting(redis):
return redis.hgetall(FISH_HOTUPDATE_TABLE)
def getUserByAccount(redis, account):
"""
通过account获取玩家数据
"""
account2user_table = FORMAT_ACCOUNT2USER_TABLE%(account)
userTable = redis.get(account2user_table)
return userTable
def do_sessionExpire(redis,session,SessionTable,SESSION_TTL):
"""
刷新session
"""
#refresh session
redis.expire(session['session_key'],60*60)
redis.expire(SessionTable,60*10)
session.expire()
def check_session_verfiy(redis,api_name,SessionTable,account,sid,verfiySid):
'''
验证session是否合法
return code,msg
'''
log_util.debug('[on refresh] account[%s] sid[%s]'%(account, sid))
if verfiySid and sid != verfiySid:
#session['member_account'],session['member_id'] = '',''
return -4,'账号已在其他地方登录',False
if not redis.exists(SessionTable):
return -3,'sid 超时',False
user_table = getUserByAccount(redis, account)
if not redis.exists(user_table):
return -5,'该用户不存在',False
return 0,True,user_table
def packPrivaTeData4Game(chair, data, resp, proto):
privateResp = proto()
privateResp.ParseFromString(resp.privateData)
for data in privateResp.data.gameInfo.roomInfo.playerList:
if int(data.side) == int(chair):
print 'replay side get,side:%s nickname:%s'%(data.side, data.nickname)
privateResp.data.gameInfo.selfInfo.side = data.side
privateResp.data.gameInfo.selfInfo.nickname = data.nickname
privateResp.data.gameInfo.selfInfo.coin = data.coin
privateResp.data.gameInfo.selfInfo.ip = data.ip
privateResp.data.gameInfo.selfInfo.sex = data.sex
privateResp.data.gameInfo.selfInfo.headImgUrl = data.headImgUrl
privateResp.data.gameInfo.selfInfo.roomCards = 0
resp.privateData = privateResp.SerializeToString()
replayStr = resp.SerializeToString()
return replayStr
def packPrivaTeData(chair, data):
resp = replay4proto_pb2.ReplayData()
resp.ParseFromString(data)
refreshDataNameProtos = [mahjong_pb2.S_C_RefreshData, poker_pb2.S_C_RefreshData]
for proto in refreshDataNameProtos:
try:
replayStr = packPrivaTeData4Game(chair, data, resp, proto)
break
except Exception as e:
print 'packPrivaTeData error', e
return replayStr
def getRuleText(rule, gameId, redis):
ruleList = eval(rule)
ruleText = '底分: %s\n'%(max(int(ruleList[-1]), 1))
gameTable = GAME_TABLE%(gameId)
for data in redis.lrange(USE_ROOM_CARDS_RULE%(gameId), 0, -1):
datas = data.split(':')
name, cards = datas[0], datas[1]
try:
playCount = int(datas[2])
except:
playCount = name
if int(cards) == ruleList[-2]:
ruleText += '局数: %s\n'%(playCount)
num = 0
for ruleNum in redis.lrange(GAME2RULE%(gameId), 0, -1):
ruleTile, ruleType, rule = redis.hmget(GAME2RULE_DATA%(gameId, ruleNum), ('title', 'type', 'rule'))
ruleDataList = rule.split(',')
if int(ruleType) == 1:
#print '[on getRuleText]get ruleList[%s] num[%s]'%(ruleList, num)
try:
ruleText += '%s: %s\n'%(ruleTile, ruleDataList[int(ruleList[num])])
except:
ruleText += '%s: %s\n'%(ruleTile, ruleDataList[int(ruleList[num][0])])
else:
text = '%s: '%(ruleTile)
textList = []
for ruleData in ruleList[num]:
textList.append(ruleDataList[ruleData])
textData = ','.join(textList)
text += textData
ruleText =ruleText + text + '\n'
num += 1
ruleText = ruleText.decode('utf-8')
return ruleText
def tryExitGroup(redis, userTable, account, id, groupId):
pipe = redis.pipeline()
key = redis.get(ACCOUNT2WAIT_JOIN_PARTY_TABLE%account)
# for key in redis.keys(WAIT_JOIN_PARTY_ROOM_PLAYERS%('*', '*', '*')): #在等待匹配娱乐模式的话则离开列表
if key:
waitJoinList = redis.lrange(key, 0, -1)
if account in waitJoinList:
pipe.lrem(key, account)
pipe.srem(FORMAT_ADMIN_ACCOUNT_MEMBER_TABLE%(groupId), id) #上线代理需要获得
pipe.hmset(userTable, {'parentAg':'', 'isVolntExitGroup':1,'lastGroup':groupId})
#记录到省级公会的房卡
topAgId = getTopAgentId(redis,groupId)
roomcard = redis.get(USER4AGENT_CARD%(groupId,id))
if not roomcard:
roomcard = 0
print '[try exitGroup] topAgId[%s] roomCards[%s]'%(topAgId,roomcard)
pipe.set(USER4AGENT_CARD%(topAgId,id),int(roomcard))
pipe.execute()
def getGroupIds(redis,groupId):
"""
获取所有上级代理ID
"""
Ids = []
if redis.exists(AGENT_TABLE%(groupId)):
parentId = redis.get(AGENT2PARENT%(groupId))
if parentId:
if int(parentId) == 1:
return ['1']
Ids.extend(getGroupIds(redis,parentId))
else:
Ids.append(parentId)
return Ids
def getBroadcasts(redis,groupId,isNew=''):
"""
获取广播列表
"""
bIds = redis.lrange(HALL_BROADCAST_LIST,0,-1)
broadInfos = []
groupIds = getGroupIds(redis,groupId)
groupIds.append(groupId)
log_util.debug('[groupIds][%s] bids[%s]'%(groupIds,bIds))
for bid in bIds:
if redis.exists(FORMAT_BROADCAST_TABLE%(bid)):
bInfos = redis.hgetall(FORMAT_BROADCAST_TABLE%(bid))
if bInfos['ag'] in groupIds:
broadInfos.append(bInfos)
else:
redis.lrem(FORMAT_BROADCAST_LIST_TABLE,'1',bid)
broadcasts = {'broadcasts':broadInfos}
if isNew:
broadcasts['isNew'] = isNew
return broadcasts
def getHallBroadInfo(redis,group_id,broad_table,broad_belone):
"""
获取大厅广播列表
"""
play_set = redis.smembers(HALL_BRO_PLAY_SET)
broads = redis.lrange(broad_table%(1),0,-1)
broad_list = []
for broad in broads:
if broad in play_set:
broadDetail = {}
broadInfo = redis.hgetall(HALL_BRO_TABLE%(broad))
broadDetail['content'] = broadInfo['content']
broadDetail['repeatInterval'] = int(broadInfo['per_sec'])
broad_list.append(broadDetail)
broads = redis.lrange(broad_table%(0),0,-1)
for broad in broads:
if broad in play_set:
broadDetail = {}
broadInfo = redis.hgetall(HALL_BRO_TABLE%(broad))
broadDetail['content'] = broadInfo['content']
broadDetail['repeatInterval'] = int(broadInfo['per_sec'])
broad_list.append(broadDetail)
return broad_list
if broad_belone == 'HALL':
broads = redis.lrange(HALL_BRO_CONTAIN_AG_LIST%(2,group_id),0,-1)
for broad in broads:
if broad in play_set:
broadDetail = {}
broadInfo = redis.hgetall(HALL_BRO_TABLE%(broad))
broadDetail['content'] = broadInfo['content']
broadDetail['repeatInterval'] = int(broadInfo['per_sec'])
broad_list.append(broadDetail)
return broad_list
broads = redis.lrange(HALL_BRO_CONTAIN_AG_LIST%(3,group_id),0,-1)
for broad in broads:
if broad in play_set:
broadDetail = {}
broadInfo = redis.hgetall(HALL_BRO_TABLE%(broad))
broadDetail['content'] = broadInfo['content']
broadDetail['repeatInterval'] = int(broadInfo['per_sec'])
broad_list.append(broadDetail)
return broad_list
return broad_list
def extendSession(redis,session,SessionTable):
"""
延长session有效时间
"""
redis.expire(session['session_key'],60*60)
redis.expire(SessionTable,60*40)
| [
"[email protected]"
] | |
ed2d7ba12a554f73862f406384a7473d3bc00b64 | beaf759183cfb5a86aecdff12a85b572b6ad7d96 | /faro/test/test_faro_entrypoint.py | 4a49855e7bca5361034b5a94fa78c09ef3621453 | [
"MIT"
] | permissive | austinsonger/FARO | 6ad33318eb687a1be9deb725aef2edfc12d164d0 | dc075fa3af8d68ade571eeb14ff8359f28ee0083 | refs/heads/master | 2022-12-06T23:31:58.499815 | 2020-08-03T16:39:15 | 2020-08-03T16:39:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,401 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import json
import argparse
import os
from os import path
from faro.faro_entrypoint import faro_execute
INPUT_FILE = 'sensitive_data.pdf'
INPUT_FILE_OCR = 'ocr.pdf'
INPUT_FILE_PROTECTED = 'protected.pdf'
INPUT_FILE_NO_METADATA = 'no_metadata.pdf'
INPUT_FILE_SPLIT_LINES = 'split_lines.docx'
INPUT_FILE_SIGNATURE = 'signature_boe.pdf'
INPUT_FILE_TESTS_TXT = 'tests.txt'
INPUT_FILE_NO_SENSITIVE = 'lorem.rtf'
CWD = os.path.dirname(__file__)
INPUT_PATH = os.path.join(CWD, 'data')
SCORE_EXT = 'score'
ENTITY_EXT = 'entity'
SIGNATURE = ['María de los Ángeles Hernández Toribio']
MONETARY_QUANTITY = ["4,99", "49,99"]
EMAILS = ["[email protected]", "[email protected]"]
MOBILE_PHONES = ["654456654", "666444222", "651.651.651"]
DOCUMENT_ID = ["C59933143", "E-38008785", "36663760-N", "96222560J"]
FINANCIAL_DATA = ["ES6621000418401234567891", "5390700823285988", "4916697015082", "4929432390175839"]
CUSTOM_WORDS = ["confidencial", "contraseña"]
LANGUAGE_METADA = "meta:lang"
def _get_file_data(file_path):
with open(file_path, "r") as f:
file_text = f.read()
return json.loads(file_text)
def _faro_run(input_path, input_file, file_type=ENTITY_EXT):
_type = '%s.%s' % (input_file, file_type)
params = argparse.Namespace()
params.input_file = '%s/%s' % (input_path, input_file)
faro_execute(params)
faro_data = _get_file_data('%s/%s' % (input_path, _type))
if file_type == ENTITY_EXT:
faro_data = faro_data['entities']
return faro_data
def _remove_output_files():
dir_list = os.listdir(INPUT_PATH)
for file_name in dir_list:
try:
if file_name.find(SCORE_EXT) != -1 or file_name.find(ENTITY_EXT) != -1:
os.remove('%s/%s' % (INPUT_PATH, file_name))
except FileNotFoundError:
pass
class FaroEntrypointTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
_remove_output_files()
cls.FARO_ENTITY_TEST_1 = _faro_run(INPUT_PATH, INPUT_FILE)
@classmethod
def tearDownClass(cls):
_remove_output_files()
def setUp(self):
""" Setting up for the test """
pass
def tearDown(self):
""" Cleaning up after the test """
pass
def test_document_id_detection(self):
faro_document_ids_entity = list(self.FARO_ENTITY_TEST_1['document_id'].keys())
self.assertTrue(len(faro_document_ids_entity) == len(DOCUMENT_ID))
diff_list = (set(faro_document_ids_entity) ^ set(DOCUMENT_ID))
self.assertTrue(len(diff_list) == 0)
def test_document_financial_data_detection(self):
faro_financial_data_entity = list(self.FARO_ENTITY_TEST_1['financial_data'].keys())
self.assertTrue(len(faro_financial_data_entity) == len(FINANCIAL_DATA))
diff_list = (set(faro_financial_data_entity) ^ set(FINANCIAL_DATA))
self.assertTrue(len(diff_list) == 0)
def test_mobile_phone_detection(self):
faro_mobile_phone_number_entity = list(self.FARO_ENTITY_TEST_1['mobile_phone_number'].keys())
for i in range(len(faro_mobile_phone_number_entity)):
faro_mobile_phone_number_entity[i] = faro_mobile_phone_number_entity[i].replace(" ", "")
self.assertTrue(len(faro_mobile_phone_number_entity) == len(MOBILE_PHONES))
diff_list = (set(faro_mobile_phone_number_entity) ^ set(MOBILE_PHONES))
self.assertTrue(len(diff_list) == 0)
def test_email_detection(self):
faro_email_entity = list(self.FARO_ENTITY_TEST_1['personal_email'].keys())
self.assertTrue(len(faro_email_entity) == len(EMAILS))
diff_list = (set(faro_email_entity) ^ set(EMAILS))
self.assertTrue(len(diff_list) == 0)
def test_monetary_quantity_detection(self):
faro_monetary_quantity_entity = list(self.FARO_ENTITY_TEST_1['monetary_quantity'].keys())
self.assertTrue(len(faro_monetary_quantity_entity) == len(MONETARY_QUANTITY))
diff_list = (set(faro_monetary_quantity_entity) ^ set(MONETARY_QUANTITY))
self.assertTrue(len(diff_list) == 0)
def test_no_metadata(self):
faro_no_metadata_score = _faro_run(INPUT_PATH, INPUT_FILE_NO_METADATA, SCORE_EXT)
self.assertTrue(faro_no_metadata_score["meta:date"] is None)
self.assertTrue(faro_no_metadata_score["meta:author"] is None)
def test_language(self):
faro_language_score = _faro_run(INPUT_PATH, INPUT_FILE, SCORE_EXT)
self.assertTrue(faro_language_score[LANGUAGE_METADA] == "es")
def test_unsupported_language(self):
faro_language_score = _faro_run(INPUT_PATH, INPUT_FILE_PROTECTED, SCORE_EXT)
self.assertTrue(faro_language_score[LANGUAGE_METADA] == "unk")
def test_unsupported_language(self):
faro_language_score = _faro_run(INPUT_PATH, INPUT_FILE_TESTS_TXT, SCORE_EXT)
self.assertTrue(faro_language_score[LANGUAGE_METADA] == "ca")
def test_protected_document(self):
faro_protected_score = _faro_run(INPUT_PATH, INPUT_FILE_PROTECTED, SCORE_EXT)
self.assertTrue(faro_protected_score["meta:encrypted"] == 1)
def test_params_rename_output_files(self):
entity_file_name = 'test_entity'
score_file_name = 'test_score'
params = argparse.Namespace()
params.input_file = '%s/%s' % (INPUT_PATH, INPUT_FILE)
params.output_entity_file = '%s/%s.%s' % (INPUT_PATH, entity_file_name, ENTITY_EXT)
params.output_score_file = '%s/%s.%s' % (INPUT_PATH, score_file_name, SCORE_EXT)
faro_execute(params)
self.assertTrue(path.exists(params.output_entity_file))
self.assertTrue(path.exists(params.output_score_file))
def test_params_verbose(self):
entity_file_name = 'test_verbose_entity'
score_file_name = 'test_verbose_score'
params = argparse.Namespace()
params.input_file = '%s/%s' % (INPUT_PATH, INPUT_FILE)
params.output_entity_file = '%s/%s.%s' % (INPUT_PATH, entity_file_name, ENTITY_EXT)
params.output_score_file = '%s/%s.%s' % (INPUT_PATH, score_file_name, SCORE_EXT)
params.verbose = True
faro_execute(params)
faro_verbose = _get_file_data(params.output_entity_file)
faro_verbose_entity = faro_verbose['entities']
self.assertTrue(faro_verbose_entity['person'] is not None)
self.assertTrue(faro_verbose_entity['phone_number'] is not None)
self.assertTrue(faro_verbose_entity['probable_currency_amount'] is not None)
def test_params_split_lines(self):
params = argparse.Namespace()
params.input_file = '%s/%s' % (INPUT_PATH, INPUT_FILE_SPLIT_LINES)
params.split_lines = True
faro_execute(params)
faro_split_lines = _get_file_data(params.output_entity_file)
faro_split_lines_entity = faro_split_lines['entities']
self.assertTrue(faro_split_lines_entity.get('mobile_phone_number') is None)
def test_ocr(self):
faro_ocr = _faro_run(INPUT_PATH, INPUT_FILE_OCR)
faro_ocr_financial_data = list(faro_ocr['financial_data'].keys())
self.assertTrue(len(faro_ocr_financial_data) == len(FINANCIAL_DATA))
diff_list = (set(faro_ocr_financial_data) ^ set(FINANCIAL_DATA))
self.assertTrue(len(diff_list) == 0)
def test_signature(self):
faro_signature_score = _faro_run(INPUT_PATH, INPUT_FILE_SIGNATURE, SCORE_EXT)['signature']
faro_signature_entity = _get_file_data(os.path.join(INPUT_PATH, INPUT_FILE_SIGNATURE + "." + ENTITY_EXT))
faro_signature_entity = list(faro_signature_entity['entities']['signature'])
self.assertTrue(faro_signature_score == 1)
self.assertTrue(faro_signature_entity[0] == SIGNATURE[0])
def test_custom_words(self):
faro_custom_score = _faro_run(INPUT_PATH, INPUT_FILE_TESTS_TXT, SCORE_EXT)['custom_words']
faro_custom_entity = _get_file_data(os.path.join(INPUT_PATH, INPUT_FILE_TESTS_TXT + "." + ENTITY_EXT))
faro_entities = list(faro_custom_entity['entities']['custom_words'])
self.assertTrue(faro_custom_score == 2)
diff_list = (set(faro_entities) ^ set(CUSTOM_WORDS))
self.assertTrue(len(diff_list) == 0)
def test_corp_emails(self):
entity_file_name = 'test_corp_email_entity'
score_file_name = 'test_corp_email_score'
params = argparse.Namespace()
params.input_file = '%s/%s' % (INPUT_PATH, INPUT_FILE_TESTS_TXT)
params.output_entity_file = '%s/%s.%s' % (INPUT_PATH, entity_file_name, ENTITY_EXT)
params.output_score_file = '%s/%s.%s' % (INPUT_PATH, score_file_name, SCORE_EXT)
params.verbose = True
faro_execute(params)
faro_entities = _get_file_data(params.output_entity_file)['entities']
self.assertTrue(faro_entities['corporate_email'] is not None)
self.assertEqual(len(faro_entities['corporate_email']), 2)
def test_no_sensitive_data(self):
faro_custom_score = _faro_run(INPUT_PATH, INPUT_FILE_NO_SENSITIVE, SCORE_EXT)['score']
faro_custom_entity = _get_file_data(os.path.join(INPUT_PATH, INPUT_FILE_NO_SENSITIVE + "." + ENTITY_EXT))
faro_entities = list(faro_custom_entity['entities'])
self.assertTrue(faro_custom_score == "low")
self.assertTrue(len(faro_entities) == 0)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
926f48063b1bb48ae00d07dc888717f3e602f13d | 3451a6d056098c83ff517960d1ecef51b35d266e | /blog_02/app_usuarios/views.py | 08f0a3f2b0f0672632b0e4a96daf608083748fed | [] | no_license | luisdebia123/blog_2 | aa9e3f03ebafdbad8741d30205dd1de5c48b83d0 | dc945114b7558e03e72dc084b48692a16fd8cee2 | refs/heads/master | 2023-05-28T11:54:03.444971 | 2021-06-17T04:23:14 | 2021-06-17T04:23:14 | 377,480,832 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,570 | py | # app_detalle
import json
from django.shortcuts import render, redirect, get_object_or_404 #
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import authenticate, login
import random
import csv
from django.contrib import messages #
from django.contrib.messages.views import SuccessMessageMixin #
from functools import wraps
from urllib.parse import urlparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import PermissionDenied
from django.shortcuts import resolve_url
from django.shortcuts import reverse, redirect #
from django.utils.http import urlencode
from django import forms #
#---------------------------------------------#
#from .forms import CustomUserCreationForm (sólo si esta creado en el forms.py)
from .models import Usuarios #
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView #
from django.views.generic import TemplateView
#from django.views.generic.edit import CreateView, UpdateView, DeleteView #
# Create your views here.
# app_usuarios #
#def index(request):
# return render(request,'app_categorias/index.html')
class UsuariosListView(ListView) :
model = Usuarios
template_name = 'app_usuarios/index.html'
class Create_Usuarios(TemplateView) :
template_name = 'app_usuarios/crear.html' | [
"[email protected]"
] | |
c2f3056d6eedeb364d87f6a1cf4eb4ee930dfc1f | af43615e07f2bfaa908d6d96b4c90f98ce3ad47b | /rdr_service/lib_fhir/fhirclient_3_0_0/models/auditevent.py | 1c26ac3c0771d8ba65433a426deca7d5f33fe601 | [
"BSD-3-Clause"
] | permissive | all-of-us/raw-data-repository | 11aa25385543f5f8ef706663b79ce181775c1c9a | 461ae46aeda21d54de8a91aa5ef677676d5db541 | refs/heads/devel | 2023-09-01T06:47:25.710651 | 2023-09-01T01:18:56 | 2023-09-01T01:18:56 | 66,000,771 | 46 | 22 | BSD-3-Clause | 2023-09-14T21:06:38 | 2016-08-18T13:47:08 | Python | UTF-8 | Python | false | false | 13,760 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 (http://hl7.org/fhir/StructureDefinition/AuditEvent) on 2017-03-22.
# 2017, SMART Health IT.
from . import domainresource
class AuditEvent(domainresource.DomainResource):
""" Event record kept for security purposes.
A record of an event made for purposes of maintaining a security log.
Typical uses include detection of intrusion attempts and monitoring for
inappropriate usage.
"""
resource_type = "AuditEvent"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" Type of action performed during the event.
Type `str`. """
self.agent = None
""" Actor involved in the event.
List of `AuditEventAgent` items (represented as `dict` in JSON). """
self.entity = None
""" Data or objects used.
List of `AuditEventEntity` items (represented as `dict` in JSON). """
self.outcome = None
""" Whether the event succeeded or failed.
Type `str`. """
self.outcomeDesc = None
""" Description of the event outcome.
Type `str`. """
self.purposeOfEvent = None
""" The purposeOfUse of the event.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.recorded = None
""" Time when the event occurred on source.
Type `FHIRDate` (represented as `str` in JSON). """
self.source = None
""" Audit Event Reporter.
Type `AuditEventSource` (represented as `dict` in JSON). """
self.subtype = None
""" More specific type/id for the event.
List of `Coding` items (represented as `dict` in JSON). """
self.type = None
""" Type/identifier of event.
Type `Coding` (represented as `dict` in JSON). """
super(AuditEvent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(AuditEvent, self).elementProperties()
js.extend([
("action", "action", str, False, None, False),
("agent", "agent", AuditEventAgent, True, None, True),
("entity", "entity", AuditEventEntity, True, None, False),
("outcome", "outcome", str, False, None, False),
("outcomeDesc", "outcomeDesc", str, False, None, False),
("purposeOfEvent", "purposeOfEvent", codeableconcept.CodeableConcept, True, None, False),
("recorded", "recorded", fhirdate.FHIRDate, False, None, True),
("source", "source", AuditEventSource, False, None, True),
("subtype", "subtype", coding.Coding, True, None, False),
("type", "type", coding.Coding, False, None, True),
])
return js
from . import backboneelement
class AuditEventAgent(backboneelement.BackboneElement):
""" Actor involved in the event.
An actor taking an active role in the event or activity that is logged.
"""
resource_type = "AuditEventAgent"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.altId = None
""" Alternative User id e.g. authentication.
Type `str`. """
self.location = None
""" Where.
Type `FHIRReference` referencing `Location` (represented as `dict` in JSON). """
self.media = None
""" Type of media.
Type `Coding` (represented as `dict` in JSON). """
self.name = None
""" Human-meaningful name for the agent.
Type `str`. """
self.network = None
""" Logical network location for application activity.
Type `AuditEventAgentNetwork` (represented as `dict` in JSON). """
self.policy = None
""" Policy that authorized event.
List of `str` items. """
self.purposeOfUse = None
""" Reason given for this user.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.reference = None
""" Direct reference to resource.
Type `FHIRReference` referencing `Practitioner, Organization, Device, Patient, RelatedPerson` (represented as `dict` in JSON). """
self.requestor = None
""" Whether user is initiator.
Type `bool`. """
self.role = None
""" Agent role in the event.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.userId = None
""" Unique identifier for the user.
Type `Identifier` (represented as `dict` in JSON). """
super(AuditEventAgent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(AuditEventAgent, self).elementProperties()
js.extend([
("altId", "altId", str, False, None, False),
("location", "location", fhirreference.FHIRReference, False, None, False),
("media", "media", coding.Coding, False, None, False),
("name", "name", str, False, None, False),
("network", "network", AuditEventAgentNetwork, False, None, False),
("policy", "policy", str, True, None, False),
("purposeOfUse", "purposeOfUse", codeableconcept.CodeableConcept, True, None, False),
("reference", "reference", fhirreference.FHIRReference, False, None, False),
("requestor", "requestor", bool, False, None, True),
("role", "role", codeableconcept.CodeableConcept, True, None, False),
("userId", "userId", identifier.Identifier, False, None, False),
])
return js
class AuditEventAgentNetwork(backboneelement.BackboneElement):
""" Logical network location for application activity.
Logical network location for application activity, if the activity has a
network location.
"""
resource_type = "AuditEventAgentNetwork"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.address = None
""" Identifier for the network access point of the user device.
Type `str`. """
self.type = None
""" The type of network access point.
Type `str`. """
super(AuditEventAgentNetwork, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(AuditEventAgentNetwork, self).elementProperties()
js.extend([
("address", "address", str, False, None, False),
("type", "type", str, False, None, False),
])
return js
class AuditEventEntity(backboneelement.BackboneElement):
""" Data or objects used.
Specific instances of data or objects that have been accessed.
"""
resource_type = "AuditEventEntity"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.description = None
""" Descriptive text.
Type `str`. """
self.detail = None
""" Additional Information about the entity.
List of `AuditEventEntityDetail` items (represented as `dict` in JSON). """
self.identifier = None
""" Specific instance of object.
Type `Identifier` (represented as `dict` in JSON). """
self.lifecycle = None
""" Life-cycle stage for the entity.
Type `Coding` (represented as `dict` in JSON). """
self.name = None
""" Descriptor for entity.
Type `str`. """
self.query = None
""" Query parameters.
Type `str`. """
self.reference = None
""" Specific instance of resource.
Type `FHIRReference` referencing `Resource` (represented as `dict` in JSON). """
self.role = None
""" What role the entity played.
Type `Coding` (represented as `dict` in JSON). """
self.securityLabel = None
""" Security labels on the entity.
List of `Coding` items (represented as `dict` in JSON). """
self.type = None
""" Type of entity involved.
Type `Coding` (represented as `dict` in JSON). """
super(AuditEventEntity, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(AuditEventEntity, self).elementProperties()
js.extend([
("description", "description", str, False, None, False),
("detail", "detail", AuditEventEntityDetail, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("lifecycle", "lifecycle", coding.Coding, False, None, False),
("name", "name", str, False, None, False),
("query", "query", str, False, None, False),
("reference", "reference", fhirreference.FHIRReference, False, None, False),
("role", "role", coding.Coding, False, None, False),
("securityLabel", "securityLabel", coding.Coding, True, None, False),
("type", "type", coding.Coding, False, None, False),
])
return js
class AuditEventEntityDetail(backboneelement.BackboneElement):
""" Additional Information about the entity.
Tagged value pairs for conveying additional information about the entity.
"""
resource_type = "AuditEventEntityDetail"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.type = None
""" Name of the property.
Type `str`. """
self.value = None
""" Property value.
Type `str`. """
super(AuditEventEntityDetail, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(AuditEventEntityDetail, self).elementProperties()
js.extend([
("type", "type", str, False, None, True),
("value", "value", str, False, None, True),
])
return js
class AuditEventSource(backboneelement.BackboneElement):
""" Audit Event Reporter.
The system that is reporting the event.
"""
resource_type = "AuditEventSource"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.identifier = None
""" The identity of source detecting the event.
Type `Identifier` (represented as `dict` in JSON). """
self.site = None
""" Logical source location within the enterprise.
Type `str`. """
self.type = None
""" The type of source where event originated.
List of `Coding` items (represented as `dict` in JSON). """
super(AuditEventSource, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(AuditEventSource, self).elementProperties()
js.extend([
("identifier", "identifier", identifier.Identifier, False, None, True),
("site", "site", str, False, None, False),
("type", "type", coding.Coding, True, None, False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import coding
except ImportError:
coding = sys.modules[__package__ + '.coding']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
| [
"[email protected]"
] | |
08116a00157fc76d178c93e63aba8866d03e1c4e | b9963ffb80aad7e057bc375edb85ac7ed5a837d0 | /adventofcode2019/02.py | 1f51e1764d11d946029f5ae774e31ffbf6f4705c | [
"MIT"
] | permissive | matslindh/codingchallenges | a2db9f4579e9f35189f5cdf74590863cf84bdf95 | a846e522f7a31e988c470cda87955ee3ef20a274 | refs/heads/main | 2022-12-23T15:56:19.776354 | 2022-12-15T21:03:37 | 2022-12-15T21:03:37 | 76,491,177 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,733 | py | def run(memory):
pc = 0
while memory[pc] != 99:
if memory[pc] == 1:
memory[memory[pc+3]] = memory[memory[pc+1]] + memory[memory[pc+2]]
elif memory[pc] == 2:
memory[memory[pc+3]] = memory[memory[pc+1]] * memory[memory[pc+2]]
pc += 4
return memory
def evaluate(s):
memory = [int(r.strip()) for r in s.split(',')]
return run(memory)
def find_parameters(s, goal):
memory = [int(r.strip()) for r in s.split(',')]
for x in range(0, 99):
for y in range(0, 99):
mem = list(memory)
mem[1] = x
mem[2] = y
if run(mem)[0] == goal:
return x,y
def test_evaluate():
assert evaluate('1,0,0,0,99') == [2,0,0,0,99]
assert evaluate('2,3,0,3,99') == [2,3,0,6,99]
assert evaluate('2,4,4,5,99,0') == [2,4,4,5,99,9801]
assert evaluate('1,1,1,4,99,5,6,0,99') == [30,1,1,4,2,5,6,0,99]
if __name__ == '__main__':
print(evaluate('1,12,2,3,1,1,2,3,1,3,4,3,1,5,0,3,2,13,1,19,1,19,10,23,2,10,23,27,1,27,6,31,1,13,31,35,1,13,35,39,1,39,10,43,2,43,13,47,1,47,9,51,2,51,13,55,1,5,55,59,2,59,9,63,1,13,63,67,2,13,67,71,1,71,5,75,2,75,13,79,1,79,6,83,1,83,5,87,2,87,6,91,1,5,91,95,1,95,13,99,2,99,6,103,1,5,103,107,1,107,9,111,2,6,111,115,1,5,115,119,1,119,2,123,1,6,123,0,99,2,14,0,0'))
print(find_parameters('1,12,2,3,1,1,2,3,1,3,4,3,1,5,0,3,2,13,1,19,1,19,10,23,2,10,23,27,1,27,6,31,1,13,31,35,1,13,35,39,1,39,10,43,2,43,13,47,1,47,9,51,2,51,13,55,1,5,55,59,2,59,9,63,1,13,63,67,2,13,67,71,1,71,5,75,2,75,13,79,1,79,6,83,1,83,5,87,2,87,6,91,1,5,91,95,1,95,13,99,2,99,6,103,1,5,103,107,1,107,9,111,2,6,111,115,1,5,115,119,1,119,2,123,1,6,123,0,99,2,14,0,0', 19690720))
| [
"[email protected]"
] | |
84c5d9573226e76a809023134456aa0ebbf95103 | 639359b9cfc88e02968923c9dfc57d626cdaec9b | /boardapp/board/migrations/0004_board.py | d7726e44bbd0dfeab8ee0697fc670fc156bdeaec | [] | no_license | ElvinKim/django-angular-board-project | ee220585a1f64804dff718066ca2d00f749e8c6c | de06a560c16a4f1db66afb15e54471ad0e9d104b | refs/heads/master | 2021-01-20T21:11:51.009496 | 2019-02-09T07:04:46 | 2019-02-09T07:04:46 | 62,854,114 | 3 | 1 | null | 2018-11-07T09:46:29 | 2016-07-08T02:55:53 | Python | UTF-8 | Python | false | false | 939 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-11 05:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('board', '0003_delete_board'),
]
operations = [
migrations.CreateModel(
name='Board',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('content', models.TextField()),
('user', models.IntegerField(default=0)),
('view_cnt', models.IntegerField(default=0)),
('moddt', models.DateTimeField()),
('regdt', models.DateTimeField()),
],
options={
'db_table': 'tbl_board',
},
),
]
| [
"[email protected]"
] | |
ed41664cc42a14be55f7006ebf125f618d472233 | 4e186c81ba3ad30b98e76eb6b684b66b8a5c61a8 | /src/devilry_settings/devilry_settings/default_settings.py | 778740418c78bdd373f7ead000bb57d845e9db90 | [] | no_license | unioslo/devilry-django | 0805e9d691586f17e52cef142de5ef4c232a3665 | 29b250ba1ed843e90bc70bc50561ab09a8c96721 | refs/heads/master | 2020-12-31T07:19:03.162554 | 2014-04-10T12:14:54 | 2014-04-10T12:14:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,426 | py | # Enable the Celery task queue
import djcelery
djcelery.setup_loader()
########################################################################
#
# Defaults for django settings
# - See: https://docs.djangoproject.com/en/dev/ref/settings/
#
########################################################################
DEBUG = False
EXTJS4_DEBUG = DEBUG
TEMPLATE_DEBUG = DEBUG
TIME_ZONE = 'Europe/Oslo'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
FORMAT_MODULE_PATH = 'devilry_settings.formats'
LOGIN_URL = '/authenticate/login'
STATIC_URL = '/static/'
STATIC_ROOT = 'static'
DATABASES = {}
EMAIL_SUBJECT_PREFIX = '[Devilry] '
ROOT_URLCONF = 'devilry_settings.default_root_urlconf'
AUTH_PROFILE_MODULE = 'core.DevilryUserProfile'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS = ['django.contrib.markup',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.staticfiles',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin',
'djcelery',
'errortemplates',
'crispy_forms',
'djangorestframework',
'gunicorn',
'extjs4',
'haystack',
'south',
'celery_haystack',
'django_decoupled_docs',
'devilry.apps.core',
'devilry.apps.theme',
'devilry.apps.extjshelpers',
'devilry.apps.extjsux',
'devilry.apps.developertools',
'devilry.apps.jsfiledownload',
'devilry.apps.approved_gradeeditor',
'devilry.apps.manual_gradeeditor',
'devilry.apps.autograde_gradeeditor',
'devilry.apps.basicform_gradeeditor',
'devilry.apps.commentform_gradeeditor',
'devilry.apps.statistics',
'devilry.apps.markup',
'devilry.apps.student',
'devilry.apps.examiner',
'devilry.apps.administrator',
'devilry.apps.superadmin',
'devilry.apps.authenticate',
'devilry.apps.gradeeditors',
'devilry.apps.send_email_to_students',
'devilry_extjsextras',
'devilry_theme',
'devilry_usersearch',
'devilry_authenticateduserinfo',
'devilry_header',
'devilry_useradmin',
'devilry_helplinks',
'devilry_frontpage',
'devilry_student',
'devilry_i18n',
'devilry_settings',
'devilry_subjectadmin',
'devilry_nodeadmin',
'devilry_search',
'devilry_qualifiesforexam',
'devilry_qualifiesforexam_approved',
'devilry_qualifiesforexam_points',
'devilry_qualifiesforexam_select',
'devilry_mathjax',
'devilry_examiner',
'devilry_gradingsystem',
'devilry_gradingsystemplugin_points',
'devilry_gradingsystemplugin_approved',
'devilry_rest',
]
TEMPLATE_CONTEXT_PROCESSORS = ("django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.debug",
"django.core.context_processors.request",
'django.contrib.messages.context_processors.messages',
'extjs4.context_processors.extjs4',
'devilry.apps.theme.templatecontext.template_variables')
MIDDLEWARE_CLASSES = ['django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'devilry_i18n.middleware.LocaleMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'devilry.utils.logexceptionsmiddleware.TracebackLoggingMiddleware']
#######################################################################
#
# Testing
#
#######################################################################
TEST_RUNNER = 'devilry_settings.testsuiterunner.FilterableTestSuiteRunner'
TEST_FILTER = {
'exclude': [
'django.*', 'djangorestframework.*',
'devilry.apps.examiner.tests.simplified.*',
'devilry.apps.student.tests.simplified.*',
'devilry.apps.student.tests.simplified.*',
'devilry_search.tests.*', # Ignored when running all tests because they requir a fullfeatured search backend, like solr, to work
],
'include': ['devilry*']
}
##################################################################################
#
# Haystack (search)
#
##################################################################################
HAYSTACK_SITECONF = 'devilry_search.haystack_search_sites'
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://127.0.0.1:8983/solr'
########################################################################
#
# Celery
#
########################################################################
BROKER_URL = 'amqp://devilry:secret@localhost:5672/devilryhost'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
########################################################################
#
# Defaults for settings defined by Devilry.
#
########################################################################
# Make sure this does not end with / (i.e. '' means / is the main page).
# DEVILRY_URLPATH_PREFIX = '/django/devilry'
DEVILRY_URLPATH_PREFIX = ''
# The default grade-plugin:
DEVILRY_DEFAULT_GRADEEDITOR='approved'
DEVILRY_STATIC_URL = '/static' # Must not end in / (this means that '' is the server root)
DEVILRY_THEME_URL = DEVILRY_STATIC_URL + '/theme/themes/devilry'
DEVILRY_EXTJS_URL = DEVILRY_STATIC_URL + '/extjs4'
DEVILRY_MATHJAX_URL = '{}/devilry_mathjax/MathJax.js'.format(DEVILRY_STATIC_URL)
DEVILRY_LOGOUT_URL = '/authenticate/logout'
DEVILRY_HELP_URL = 'https://devilry-userdoc.readthedocs.org'
#Set max file size to 5MB. Files greater than this size are split into chunks of this size.
DEVILRY_MAX_ARCHIVE_CHUNK_SIZE = 5000000
DEVILRY_SEND_EMAIL_TO_USERS = True
DEVILRY_EMAIL_SUBJECT_PREFIX_ADMIN = '[devilry-admin] '
DEVILRY_EMAIL_SIGNATURE = "This is a message from the Devilry assignment delivery system. " \
"Please do not respond to this email."
DEVILRY_DELIVERY_STORE_BACKEND = 'devilry.apps.core.deliverystore.FsHierDeliveryStore'
DEVILRY_FSHIERDELIVERYSTORE_INTERVAL = 1000
DEVILRY_SYNCSYSTEM = 'YOUR SYNC SYSTEM HERE'
DEVILRY_EMAIL_DEFAULT_FROM = '[email protected]'
DEVILRY_SYSTEM_ADMIN_EMAIL = '[email protected]'
DEVILRY_SCHEME_AND_DOMAIN = 'https://devilry.example.com'
#: Email pattern. Set this, and add 'devilry.apps.autoset_empty_email_by_username' to INSTALLED_APPS
#: to automatically set email to "<username>@DEVILRY_DEFAULT_EMAIL_SUFFIX" if it is not set when a user is saved.
#DEVILRY_DEFAULT_EMAIL_SUFFIX = 'example.com'
#: When sorting by fullname, would you like to sort by last name? Currently
#: only affects the overview over an entire period.
DEVILRY_SORT_FULL_NAME_BY_LASTNAME = True
#: Messages that are displayed in the 3 dashboards for users with no permission to the dashboard
#: The body of each message can contain html. For example, you can add an email link using: <a href="mailto:[email protected]">[email protected]</a>
DEVILRY_STUDENT_NO_PERMISSION_MSG = {'title': 'No published assignments',
'body': 'You are not registered as a student on any assignments in Devilry. This is usually because you subject/course administrator has not published any assignments yet. Contact your subject/course administrator if this is wrong.'}
DEVILRY_EXAMINER_NO_PERMISSION_MSG = {'title': 'You are not an examiner',
'body': 'You are not registered as an examiner on any publshed assignments in Devilry. If this is wrong, please contact the subject/course administrator.'}
DEVILRY_ADMINISTRATOR_NO_PERMISSION_MSG = {'title': 'You are not an administrator',
'body': 'You are not registered as an administrator on any Node, Subject/Course, Period/Semester or Assignment in Devilry. If this is wrong, please contact the system administrator.'}
DEVILRY_QUALIFIESFOREXAM_PLUGINS = [
'devilry_qualifiesforexam_approved.all',
'devilry_qualifiesforexam_approved.subset',
'devilry_qualifiesforexam_points',
'devilry_qualifiesforexam_select',
]
CRISPY_TEMPLATE_PACK = 'bootstrap3'
#: Deadline handling method:
#:
#: 0: Soft deadlines
#: 1: Hard deadlines
DEFAULT_DEADLINE_HANDLING_METHOD = 0
#: Url where users are directed when they do not have the permissions they believe they should have.
DEVILRY_LACKING_PERMISSIONS_URL = None
#: Url where users are directed when they want to know what to do if their personal info in Devilry is wrong.
DEVILRY_WRONG_USERINFO_URL = None
#: Django apps that override the Devilry javascript translations (which is most
#: of the Devilry user interface).
DEVILRY_JAVASCRIPT_LOCALE_OVERRIDE_APPS = tuple()
#: Default language
LANGUAGE_CODE = 'en'
#: Available languages
gettext_noop = lambda s: s
LANGUAGES = [('en', gettext_noop('English')),
('nb', gettext_noop('Norwegian Bokmal'))]
#: Enable MathJax?
DEVILRY_ENABLE_MATHJAX = True
###################################################
# Setup logging using the defaults - logs to stderr
###################################################
from devilry_settings.log import create_logging_config
LOGGING = create_logging_config()
| [
"[email protected]"
] | |
e81b788920912c0d9f56722dd3d855601c8582d6 | 40b42ccf2b6959d6fce74509201781be96f04475 | /mmocr/datasets/base_dataset.py | 5a39bf46548e9d20996abe52a3d2ffdda518eaeb | [
"Apache-2.0"
] | permissive | xdxie/WordArt | 2f1414d8e4edaa89333353d0b28e5096e1f87263 | 89bf8a218881b250d0ead7a0287526c69586c92a | refs/heads/main | 2023-05-23T02:04:22.185386 | 2023-03-06T11:51:43 | 2023-03-06T11:51:43 | 515,485,694 | 106 | 12 | null | null | null | null | UTF-8 | Python | false | false | 5,469 | py | # Copyright (c) OpenMMLab. All rights reserved.
import numpy as np
from mmcv.utils import print_log
from mmdet.datasets.builder import DATASETS
from mmdet.datasets.pipelines import Compose
from torch.utils.data import Dataset
from mmocr.datasets.builder import build_loader
@DATASETS.register_module()
class BaseDataset(Dataset):
"""Custom dataset for text detection, text recognition, and their
downstream tasks.
1. The text detection annotation format is as follows:
The `annotations` field is optional for testing
(this is one line of anno_file, with line-json-str
converted to dict for visualizing only).
.. code-block:: json
{
"file_name": "sample.jpg",
"height": 1080,
"width": 960,
"annotations":
[
{
"iscrowd": 0,
"category_id": 1,
"bbox": [357.0, 667.0, 804.0, 100.0],
"segmentation": [[361, 667, 710, 670,
72, 767, 357, 763]]
}
]
}
2. The two text recognition annotation formats are as follows:
The `x1,y1,x2,y2,x3,y3,x4,y4` field is used for online crop
augmentation during training.
format1: sample.jpg hello
format2: sample.jpg 20 20 100 20 100 40 20 40 hello
Args:
ann_file (str): Annotation file path.
pipeline (list[dict]): Processing pipeline.
loader (dict): Dictionary to construct loader
to load annotation infos.
img_prefix (str, optional): Image prefix to generate full
image path.
test_mode (bool, optional): If set True, try...except will
be turned off in __getitem__.
"""
def __init__(self,
ann_file,
loader,
pipeline,
img_prefix='',
test_mode=False):
super().__init__()
self.test_mode = test_mode
self.img_prefix = img_prefix
self.ann_file = ann_file
# load annotations
loader.update(ann_file=ann_file)
self.data_infos = build_loader(loader)
# processing pipeline
self.pipeline = Compose(pipeline)
# set group flag and class, no meaning
# for text detect and recognize
self._set_group_flag()
self.CLASSES = 0
def __len__(self):
return len(self.data_infos)
def _set_group_flag(self):
"""Set flag."""
self.flag = np.zeros(len(self), dtype=np.uint8)
def pre_pipeline(self, results):
"""Prepare results dict for pipeline."""
results['img_prefix'] = self.img_prefix
def prepare_train_img(self, index):
"""Get training data and annotations from pipeline.
Args:
index (int): Index of data.
Returns:
dict: Training data and annotation after pipeline with new keys
introduced by pipeline.
"""
img_info = self.data_infos[index]
results = dict(img_info=img_info)
self.pre_pipeline(results)
return self.pipeline(results)
def prepare_test_img(self, img_info):
"""Get testing data from pipeline.
Args:
idx (int): Index of data.
Returns:
dict: Testing data after pipeline with new keys introduced by
pipeline.
"""
return self.prepare_train_img(img_info)
def _log_error_index(self, index):
"""Logging data info of bad index."""
try:
data_info = self.data_infos[index]
img_prefix = self.img_prefix
print_log(f'Warning: skip broken file {data_info} '
f'with img_prefix {img_prefix}')
except Exception as e:
print_log(f'load index {index} with error {e}')
def _get_next_index(self, index):
"""Get next index from dataset."""
self._log_error_index(index)
index = (index + 1) % len(self)
return index
def __getitem__(self, index):
"""Get training/test data from pipeline.
Args:
index (int): Index of data.
Returns:
dict: Training/test data.
"""
if self.test_mode:
return self.prepare_test_img(index)
while True:
try:
data = self.prepare_train_img(index)
if data is None:
raise Exception('prepared train data empty')
break
except Exception as e:
print_log(f'prepare index {index} with error {e}')
index = self._get_next_index(index)
return data
def format_results(self, results, **kwargs):
"""Placeholder to format result to dataset-specific output."""
pass
def evaluate(self, results, metric=None, logger=None, **kwargs):
"""Evaluate the dataset.
Args:
results (list): Testing results of the dataset.
metric (str | list[str]): Metrics to be evaluated.
logger (logging.Logger | str | None): Logger used for printing
related information during evaluation. Default: None.
Returns:
dict[str: float]
"""
raise NotImplementedError
| [
"[email protected]"
] | |
e23459a3a02e02800b2ef6783cb1df6e5086e855 | d16fbe6ec22af65fb21e31839f3565cee6c32739 | /bin/casasim_go_v20190816.py | 7d1ee42466cc0d0c2a41374581f1c2aabd515542 | [] | no_license | 1054/Crab.Toolkit.PdBI | 4320ae59fc9e0c7e4334deb8e6b65aafd57a6321 | 037a589586c61ac0b27bbe5e1b556532847e85fb | refs/heads/master | 2023-08-13T13:42:39.282276 | 2023-07-28T20:15:27 | 2023-07-28T20:15:27 | 62,636,065 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,490 | py | #!/usr/bin/env python
#
# This code needs to be ran in CASA.
#
# This code simulates ALMA data.
#
# Usage:
# casa
# execfile(\'casasim_go.py\') <TODO>
#
# Usage 2:
# casa
# sys.path.append('/Users/dzliu/Cloud/Github/Crab.Toolkit.PdBI/bin')
# simfreq = '339.0 GHz'
# simcell = '0.062 arcsec'
# import casasim_go
# reload(casasim_go)
# skymodel, complist = casasim_go.simulate_Gaussian(locals())
# casasim_go.simulate_Visibilities(locals())
#
# 20190723: init
#
from __future__ import print_function
import os, sys, re, json, copy, time, datetime, shutil
import numpy as np
import inspect
import random
#
# A function to get arg list of another function
#
#def get_arg_list(func_name):
# inspect__dir__ = dir(inspect)
# arg_list = []
# if 'signature' in inspect__dir__:
# arg_list = inspect.signature(func_name).parameters.keys()
# arg_list = list(arg_list)
# elif 'getfullargspec' in inspect__dir__:
# arg_list = inspect.getfullargspec(func_name)[0]
# elif 'getargspec' in inspect__dir__:
# arg_list = inspect.getargspec(func_name)[0]
# else:
# print('Error! Could not call inspect.getargspec() or inspect.getfullargspec() or inspect.signature()!')
# sys.exit()
# return arg_list
#
# Simulate a Gaussian shape source
#
def simulate_Gaussian(locals_dict):
#
# Simulate sky model
#
def simulate_Model(casasim_source_models, locals_dict):
#
qa = locals_dict['qa']
cl = locals_dict['cl']
ia = locals_dict['ia']
exportfits = locals_dict['exportfits']
#
# load variables
if not ('simfreq' in locals_dict):
simfreq = '339.0GHz'
#raise ValueError('Please set simfreq!')
else:
simfreq = locals_dict['simfreq']
if not ('simcell' in locals_dict):
simcell = '0.062arcsec'
#raise ValueError('Please set simcell!')
else:
simcell = locals_dict['simcell']
if not ('simsize' in locals_dict):
simsize = 256
else:
simsize = locals_dict['simsize']
if not ('skymodel' in locals_dict):
skymodel = "casasim_Model.fits"
else:
skymodel = locals_dict['skymodel']
if skymodel == '':
raise ValueError('Error! skymodel is empty!')
return
if not ('complist' in locals_dict):
complist = "casasim_Model.cl"
else:
complist = locals_dict['complist']
if complist == '':
raise ValueError('Error! complist is empty!')
return
#
# check input models
if not (type(casasim_source_models) is list):
casasim_source_models = [casasim_source_models]
for i,casasim_source_model in enumerate(casasim_source_models):
if not (type(casasim_source_model) is dict):
raise ValueError('Error! The %d-th model of the input model list is not a dict!'%(i))
return
if not ('ra' in casasim_source_model and 'dec' in casasim_source_model and 'shape' in casasim_source_model and 'flux' in casasim_source_model):
raise ValueError('Error! The %d-th model of the input model list does not have \'ra\', \'dec\', \'shape\' or \'flux\'!'%(i))
return
if casasim_source_model['shape'] == 'Gaussian':
if not ('majoraxis' in casasim_source_model and 'minoraxis' in casasim_source_model and 'positionangle' in casasim_source_model):
if 'size' in casasim_source_model:
if not (type(casasim_source_model['size']) is list):
casasim_source_model['size'] = [float(casasim_source_model['size'])]
if len(casasim_source_model['size']) == 1:
casasim_source_models[i]['majoraxis'] = casasim_source_model['size'][0]
casasim_source_models[i]['minoraxis'] = casasim_source_model['size'][0]
casasim_source_models[i]['positionangle'] = 0.0
elif len(casasim_source_model['size']) == 2:
casasim_source_models[i]['majoraxis'] = casasim_source_model['size'][0]
casasim_source_models[i]['minoraxis'] = casasim_source_model['size'][1]
casasim_source_models[i]['positionangle'] = 0.0
elif len(casasim_source_model['size']) >= 3:
casasim_source_models[i]['majoraxis'] = casasim_source_model['size'][0]
casasim_source_models[i]['minoraxis'] = casasim_source_model['size'][1]
casasim_source_models[i]['positionangle'] = casasim_source_model['size'][2]
#
#if not ('casasim_source_models' in locals_dict):
# casasim_source_models = []
# #casasim_source_models.append({'ra':'10h00m00.0s','dec':'-30d00m00.0s','flux':2.0,'fluxunit':'mJy','shape':'Gaussian','majoraxis':'0.6arcsec','minoraxis':'0.4arcsec','positionangle':'0.0deg'})
# #<TODO># test reading ds9 regions file
# with open('ds9.reg', 'r') as fp:
# for fline in fp.readlines():
# if fline.startswith('ellipse'):
# ra, dec, majoraxis, minoraxis, positionangle = re.sub(r'ellipse\(([0-9:+-\.]+)\s*,\s*([0-9:+-\.]+)\s*,\s*([0-9Ee:+-\.\"]+)\s*,\s*([0-9Ee:+-\.\"]+)\s*,\s*([0-9Ee:+-\.]+)\s*\)\s*.*', r'\1 \2 \3 \4 \5', fline.strip()).split(' ')
# if ra.find(':')>=0: ra = re.sub(r'([0-9+-]+):([0-9+-]+):([0-9+-\.]+)', r'\1h\2m\3s', ra)
# if dec.find(':')>=0: dec = re.sub(r'([0-9+-]+):([0-9+-]+):([0-9+-\.]+)', r'\1d\2m\3s', dec)
# if majoraxis.endswith('"'): majoraxis = majoraxis.replace('"','arcsec')
# if minoraxis.endswith('"'): minoraxis = minoraxis.replace('"','arcsec')
# majoraxisvalue = qa.convert(majoraxis,'arcsec')['value']
# minoraxisvalue = qa.convert(minoraxis,'arcsec')['value']
# positionanglevalue = float(positionangle) # deg
# if majoraxisvalue < minoraxisvalue:
# majoraxis, minoraxis = minoraxis, majoraxis
# positionangle = '%s'%(positionanglevalue + 90.0)
# positionangle += 'deg'
# flux_max = 2.0 #<TODO># test random flux between 0.2 - 2.0 mJy/beam
# flux_min = 0.2 #<TODO># test random flux between 0.2 - 2.0 mJy/beam
# flux = (random.random() - 0.0) / (1.0 - 0.0) * (flux_max - flux_min) + flux_min #<TODO># test random flux
# casasim_source_models.append({'ra':ra,'dec':dec,'flux':flux,'fluxunit':'mJy','shape':'Gaussian','majoraxis':majoraxis,'minoraxis':minoraxis,'positionangle':positionangle})
# #print(casasim_source_models[-1])
#
# closes any open component lists, if any.
cl.done()
#
# backup previous result if any
skymodelname = re.sub(r'^(.*)\.fits', r'\1', skymodel, re.IGNORECASE)
if os.path.isdir(skymodelname+'.im'):
if os.path.isdir(skymodelname+'.im'+'.backup'):
shutil.rmtree(skymodelname+'.im'+'.backup')
shutil.move(skymodelname+'.im', skymodelname+'.im'+'.backup')
if os.path.isdir(complist):
if os.path.isdir(complist+'.backup'):
shutil.rmtree(complist+'.backup')
shutil.move(complist, complist+'.backup')
#
# prepare to set sim image center RA Dec by taking the position of the first model source
simCenRA = None
simCenDec = None
#
# prepare to add components
cl_addcomponent_arg_list = ['flux','fluxunit','shape','majoraxis','minoraxis','positionangle']
for imodel, casasim_source_model in enumerate(casasim_source_models):
cl_component_properties = {}
for key in casasim_source_model.keys():
if key in cl_addcomponent_arg_list and key != 'dir' and key != 'freq':
cl_component_properties[key] = casasim_source_model[key]
print('component %d, %s = %s'%(imodel, key, cl_component_properties[key]))
cl.addcomponent(dir='J2000 %s %s'%(casasim_source_model['ra'], casasim_source_model['dec']),
freq=simfreq,
**cl_component_properties)
if simCenRA is None: simCenRA = casasim_source_model['ra']
if simCenDec is None: simCenDec = casasim_source_model['dec']
#
# print message
print('simCenRA = %s'%(simCenRA))
print('simCenDec = %s'%(simCenDec))
print('simfreq = %s'%(simfreq))
print('simcell = %s'%(simcell))
print('simsize = %s'%(simsize))
print('complist = %s'%(complist))
print('skymodel = %s'%(skymodel))
print('casasim_source_models = %s'%(str(casasim_source_models)))
#
# process image
ia.fromshape(skymodelname+".im", [simsize,simsize,1,1], overwrite=True) # image a
cs = ia.coordsys() # coordinate system
cs.setunits(['rad', 'rad', '', 'Hz'])
cell_rad = qa.convert(qa.quantity(simcell), "rad")['value']
cs.setincrement([-cell_rad,cell_rad],'direction')
cs.setreferencevalue([qa.convert(simCenRA,'rad')['value'], qa.convert(simCenDec,'rad')['value']], type="direction")
cs.setreferencevalue(qa.convert(qa.quantity(simfreq),'Hz')['value'], 'spectral')
cs.setincrement('31.25MHz','spectral')
ia.setcoordsys(cs.torecord())
ia.setbrightnessunit("Jy/pixel")
ia.modify(cl.torecord(),subtract=False)
exportfits(imagename = skymodelname+'.im', fitsimage = skymodelname+'.fits', overwrite = True)
print('Output to "%s"!'%(skymodelname+'.im'))
print('Output to "%s"!'%(skymodelname+'.fits'))
#
cl.rename(complist)
cl.done()
print('Output to "%s"!'%(complist))
return skymodel, complist
#
#locals_dict.update({'skymodel':skymodelname+'.fits', 'complist':complist})
#
#
#
def simulate_Visibilities(locals_dict):
#
simobserve = locals_dict['simobserve']
qa = locals_dict['qa']
imhead = locals_dict['imhead']
split = locals_dict['split']
exportuvfits = locals_dict['exportuvfits']
#inp = locals_dict['inp']
#
# load variables
if not ('project' in locals_dict):
project = "casasim_Project"
else:
project = locals_dict['project']
if not ('skymodel' in locals_dict):
skymodel = "casasim_Model.fits"
else:
skymodel = locals_dict['skymodel']
if not ('complist' in locals_dict):
complist = ""
else:
complist = locals_dict['complist']
if not ('compwidth' in locals_dict):
compwidth = ''
else:
compwidth = locals_dict['compwidth']
if not ('inwidth' in locals_dict):
inwidth = ''
else:
inwidth = locals_dict['inwidth']
if not ('antennalist' in locals_dict):
antennalist = 'alma;0.1arcsec'
else:
antennalist = locals_dict['antennalist']
if not ('totaltime' in locals_dict):
totaltime = ''
else:
totaltime = locals_dict['totaltime']
if not ('integration' in locals_dict):
integration = '600s'
else:
integration = locals_dict['integration']
if not ('thermalnoise' in locals_dict):
thermalnoise = '' # 'tsys-atm' (user_pwv=XXX)
else:
thermalnoise = locals_dict['thermalnoise']
if not ('obsmode' in locals_dict):
obsmode = 'int' # 'int' or 'sd'
else:
obsmode = locals_dict['obsmode']
if not ('direction' in locals_dict):
direction = '' # "J2000 10h00m00.0s -30d00m00.0s" # If left unset, simobserve will use the center of the skymodel image. -- not working?
else:
direction = locals_dict['direction']
if not ('mapsize' in locals_dict):
mapsize = '5arcsec'
else:
mapsize = locals_dict['mapsize']
#
# print message
print('project = %s'%(project))
print('skymodel = %s'%(skymodel))
#print('complist = %s'%(complist))
#print('direction = %s'%(direction))
#
# backup previous result if any
if os.path.isdir(os.path.join(project, 'split_field_0_spw_0.ms')):
if os.path.isdir(os.path.join(project, 'split_field_0_spw_0.ms'+'.backup')):
shutil.rmtree(os.path.join(project, 'split_field_0_spw_0.ms'+'.backup'))
shutil.move(os.path.join(project, 'split_field_0_spw_0.ms'),
os.path.join(project, 'split_field_0_spw_0.ms'+'.backup'))
#
# run simobserve task
print('Running simobserve(project=\'%s\', ...)'%(project))
project_in = project
simobserve(project = project,
skymodel = skymodel,
complist = complist,
direction = direction,
mapsize = mapsize,
ptgfile = '',
compwidth = compwidth,
inwidth = inwidth,
obsmode = obsmode,
antennalist = antennalist,
integration = integration,
thermalnoise = thermalnoise,
verbose = True,
overwrite = True,
)
project = project_in # it seems 'project' variable got changed after simobserve ...
#
# check output
if antennalist.endswith('.cfg'):
outname = antennalist.replace('.cfg','').replace('alma.','alma_')
else:
outname = antennalist.replace(';','_').replace('alma.','alma_')
if not (os.path.isdir(os.path.join(project, project+'.'+outname+'.ms'))):
raise Exception('Error! Failed to run simobserve()! Could not find "%s"!'%(os.path.join(project, project+'.'+outname+'.ms')))
print('Output to "%s"!'%(os.path.join(project, project+'.'+outname+'.ms')))
#
# exportuvfits
split(vis = os.path.join(project, project+'.'+outname+'.ms'),
outputvis = os.path.join(project, 'split_field_0_spw_0.ms'),
keepmms = False,
keepflags = False,
field = '0',
spw = '0',
timebin = '30s',
)
if not (os.path.isdir(os.path.join(project, 'split_field_0_spw_0.ms'))):
raise Exception('Error! Failed to run split()! Could not find "%s"!'%(os.path.join(project, 'split_field_0_spw_0.ms')))
print('Output to "%s"!'%(os.path.join(project, 'split_field_0_spw_0.ms')))
#
# exportuvfits
exportuvfits(vis = os.path.join(project, 'split_field_0_spw_0.ms'),
fitsfile = os.path.join(project, 'split_field_0_spw_0.uvfits'),
multisource = False,
combinespw = False,
overwrite = True,
)
if not (os.path.isfile(os.path.join(project, 'split_field_0_spw_0.uvfits'))):
raise Exception('Error! Failed to run exportuvfits()! Could not find "%s"!'%(os.path.join(project, 'split_field_0_spw_0.uvfits')))
print('Output to "%s"!'%(os.path.join(project, 'split_field_0_spw_0.uvfits')))
#
#
#
#def convolve_model_image_with_beam(locals_dict):
# #
# imsmooth = locals_dict['imsmooth']
# #
# # load variables
# if not ('project' in locals_dict):
# project = "casasim_Project"
# else:
# project = locals_dict['project']
# if not ('skymodel' in locals_dict):
# skymodel = "casasim_Gaussian.fits"
# else:
# skymodel = locals_dict['skymodel']
# if not ('beam' in locals_dict):
# skymodel = "casasim_Gaussian.fits"
# else:
# skymodel = locals_dict['skymodel']
# #
# # check antennalist
# if not ('antennalist' in locals_dict):
# raise Exception('Errro! antennalist was not set!')
# else:
# antennalist = locals_dict['antennalist']
# #
# if antennalist.endswith('.cfg'):
# outname = antennalist.replace('.cfg','').replace('alma.','alma_')
# else:
# outname = antennalist.replace(';','_').replace('alma.','alma_')
# #
# if not (os.path.isdir(os.path.join(project, project+'.'+outname+'.ms'))):
# raise Exception('Error! Could not find "%s"! Please run simobserve() first!'%(os.path.join(project, project+'.'+outname+'.ms')))
# #
# print('Reading "%s"'%(os.path.join(project, project+'.'+outname+'.ms')))
# #
# # get clean beam size
# restoringbeam =
# #
# # skymodelname
# skymodelname = re.sub(r'\.fits$', r'', skymodel, re.IGNORECASE)
# #
# # output a smoothed version
# imsmooth(imagename = skymodel, kernel = 'gauss', beam = beam)
# print('Output to "%s"!'%(skymodelname+'_convolved.fits'))
| [
"[email protected]"
] | |
754c57955dc4711c3f1b16359dad22056c067dde | d97b9dc98c65ed3114a6449e0dab9c6c9bd1c01a | /tests/parser_tests.py | 7a9a7b1dc3b44895dab24b0cf46a9bec9402325c | [
"MIT"
] | permissive | blitzmann/evepaste | 19f7edbb0d15e44e898f575952d6322fe60a18ce | 78b2ca1553b0773e68978bce6858d37f445a927e | refs/heads/master | 2021-01-18T09:28:00.967532 | 2014-07-23T05:43:28 | 2014-07-23T05:43:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,523 | py | from evepaste import parse
from tests import parsers, TableChecker
ALL_TABLES = [parsers.ASSET_TABLE,
parsers.BOM_TABLE,
parsers.CARGO_SCAN_TABLE,
parsers.CHAT_TABLE,
parsers.CONTRACT_TABLE,
parsers.DSCAN_TABLE,
parsers.EFT_TABLE,
parsers.FITTING_TABLE,
parsers.KILLMAIL_TABLE,
parsers.LOOT_HISTORY_TABLE,
parsers.PI_TABLE,
parsers.SURVEY_SCANNER_TABLE,
parsers.VIEW_CONTENTS_TABLE,
parsers.WALLET_TABLE]
def test_generator():
# Perform each table test with their associated callable
for table in ALL_TABLES + [parsers.PARSE_TABLE, parsers.LISTING_TABLE]:
for i, (input_str, expected) in enumerate(table.tests):
name = ('test_%s[%s]' % (str(table.funct.__name__), i))
checker = TableChecker(table.funct, name)
yield checker, input_str, expected
# Perform each table test with parse() instead of the associated callable
for table in ALL_TABLES:
for i, (input_str, expected) in enumerate(table.tests):
if isinstance(expected, tuple) and not expected[1]:
name = 'test_parse(%s)[%s]' % (str(table.funct.__name__), i)
checker = TableChecker(parse, name)
result, bad_lines = expected
_type = table.funct.__name__.split('_', 1)[1]
yield checker, input_str, (_type, result, bad_lines)
| [
"[email protected]"
] | |
84bb6640e82b66ac5cf41c0c71d175c99bde1072 | c6123c9a071746cd72a7797e47cc6dfb6efc3f26 | /com/old/nlp/user_portrait_analysis/user_portrait_analysis.py | 85717da4a2f89140eaa74aeabc34c4178850c4ee | [] | no_license | happiless/AI | 74144d842e61a9c1ef09b6e4fe4b3b09cae285c5 | 31b8bd1e890a2d0519c65958da198a504ffe2e2d | refs/heads/master | 2023-08-07T18:35:54.509458 | 2020-11-23T18:47:29 | 2020-11-23T18:47:29 | 262,650,975 | 0 | 0 | null | 2023-07-06T21:36:58 | 2020-05-09T20:13:23 | Python | UTF-8 | Python | false | false | 11,048 | py | # 用户画像-案例
# 基于用户搜索关键词数据为用户打上标签(年龄,性别,学历)
# 整体流程
# (一)数据预处理
# 编码方式转换
# 对数据搜索内容进行分词
# 词性过滤
# 数据检查
# (二)特征选择
# 建立word2vec词向量模型
# 对所有搜索数据求平均向量
# (三)建模预测
# 不同机器学习模型对比
# 堆叠模型
# 将原始数据转换成utf-8编码,防止后续出现各种编码问题
# 由于原始数据比较大,在分词与过滤阶段会比较慢,这里我们选择了原始数据中的1W个
import csv
import pandas as pd
# import jieba
# import jieba.posseg
import os
import sys
import time
import itertools
from gensim.models import word2vec
from gensim.models.word2vec import Word2Vec
import numpy as np
from sklearn.linear_model import LinearRegression, LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn.svm import SVC
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
'''
# (一)数据预处理
# 编码方式转换
data_path = './data/user_tag_query.10W.TRAIN'
csv_file = open(data_path + '-1w.csv', 'w')
writer = csv.writer(csv_file)
writer.writerow(['ID', 'Age', 'Gender', 'Education', 'QueryList'])
# 转换成utf-8编码格式
with open(data_path, 'r', encoding='gbk', errors='ignore') as f:
lines = f.readlines()
print(len(lines))
for line in lines[:10000]:
try:
data = line.strip().split('\t')
write_data = [data[0], data[1], data[2], data[3]]
querystr = ''
data[-1] = data[-1][:-1]
for d in data[4:]:
try:
cur_str = d.encode('utf8')
cur_str = cur_str.decode('utf8')
querystr += cur_str + '\t'
print(querystr)
except:
continue
querystr = querystr[:-1]
write_data.append(querystr)
writer.writerow(write_data)
except:
continue
# 编码转换完成的数据,取的是1W的子集
train_name = data_path + '-1w.csv'
data = pd.read_csv(train_name, encoding='gbk')
print(data.info())
data.Age.to_csv('./data/train_age.csv', index=False)
data.Gender.to_csv('./data/train_gender.csv', index=False)
data.Education.to_csv('./data/train_education.csv', index=False)
data.QueryList.to_csv('./data/train_querylist.csv', index=False)
# 对数据搜索内容进行分词
def input(train_name):
train_data = []
with open(train_name, 'rb') as f:
line = f.readline()
count = 0
while line:
try:
train_data.append(line)
count += 1
except:
print('error', count, line)
line = f.readline()
return train_data
start = time.clock()
filepath = './data/train_querylist.csv'
query_list = input(filepath)
write_path = './data/train_querylist_writefile-1w.csv'
csv_file = open(write_path, 'w')
# part-of-speech tagging 词性标注
POS = {}
for i in range(len(query_list)):
if i % 2000 == 0 and i >= 1000:
print(i, 'finished')
s = []
str = ""
words = jieba.posseg.cut(query_list[i]) # 带有词性的精确分词模式
allowPOS = ['n', 'v', 'j']
for word, flag in words:
print(word, flag)
# 婚外 j 女人 n 不 d 爱 v 你 r 的 uj 表现 v
POS[flag] = POS.get(flag, 0) + 1
if (flag[0] in allowPOS) and len(word) >= 2:
str += word + " "
cur_str = str.encode('utf8')
cur_str = cur_str.decode('utf8')
s.append(cur_str)
csv_file.write(" ".join(s) + "\n")
csv_file.close()
end = time.clock()
print("total time: %f s" % (end - start))
print(POS)
'''
# (二)特征选择
# 建立word2vec词向量模型
# 使用Gensim库建立word2vec词向量模型
# 参数定义:
# sentences:可以是一个list
# sg: 用于设置训练算法,默认为0,对应CBOW算法;sg=1则采用skip-gram算法。
# size:是指特征向量的维度,默认为100。大的size需要更多的训练数据,但是效果会更好. 推荐值为几十到几百。
# window:表示当前词与预测词在一个句子中的最大距离是多少
# alpha: 是学习速率
# seed:用于随机数发生器。与初始化词向量有关。
# min_count: 可以对字典做截断. 词频少于min_count次数的单词会被丢弃掉, 默认值为5
# max_vocab_size: 设置词向量构建期间的RAM限制。如果所有独立单词个数超过这个,则就消除掉其中最不频繁的一个。每一千万个单词需要大约1GB的RAM。设置成None则没有限制。
# workers参数控制训练的并行数。
# hs: 如果为1则会采用hierarchica·softmax技巧。如果设置为0(default),则negative sampling会被使用。
# negative: 如果>0,则会采用negativesampling,用于设置多少个noise words
# iter: 迭代次数,默认为5
# 将数据变换成list of list格式
# pip install --upgrade smart_open
train_path = './data/train_querylist_writefile-1w.csv'
save_path = "1w_word2vec_300.model"
with open(train_path, 'r') as f:
my_list = []
lines = f.readlines()
for line in lines:
cur_list = []
data = line.strip().split(' ')
for d in data:
cur_list.append(d)
my_list.append(cur_list)
model = word2vec.Word2Vec(my_list, size=300, window=10, workers=4)
model.save(save_path)
model = Word2Vec.load(save_path)
print(model.most_similar('大哥'))
print(model.most_similar('清华'))
# 对所有搜索数据求平均向量
# 加载训练好的word2vec模型,求用户搜索结果的平均向量
with open(train_path, 'r') as f:
cur_index = 0
lines = f.readlines()
doc_cev = np.zeros((len(lines), 300))
for line in lines:
word_cev = np.zeros((1, 300))
words = line.strip().split(" ")
word_num = 0
# 求模型的平均向量
for word in words:
if word in model:
word_num += 1
word_cev += np.array([model[word]])
doc_cev[cur_index] = word_cev / float(word_num)
cur_index += 1
print(doc_cev.shape)
print(doc_cev[0])
gender_label = np.loadtxt('./data/train_gender.csv', int)
education_label = np.loadtxt('./data/train_education.csv', int)
age_label = np.loadtxt('./data/train_age.csv', int)
def remove_zero(x, y):
"""
把标签列Y为0的去除掉,对应Y为0的X矩阵的行也相应去掉
:param x: 列表包含一个个用户搜素词的平均向量
:param y: 用户性别标签列/年龄标签列/教育标签列
:return: 返回去除标签列为0的记录X和y
"""
nonzero = np.nonzero(y)
y = y[nonzero]
x = np.array(x)
x = x[nonzero]
return x, y
gender_train, gender_label = remove_zero(doc_cev, gender_label)
education_train, education_label = remove_zero(doc_cev, education_label)
age_train, age_label = remove_zero(doc_cev, age_label)
print(gender_train.shape, gender_label.shape)
print(education_train.shape, education_label.shape)
print(age_train.shape, age_label.shape)
# 定义一个函数去绘制混淆矩阵,为了评估看着方便
def plot_confusion_matrix(cm, classes,
title='Confusion matrix',
cmap=plt.cm.Blues):
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
# 加上图里面的颜色渐近条
plt.colorbar()
# 分别给横纵坐标在0和1的位置写上数字0和1
tick_marks = np.arange(len(classes))
plt.xticks(ticks=tick_marks, labels=classes, rotation=0)
plt.yticks(ticks=tick_marks, labels=classes)
thresh = cm.max() / 2.
# itertools.product(a, b) 两个元组进行笛卡尔积
# 在混淆矩阵图形四象限的格子里面写上数值,如果底色深就用白色,如果底色浅就用
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment='center',
color='white' if cm[i, j] > thresh else 'black')
plt.tight_layout()
plt.ylabel('True Label')
plt.xlabel('Predicted Label')
# (三)建模预测
# 不同机器学习模型对比
# 建立一个基础预测模型
X_train, X_test, y_train, y_test = train_test_split(gender_train, gender_label, test_size=0.2, random_state=0)
lr = LinearRegression()
lr.fit(X_train, y_train)
y_pred = lr.predict(X_test)
print(accuracy_score(y_pred=y_pred, y_true=y_test))
cnf_matrix = confusion_matrix(y_true=y_test, y_pred=y_pred)
print('Test Recall metric => ', cnf_matrix[1, 1] / (cnf_matrix[1, 0] + cnf_matrix[1, 1]))
print('Test accuracy metric => ', cnf_matrix[1, 1] / (cnf_matrix[0, 0] + cnf_matrix[0, 1]
+ cnf_matrix[1, 0] + cnf_matrix[1, 1]))
class_names = [0, 1]
plt.figure()
plot_confusion_matrix(cnf_matrix, classes=class_names, title='Gender-Confusion matrix')
plt.show()
rfc = RandomForestClassifier(n_estimators=100, min_samples_split=5, max_depth=10)
rfc.fit(X_train, y_train)
y_pred = rfc.predict(X_test)
print(accuracy_score(y_true=y_test, y_pred=y_pred))
cnf_matrix = confusion_matrix(y_true=y_test, y_pred=y_pred)
print('Test Recall metric => ', cnf_matrix[1, 1] / (cnf_matrix[1, 0] + cnf_matrix[1, 1]))
print('Test accuracy metric => ', cnf_matrix[1, 1] / (cnf_matrix[0, 0] + cnf_matrix[0, 1]))
plt.figure()
plot_confusion_matrix(cnf_matrix,
classes=class_names,
title='Gender-Confusion Matrix')
plt.show()
# 堆叠模型
clf1 = RandomForestClassifier(n_estimators=100, min_samples_split=5, max_depth=10)
clf2 = SVC()
clf3 = LogisticRegression()
base_model = [
['rf', clf1],
['svm', clf2],
['lr', clf3]
]
models = base_model
# 把第一阶段模型预测的结果,存在S_train和S_test中,供给第二阶段去训练
S_train = np.zeros(X_train.shape[0], len(models))
S_test = np.zeros(X_test.shape[0], len(models))
X_train, X_test, y_train, y_test = train_test_split(gender_train, gender_label, test_size=0.2, random_state=0)
folds = KFold(n_splits=5, random_state=0)
for i, bm in models:
clf = bm[1]
for train_idx, valid_idx in folds.split(X_train):
X_train_cv = X_train[train_idx]
y_train_cv = y_train[train_idx]
X_valid = X_train[valid_idx]
clf.fit(X_train_cv, y_train_cv)
y_valid = clf.predict(X_valid)[:]
S_train[train_idx, i] = y_valid
y_pred = clf.predict(X_test)
S_test[:, i] = y_test
print(accuracy_score(y_true=y_test, y_pred=y_pred))
# 第二阶段算法随便选择一个,这里选择了随机森林
final_rfc = RandomForestClassifier(n_estimators=100)
final_rfc.fit(S_train, y_train)
S_pred = final_rfc.predict(S_test)
print(S_pred)
print(final_rfc.score(S_test, y_test))
| [
"[email protected]"
] | |
053e7c20b755a200f86dc65acb5e2982c3213c7f | 40b27bdd261a0d8a9e100bc4e83c9f76b9ef710e | /contests/ABC1-100/ABC3/a.py | ef238598da9c9f189cd50d282d535fac16896483 | [] | no_license | kouma1990/AtCoder-code-collection | 486d612ae1def6df49f4aa3632e06aae7ff73d2f | a3040a6025b43fb7dd3522945dce05a2626a92aa | refs/heads/master | 2020-04-16T22:42:39.023009 | 2019-08-29T07:05:43 | 2019-08-29T07:05:43 | 165,980,129 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | n = int(input())
s = 0
for i in range(1,n+1):
s+=i
print(s*10000/n) | [
"[email protected]"
] | |
a6579d48d9a950469d219fe1691ca39caed063b2 | 1e58cee5b12b157b7b646bd06b09806f97d7fb57 | /tests/test_regex.py | acfeb0755a29804cbe490ff40b52e34bf82e8ba3 | [] | no_license | DRMacIver/FALBS | 3397612430ea8fbd60cb6c2452ac47a74e912452 | efd9231da4e64fd98c025eef86a106db72c96e80 | refs/heads/master | 2020-05-24T16:14:41.739260 | 2018-02-20T11:14:18 | 2018-02-20T11:14:18 | 84,857,328 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,111 | py | from tests.helpers import regex
import falbs.regex as rd
from hypothesis import given, assume, strategies as st, example
from pyrsistent import pset
import pytest
@given(regex())
def test_can_build_a_dfa(re):
assume(rd.has_matches(re))
rd.build_dfa(re)
def test_char_classes_1():
assert rd.character_classes(
rd.concatenate(rd.union(rd.char(0), rd.char(1)), rd.char(2))
) == pset([pset([0, 1])])
@given(regex())
def test_characters_in_same_class_produce_equivalent_expressions(re):
assume(rd.has_matches(re))
classes = rd.character_classes(re)
assume(any(len(cs) > 1 for cs in classes))
for cs in classes:
if len(cs) > 1:
derivs = [rd.derivative(re, c) for c in cs]
for a in derivs:
for b in derivs:
assert rd.equivalent(a, b)
@example(rd.star(rd.char(b'0')))
@example(rd.subtract(rd.star(rd.char(b'0')), rd.char(b'0')))
@given(regex())
def test_infinite_regex_have_more_than_one_solution(reg):
assume(rd.is_infinite(reg))
x = rd.subtract(reg, rd.literal(rd.lexmin(reg)))
assert rd.has_matches(x)
@example(rd.concatenate(rd.star(rd.char(b'\0')), rd.char(b'\1')))
@example(rd.union(rd.char(b'\0'), rd.star(rd.literal(b'\0\0'))))
@example(rd.star(rd.char(0)))
@given(regex())
def test_decompilation(re):
assume(rd.has_matches(re))
dfa = rd.build_dfa(re)
rewritten = rd.decompile_dfa(*dfa)
assert rd.equivalent(re, rewritten)
def symdiff(x, y):
return rd.union(rd.subtract(x, y), rd.subtract(y, x))
@example(
rd.union(
rd.char(b'\x00'), rd.subtract(rd.star(rd.char(b'\x01')), rd.Epsilon)),
rd.intersection(rd.char(b'\x00'), rd.star(rd.char(b'\x00')))
)
@example(x=rd.literal(b'01'), y=rd.literal(b'11'))
@given(regex(), regex())
def test_lexmin_of_symmetric_difference_is_refutation(x, y):
assume(not rd.equivalent(x, y))
w = rd.lexmin(symdiff(x, y))
assert w is not None
assert w == rd.witness_difference(x, y)
@example(rd.union(rd.char(b'\0'), rd.star(rd.char(b'\x00'))))
@given(regex())
def test_no_refutation_for_decompilation(re):
dec = rd.decompile_dfa(*rd.build_dfa(re))
assert rd.witness_difference(dec, re) is None
@given(regex(), st.data())
def test_lexmin_of_mutated_regex_is_refutation(x, data):
assume(rd.has_matches(x))
accepting, transitions = rd.build_dfa(x)
j = data.draw(st.integers(0, len(accepting) - 1))
assume(transitions[j])
c = data.draw(st.sampled_from(sorted(transitions[j])))
transitions[j][c] = data.draw(st.integers(0, len(accepting) - 1))
y = rd.decompile_dfa(accepting, transitions)
assume(rd.has_matches(y))
assume(not rd.equivalent(x, y))
w = rd.lexmin(symdiff(x, y))
assert w is not None
assert w == rd.witness_difference(x, y)
@example(rd.union(rd.char(b'\x00'), rd.star(rd.char(b'\x00'))), 0, 1)
@example(rd.star(rd.char(b'\x00')), 1, 1)
@given(regex(), st.integers(0, 10), st.integers(0, 10))
def test_count_below_bound_is_the_same(re, m, n):
assume(rd.has_matches(re))
m, n = sorted((m, n))
count1 = rd.LanguageCounter(*rd.build_dfa(re)).count(m)
count2 = rd.LanguageCounter(*rd.build_dfa(rd.bounded(re, n))).count(m)
assert count1 == count2
def test_clearing_caches_resets_identity():
c1 = rd.char(0)
c2 = rd.char(0)
rd.clear_caches()
c3 = rd.char(0)
assert c1 is c2 is not c3
@pytest.mark.parametrize(
'c',
[rd.Empty, rd.Epsilon, rd.char(0), rd.bounded(rd.star(rd.char(0)), 1)]
)
def test_bounded_does_not_wrap_obviously_bounded(c):
assert rd.bounded(c, 1) is c
assert rd.bounded(rd.Empty, 1) is rd.Empty
def test_basic_impossible_bounds_are_empty():
assert rd.bounded(rd.char(0), -1) is rd.Empty
assert rd.bounded(rd.char(0), 0) is rd.Empty
def test_bounds_are_not_nested():
x = rd.bounded(rd.star(rd.char(0)), 7)
y = rd.bounded(x, 5)
assert x.bound == 7
assert y.bound == 5
assert isinstance(y.child, rd.Star)
def test_bounds_propagate_through_unions():
assert isinstance(
rd.bounded(rd.union(rd.star(rd.char(0)), rd.star(rd.char(1))), 1),
rd.Union
)
def test_bounds_propagate_through_intersections():
x = rd.star(rd.char(b'\0\1'))
y = rd.star(rd.char(b'\1\2'))
assert isinstance(
rd.bounded(rd.intersection(x, y), 3),
rd.Intersection
)
def test_bounds_propagate_through_subtraction():
x = rd.star(rd.char(b'\0\1'))
y = rd.literal(b'\0\0\0\1')
z = rd.subtract(x, y)
b = rd.bounded(z, 10)
assert isinstance(b, rd.Subtraction)
assert isinstance(b.left, rd.Bounded)
@example(rd.concatenate(rd.char(b'\x00\x01'), rd.char(b'\x00')), 2)
@given(regex(), st.integers(0, 10))
def test_bounded_min_matches_bounds(re, n):
bd = rd.bounded(re, n)
assume(rd.has_matches(bd))
assert len(rd.lexmin(bd)) <= n
@example(rd.char(0))
@given(regex())
def test_non_empty_is_identity_on_non_nullable(re):
assume(not re.nullable)
assume(rd.has_matches(re))
assert rd.nonempty(re) is re
def test_star_collapses_trivial_children():
assert rd.star(rd.Empty) is rd.Epsilon
assert rd.star(rd.Epsilon) is rd.Epsilon
def test_star_collapses_stars():
x = rd.star(rd.char(0))
assert rd.star(x) is x
def test_flattens_unions():
x = rd.star(rd.char(0))
y = rd.star(rd.char(1))
z = rd.star(rd.char(2))
assert rd.union(x, rd.union(y, z)) is rd.union(rd.union(x, z), y)
def test_flattens_intersections():
x = rd.star(rd.char(b'01'))
y = rd.star(rd.char(b'02'))
z = rd.star(rd.char(b'03'))
assert rd.intersection(x, rd.intersection(y, z)) is \
rd.intersection(rd.intersection(x, z), y)
def test_removes_empty_from_unions():
c = rd.char(0)
assert c is rd.union(rd.Empty, c)
def test_union_of_empty_is_empty():
assert rd.union(rd.Empty, rd.Empty) is rd.Empty
def test_epsilon_prunes_down_intersections():
assert rd.intersection(rd.Epsilon, rd.star(rd.char(0))) is rd.Epsilon
assert rd.intersection(rd.Epsilon, rd.char(0)) is rd.Empty
def test_empty_kills_intersections():
assert rd.intersection(rd.Empty, rd.Epsilon) is rd.Empty
def test_self_intersection_is_identity():
x = rd.char(0)
assert rd.intersection(x, x, x) is x
def test_valid_starts_of_nullable_cat():
x = rd.concatenate(rd.star(rd.char(0)), rd.char(1))
assert rd.valid_starts(x) == pset([0, 1])
def test_empty_concatenation_is_epsilon():
assert rd.concatenate() is rd.Epsilon
def test_single_concatenation_is_self():
assert rd.concatenate(rd.char(0)) is rd.char(0)
def test_rebalances_concatenation():
x = rd.char(0)
y = rd.star(rd.char(1))
z = rd.char(2)
assert rd.concatenate(x, rd.concatenate(y, z)) is \
rd.concatenate(rd.concatenate(x, y), z)
def test_self_subtraction_is_empty():
x = rd.char(0)
assert rd.subtract(x, x) is rd.Empty
def test_empty_subtraction_is_identity():
x = rd.char(0)
assert rd.subtract(x, rd.Empty) is x
def test_subtraction_from_empty_is_empty():
x = rd.char(0)
assert rd.subtract(rd.Empty, x) is rd.Empty
def test_subtraction_from_epsilon_checks_nullability():
assert rd.subtract(rd.Epsilon, rd.char(0)) is rd.Epsilon
assert rd.subtract(rd.Epsilon, rd.star(rd.char(0))) is rd.Empty
def test_merges_multiple_subtracts():
x = rd.star(rd.char(b'012'))
y = rd.star(rd.char(b'0'))
z = rd.star(rd.char(b'1'))
t = rd.subtract(rd.subtract(x, y), z)
assert t is rd.subtract(x, rd.union(y, z))
t = rd.subtract(x, rd.subtract(y, z))
assert t.nullable
assert isinstance(t, rd.Union)
def test_derivatives_of_unions():
assert rd.derivative(
rd.union(rd.star(rd.char(0)), rd.star(rd.char(1))), 0
) is rd.star(rd.char(0))
def test_derivatives_of_intersection():
x = rd.star(rd.char(b'\0\1'))
y = rd.star(rd.literal(b'\0\1'))
z = rd.intersection(x, y)
d1 = rd.derivative(z, 0)
d2 = rd.derivative(d1, 1)
assert d2 is z
def test_valid_starts_of_subtraction():
x = rd.star(rd.char(b'\0\1'))
y = rd.char(b'\1')
z = rd.subtract(x, y)
assert rd.valid_starts(z) == pset([0, 1])
def test_difference_of_same_is_none():
x = rd.char(0)
assert rd.witness_difference(x, x) is None
def test_difference_of_epsilon_and_non_nullable_is_epsilon():
assert rd.witness_difference(rd.char(0), rd.Epsilon) is b''
def test_witness_difference_of_literals_is_smaller_of_two():
assert rd.witness_difference(rd.literal(b'00'), rd.literal(b'01')) == b'00'
def test_lexmin_of_star_is_empty():
assert rd.lexmin(rd.star(rd.char(b'0'))) is b''
def test_empty_is_not_infinite():
assert not rd.is_infinite(rd.Empty)
def test_basic_finite_are_not_infinite():
assert not rd.is_infinite(rd.Epsilon)
assert not rd.is_infinite(rd.char(0))
def test_union_of_infinite_and_finite_is_infinite():
assert rd.is_infinite(rd.union(rd.char(1), rd.star(rd.char(0))))
def test_can_walk_graph_for_infintiy():
assert rd.is_infinite(rd.intersection(
rd.star(rd.char(b'01')), rd.star(rd.char(b'12'))
))
def test_bounded_is_not_infinite():
assert not rd.is_infinite(rd.bounded(rd.star(rd.char(0)), 10 ** 6))
def to_basic(re):
return rd.decompile_dfa(*rd.build_dfa(re))
def test_complex_graphs_may_be_finite():
x = to_basic(rd.bounded(
rd.union(rd.star(rd.char(0)), rd.star(rd.char(1))), 20))
assert not rd.is_infinite(x)
def test_non_empty_star_dfa():
accepting, _ = rd.build_dfa(rd.nonempty(rd.star(rd.char(0))))
assert accepting == [False, True]
def test_two_phase_dfa():
re = rd.concatenate(rd.star(rd.char(0)), rd.star(rd.char(1)))
accepting, transitions = rd.build_dfa(re)
assert accepting == [True, True]
assert transitions == [{0: 0, 1: 1}, {1: 1}]
def test_lexmin_of_empty_is_none():
assert rd.lexmin(rd.Empty) is None
def test_trival_dfa_from_intersection():
assert rd.build_dfa(
rd.intersection(rd.char(b'\x00'), rd.char(b'\x00\x01'))) == (
[False, True], [{0: 1}, {}]
)
| [
"[email protected]"
] | |
812face81685d31ba6466f309191646f1a14e8ad | cf4958e16c65c4445d0c3e749bb407a452a1d722 | /tests/identify/test_identify.py | 2a4a77aacfaad76c0adf1e9794bbf74a80fecb94 | [
"Apache-2.0"
] | permissive | greschd/NodeFinder | a4775566f22c85f880bd34ea4bcbc24e4e006000 | 0204789afb13fcd1ffb86fd3c013e7debafb2590 | refs/heads/dev | 2021-10-08T04:14:51.391769 | 2020-03-13T20:37:59 | 2020-03-13T20:37:59 | 96,325,459 | 3 | 2 | Apache-2.0 | 2020-03-13T20:38:01 | 2017-07-05T14:00:37 | Python | UTF-8 | Python | false | false | 3,103 | py | # -*- coding: utf-8 -*-
# © 2017-2019, ETH Zurich, Institut für Theoretische Physik
# Author: Dominik Gresch <[email protected]>
"""
Tests for the identify step.
"""
import os
import pytest
import numpy as np
import nodefinder as nf
@pytest.fixture
def run_identify(sample):
"""
Fixture to run the identification step for a given sample file.
"""
def inner(sample_name):
search_res = nf.io.load(sample(os.path.join('search', sample_name)))
return nf.identify.run(search_res)
return inner
@pytest.fixture
def run_single_identify(run_identify): # pylint: disable=redefined-outer-name
"""
Fixture to run the identification step for a given sample file which should
contain only one cluster, and return the result for that cluster.
"""
def inner(sample_name):
res = run_identify(sample_name)
assert len(res) == 1
return res[0]
return inner
def test_point(run_single_identify): # pylint: disable=redefined-outer-name
"""
Test that a point is correctly identified.
"""
res = run_single_identify('point.hdf5')
assert res.dimension == 0
assert np.allclose(res.shape.position, [0.2, 0.4, 0.8])
def test_line(run_single_identify): # pylint: disable=redefined-outer-name
"""
Test that a line is correctly identified.
"""
res = run_single_identify('line.hdf5')
assert res.dimension == 1
assert len(res.shape.graph.nodes) > 10
assert res.shape.degree_count == dict()
def test_surface(run_single_identify): # pylint: disable=redefined-outer-name
"""
Test that a surface is correctly identified.
"""
res = run_single_identify('surface.hdf5')
assert res.dimension == 2
def test_two_lines(run_identify): # pylint: disable=redefined-outer-name
"""
Test that two 2D lines are correctly identified.
"""
res = run_identify('two_lines.hdf5')
assert len(res) == 2
for identified_object in res:
assert identified_object.shape.degree_count == dict()
assert identified_object.dimension == 1
assert len(identified_object.shape.graph.nodes) > 10
def test_cross(run_single_identify): # pylint: disable=redefined-outer-name
"""
Test that the cross is identified without holes.
"""
res = run_single_identify('cross.hdf5')
assert res.dimension == 1
assert res.shape.degree_count == {4: 1}
def test_open_line(run_single_identify): # pylint: disable=redefined-outer-name
"""
Test that an open line is identified correctly.
"""
res = run_single_identify('line_open.hdf5')
assert res.dimension == 1
assert res.shape.degree_count == {1: 2}
positions = np.array(list(res.shape.graph.nodes))
assert np.min(positions) < -0.49
assert np.max(positions) > 0.49
def test_line_short_paths(run_single_identify): # pylint: disable=redefined-outer-name
"""
Test that a short line is identified correctly, without holes.
"""
res = run_single_identify('line_1d.hdf5')
assert res.dimension == 1
assert res.shape.degree_count == dict()
| [
"[email protected]"
] | |
c5c9b79a713e8c961f1d210aadef8e76768b6c54 | adfb55c4fe2027986cabb49bf58081241bbba5cc | /SingletonPyton/SingletonPY.py | d5768bd1084ab78954806511859b90bec435b92c | [] | no_license | juanmontes1224/SingletonJavayPython | 04003061fcf0a990455689c90fa227e8e921541b | 68cd5f09709d503b8ab755676cc357bda7442077 | refs/heads/master | 2020-07-25T00:18:23.250152 | 2019-09-12T16:34:11 | 2019-09-12T16:34:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | import singleton
class PatronSingleton:
def __new__(obj):
try:
obj.instance
except AttributeError:
obj.instance = super(PatronSingleton, obj).__new__(obj)
return obj.instance
x = singleton.PatronSingleton()
y = singleton.PatronSingleton()
print("LA INSTANCIA "+str(x)+ " ")
print("ES IGUAL A LA INSTANCIA "+str(y)+" ")
| [
"[email protected]"
] | |
f9ce079989453c45688be5ac2affa1080c642420 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/calenderNotification/testcase/firstcases/testcase4_021.py | 72fdf8a65de555c26803c8d914f81dd294d2e3ae | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,600 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'com.github.quarck.calnotify',
'appActivity' : 'com.github.quarck.calnotify.ui.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'com.github.quarck.calnotify/com.github.quarck.calnotify.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=1000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=1000)
return
def scrollToFindElement(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
elements = driver.find_elements_by_android_uiautomator(str)
if (len(elements) > 1) :
for temp in elements :
if temp.get_attribute("enabled") == "true" :
element = temp
break
except NoSuchElementException:
swipe(driver, 0.5, 0.55, 0.5, 0.2)
else :
return element
for i in range(0, 4, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
elements = driver.find_elements_by_android_uiautomator(str)
if (len(elements) > 1):
for temp in elements:
if temp.get_attribute("enabled") == "true":
element = temp
break
except NoSuchElementException:
swipe(driver, 0.5, 0.2, 0.5, 0.55)
else :
return element
return
def scrollToClickElement(driver, str) :
element = scrollToFindElement(driver, str)
if element is None :
return
else :
element.click()
def clickInList(driver, str) :
element = None
if (str is None) :
candidates = driver.find_elements_by_class_name("android.widget.CheckedTextView")
if len(candidates) >= 1 and checkWindow(driver):
element = candidates[len(candidates)-1]
else :
element = scrollToFindElement(driver, str)
if element is not None :
element.click()
else :
if checkWindow(driver) :
driver.press_keycode(4)
def clickOnCheckable(driver, str, value = "true") :
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if len(lists) == 1 :
innere = parent.find_element_by_android_uiautomator("new UiSelector().checkable(true)")
nowvalue = innere.get_attribute("checked")
if (nowvalue != value) :
innere.click()
break
except NoSuchElementException:
continue
def typeText(driver, value) :
element = getElememt(driver, "new UiSelector().className(\"android.widget.EditText\")")
element.clear()
element.send_keys(value)
enterelement = getElememt(driver, "new UiSelector().text(\"OK\")")
if (enterelement is None) :
if checkWindow(driver):
driver.press_keycode(4)
else :
enterelement.click()
def checkWindow(driver) :
dsize = driver.get_window_size()
nsize = driver.find_element_by_class_name("android.widget.FrameLayout").size
if dsize['height'] > nsize['height']:
return True
else :
return False
def testingSeekBar(driver, str, value):
try :
if(not checkWindow(driver)) :
element = seekForNearestSeekBar(driver, str)
else :
element = driver.find_element_by_class_name("android.widget.SeekBar")
if (None != element):
settingSeekBar(driver, element, value)
driver.find_element_by_android_uiautomator("new UiSelector().text(\"OK\")").click()
except NoSuchElementException:
time.sleep(1)
def seekForNearestSeekBar(driver, str):
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if len(lists) == 1 :
innere = parent.find_element_by_class_name("android.widget.SeekBar")
return innere
break
except NoSuchElementException:
continue
def settingSeekBar(driver, element, value) :
x = element.rect.get("x")
y = element.rect.get("y")
width = element.rect.get("width")
height = element.rect.get("height")
TouchAction(driver).press(None, x + 10, y + height/2).move_to(None, x + width * value,y + height/2).release().perform()
y = value
def clickInMultiList(driver, str) :
element = None
if (str is None) :
candidates = driver.find_elements_by_class_name("android.widget.CheckedTextView")
if len(candidates) >= 1 and checkWindow(driver):
element = candidates[len(candidates)-1]
else :
element = scrollToFindElement(driver, str)
if element is not None :
nowvalue = element.get_attribute("checked")
if (nowvalue != "true") :
element.click()
if checkWindow(driver) :
driver.find_element_by_android_uiautomator("new UiSelector().text(\"OK\")").click()
# testcase4_021
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"com.github.quarck.calnotify:id/card_view_event_name\").className(\"android.widget.TextView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Until specific time and date\")", "new UiSelector().className(\"android.widget.TextView\").instance(14)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"01\")", "new UiSelector().className(\"android.widget.Button\").instance(2)")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"android:id/numberpicker_input\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("Feb");
element = getElememtBack(driver, "new UiSelector().text(\"29\")", "new UiSelector().className(\"android.widget.Button\").instance(3)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Jan\")", "new UiSelector().className(\"android.widget.Button\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Oct\")", "new UiSelector().className(\"android.widget.Button\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"Oct\")", "new UiSelector().className(\"android.widget.Button\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Nov\")", "new UiSelector().className(\"android.widget.Button\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"2020\")", "new UiSelector().className(\"android.widget.Button\").instance(4)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"26\")", "new UiSelector().className(\"android.widget.Button\").instance(2)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Sep\")", "new UiSelector().className(\"android.widget.Button\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"2019\")", "new UiSelector().className(\"android.widget.Button\").instance(5)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"2020\")", "new UiSelector().className(\"android.widget.Button\").instance(5)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"2018\")", "new UiSelector().className(\"android.widget.Button\").instance(5)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"Dec\")", "new UiSelector().className(\"android.widget.Button\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Feb\")", "new UiSelector().className(\"android.widget.Button\").instance(1)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"27\")", "new UiSelector().className(\"android.widget.Button\").instance(2)")
TouchAction(driver).long_press(element).release().perform()
swipe(driver, 0.5, 0.8, 0.5, 0.2)
element = getElememtBack(driver, "new UiSelector().text(\"27\")", "new UiSelector().className(\"android.widget.Button\").instance(2)")
TouchAction(driver).long_press(element).release().perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"4_021\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'com.github.quarck.calnotify'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
6a80192155d9e0511c8cc5db8443d8d3333429d0 | f70da0d011ad2d96ffd6a693e6cd36f1e1df56cb | /Proyecto2/Optimization/Instructions/Assignment.py | fd3c7e81489ded2d4756af82502cbb7a0c91433f | [] | no_license | diemorales96/OLC_Proyecto2_201503958 | aeaa2ba013f9ed643b324537cc6493710d407226 | 7779c619e635b7dc7cc2e47130a1c654ac84889a | refs/heads/main | 2023-09-02T05:11:57.732595 | 2021-11-13T04:54:04 | 2021-11-13T04:54:04 | 427,418,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | from Optimization.C3DInstruction import *
from Optimization.Expressions import *
class Assignment(C3DInstruction):
def __init__(self, place, exp, line, column):
C3DInstruction.__init__(self, line, column)
self.place = place
self.exp = exp
def selfAssignment(self):
if type(self.exp) is Literal:
aux = self.place.getCode() == self.exp.getCode()
else:
aux = self.place.getCode() == self.exp.right.getCode() or self.place.getCode() == self.exp.left.getCode()
return aux
def getCode(self):
if self.deleted:
return ''
return f'{self.place.getCode()} = {self.exp.getCode()};' | [
"[email protected]"
] | |
f9265fb504e00e40640d423894f0bbbba00b6493 | 947fa6a4a6155ffce0038b11f4d743603418ad68 | /.c9/metadata/environment/ib_miniprojects_backend/covid_dashboard/tests/presenters/test_raise_invalid_username_exception.py | 016394fe848a71bbce6cc3bdafa9be4381937364 | [] | no_license | bharathi151/bharathi_diyyala | bd75e10639d7d22b332d5ce677e7799402dc4984 | 99f8657d010c790a0e4e4c9d6b57f81814784eb0 | refs/heads/master | 2022-11-21T12:43:48.401239 | 2020-07-23T09:05:52 | 2020-07-23T09:05:52 | 281,903,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,973 | py | {"filter":false,"title":"test_raise_invalid_username_exception.py","tooltip":"/ib_miniprojects_backend/covid_dashboard/tests/presenters/test_raise_invalid_username_exception.py","undoManager":{"mark":29,"position":29,"stack":[[{"start":{"row":3,"column":60},"end":{"row":3,"column":70},"action":"remove","lines":["COMMENT_ID"],"id":2}],[{"start":{"row":3,"column":60},"end":{"row":3,"column":61},"action":"insert","lines":["U"],"id":3}],[{"start":{"row":3,"column":52},"end":{"row":3,"column":61},"action":"remove","lines":["INVALID_U"],"id":4},{"start":{"row":3,"column":52},"end":{"row":3,"column":69},"action":"insert","lines":["INVALID_USER_NAME"]}],[{"start":{"row":3,"column":14},"end":{"row":3,"column":15},"action":"remove","lines":["2"],"id":5},{"start":{"row":3,"column":13},"end":{"row":3,"column":14},"action":"remove","lines":["v"]},{"start":{"row":3,"column":12},"end":{"row":3,"column":13},"action":"remove","lines":["_"]},{"start":{"row":3,"column":11},"end":{"row":3,"column":12},"action":"remove","lines":["t"]},{"start":{"row":3,"column":10},"end":{"row":3,"column":11},"action":"remove","lines":["s"]},{"start":{"row":3,"column":9},"end":{"row":3,"column":10},"action":"remove","lines":["o"]},{"start":{"row":3,"column":8},"end":{"row":3,"column":9},"action":"remove","lines":["p"]},{"start":{"row":3,"column":7},"end":{"row":3,"column":8},"action":"remove","lines":["_"]},{"start":{"row":3,"column":6},"end":{"row":3,"column":7},"action":"remove","lines":["b"]},{"start":{"row":3,"column":5},"end":{"row":3,"column":6},"action":"remove","lines":["f"]}],[{"start":{"row":3,"column":5},"end":{"row":3,"column":6},"action":"insert","lines":["c"],"id":6}],[{"start":{"row":3,"column":5},"end":{"row":3,"column":6},"action":"remove","lines":["c"],"id":7},{"start":{"row":3,"column":5},"end":{"row":3,"column":20},"action":"insert","lines":["covid_dashboard"]}],[{"start":{"row":4,"column":14},"end":{"row":4,"column":15},"action":"remove","lines":["2"],"id":8},{"start":{"row":4,"column":13},"end":{"row":4,"column":14},"action":"remove","lines":["v"]},{"start":{"row":4,"column":12},"end":{"row":4,"column":13},"action":"remove","lines":["_"]},{"start":{"row":4,"column":11},"end":{"row":4,"column":12},"action":"remove","lines":["t"]},{"start":{"row":4,"column":10},"end":{"row":4,"column":11},"action":"remove","lines":["s"]},{"start":{"row":4,"column":9},"end":{"row":4,"column":10},"action":"remove","lines":["o"]},{"start":{"row":4,"column":8},"end":{"row":4,"column":9},"action":"remove","lines":["p"]},{"start":{"row":4,"column":7},"end":{"row":4,"column":8},"action":"remove","lines":["_"]},{"start":{"row":4,"column":6},"end":{"row":4,"column":7},"action":"remove","lines":["b"]},{"start":{"row":4,"column":5},"end":{"row":4,"column":6},"action":"remove","lines":["f"]}],[{"start":{"row":4,"column":5},"end":{"row":4,"column":6},"action":"insert","lines":["c"],"id":9}],[{"start":{"row":4,"column":5},"end":{"row":4,"column":6},"action":"remove","lines":["c"],"id":10},{"start":{"row":4,"column":5},"end":{"row":4,"column":20},"action":"insert","lines":["covid_dashboard"]}],[{"start":{"row":9,"column":41},"end":{"row":9,"column":42},"action":"remove","lines":["D"],"id":11},{"start":{"row":9,"column":40},"end":{"row":9,"column":41},"action":"remove","lines":["I"]},{"start":{"row":9,"column":39},"end":{"row":9,"column":40},"action":"remove","lines":["_"]},{"start":{"row":9,"column":38},"end":{"row":9,"column":39},"action":"remove","lines":["T"]},{"start":{"row":9,"column":37},"end":{"row":9,"column":38},"action":"remove","lines":["N"]},{"start":{"row":9,"column":36},"end":{"row":9,"column":37},"action":"remove","lines":["E"]},{"start":{"row":9,"column":35},"end":{"row":9,"column":36},"action":"remove","lines":["M"]},{"start":{"row":9,"column":34},"end":{"row":9,"column":35},"action":"remove","lines":["M"]},{"start":{"row":9,"column":33},"end":{"row":9,"column":34},"action":"remove","lines":["O"]},{"start":{"row":9,"column":32},"end":{"row":9,"column":33},"action":"remove","lines":["C"]}],[{"start":{"row":9,"column":32},"end":{"row":9,"column":33},"action":"insert","lines":["U"],"id":12}],[{"start":{"row":9,"column":24},"end":{"row":9,"column":33},"action":"remove","lines":["INVALID_U"],"id":13},{"start":{"row":9,"column":24},"end":{"row":9,"column":41},"action":"insert","lines":["INVALID_USER_NAME"]}],[{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"remove","lines":["D"],"id":14}],[{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"insert","lines":["\\"],"id":15}],[{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"remove","lines":["\\"],"id":16},{"start":{"row":10,"column":43},"end":{"row":10,"column":44},"action":"remove","lines":["I"]},{"start":{"row":10,"column":42},"end":{"row":10,"column":43},"action":"remove","lines":["_"]},{"start":{"row":10,"column":41},"end":{"row":10,"column":42},"action":"remove","lines":["T"]},{"start":{"row":10,"column":40},"end":{"row":10,"column":41},"action":"remove","lines":["N"]},{"start":{"row":10,"column":39},"end":{"row":10,"column":40},"action":"remove","lines":["E"]},{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"remove","lines":["M"]},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"remove","lines":["M"]},{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"remove","lines":["O"]},{"start":{"row":10,"column":35},"end":{"row":10,"column":36},"action":"remove","lines":["C"]}],[{"start":{"row":10,"column":35},"end":{"row":10,"column":36},"action":"insert","lines":["U"],"id":17}],[{"start":{"row":10,"column":27},"end":{"row":10,"column":36},"action":"remove","lines":["INVALID_U"],"id":18},{"start":{"row":10,"column":27},"end":{"row":10,"column":44},"action":"insert","lines":["INVALID_USER_NAME"]}],[{"start":{"row":12,"column":23},"end":{"row":12,"column":57},"action":"remove","lines":["raise_invalid_comment_id_exception"],"id":19}],[{"start":{"row":12,"column":23},"end":{"row":12,"column":24},"action":"insert","lines":["r"],"id":20},{"start":{"row":12,"column":24},"end":{"row":12,"column":25},"action":"insert","lines":["a"]}],[{"start":{"row":12,"column":23},"end":{"row":12,"column":25},"action":"remove","lines":["ra"],"id":21},{"start":{"row":12,"column":23},"end":{"row":12,"column":55},"action":"insert","lines":["raise_invalid_username_exception"]}],[{"start":{"row":7,"column":4},"end":{"row":7,"column":43},"action":"remove","lines":["test_raise_invalid_comment_id_exception"],"id":22}],[{"start":{"row":7,"column":4},"end":{"row":7,"column":5},"action":"insert","lines":["t"],"id":23},{"start":{"row":7,"column":5},"end":{"row":7,"column":6},"action":"insert","lines":["e"]},{"start":{"row":7,"column":6},"end":{"row":7,"column":7},"action":"insert","lines":["s"]},{"start":{"row":7,"column":7},"end":{"row":7,"column":8},"action":"insert","lines":["t"]}],[{"start":{"row":7,"column":7},"end":{"row":7,"column":8},"action":"remove","lines":["t"],"id":24},{"start":{"row":7,"column":6},"end":{"row":7,"column":7},"action":"remove","lines":["s"]},{"start":{"row":7,"column":5},"end":{"row":7,"column":6},"action":"remove","lines":["e"]},{"start":{"row":7,"column":4},"end":{"row":7,"column":5},"action":"remove","lines":["t"]}],[{"start":{"row":7,"column":4},"end":{"row":7,"column":5},"action":"insert","lines":["r"],"id":25},{"start":{"row":7,"column":5},"end":{"row":7,"column":6},"action":"insert","lines":["a"]}],[{"start":{"row":7,"column":4},"end":{"row":7,"column":6},"action":"remove","lines":["ra"],"id":26},{"start":{"row":7,"column":4},"end":{"row":7,"column":36},"action":"insert","lines":["raise_invalid_password_exception"]}],[{"start":{"row":7,"column":4},"end":{"row":7,"column":5},"action":"insert","lines":["t"],"id":27},{"start":{"row":7,"column":5},"end":{"row":7,"column":6},"action":"insert","lines":["e"]},{"start":{"row":7,"column":6},"end":{"row":7,"column":7},"action":"insert","lines":["s"]},{"start":{"row":7,"column":7},"end":{"row":7,"column":8},"action":"insert","lines":["t"]},{"start":{"row":7,"column":8},"end":{"row":7,"column":9},"action":"insert","lines":["_"]}],[{"start":{"row":7,"column":30},"end":{"row":7,"column":31},"action":"remove","lines":["d"],"id":28},{"start":{"row":7,"column":29},"end":{"row":7,"column":30},"action":"remove","lines":["r"]},{"start":{"row":7,"column":28},"end":{"row":7,"column":29},"action":"remove","lines":["o"]},{"start":{"row":7,"column":27},"end":{"row":7,"column":28},"action":"remove","lines":["w"]},{"start":{"row":7,"column":26},"end":{"row":7,"column":27},"action":"remove","lines":["s"]},{"start":{"row":7,"column":25},"end":{"row":7,"column":26},"action":"remove","lines":["s"]}],[{"start":{"row":7,"column":24},"end":{"row":7,"column":25},"action":"remove","lines":["a"],"id":29},{"start":{"row":7,"column":23},"end":{"row":7,"column":24},"action":"remove","lines":["p"]}],[{"start":{"row":7,"column":23},"end":{"row":7,"column":24},"action":"insert","lines":["u"],"id":30},{"start":{"row":7,"column":24},"end":{"row":7,"column":25},"action":"insert","lines":["s"]},{"start":{"row":7,"column":25},"end":{"row":7,"column":26},"action":"insert","lines":["e"]},{"start":{"row":7,"column":26},"end":{"row":7,"column":27},"action":"insert","lines":["r"]},{"start":{"row":7,"column":27},"end":{"row":7,"column":28},"action":"insert","lines":["n"]},{"start":{"row":7,"column":28},"end":{"row":7,"column":29},"action":"insert","lines":["a"]}],[{"start":{"row":7,"column":29},"end":{"row":7,"column":30},"action":"insert","lines":["m"],"id":31},{"start":{"row":7,"column":30},"end":{"row":7,"column":31},"action":"insert","lines":["e"]}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":16,"column":0},"end":{"row":16,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1590568234028,"hash":"b2f85c5d800a71bad9192aa8472738ff9c5be858"} | [
"[email protected]"
] | |
39887b28df78f262c8bbab74d1632da71f0305cb | 13130259156f6f9d16670cea88aa2608dd477d16 | /goeievraag/api.py | da5550ee4d634d841e456869f5e506e812c80368 | [] | no_license | fkunneman/DiscoSumo | d459251d543be5f4df38292a96f52baf4b520a0b | ed8f214834cf0c2e04a3bc429253502f7e79fbf8 | refs/heads/master | 2022-12-14T13:34:41.496963 | 2019-07-31T15:57:02 | 2019-07-31T15:57:02 | 140,422,779 | 2 | 1 | null | 2022-12-08T04:57:55 | 2018-07-10T11:36:00 | Python | UTF-8 | Python | false | false | 689 | py | import json
from flask import Flask, request
from main import GoeieVraag
app = Flask(__name__)
model = GoeieVraag()
@app.route("/rank", methods=['GET'])
def search():
'''
:return: return the 10 most semantic-similar questions to the query based on our official sysmte
'''
questions = {'code': 400}
query, method = '', 'ensemble'
if 'q' in request.args:
query = request.args['q'].strip()
if 'method' in request.args:
method = request.args['method'].strip()
if request.method == 'GET':
questions = model(query=query.strip(), method=method)
questions = { 'code':200, 'result': questions }
return json.dumps(questions) | [
"[email protected]"
] | |
88aef09482138586ab261da2c44dcb42137269ec | e8912ed90e97730b465b1e65084c1dbcc741a73e | /기본/Cage D4/practice.py | 61641fe72db9d47335a7aec6862d65842133b0aa | [] | no_license | yhnb3/Algorithm_lecture | a0dcefc27ed17bec3cadae56d69e3cc64239cbfb | 461367e907e2b8a6a0cdc629e6a9029d9b03fba1 | refs/heads/master | 2020-12-22T05:39:05.412680 | 2020-04-10T09:16:19 | 2020-04-10T09:16:19 | 236,685,987 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | import sys
sys.stdin = open('samsung_bus.txt')
T = int(input())
for tc in range(1, T + 1):
N = int(input())
A = [list(map(int, input().split())) for _ in range(N)]
P = int(input())
C = [list(map(int, input())) for _ in range(P)]
cnt = [0] * 5002
print(C)
for i in range(N):
for j in range(A[i][0], A[i][1] + 1):
cnt[j] += 1
print('#{}'.format(tc), end='')
for a in range(P):
print(' {}'.format(cnt[C[a][0]]), end='')
print()
| [
"[email protected]"
] | |
bb6f61c1e8ff19905364983a96a89e0015f61066 | bc441bb06b8948288f110af63feda4e798f30225 | /anxin_service_sdk/model/easy_flow/target_info_pb2.py | bb13e8970970155b8556c9e7d1ab4fbeac6f2cce | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 11,758 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: target_info.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from anxin_service_sdk.model.cmdb import cluster_info_pb2 as anxin__service__sdk_dot_model_dot_cmdb_dot_cluster__info__pb2
from anxin_service_sdk.model.easy_flow import version_info_pb2 as anxin__service__sdk_dot_model_dot_easy__flow_dot_version__info__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='target_info.proto',
package='easy_flow',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/easy_flow'),
serialized_pb=_b('\n\x11target_info.proto\x12\teasy_flow\x1a/anxin_service_sdk/model/cmdb/cluster_info.proto\x1a\x34\x61nxin_service_sdk/model/easy_flow/version_info.proto\"\x9b\x04\n\nTargetInfo\x12\x10\n\x08targetId\x18\x01 \x01(\t\x12\x12\n\ntargetName\x18\x02 \x01(\t\x12\x12\n\ninstanceId\x18\x03 \x01(\t\x12\"\n\x07\x63luster\x18\x04 \x01(\x0b\x32\x11.cmdb.ClusterInfo\x12\x38\n\x0cinstanceInfo\x18\x05 \x03(\x0b\x32\".easy_flow.TargetInfo.InstanceInfo\x12:\n\roperationInfo\x18\x06 \x03(\x0b\x32#.easy_flow.TargetInfo.OperationInfo\x1a\x8b\x01\n\x0cInstanceInfo\x12\x13\n\x0bversionName\x18\x01 \x01(\t\x12+\n\x0bversionInfo\x18\x02 \x01(\x0b\x32\x16.easy_flow.VersionInfo\x12\x11\n\tpackageId\x18\x03 \x01(\t\x12\x13\n\x0binstallPath\x18\x04 \x01(\t\x12\x11\n\tversionId\x18\x05 \x01(\t\x1a\xaa\x01\n\rOperationInfo\x12\x11\n\toperation\x18\x01 \x01(\t\x12-\n\rversionToInfo\x18\x02 \x01(\x0b\x32\x16.easy_flow.VersionInfo\x12/\n\x0fversionFromInfo\x18\x03 \x01(\x0b\x32\x16.easy_flow.VersionInfo\x12\x13\n\x0binstallPath\x18\x04 \x01(\t\x12\x11\n\tpackageId\x18\x05 \x01(\tBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/easy_flowb\x06proto3')
,
dependencies=[anxin__service__sdk_dot_model_dot_cmdb_dot_cluster__info__pb2.DESCRIPTOR,anxin__service__sdk_dot_model_dot_easy__flow_dot_version__info__pb2.DESCRIPTOR,])
_TARGETINFO_INSTANCEINFO = _descriptor.Descriptor(
name='InstanceInfo',
full_name='easy_flow.TargetInfo.InstanceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='versionName', full_name='easy_flow.TargetInfo.InstanceInfo.versionName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionInfo', full_name='easy_flow.TargetInfo.InstanceInfo.versionInfo', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='easy_flow.TargetInfo.InstanceInfo.packageId', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installPath', full_name='easy_flow.TargetInfo.InstanceInfo.installPath', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionId', full_name='easy_flow.TargetInfo.InstanceInfo.versionId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=363,
serialized_end=502,
)
_TARGETINFO_OPERATIONINFO = _descriptor.Descriptor(
name='OperationInfo',
full_name='easy_flow.TargetInfo.OperationInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operation', full_name='easy_flow.TargetInfo.OperationInfo.operation', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionToInfo', full_name='easy_flow.TargetInfo.OperationInfo.versionToInfo', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionFromInfo', full_name='easy_flow.TargetInfo.OperationInfo.versionFromInfo', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installPath', full_name='easy_flow.TargetInfo.OperationInfo.installPath', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='easy_flow.TargetInfo.OperationInfo.packageId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=505,
serialized_end=675,
)
_TARGETINFO = _descriptor.Descriptor(
name='TargetInfo',
full_name='easy_flow.TargetInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='targetId', full_name='easy_flow.TargetInfo.targetId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='targetName', full_name='easy_flow.TargetInfo.targetName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceId', full_name='easy_flow.TargetInfo.instanceId', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cluster', full_name='easy_flow.TargetInfo.cluster', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceInfo', full_name='easy_flow.TargetInfo.instanceInfo', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operationInfo', full_name='easy_flow.TargetInfo.operationInfo', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TARGETINFO_INSTANCEINFO, _TARGETINFO_OPERATIONINFO, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=136,
serialized_end=675,
)
_TARGETINFO_INSTANCEINFO.fields_by_name['versionInfo'].message_type = anxin__service__sdk_dot_model_dot_easy__flow_dot_version__info__pb2._VERSIONINFO
_TARGETINFO_INSTANCEINFO.containing_type = _TARGETINFO
_TARGETINFO_OPERATIONINFO.fields_by_name['versionToInfo'].message_type = anxin__service__sdk_dot_model_dot_easy__flow_dot_version__info__pb2._VERSIONINFO
_TARGETINFO_OPERATIONINFO.fields_by_name['versionFromInfo'].message_type = anxin__service__sdk_dot_model_dot_easy__flow_dot_version__info__pb2._VERSIONINFO
_TARGETINFO_OPERATIONINFO.containing_type = _TARGETINFO
_TARGETINFO.fields_by_name['cluster'].message_type = anxin__service__sdk_dot_model_dot_cmdb_dot_cluster__info__pb2._CLUSTERINFO
_TARGETINFO.fields_by_name['instanceInfo'].message_type = _TARGETINFO_INSTANCEINFO
_TARGETINFO.fields_by_name['operationInfo'].message_type = _TARGETINFO_OPERATIONINFO
DESCRIPTOR.message_types_by_name['TargetInfo'] = _TARGETINFO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TargetInfo = _reflection.GeneratedProtocolMessageType('TargetInfo', (_message.Message,), {
'InstanceInfo' : _reflection.GeneratedProtocolMessageType('InstanceInfo', (_message.Message,), {
'DESCRIPTOR' : _TARGETINFO_INSTANCEINFO,
'__module__' : 'target_info_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.TargetInfo.InstanceInfo)
})
,
'OperationInfo' : _reflection.GeneratedProtocolMessageType('OperationInfo', (_message.Message,), {
'DESCRIPTOR' : _TARGETINFO_OPERATIONINFO,
'__module__' : 'target_info_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.TargetInfo.OperationInfo)
})
,
'DESCRIPTOR' : _TARGETINFO,
'__module__' : 'target_info_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.TargetInfo)
})
_sym_db.RegisterMessage(TargetInfo)
_sym_db.RegisterMessage(TargetInfo.InstanceInfo)
_sym_db.RegisterMessage(TargetInfo.OperationInfo)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
8207e03ca2def6f397ee315a6a5d7197f231c0b7 | e51c1abe850bacebaed50db66cf05f613db8c2f6 | /djangoevents/djangoevents/settings.py | b13380b36ea4611488a6ef4c35face2ce45651f9 | [] | no_license | zurcx/bkp_provi_djangoevents | 26a5932c170b52b2dd0e983265778d35bb9103ad | b7b4e5ec6ca3730c0c52b7e10341b10157104737 | refs/heads/master | 2021-01-19T13:33:25.967838 | 2013-06-10T12:43:08 | 2013-06-10T12:43:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,302 | py | # Django settings for djangoevents project.
import os
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'djangoevents.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Sao_Paulo'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'pt-br'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static_files')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'yg2qqmd0xlo)1ybqq)cyxz-*7w*n^3ds_oo7j3^&q=#*a3b6qa'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'djangoevents.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'djangoevents.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'core',
'events',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' | [
"[email protected]"
] | |
2b40d9cd03bf736e19b0d5cf91b979b04c27b6f2 | 4f3e9294fff97ec4d14bfa43e45501a5ba53de00 | /ver0028/py_subplot2.py | c752457f7da5ccf4fa5e05eaba84f239d2189e43 | [] | no_license | corrod/mittet_main | 6e7264df1d93d8f00437d4ab153d9c906057ce13 | 4dc60eee8ca1daf9ef70fd10ed300b6fa6a1e67d | refs/heads/master | 2021-01-06T20:38:30.150942 | 2015-02-26T05:17:44 | 2015-02-26T05:17:44 | 20,375,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,452 | py | # coding:utf-8
from pylab import *
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
f = figure()
subplots_adjust(hspace=0.001)
ax1 = subplot(411)
data1 = np.loadtxt('./ex1010.d')
x1 = data1[:,0]
y1 = data1[:,1]
plt.plot(x1,y1)
ax2 = subplot(412)
data2 = np.loadtxt('./ex1020.d')
x2 = data2[:,0]
y2 = data2[:,1]
plt.plot(x2,y2)
ax3 = subplot(413)
data3 = np.loadtxt('./ex1030.d')
x3 = data3[:,0]
y3 = data3[:,1]
plt.plot(x3,y3)
# ax4 = subplot(414)
# data4 = np.loadtxt('./ex1040.d')
# x4 = data4[:,0]
# y4 = data4[:,1]
# plt.plot(x4,y4)
# ax5 = subplot(615)
# data5 = np.loadtxt('./hz1040.d')
# x5 = data5[:,0]
# y5 = data5[:,1]
# plt.plot(x5,y5)
# ax6 = subplot(616)
# data6 = np.loadtxt('./hz1050.d')
# x6 = data6[:,0]
# y6 = data6[:,1]
# plt.plot(x6,y6)
show()
# from pylab import *
# t = arange(0.0, 2.0, 0.01)
# s1 = sin(2*pi*t)
# s2 = exp(-t)
# s3 = s1*s2
# # axes rect in relative 0,1 coords left, bottom, width, height. Turn
# # off xtick labels on all but the lower plot
# f = figure()
# subplots_adjust(hspace=0.001)
# ax1 = subplot(311)
# ax1.plot(t,s1)
# yticks(arange(-0.9, 1.0, 0.4))
# ylim(-1,1)
# ax2 = subplot(312, sharex=ax1)
# ax2.plot(t,s2)
# yticks(arange(0.1, 1.0, 0.2))
# ylim(0,1)
# ax3 = subplot(313, sharex=ax1)
# ax3.plot(t,s3)
# yticks(arange(-0.9, 1.0, 0.4))
# ylim(-1,1)
# xticklabels = ax1.get_xticklabels()+ax2.get_xticklabels()
# setp(xticklabels, visible=False)
# show()
| [
"[email protected]"
] | |
a1638f815e06cb7c87660b8bb36c0ca73466c002 | aa9f21f74196960ab57ffa0ff664b2a19de17e0b | /surrogate/files/moeaJSON.py | 18cd24118fa15aa75333f0d8743d49d67f29db5d | [
"MIT"
] | permissive | onlymezhong/Surrogate-Model | 36a73a40f38bc9e4d86123a2a12f11a369195024 | 1e47b5eaff74ee3491ef436dd604fe2bc3af8f57 | refs/heads/master | 2021-04-28T22:54:48.947536 | 2016-12-22T15:38:09 | 2016-12-22T15:38:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,651 | py | import json
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.pyplot import cm
class JSON(object):
def __init__(self, fileName, numVar, numPop, numCon, numObj, numGen):
self.fileName = fileName
self.numPop = numPop
self.numVar = numVar
self.numCon = numCon
self.numObj = numObj
self.numGen = numGen
def writeHeader(self):
outFile = open(self.fileName, "wt")
outFile.write("{\n")
outFile.write("\"generation\": [\n")
outFile.close()
def writeEnd(self):
outFile = open(self.fileName, "a")
outFile.write("]\n}\n")
outFile.close()
def writePareto(self, individuals, igen):
outFile = open(self.fileName, "a")
outFile.write(" {\n")
outFile.write(" \"variable\" : [")
outFile.write("[%f" % (individuals[0].variable[0]))
for j in range(1, self.numVar):
outFile.write(",%f" % (individuals[0].variable[j]))
outFile.write("]")
for i in range(1, self.numPop):
outFile.write(",[%f" % (individuals[i].variable[0]))
for j in range(1, self.numVar):
outFile.write(",%f" % (individuals[i].variable[j]))
outFile.write("]")
outFile.write("],\n")
outFile.write(" \"objective\" : [[")
outFile.write("[%f" % (individuals[0].fitness.values[0]))
for j in range(1, self.numObj):
outFile.write(",%f" % (individuals[0].fitness.values[j]))
outFile.write("]")
for i in range(1, self.numPop):
outFile.write(",[%f" % (individuals[i].fitness.values[0]))
for j in range(1, self.numObj):
outFile.write(",%f" % (individuals[i].fitness.values[j]))
outFile.write("]")
outFile.write("]]")
if self.numCon > 0:
outFile.write(",")
outFile.write("\n")
if self.numCon > 0:
outFile.write(" \"constraint\" : [")
outFile.write("[%f" % (individuals[0].constraint[0]))
for j in range(1, self.numCon):
outFile.write(",%f" % (individuals[0].constraint[j]))
outFile.write("]")
for i in range(1, self.numPop):
outFile.write(",[%f" % (individuals[i].constraint[0]))
for j in range(1, self.numCon):
outFile.write(",%f" % (individuals[i].constraint[j]))
outFile.write("]")
outFile.write("]")
outFile.write("\n")
outFile.write(" }")
if igen < self.numGen - 1:
outFile.write(",")
outFile.write("\n")
outFile.close()
def plot_json(self):
with open(self.fileName) as data_file:
data = json.load(data_file)
gen = data["generation"]
gen_tot = len(gen)
color = iter(cm.gray(np.linspace(1, 0.1, gen_tot)))
# color = iter(cm.rainbow(np.linspace(0,1,gen_tot)))
for index, item in enumerate(gen):
obj = item["objective"][0]
obj_tot = len(obj)
x = []
y = []
r = index / gen_tot
g = index / gen_tot
b = index / gen_tot
for iobj in obj:
x.append(iobj[0])
y.append(iobj[1])
plt.plot(x, y, '.', color=next(color), label=str(index))
plt.title('moea.json')
plt.xlabel('obj1')
# plt.xlim([0.7,1.1])
plt.ylabel('obj2')
# plt.ylim([6,9])
plt.grid(True)
# plt.legend(loc='best')
plt.show()
| [
"[email protected]"
] | |
d67369c8830125c0b3fbd5e61988f907aa13951b | f8d3f814067415485bb439d7fe92dc2bbe22a048 | /solem/examples/ch1_morphology.py | e0fe53f9ce60b8c8db29fecba431b2392b479042 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | gmonkman/python | 2f9ab8f159c01f6235c86cb0cd52062cd3fdedd3 | 9123aa6baf538b662143b9098d963d55165e8409 | refs/heads/master | 2023-04-09T15:53:29.746676 | 2022-11-26T20:35:21 | 2022-11-26T20:35:21 | 60,254,898 | 0 | 2 | null | 2023-03-24T22:58:39 | 2016-06-02T10:25:27 | Python | UTF-8 | Python | false | false | 649 | py | from __future__ import print_function
from PIL import Image
from numpy import *
from scipy.ndimage import measurements, morphology
"""
This is the morphology counting objects example in Section 1.4.
"""
# load image and threshold to make sure it is binary
im = array(Image.open('./data/houses.png').convert('L'))
im = (im < 128)
labels, nbr_objects = measurements.label(im)
print(("Number of objects:", nbr_objects))
# morphology - opening to separate objects better
im_open = morphology.binary_opening(im, ones((9, 5)), iterations=2)
labels_open, nbr_objects_open = measurements.label(im_open)
print(("Number of objects:", nbr_objects_open))
| [
"[email protected]"
] | |
65605a0e75c94747baeb0f7b40c8c21546c038ac | 6874015cb6043d1803b61f8978627ddce64963b4 | /tests/multiple_database/tests.py | 412daa7e38e768082e77c627eb9d41b4d2dd197d | [
"BSD-3-Clause",
"Python-2.0"
] | permissive | yephper/django | 25fbfb4147211d08ec87c41e08a695ac016454c6 | cdd1689fb354886362487107156978ae84e71453 | refs/heads/master | 2021-01-21T12:59:14.443153 | 2016-04-27T09:51:41 | 2016-04-27T09:51:41 | 56,134,291 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 96,272 | py | from __future__ import unicode_literals
import datetime
import pickle
from operator import attrgetter
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core import management
from django.db import DEFAULT_DB_ALIAS, connections, router, transaction
from django.db.models import signals
from django.db.utils import ConnectionRouter
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils.six import StringIO
from .models import Book, Person, Pet, Review, UserProfile
from .routers import AuthRouter, TestRouter, WriteRouter
class QueryTestCase(TestCase):
multi_db = True
def test_db_selection(self):
"Check that querysets will use the default database by default"
self.assertEqual(Book.objects.db, DEFAULT_DB_ALIAS)
self.assertEqual(Book.objects.all().db, DEFAULT_DB_ALIAS)
self.assertEqual(Book.objects.using('other').db, 'other')
self.assertEqual(Book.objects.db_manager('other').db, 'other')
self.assertEqual(Book.objects.db_manager('other').all().db, 'other')
def test_default_creation(self):
"Objects created on the default database don't leak onto other databases"
# Create a book on the default database using create()
Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
# Create a book on the default database using a save
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save()
# Check that book exists on the default database, but not on other database
try:
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on default database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('other').get(title="Pro Django")
try:
Book.objects.get(title="Dive into Python")
Book.objects.using('default').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Dive into Python" should exist on default database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('other').get(title="Dive into Python")
def test_other_creation(self):
"Objects created on another database don't leak onto the default database"
# Create a book on the second database
Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
# Create a book on the default database using a save
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save(using='other')
# Check that book exists on the default database, but not on other database
try:
Book.objects.using('other').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on other database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.get(title="Pro Django")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title="Pro Django")
try:
Book.objects.using('other').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Dive into Python" should exist on other database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.get(title="Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title="Dive into Python")
def test_refresh(self):
dive = Book()
dive.title = "Dive into Python"
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save(using='other')
dive.published = datetime.date(2009, 5, 4)
dive.save(using='other')
dive2 = Book.objects.using('other').get()
dive2.title = "Dive into Python (on default)"
dive2.save(using='default')
dive.refresh_from_db()
self.assertEqual(dive.title, "Dive into Python")
dive.refresh_from_db(using='default')
self.assertEqual(dive.title, "Dive into Python (on default)")
self.assertEqual(dive._state.db, "default")
def test_basic_queries(self):
"Queries are constrained to a single database"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
dive = Book.objects.using('other').get(published=datetime.date(2009, 5, 4))
self.assertEqual(dive.title, "Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(published=datetime.date(2009, 5, 4))
dive = Book.objects.using('other').get(title__icontains="dive")
self.assertEqual(dive.title, "Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title__icontains="dive")
dive = Book.objects.using('other').get(title__iexact="dive INTO python")
self.assertEqual(dive.title, "Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title__iexact="dive INTO python")
dive = Book.objects.using('other').get(published__year=2009)
self.assertEqual(dive.title, "Dive into Python")
self.assertEqual(dive.published, datetime.date(2009, 5, 4))
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(published__year=2009)
years = Book.objects.using('other').dates('published', 'year')
self.assertEqual([o.year for o in years], [2009])
years = Book.objects.using('default').dates('published', 'year')
self.assertEqual([o.year for o in years], [])
months = Book.objects.using('other').dates('published', 'month')
self.assertEqual([o.month for o in months], [5])
months = Book.objects.using('default').dates('published', 'month')
self.assertEqual([o.month for o in months], [])
def test_m2m_separation(self):
"M2M fields are constrained to a single database"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
pro.authors.set([marty])
dive.authors.set([mark])
# Inspect the m2m tables directly.
# There should be 1 entry in each database
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
# Check that queries work across m2m joins
self.assertEqual(
list(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
['Pro Django']
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
['Dive into Python']
)
# Reget the objects to clear caches
dive = Book.objects.using('other').get(title="Dive into Python")
mark = Person.objects.using('other').get(name="Mark Pilgrim")
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(list(dive.authors.all().values_list('name', flat=True)),
['Mark Pilgrim'])
self.assertEqual(list(mark.book_set.all().values_list('title', flat=True)),
['Dive into Python'])
def test_m2m_forward_operations(self):
"M2M forward manipulations are all constrained to a single DB"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors.set([mark])
# Add a second author
john = Person.objects.using('other').create(name="John Smith")
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[]
)
dive.authors.add(john)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
['Dive into Python']
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
['Dive into Python']
)
# Remove the second author
dive.authors.remove(john)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
['Dive into Python']
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[]
)
# Clear all authors
dive.authors.clear()
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[]
)
# Create an author through the m2m interface
dive.authors.create(name='Jane Brown')
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Jane Brown').values_list('title', flat=True)),
['Dive into Python']
)
def test_m2m_reverse_operations(self):
"M2M reverse manipulations are all constrained to a single DB"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors.set([mark])
# Create a second book on the other database
grease = Book.objects.using('other').create(title="Greasemonkey Hacks",
published=datetime.date(2005, 11, 1))
# Add a books to the m2m
mark.book_set.add(grease)
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
['Mark Pilgrim']
)
self.assertEqual(
list(
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
),
['Mark Pilgrim']
)
# Remove a book from the m2m
mark.book_set.remove(grease)
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
['Mark Pilgrim']
)
self.assertEqual(
list(
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
),
[]
)
# Clear the books associated with mark
mark.book_set.clear()
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
),
[]
)
# Create a book through the m2m interface
mark.book_set.create(title="Dive into HTML5", published=datetime.date(2020, 1, 1))
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into HTML5').values_list('name', flat=True)),
['Mark Pilgrim']
)
def test_m2m_cross_database_protection(self):
"Operations that involve sharing M2M objects across databases raise an error"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Set a foreign key set with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.edited.set([pro, dive])
# Add to an m2m with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.book_set.add(dive)
# Set a m2m with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.book_set.set([pro, dive])
# Add to a reverse m2m with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='other'):
dive.authors.add(marty)
# Set a reverse m2m with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='other'):
dive.authors.set([mark, marty])
def test_m2m_deletion(self):
"Cascaded deletions of m2m relations issue queries on the right database"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
dive.authors.set([mark])
# Check the initial state
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
self.assertEqual(Person.objects.using('other').count(), 1)
self.assertEqual(Book.objects.using('other').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
# Delete the object on the other database
dive.delete(using='other')
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
# The person still exists ...
self.assertEqual(Person.objects.using('other').count(), 1)
# ... but the book has been deleted
self.assertEqual(Book.objects.using('other').count(), 0)
# ... and the relationship object has also been deleted.
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Now try deletion in the reverse direction. Set up the relation again
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
dive.authors.set([mark])
# Check the initial state
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
self.assertEqual(Person.objects.using('other').count(), 1)
self.assertEqual(Book.objects.using('other').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
# Delete the object on the other database
mark.delete(using='other')
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
# The person has been deleted ...
self.assertEqual(Person.objects.using('other').count(), 0)
# ... but the book still exists
self.assertEqual(Book.objects.using('other').count(), 1)
# ... and the relationship object has been deleted.
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
def test_foreign_key_separation(self):
"FK fields are constrained to a single database"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
george = Person.objects.create(name="George Vilches")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
chris = Person.objects.using('other').create(name="Chris Mills")
# Save the author's favorite books
pro.editor = george
pro.save()
dive.editor = chris
dive.save()
pro = Book.objects.using('default').get(title="Pro Django")
self.assertEqual(pro.editor.name, "George Vilches")
dive = Book.objects.using('other').get(title="Dive into Python")
self.assertEqual(dive.editor.name, "Chris Mills")
# Check that queries work across foreign key joins
self.assertEqual(
list(Person.objects.using('default').filter(edited__title='Pro Django').values_list('name', flat=True)),
['George Vilches']
)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Pro Django').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('default').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
['Chris Mills']
)
# Reget the objects to clear caches
chris = Person.objects.using('other').get(name="Chris Mills")
dive = Book.objects.using('other').get(title="Dive into Python")
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(list(chris.edited.values_list('title', flat=True)),
['Dive into Python'])
def test_foreign_key_reverse_operations(self):
"FK reverse manipulations are all constrained to a single DB"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
chris = Person.objects.using('other').create(name="Chris Mills")
# Save the author relations
dive.editor = chris
dive.save()
# Add a second book edited by chris
html5 = Book.objects.using('other').create(title="Dive into HTML5", published=datetime.date(2010, 3, 15))
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
chris.edited.add(html5)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
['Chris Mills']
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
['Chris Mills']
)
# Remove the second editor
chris.edited.remove(html5)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
['Chris Mills']
)
# Clear all edited books
chris.edited.clear()
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
[]
)
# Create an author through the m2m interface
chris.edited.create(title='Dive into Water', published=datetime.date(2010, 3, 15))
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into Water').values_list('name', flat=True)),
['Chris Mills']
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
[]
)
def test_foreign_key_cross_database_protection(self):
"Operations that involve sharing FK objects across databases raise an error"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
# Set a foreign key with an object from a different database
with self.assertRaises(ValueError):
dive.editor = marty
# Set a foreign key set with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.edited.set([pro, dive])
# Add to a foreign key set with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='default'):
marty.edited.add(dive)
def test_foreign_key_deletion(self):
"Cascaded deletions of Foreign Key relations issue queries on the right database"
mark = Person.objects.using('other').create(name="Mark Pilgrim")
Pet.objects.using('other').create(name="Fido", owner=mark)
# Check the initial state
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Pet.objects.using('default').count(), 0)
self.assertEqual(Person.objects.using('other').count(), 1)
self.assertEqual(Pet.objects.using('other').count(), 1)
# Delete the person object, which will cascade onto the pet
mark.delete(using='other')
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Pet.objects.using('default').count(), 0)
# Both the pet and the person have been deleted from the right database
self.assertEqual(Person.objects.using('other').count(), 0)
self.assertEqual(Pet.objects.using('other').count(), 0)
def test_foreign_key_validation(self):
"ForeignKey.validate() uses the correct database"
mickey = Person.objects.using('other').create(name="Mickey")
pluto = Pet.objects.using('other').create(name="Pluto", owner=mickey)
self.assertIsNone(pluto.full_clean())
def test_o2o_separation(self):
"OneToOne fields are constrained to a single database"
# Create a user and profile on the default database
alice = User.objects.db_manager('default').create_user('alice', '[email protected]')
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
# Create a user and profile on the other database
bob = User.objects.db_manager('other').create_user('bob', '[email protected]')
bob_profile = UserProfile.objects.using('other').create(user=bob, flavor='crunchy frog')
# Retrieve related objects; queries should be database constrained
alice = User.objects.using('default').get(username="alice")
self.assertEqual(alice.userprofile.flavor, "chocolate")
bob = User.objects.using('other').get(username="bob")
self.assertEqual(bob.userprofile.flavor, "crunchy frog")
# Check that queries work across joins
self.assertEqual(
list(
User.objects.using('default')
.filter(userprofile__flavor='chocolate').values_list('username', flat=True)
),
['alice']
)
self.assertEqual(
list(
User.objects.using('other')
.filter(userprofile__flavor='chocolate').values_list('username', flat=True)
),
[]
)
self.assertEqual(
list(
User.objects.using('default')
.filter(userprofile__flavor='crunchy frog').values_list('username', flat=True)
),
[]
)
self.assertEqual(
list(
User.objects.using('other')
.filter(userprofile__flavor='crunchy frog').values_list('username', flat=True)
),
['bob']
)
# Reget the objects to clear caches
alice_profile = UserProfile.objects.using('default').get(flavor='chocolate')
bob_profile = UserProfile.objects.using('other').get(flavor='crunchy frog')
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(alice_profile.user.username, 'alice')
self.assertEqual(bob_profile.user.username, 'bob')
def test_o2o_cross_database_protection(self):
"Operations that involve sharing FK objects across databases raise an error"
# Create a user and profile on the default database
alice = User.objects.db_manager('default').create_user('alice', '[email protected]')
# Create a user and profile on the other database
bob = User.objects.db_manager('other').create_user('bob', '[email protected]')
# Set a one-to-one relation with an object from a different database
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
with self.assertRaises(ValueError):
bob.userprofile = alice_profile
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
bob_profile = UserProfile.objects.using('other').create(user=bob, flavor='crunchy frog')
new_bob_profile = UserProfile(flavor="spring surprise")
# assigning a profile requires an explicit pk as the object isn't saved
charlie = User(pk=51, username='charlie', email='[email protected]')
charlie.set_unusable_password()
# initially, no db assigned
self.assertEqual(new_bob_profile._state.db, None)
self.assertEqual(charlie._state.db, None)
# old object comes from 'other', so the new object is set to use 'other'...
new_bob_profile.user = bob
charlie.userprofile = bob_profile
self.assertEqual(new_bob_profile._state.db, 'other')
self.assertEqual(charlie._state.db, 'other')
# ... but it isn't saved yet
self.assertEqual(list(User.objects.using('other').values_list('username', flat=True)),
['bob'])
self.assertEqual(list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog'])
# When saved (no using required), new objects goes to 'other'
charlie.save()
bob_profile.save()
new_bob_profile.save()
self.assertEqual(list(User.objects.using('default').values_list('username', flat=True)),
['alice'])
self.assertEqual(list(User.objects.using('other').values_list('username', flat=True)),
['bob', 'charlie'])
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)),
['chocolate'])
self.assertEqual(list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog', 'spring surprise'])
# This also works if you assign the O2O relation in the constructor
denise = User.objects.db_manager('other').create_user('denise', '[email protected]')
denise_profile = UserProfile(flavor="tofu", user=denise)
self.assertEqual(denise_profile._state.db, 'other')
# ... but it isn't saved yet
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)),
['chocolate'])
self.assertEqual(list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog', 'spring surprise'])
# When saved, the new profile goes to 'other'
denise_profile.save()
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)),
['chocolate'])
self.assertEqual(list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog', 'spring surprise', 'tofu'])
def test_generic_key_separation(self):
"Generic fields are constrained to a single database"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
review1 = Review.objects.create(source="Python Monthly", content_object=pro)
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
review2 = Review.objects.using('other').create(source="Python Weekly", content_object=dive)
review1 = Review.objects.using('default').get(source="Python Monthly")
self.assertEqual(review1.content_object.title, "Pro Django")
review2 = Review.objects.using('other').get(source="Python Weekly")
self.assertEqual(review2.content_object.title, "Dive into Python")
# Reget the objects to clear caches
dive = Book.objects.using('other').get(title="Dive into Python")
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(list(dive.reviews.all().values_list('source', flat=True)),
['Python Weekly'])
def test_generic_key_reverse_operations(self):
"Generic reverse manipulations are all constrained to a single DB"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
temp = Book.objects.using('other').create(title="Temp",
published=datetime.date(2009, 5, 4))
review1 = Review.objects.using('other').create(source="Python Weekly", content_object=dive)
review2 = Review.objects.using('other').create(source="Python Monthly", content_object=temp)
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Weekly']
)
# Add a second review
dive.reviews.add(review2)
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Monthly', 'Python Weekly']
)
# Remove the second author
dive.reviews.remove(review1)
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Monthly']
)
# Clear all reviews
dive.reviews.clear()
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
# Create an author through the generic interface
dive.reviews.create(source='Python Daily')
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Daily']
)
def test_generic_key_cross_database_protection(self):
"Operations that involve sharing generic key objects across databases raise an error"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
review1 = Review.objects.create(source="Python Monthly", content_object=pro)
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
# Set a foreign key with an object from a different database
with self.assertRaises(ValueError):
review1.content_object = dive
# Add to a foreign key set with an object from a different database
with self.assertRaises(ValueError):
with transaction.atomic(using='other'):
dive.reviews.add(review1)
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
review3 = Review(source="Python Daily")
# initially, no db assigned
self.assertEqual(review3._state.db, None)
# Dive comes from 'other', so review3 is set to use 'other'...
review3.content_object = dive
self.assertEqual(review3._state.db, 'other')
# ... but it isn't saved yet
self.assertEqual(
list(Review.objects.using('default').filter(object_id=pro.pk).values_list('source', flat=True)),
['Python Monthly']
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Weekly']
)
# When saved, John goes to 'other'
review3.save()
self.assertEqual(
list(Review.objects.using('default').filter(object_id=pro.pk).values_list('source', flat=True)),
['Python Monthly']
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Daily', 'Python Weekly']
)
def test_generic_key_deletion(self):
"Cascaded deletions of Generic Key relations issue queries on the right database"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
# Check the initial state
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Review.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('other').count(), 1)
self.assertEqual(Review.objects.using('other').count(), 1)
# Delete the Book object, which will cascade onto the pet
dive.delete(using='other')
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Review.objects.using('default').count(), 0)
# Both the pet and the person have been deleted from the right database
self.assertEqual(Book.objects.using('other').count(), 0)
self.assertEqual(Review.objects.using('other').count(), 0)
def test_ordering(self):
"get_next_by_XXX commands stick to a single database"
Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
learn = Book.objects.using('other').create(title="Learning Python",
published=datetime.date(2008, 7, 16))
self.assertEqual(learn.get_next_by_published().title, "Dive into Python")
self.assertEqual(dive.get_previous_by_published().title, "Learning Python")
def test_raw(self):
"test the raw() method across databases"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
val = Book.objects.db_manager("other").raw('SELECT id FROM multiple_database_book')
self.assertQuerysetEqual(val, [dive.pk], attrgetter("pk"))
val = Book.objects.raw('SELECT id FROM multiple_database_book').using('other')
self.assertQuerysetEqual(val, [dive.pk], attrgetter("pk"))
def test_select_related(self):
"Database assignment is retained if an object is retrieved with select_related()"
# Create a book and author on the other database
mark = Person.objects.using('other').create(name="Mark Pilgrim")
Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
editor=mark)
# Retrieve the Person using select_related()
book = Book.objects.using('other').select_related('editor').get(title="Dive into Python")
# The editor instance should have a db state
self.assertEqual(book.editor._state.db, 'other')
def test_subquery(self):
"""Make sure as_sql works with subqueries and primary/replica."""
sub = Person.objects.using('other').filter(name='fff')
qs = Book.objects.filter(editor__in=sub)
# When you call __str__ on the query object, it doesn't know about using
# so it falls back to the default. If the subquery explicitly uses a
# different database, an error should be raised.
with self.assertRaises(ValueError):
str(qs.query)
# Evaluating the query shouldn't work, either
with self.assertRaises(ValueError):
for obj in qs:
pass
def test_related_manager(self):
"Related managers return managers, not querysets"
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# extra_arg is removed by the BookManager's implementation of
# create(); but the BookManager's implementation won't get called
# unless edited returns a Manager, not a queryset
mark.book_set.create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
extra_arg=True)
mark.book_set.get_or_create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
extra_arg=True)
mark.edited.create(title="Dive into Water",
published=datetime.date(2009, 5, 4),
extra_arg=True)
mark.edited.get_or_create(title="Dive into Water",
published=datetime.date(2009, 5, 4),
extra_arg=True)
class ConnectionRouterTestCase(SimpleTestCase):
@override_settings(DATABASE_ROUTERS=[
'multiple_database.tests.TestRouter',
'multiple_database.tests.WriteRouter'])
def test_router_init_default(self):
connection_router = ConnectionRouter()
self.assertListEqual([r.__class__.__name__ for r in connection_router.routers],
['TestRouter', 'WriteRouter'])
def test_router_init_arg(self):
connection_router = ConnectionRouter([
'multiple_database.tests.TestRouter',
'multiple_database.tests.WriteRouter'
])
self.assertListEqual([r.__class__.__name__ for r in connection_router.routers],
['TestRouter', 'WriteRouter'])
# Init with instances instead of strings
connection_router = ConnectionRouter([TestRouter(), WriteRouter()])
self.assertListEqual([r.__class__.__name__ for r in connection_router.routers],
['TestRouter', 'WriteRouter'])
# Make the 'other' database appear to be a replica of the 'default'
@override_settings(DATABASE_ROUTERS=[TestRouter()])
class RouterTestCase(TestCase):
multi_db = True
def test_db_selection(self):
"Check that querysets obey the router for db suggestions"
self.assertEqual(Book.objects.db, 'other')
self.assertEqual(Book.objects.all().db, 'other')
self.assertEqual(Book.objects.using('default').db, 'default')
self.assertEqual(Book.objects.db_manager('default').db, 'default')
self.assertEqual(Book.objects.db_manager('default').all().db, 'default')
def test_migrate_selection(self):
"Synchronization behavior is predictable"
self.assertTrue(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
self.assertTrue(router.allow_migrate_model('other', User))
self.assertTrue(router.allow_migrate_model('other', Book))
with override_settings(DATABASE_ROUTERS=[TestRouter(), AuthRouter()]):
# Add the auth router to the chain. TestRouter is a universal
# synchronizer, so it should have no effect.
self.assertTrue(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
self.assertTrue(router.allow_migrate_model('other', User))
self.assertTrue(router.allow_migrate_model('other', Book))
with override_settings(DATABASE_ROUTERS=[AuthRouter(), TestRouter()]):
# Now check what happens if the router order is reversed.
self.assertFalse(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
self.assertTrue(router.allow_migrate_model('other', User))
self.assertTrue(router.allow_migrate_model('other', Book))
def test_partial_router(self):
"A router can choose to implement a subset of methods"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
# First check the baseline behavior.
self.assertEqual(router.db_for_read(User), 'other')
self.assertEqual(router.db_for_read(Book), 'other')
self.assertEqual(router.db_for_write(User), 'default')
self.assertEqual(router.db_for_write(Book), 'default')
self.assertTrue(router.allow_relation(dive, dive))
self.assertTrue(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
with override_settings(DATABASE_ROUTERS=[WriteRouter(), AuthRouter(), TestRouter()]):
self.assertEqual(router.db_for_read(User), 'default')
self.assertEqual(router.db_for_read(Book), 'other')
self.assertEqual(router.db_for_write(User), 'writer')
self.assertEqual(router.db_for_write(Book), 'writer')
self.assertTrue(router.allow_relation(dive, dive))
self.assertFalse(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
def test_database_routing(self):
marty = Person.objects.using('default').create(name="Marty Alchin")
pro = Book.objects.using('default').create(title="Pro Django",
published=datetime.date(2008, 12, 16),
editor=marty)
pro.authors.set([marty])
# Create a book and author on the other database
Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
# An update query will be routed to the default database
Book.objects.filter(title='Pro Django').update(pages=200)
with self.assertRaises(Book.DoesNotExist):
# By default, the get query will be directed to 'other'
Book.objects.get(title='Pro Django')
# But the same query issued explicitly at a database will work.
pro = Book.objects.using('default').get(title='Pro Django')
# Check that the update worked.
self.assertEqual(pro.pages, 200)
# An update query with an explicit using clause will be routed
# to the requested database.
Book.objects.using('other').filter(title='Dive into Python').update(pages=300)
self.assertEqual(Book.objects.get(title='Dive into Python').pages, 300)
# Related object queries stick to the same database
# as the original object, regardless of the router
self.assertEqual(list(pro.authors.values_list('name', flat=True)), ['Marty Alchin'])
self.assertEqual(pro.editor.name, 'Marty Alchin')
# get_or_create is a special case. The get needs to be targeted at
# the write database in order to avoid potential transaction
# consistency problems
book, created = Book.objects.get_or_create(title="Pro Django")
self.assertFalse(created)
book, created = Book.objects.get_or_create(title="Dive Into Python",
defaults={'published': datetime.date(2009, 5, 4)})
self.assertTrue(created)
# Check the head count of objects
self.assertEqual(Book.objects.using('default').count(), 2)
self.assertEqual(Book.objects.using('other').count(), 1)
# If a database isn't specified, the read database is used
self.assertEqual(Book.objects.count(), 1)
# A delete query will also be routed to the default database
Book.objects.filter(pages__gt=150).delete()
# The default database has lost the book.
self.assertEqual(Book.objects.using('default').count(), 1)
self.assertEqual(Book.objects.using('other').count(), 1)
def test_invalid_set_foreign_key_assignment(self):
marty = Person.objects.using('default').create(name="Marty Alchin")
dive = Book.objects.using('other').create(
title="Dive into Python",
published=datetime.date(2009, 5, 4),
)
# Set a foreign key set with an object from a different database
msg = "<Book: Dive into Python> instance isn't saved. Use bulk=False or save the object first."
with self.assertRaisesMessage(ValueError, msg):
marty.edited.set([dive])
def test_foreign_key_cross_database_protection(self):
"Foreign keys can cross databases if they two databases have a common source"
# Create a book and author on the default database
pro = Book.objects.using('default').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.using('default').create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Set a foreign key with an object from a different database
try:
dive.editor = marty
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments of original objects haven't changed...
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# ... but they will when the affected object is saved.
dive.save()
self.assertEqual(dive._state.db, 'default')
# ...and the source database now has a copy of any object saved
try:
Book.objects.using('default').get(title='Dive into Python').delete()
except Book.DoesNotExist:
self.fail('Source database should have a copy of saved object')
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
self.assertEqual(dive._state.db, 'other')
# Set a foreign key set with an object from a different database
try:
marty.edited.set([pro, dive], bulk=False)
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Assignment implies a save, so database assignments of original objects have changed...
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'default')
self.assertEqual(mark._state.db, 'other')
# ...and the source database now has a copy of any object saved
try:
Book.objects.using('default').get(title='Dive into Python').delete()
except Book.DoesNotExist:
self.fail('Source database should have a copy of saved object')
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
self.assertEqual(dive._state.db, 'other')
# Add to a foreign key set with an object from a different database
try:
marty.edited.add(dive, bulk=False)
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Add implies a save, so database assignments of original objects have changed...
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'default')
self.assertEqual(mark._state.db, 'other')
# ...and the source database now has a copy of any object saved
try:
Book.objects.using('default').get(title='Dive into Python').delete()
except Book.DoesNotExist:
self.fail('Source database should have a copy of saved object')
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
# If you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
chris = Person(name="Chris Mills")
html5 = Book(title="Dive into HTML5", published=datetime.date(2010, 3, 15))
# initially, no db assigned
self.assertEqual(chris._state.db, None)
self.assertEqual(html5._state.db, None)
# old object comes from 'other', so the new object is set to use the
# source of 'other'...
self.assertEqual(dive._state.db, 'other')
chris.save()
dive.editor = chris
html5.editor = mark
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
self.assertEqual(chris._state.db, 'default')
self.assertEqual(html5._state.db, 'default')
# This also works if you assign the FK in the constructor
water = Book(title="Dive into Water", published=datetime.date(2001, 1, 1), editor=mark)
self.assertEqual(water._state.db, 'default')
# For the remainder of this test, create a copy of 'mark' in the
# 'default' database to prevent integrity errors on backends that
# don't defer constraints checks until the end of the transaction
mark.save(using='default')
# This moved 'mark' in the 'default' database, move it back in 'other'
mark.save(using='other')
self.assertEqual(mark._state.db, 'other')
# If you create an object through a FK relation, it will be
# written to the write database, even if the original object
# was on the read database
cheesecake = mark.edited.create(title='Dive into Cheesecake', published=datetime.date(2010, 3, 15))
self.assertEqual(cheesecake._state.db, 'default')
# Same goes for get_or_create, regardless of whether getting or creating
cheesecake, created = mark.edited.get_or_create(
title='Dive into Cheesecake',
published=datetime.date(2010, 3, 15),
)
self.assertEqual(cheesecake._state.db, 'default')
puddles, created = mark.edited.get_or_create(title='Dive into Puddles', published=datetime.date(2010, 3, 15))
self.assertEqual(puddles._state.db, 'default')
def test_m2m_cross_database_protection(self):
"M2M relations can cross databases if the database share a source"
# Create books and authors on the inverse to the usual database
pro = Book.objects.using('other').create(pk=1, title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
dive = Book.objects.using('default').create(pk=2, title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('default').create(pk=2, name="Mark Pilgrim")
# Now save back onto the usual database.
# This simulates primary/replica - the objects exist on both database,
# but the _state.db is as it is for all other tests.
pro.save(using='default')
marty.save(using='default')
dive.save(using='other')
mark.save(using='other')
# Check that we have 2 of both types of object on both databases
self.assertEqual(Book.objects.using('default').count(), 2)
self.assertEqual(Book.objects.using('other').count(), 2)
self.assertEqual(Person.objects.using('default').count(), 2)
self.assertEqual(Person.objects.using('other').count(), 2)
# Set a m2m set with an object from a different database
try:
marty.book_set.set([pro, dive])
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 2)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Reset relations
Book.authors.through.objects.using('default').delete()
# Add to an m2m with an object from a different database
try:
marty.book_set.add(dive)
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Reset relations
Book.authors.through.objects.using('default').delete()
# Set a reverse m2m with an object from a different database
try:
dive.authors.set([mark, marty])
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 2)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Reset relations
Book.authors.through.objects.using('default').delete()
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Add to a reverse m2m with an object from a different database
try:
dive.authors.add(marty)
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# If you create an object through a M2M relation, it will be
# written to the write database, even if the original object
# was on the read database
alice = dive.authors.create(name='Alice')
self.assertEqual(alice._state.db, 'default')
# Same goes for get_or_create, regardless of whether getting or creating
alice, created = dive.authors.get_or_create(name='Alice')
self.assertEqual(alice._state.db, 'default')
bob, created = dive.authors.get_or_create(name='Bob')
self.assertEqual(bob._state.db, 'default')
def test_o2o_cross_database_protection(self):
"Operations that involve sharing FK objects across databases raise an error"
# Create a user and profile on the default database
alice = User.objects.db_manager('default').create_user('alice', '[email protected]')
# Create a user and profile on the other database
bob = User.objects.db_manager('other').create_user('bob', '[email protected]')
# Set a one-to-one relation with an object from a different database
alice_profile = UserProfile.objects.create(user=alice, flavor='chocolate')
try:
bob.userprofile = alice_profile
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments of original objects haven't changed...
self.assertEqual(alice._state.db, 'default')
self.assertEqual(alice_profile._state.db, 'default')
self.assertEqual(bob._state.db, 'other')
# ... but they will when the affected object is saved.
bob.save()
self.assertEqual(bob._state.db, 'default')
def test_generic_key_cross_database_protection(self):
"Generic Key operations can span databases if they share a source"
# Create a book and author on the default database
pro = Book.objects.using(
'default').create(title="Pro Django", published=datetime.date(2008, 12, 16))
review1 = Review.objects.using(
'default').create(source="Python Monthly", content_object=pro)
# Create a book and author on the other database
dive = Book.objects.using(
'other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
review2 = Review.objects.using(
'other').create(source="Python Weekly", content_object=dive)
# Set a generic foreign key with an object from a different database
try:
review1.content_object = dive
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments of original objects haven't changed...
self.assertEqual(pro._state.db, 'default')
self.assertEqual(review1._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(review2._state.db, 'other')
# ... but they will when the affected object is saved.
dive.save()
self.assertEqual(review1._state.db, 'default')
self.assertEqual(dive._state.db, 'default')
# ...and the source database now has a copy of any object saved
try:
Book.objects.using('default').get(title='Dive into Python').delete()
except Book.DoesNotExist:
self.fail('Source database should have a copy of saved object')
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
self.assertEqual(dive._state.db, 'other')
# Add to a generic foreign key set with an object from a different database
try:
dive.reviews.add(review1)
except ValueError:
self.fail("Assignment across primary/replica databases with a common source should be ok")
# Database assignments of original objects haven't changed...
self.assertEqual(pro._state.db, 'default')
self.assertEqual(review1._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(review2._state.db, 'other')
# ... but they will when the affected object is saved.
dive.save()
self.assertEqual(dive._state.db, 'default')
# ...and the source database now has a copy of any object saved
try:
Book.objects.using('default').get(title='Dive into Python').delete()
except Book.DoesNotExist:
self.fail('Source database should have a copy of saved object')
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
review3 = Review(source="Python Daily")
# initially, no db assigned
self.assertEqual(review3._state.db, None)
# Dive comes from 'other', so review3 is set to use the source of 'other'...
review3.content_object = dive
self.assertEqual(review3._state.db, 'default')
# If you create an object through a M2M relation, it will be
# written to the write database, even if the original object
# was on the read database
dive = Book.objects.using('other').get(title='Dive into Python')
nyt = dive.reviews.create(source="New York Times", content_object=dive)
self.assertEqual(nyt._state.db, 'default')
def test_m2m_managers(self):
"M2M relations are represented by managers, and can be controlled like managers"
pro = Book.objects.using('other').create(pk=1, title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
self.assertEqual(pro.authors.db, 'other')
self.assertEqual(pro.authors.db_manager('default').db, 'default')
self.assertEqual(pro.authors.db_manager('default').all().db, 'default')
self.assertEqual(marty.book_set.db, 'other')
self.assertEqual(marty.book_set.db_manager('default').db, 'default')
self.assertEqual(marty.book_set.db_manager('default').all().db, 'default')
def test_foreign_key_managers(self):
"FK reverse relations are represented by managers, and can be controlled like managers"
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
Book.objects.using('other').create(pk=1, title="Pro Django",
published=datetime.date(2008, 12, 16),
editor=marty)
self.assertEqual(marty.edited.db, 'other')
self.assertEqual(marty.edited.db_manager('default').db, 'default')
self.assertEqual(marty.edited.db_manager('default').all().db, 'default')
def test_generic_key_managers(self):
"Generic key relations are represented by managers, and can be controlled like managers"
pro = Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
Review.objects.using('other').create(source="Python Monthly",
content_object=pro)
self.assertEqual(pro.reviews.db, 'other')
self.assertEqual(pro.reviews.db_manager('default').db, 'default')
self.assertEqual(pro.reviews.db_manager('default').all().db, 'default')
def test_subquery(self):
"""Make sure as_sql works with subqueries and primary/replica."""
# Create a book and author on the other database
mark = Person.objects.using('other').create(name="Mark Pilgrim")
Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
editor=mark)
sub = Person.objects.filter(name='Mark Pilgrim')
qs = Book.objects.filter(editor__in=sub)
# When you call __str__ on the query object, it doesn't know about using
# so it falls back to the default. Don't let routing instructions
# force the subquery to an incompatible database.
str(qs.query)
# If you evaluate the query, it should work, running on 'other'
self.assertEqual(list(qs.values_list('title', flat=True)), ['Dive into Python'])
def test_deferred_models(self):
mark_def = Person.objects.using('default').create(name="Mark Pilgrim")
mark_other = Person.objects.using('other').create(name="Mark Pilgrim")
orig_b = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
editor=mark_other)
b = Book.objects.using('other').only('title').get(pk=orig_b.pk)
self.assertEqual(b.published, datetime.date(2009, 5, 4))
b = Book.objects.using('other').only('title').get(pk=orig_b.pk)
b.editor = mark_def
b.save(using='default')
self.assertEqual(Book.objects.using('default').get(pk=b.pk).published,
datetime.date(2009, 5, 4))
@override_settings(DATABASE_ROUTERS=[AuthRouter()])
class AuthTestCase(TestCase):
multi_db = True
def test_auth_manager(self):
"The methods on the auth manager obey database hints"
# Create one user using default allocation policy
User.objects.create_user('alice', '[email protected]')
# Create another user, explicitly specifying the database
User.objects.db_manager('default').create_user('bob', '[email protected]')
# The second user only exists on the other database
alice = User.objects.using('other').get(username='alice')
self.assertEqual(alice.username, 'alice')
self.assertEqual(alice._state.db, 'other')
with self.assertRaises(User.DoesNotExist):
User.objects.using('default').get(username='alice')
# The second user only exists on the default database
bob = User.objects.using('default').get(username='bob')
self.assertEqual(bob.username, 'bob')
self.assertEqual(bob._state.db, 'default')
with self.assertRaises(User.DoesNotExist):
User.objects.using('other').get(username='bob')
# That is... there is one user on each database
self.assertEqual(User.objects.using('default').count(), 1)
self.assertEqual(User.objects.using('other').count(), 1)
def test_dumpdata(self):
"Check that dumpdata honors allow_migrate restrictions on the router"
User.objects.create_user('alice', '[email protected]')
User.objects.db_manager('default').create_user('bob', '[email protected]')
# Check that dumping the default database doesn't try to include auth
# because allow_migrate prohibits auth on default
new_io = StringIO()
management.call_command('dumpdata', 'auth', format='json', database='default', stdout=new_io)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, '[]')
# Check that dumping the other database does include auth
new_io = StringIO()
management.call_command('dumpdata', 'auth', format='json', database='other', stdout=new_io)
command_output = new_io.getvalue().strip()
self.assertIn('"email": "[email protected]"', command_output)
class AntiPetRouter(object):
# A router that only expresses an opinion on migrate,
# passing pets to the 'other' database
def allow_migrate(self, db, app_label, model_name=None, **hints):
if db == 'other':
return model_name == 'pet'
else:
return model_name != 'pet'
class FixtureTestCase(TestCase):
multi_db = True
fixtures = ['multidb-common', 'multidb']
@override_settings(DATABASE_ROUTERS=[AntiPetRouter()])
def test_fixture_loading(self):
"Multi-db fixtures are loaded correctly"
# Check that "Pro Django" exists on the default database, but not on other database
try:
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on default database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('other').get(title="Pro Django")
# Check that "Dive into Python" exists on the default database, but not on other database
try:
Book.objects.using('other').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Dive into Python" should exist on other database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.get(title="Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title="Dive into Python")
# Check that "Definitive Guide" exists on the both databases
try:
Book.objects.get(title="The Definitive Guide to Django")
Book.objects.using('default').get(title="The Definitive Guide to Django")
Book.objects.using('other').get(title="The Definitive Guide to Django")
except Book.DoesNotExist:
self.fail('"The Definitive Guide to Django" should exist on both databases')
@override_settings(DATABASE_ROUTERS=[AntiPetRouter()])
def test_pseudo_empty_fixtures(self):
"""
A fixture can contain entries, but lead to nothing in the database;
this shouldn't raise an error (#14068).
"""
new_io = StringIO()
management.call_command('loaddata', 'pets', stdout=new_io, stderr=new_io)
command_output = new_io.getvalue().strip()
# No objects will actually be loaded
self.assertEqual(command_output, "Installed 0 object(s) (of 2) from 1 fixture(s)")
class PickleQuerySetTestCase(TestCase):
multi_db = True
def test_pickling(self):
for db in connections:
Book.objects.using(db).create(title='Dive into Python', published=datetime.date(2009, 5, 4))
qs = Book.objects.all()
self.assertEqual(qs.db, pickle.loads(pickle.dumps(qs)).db)
class DatabaseReceiver(object):
"""
Used in the tests for the database argument in signals (#13552)
"""
def __call__(self, signal, sender, **kwargs):
self._database = kwargs['using']
class WriteToOtherRouter(object):
"""
A router that sends all writes to the other database.
"""
def db_for_write(self, model, **hints):
return "other"
class SignalTests(TestCase):
multi_db = True
def override_router(self):
return override_settings(DATABASE_ROUTERS=[WriteToOtherRouter()])
def test_database_arg_save_and_delete(self):
"""
Tests that the pre/post_save signal contains the correct database.
(#13552)
"""
# Make some signal receivers
pre_save_receiver = DatabaseReceiver()
post_save_receiver = DatabaseReceiver()
pre_delete_receiver = DatabaseReceiver()
post_delete_receiver = DatabaseReceiver()
# Make model and connect receivers
signals.pre_save.connect(sender=Person, receiver=pre_save_receiver)
signals.post_save.connect(sender=Person, receiver=post_save_receiver)
signals.pre_delete.connect(sender=Person, receiver=pre_delete_receiver)
signals.post_delete.connect(sender=Person, receiver=post_delete_receiver)
p = Person.objects.create(name='Darth Vader')
# Save and test receivers got calls
p.save()
self.assertEqual(pre_save_receiver._database, DEFAULT_DB_ALIAS)
self.assertEqual(post_save_receiver._database, DEFAULT_DB_ALIAS)
# Delete, and test
p.delete()
self.assertEqual(pre_delete_receiver._database, DEFAULT_DB_ALIAS)
self.assertEqual(post_delete_receiver._database, DEFAULT_DB_ALIAS)
# Save again to a different database
p.save(using="other")
self.assertEqual(pre_save_receiver._database, "other")
self.assertEqual(post_save_receiver._database, "other")
# Delete, and test
p.delete(using="other")
self.assertEqual(pre_delete_receiver._database, "other")
self.assertEqual(post_delete_receiver._database, "other")
signals.pre_save.disconnect(sender=Person, receiver=pre_save_receiver)
signals.post_save.disconnect(sender=Person, receiver=post_save_receiver)
signals.pre_delete.disconnect(sender=Person, receiver=pre_delete_receiver)
signals.post_delete.disconnect(sender=Person, receiver=post_delete_receiver)
def test_database_arg_m2m(self):
"""
Test that the m2m_changed signal has a correct database arg (#13552)
"""
# Make a receiver
receiver = DatabaseReceiver()
# Connect it
signals.m2m_changed.connect(receiver=receiver)
# Create the models that will be used for the tests
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
# Create a copy of the models on the 'other' database to prevent
# integrity errors on backends that don't defer constraints checks
Book.objects.using('other').create(pk=b.pk, title=b.title,
published=b.published)
Person.objects.using('other').create(pk=p.pk, name=p.name)
# Test addition
b.authors.add(p)
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
b.authors.add(p)
self.assertEqual(receiver._database, "other")
# Test removal
b.authors.remove(p)
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
b.authors.remove(p)
self.assertEqual(receiver._database, "other")
# Test addition in reverse
p.book_set.add(b)
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
p.book_set.add(b)
self.assertEqual(receiver._database, "other")
# Test clearing
b.authors.clear()
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
b.authors.clear()
self.assertEqual(receiver._database, "other")
class AttributeErrorRouter(object):
"A router to test the exception handling of ConnectionRouter"
def db_for_read(self, model, **hints):
raise AttributeError
def db_for_write(self, model, **hints):
raise AttributeError
class RouterAttributeErrorTestCase(TestCase):
multi_db = True
def override_router(self):
return override_settings(DATABASE_ROUTERS=[AttributeErrorRouter()])
def test_attribute_error_read(self):
"Check that the AttributeError from AttributeErrorRouter bubbles up"
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
with self.override_router():
with self.assertRaises(AttributeError):
Book.objects.get(pk=b.pk)
def test_attribute_error_save(self):
"Check that the AttributeError from AttributeErrorRouter bubbles up"
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
with self.override_router():
with self.assertRaises(AttributeError):
dive.save()
def test_attribute_error_delete(self):
"Check that the AttributeError from AttributeErrorRouter bubbles up"
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
b.authors.set([p])
b.editor = p
with self.override_router():
with self.assertRaises(AttributeError):
b.delete()
def test_attribute_error_m2m(self):
"Check that the AttributeError from AttributeErrorRouter bubbles up"
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
with self.override_router():
with self.assertRaises(AttributeError):
b.authors.set([p])
class ModelMetaRouter(object):
"A router to ensure model arguments are real model classes"
def db_for_write(self, model, **hints):
if not hasattr(model, '_meta'):
raise ValueError
@override_settings(DATABASE_ROUTERS=[ModelMetaRouter()])
class RouterModelArgumentTestCase(TestCase):
multi_db = True
def test_m2m_collection(self):
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
# test add
b.authors.add(p)
# test remove
b.authors.remove(p)
# test clear
b.authors.clear()
# test setattr
b.authors.set([p])
# test M2M collection
b.delete()
def test_foreignkey_collection(self):
person = Person.objects.create(name='Bob')
Pet.objects.create(owner=person, name='Wart')
# test related FK collection
person.delete()
class SyncOnlyDefaultDatabaseRouter(object):
def allow_migrate(self, db, app_label, **hints):
return db == DEFAULT_DB_ALIAS
class MigrateTestCase(TestCase):
available_apps = [
'multiple_database',
'django.contrib.auth',
'django.contrib.contenttypes'
]
multi_db = True
def test_migrate_to_other_database(self):
"""Regression test for #16039: migrate with --database option."""
cts = ContentType.objects.using('other').filter(app_label='multiple_database')
count = cts.count()
self.assertGreater(count, 0)
cts.delete()
management.call_command('migrate', verbosity=0, interactive=False, database='other')
self.assertEqual(cts.count(), count)
def test_migrate_to_other_database_with_router(self):
"""Regression test for #16039: migrate with --database option."""
cts = ContentType.objects.using('other').filter(app_label='multiple_database')
cts.delete()
with override_settings(DATABASE_ROUTERS=[SyncOnlyDefaultDatabaseRouter()]):
management.call_command('migrate', verbosity=0, interactive=False, database='other')
self.assertEqual(cts.count(), 0)
class RouterUsed(Exception):
WRITE = 'write'
def __init__(self, mode, model, hints):
self.mode = mode
self.model = model
self.hints = hints
class RouteForWriteTestCase(TestCase):
multi_db = True
class WriteCheckRouter(object):
def db_for_write(self, model, **hints):
raise RouterUsed(mode=RouterUsed.WRITE, model=model, hints=hints)
def override_router(self):
return override_settings(DATABASE_ROUTERS=[RouteForWriteTestCase.WriteCheckRouter()])
def test_fk_delete(self):
owner = Person.objects.create(name='Someone')
pet = Pet.objects.create(name='fido', owner=owner)
try:
with self.override_router():
pet.owner.delete()
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': owner})
def test_reverse_fk_delete(self):
owner = Person.objects.create(name='Someone')
to_del_qs = owner.pet_set.all()
try:
with self.override_router():
to_del_qs.delete()
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Pet)
self.assertEqual(e.hints, {'instance': owner})
def test_reverse_fk_get_or_create(self):
owner = Person.objects.create(name='Someone')
try:
with self.override_router():
owner.pet_set.get_or_create(name='fido')
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Pet)
self.assertEqual(e.hints, {'instance': owner})
def test_reverse_fk_update(self):
owner = Person.objects.create(name='Someone')
Pet.objects.create(name='fido', owner=owner)
try:
with self.override_router():
owner.pet_set.update(name='max')
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Pet)
self.assertEqual(e.hints, {'instance': owner})
def test_m2m_add(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
try:
with self.override_router():
book.authors.add(auth)
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_clear(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
book.authors.clear()
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_delete(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
book.authors.all().delete()
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_get_or_create(self):
Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
try:
with self.override_router():
book.authors.get_or_create(name='Someone else')
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_remove(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
book.authors.remove(auth)
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_update(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
book.authors.all().update(name='Different')
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': book})
def test_reverse_m2m_add(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
try:
with self.override_router():
auth.book_set.add(book)
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_clear(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
auth.book_set.clear()
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_delete(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
auth.book_set.all().delete()
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_get_or_create(self):
auth = Person.objects.create(name='Someone')
Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
try:
with self.override_router():
auth.book_set.get_or_create(title="New Book", published=datetime.datetime.now())
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_remove(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
auth.book_set.remove(book)
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_update(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
try:
with self.override_router():
auth.book_set.all().update(title='Different')
self.fail('db_for_write() not invoked on router')
except RouterUsed as e:
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book)
self.assertEqual(e.hints, {'instance': auth})
| [
"[email protected]"
] | |
1ff723030c57bb8f1374e9cf2945a6d7a1a2132d | fd8f429f146ae2e4455b5f9a19dfdfec9dcaaabf | /utils/fairseq_mod/fairseq_mod/modules/multi_head_attention_temp.py | 206260b43ac447601c45434adb4acedd15a7021b | [
"MIT"
] | permissive | mayank-k-jha/Knowledge-Distillation-Toolkit | 274448a10b7f9108bf886120481137e77eb6c348 | 2d4a2b87c757462a980f0ed0c2a7a9ccea0be683 | refs/heads/main | 2023-04-02T21:50:51.083071 | 2021-03-19T21:22:12 | 2021-03-19T21:22:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,605 | py | """
We simplified the logics in torch.nn.functional.multi_head_attention_forward, and only kept operations that are relevant to wav2vec 2.0.
"""
from __future__ import division
import warnings
import math
import torch
from torch._C import _infer_size, _add_docstr
from torch.nn import _reduction as _Reduction
from torch.nn.modules import utils
from torch.nn.modules.utils import _single, _pair, _triple, _list_with_default
from torch.nn import grad # noqa: F401
from torch import _VF
from torch._jit_internal import boolean_dispatch, List, Optional, _overload
from torch._overrides import has_torch_function, handle_torch_function
from torch.nn.functional import linear, softmax, dropout
Tensor = torch.Tensor
def multi_head_attention_forward_temp(query: Tensor,
key: Tensor,
value: Tensor,
embed_dim_to_check: int,
num_heads: int,
in_proj_weight: Tensor,
in_proj_bias: Tensor,
bias_k: Optional[Tensor],
bias_v: Optional[Tensor],
add_zero_attn: bool,
dropout_p: float,
out_proj_weight: Tensor,
out_proj_bias: Tensor,
training: bool = True,
key_padding_mask: Optional[Tensor] = None,
need_weights: bool = True,
attn_mask: Optional[Tensor] = None,
use_separate_proj_weight: bool = False,
q_proj_weight: Optional[Tensor] = None,
k_proj_weight: Optional[Tensor] = None,
v_proj_weight: Optional[Tensor] = None,
static_k: Optional[Tensor] = None,
static_v: Optional[Tensor] = None,
q_proj_bias = None,
k_proj_bias = None,
v_proj_bias = None
):
if not torch.jit.is_scripting():
tens_ops = (query, key, value, in_proj_weight, in_proj_bias, bias_k, bias_v,
out_proj_weight, out_proj_bias)
if any([type(t) is not Tensor for t in tens_ops]) and has_torch_function(tens_ops):
return handle_torch_function(
multi_head_attention_forward, tens_ops, query, key, value,
embed_dim_to_check, num_heads, in_proj_weight, in_proj_bias,
bias_k, bias_v, add_zero_attn, dropout_p, out_proj_weight,
out_proj_bias, training=training, key_padding_mask=key_padding_mask,
need_weights=need_weights, attn_mask=attn_mask,
use_separate_proj_weight=use_separate_proj_weight,
q_proj_weight=q_proj_weight, k_proj_weight=k_proj_weight,
v_proj_weight=v_proj_weight, static_k=static_k, static_v=static_v)
tgt_len, bsz, embed_dim = query.size()
assert embed_dim == embed_dim_to_check
# allow MHA to have different sizes for the feature dimension
assert key.size(0) == value.size(0) and key.size(1) == value.size(1)
head_dim = embed_dim // num_heads
assert head_dim * num_heads == embed_dim, "embed_dim must be divisible by num_heads"
scaling = float(head_dim) ** -0.5
# !!! Assume use_separate_proj_weight = True
q_proj_weight_non_opt = torch.jit._unwrap_optional(q_proj_weight)
len1, len2 = q_proj_weight_non_opt.size()
assert len1 == embed_dim and len2 == query.size(-1)
k_proj_weight_non_opt = torch.jit._unwrap_optional(k_proj_weight)
len1, len2 = k_proj_weight_non_opt.size()
assert len1 == embed_dim and len2 == key.size(-1)
v_proj_weight_non_opt = torch.jit._unwrap_optional(v_proj_weight)
len1, len2 = v_proj_weight_non_opt.size()
assert len1 == embed_dim and len2 == value.size(-1)
# !!! Did not indexing in_proj_bias because biases are supplied separately
q = linear(query, q_proj_weight_non_opt, q_proj_bias)
k = linear(key, k_proj_weight_non_opt, k_proj_bias)
v = linear(value, v_proj_weight_non_opt, v_proj_bias)
q = q * scaling
# !!! Assumed that attn_mask is None
# !!! Assumed that key_padding_mask.dtype = torch.Bool
# !!! Assumed that bias_k and bias_v are None
q = q.contiguous().view(tgt_len, bsz * num_heads, head_dim).transpose(0, 1)
if k is not None:
k = k.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)
if v is not None:
v = v.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)
if static_k is not None:
assert static_k.size(0) == bsz * num_heads
assert static_k.size(2) == head_dim
k = static_k
if static_v is not None:
assert static_v.size(0) == bsz * num_heads
assert static_v.size(2) == head_dim
v = static_v
src_len = k.size(1)
if key_padding_mask is not None:
assert key_padding_mask.size(0) == bsz
assert key_padding_mask.size(1) == src_len
# !!! Assumed add_zero_attn is False
attn_output_weights = torch.bmm(q, k.transpose(1, 2))
assert list(attn_output_weights.size()) == [bsz * num_heads, tgt_len, src_len]
if key_padding_mask is not None:
attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)
attn_output_weights = attn_output_weights.masked_fill(
key_padding_mask.unsqueeze(1).unsqueeze(2),
float('-inf'),
)
attn_output_weights = attn_output_weights.view(bsz * num_heads, tgt_len, src_len)
attn_output_weights = softmax(
attn_output_weights, dim=-1)
attn_output_weights = dropout(attn_output_weights, p=dropout_p, training=training)
attn_output = torch.bmm(attn_output_weights, v)
assert list(attn_output.size()) == [bsz * num_heads, tgt_len, head_dim]
attn_output = attn_output.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)
attn_output = linear(attn_output, out_proj_weight, out_proj_bias)
if need_weights:
# average attention weights over heads
attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)
return attn_output, attn_output_weights.sum(dim=1) / num_heads
else:
return attn_output, None
| [
"[email protected]"
] | |
76f9d4a607fd502c71c1b37b1e0d92d48b6e5780 | 6c14069181f313e84eeb524dd495e3882156ef50 | /samples/basic/crud/models/cisco-ios-xr/Cisco-IOS-XR-ip-domain-cfg/nc-read-xr-ip-domain-cfg-10-ydk.py | f4b17cee4430379ce46492911c842f1526de80e5 | [
"Apache-2.0"
] | permissive | decolnz/ydk-py-samples | dde0fd64fd4df12a215588766a0f1fb8baf07fcd | 7fa3f53c4d458c3332d372fb2fe3c46c5e036f07 | refs/heads/master | 2021-01-19T03:24:19.877929 | 2017-04-04T17:16:46 | 2017-04-04T17:16:46 | 87,310,389 | 1 | 0 | null | 2017-04-05T13:06:57 | 2017-04-05T13:06:57 | null | UTF-8 | Python | false | false | 2,723 | py | #!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Read all data for model Cisco-IOS-XR-ip-domain-cfg.
usage: nc-read-xr-ip-domain-cfg-10-ydk.py [-h] [-v] device
positional arguments:
device NETCONF device (ssh://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_ip_domain_cfg \
as xr_ip_domain_cfg
import logging
def process_ip_domain(ip_domain):
"""Process data in ip_domain object."""
pass
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="NETCONF device (ssh://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create NETCONF provider
provider = NetconfServiceProvider(address=device.hostname,
port=device.port,
username=device.username,
password=device.password,
protocol=device.scheme)
# create CRUD service
crud = CRUDService()
ip_domain = xr_ip_domain_cfg.IpDomain() # create object
# read data from NETCONF device
# ip_domain = crud.read(provider, ip_domain)
process_ip_domain(ip_domain) # process object data
provider.close()
exit()
# End of script
| [
"[email protected]"
] | |
f312c517f23c163fd877ebd3e6cd379560543899 | 4df3712caff818c0554e7fbe4b97dee5fcfd8675 | /testCase/is/test_registerUsingPOST.py | b24a2919f9393b559958aa146eef8ac54bfcca4d | [] | no_license | Qingyaya/interface | 456057a740bd77ba6c38eda27dd1aef658e0add9 | 3ae37816f52ad8c45e192596a854848d8e546b14 | refs/heads/master | 2020-03-22T07:16:04.171904 | 2018-12-05T05:20:25 | 2018-12-05T05:20:25 | 139,690,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,229 | py |
#-*-coding:utf-8-*-
import unittest
import paramunittest
from common import myHttp
from common.get_csv import *
from common.Log import Log
from common.checkResult import checkResult
import os
apifile,datapath,casename=get_dataname(os.path.abspath(__file__))
load_csv=get_testdata(datapath)
package=get_package(os.path.abspath(__file__))
@paramunittest.parametrized(*load_csv)
class test_registerUsingPOST(unittest.TestCase):
def setParameters(self,caseId,caseName,assertKey,assertValue,params):
self.caseId=caseId
self.caseName=caseName
self.assertKey=assertKey
self.assertValue=assertValue
self.params=eval(params)
def setUp(self):
self.url=get_url(apifile,casename)
self.log=Log()
self.log.build_start_line(self.caseId+ ":"+ self.caseName)
def test_registerUsingPOST(self):
u"""eps_云超市用户注册"""
self.re=myHttp.post(self.url,self.params,package)
checkResult().ck(self.caseId,self.caseName,self.assertKey,self.assertValue,self.params,self.url,self.re)
def tearDown(self):
self.log.build_end_line(self.caseId +":"+ self.caseName)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
22e90031b1c24c768d56487151773a8de59326f2 | 88e03e66109adb6325ccace96f37b31e15c5e86c | /docopt/example1.py | 91b2d16a7304e135fafe8a1dc1496d307bbc53e8 | [] | no_license | abevieiramota/learning-python | 53ee5d158af33f627c65a7d3960083a1242713ed | c9dfa37e5dd547ab03d1ff67932ff28be70bfbeb | refs/heads/master | 2021-01-10T21:05:48.021976 | 2014-07-11T20:28:03 | 2014-07-11T20:28:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 542 | py | """Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored|--drifting]
naval_fate.py -h | --help
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine."""
from docopt import docopt
args = docopt(__doc__, version='1.2.3.4')
print args
| [
"[email protected]"
] | |
a01ada960931c5a2d450ca04cd612c3e4ced1d26 | 4d2a789ccc9fac1380b80b558569e29a71980c0f | /backend/model/open_chat/chatterbot_chat/chattrbot_chat.py | ef256414eafbdae32a642ceff4bfb9c5f2f4a4f0 | [] | no_license | zhuxianzhong/libot | d493853ca847498b7ea12fc22b62c8a4f831dd62 | 59f70c4e41e36f3743bd659d0fb038bedb40be72 | refs/heads/master | 2020-07-13T12:53:11.850328 | 2019-08-28T12:41:16 | 2019-08-28T12:41:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,581 | py | # -*- coding: utf-8 -*-
# File: chatterbot_chat.py
# Author: Hualong Zhang <[email protected]>
# CreateDate: 19-03-07
import os
import sys
# 模块路径引用统一回退到Libbot目录下
project_path = os.path.abspath(os.path.join(os.getcwd(), "../.."))
sys.path.append(project_path)
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
import logging
# logging.basicConfig(level=logging.INFO)
class ChatterbotChat():
@classmethod
def create_chatterbot(cls):
"""
用语料训练一个chatbot
:return:
"""
cn_chatter = ChatBot("National Lib Chatter",
storage_adapter='chatterbot.storage.SQLStorageAdapter',
input_adapter='chatterbot.input.TerminalAdapter',
output_adapter='chatterbot.output.TerminalAdapter',
logic_adapters=[
'chatterbot.logic.BestMatch',
'chatterbot.logic.MathematicalEvaluation',
],
database='./db.sqlite3'
)
trainer = ChatterBotCorpusTrainer(cn_chatter)
trainer.train('./xiaohuangji_chatterbot_corpus.json')
# trainer.export_for_training('./my_export.json')
return cn_chatter
@classmethod
def load_chatterbot(cls):
"""
加载训练好的bot
:return:
"""
cn_chatterbot = ChatBot('National Lib Chatter',
storage_adapter='chatterbot.storage.SQLStorageAdapter',
input_adapter = 'chatterbot.input.TerminalAdapter',
output_adapter = 'chatterbot.output.TerminalAdapter',
logic_adapters = [
'chatterbot.logic.BestMatch',
'chatterbot.logic.MathematicalEvaluation',
],
database = './db.sqlite3'
)
return cn_chatterbot
if __name__ == '__main__':
test_chatter = ChatterbotChat.create_chatterbot()
test_chatter = ChatterbotChat.load_chatterbot()
while True:
try:
user_input = input('USER:')
response = test_chatter.get_response(user_input)
print('BOT:', response)
# 直到按ctrl-c 或者 ctrl-d 才会退出
except (KeyboardInterrupt, EOFError, SystemExit):
break
| [
"[email protected]"
] | |
fafe53a39da8ee583dc0c6eb3fcc4cee0ba2e479 | 6df76f8a6fcdf444c3863e3788a2f4b2c539c22c | /django code/p44/p44/settings.py | d6f5008e8a2e60f7f24dd5e91f4f61217b442cc7 | [] | no_license | basantbhandari/DjangoProjectsAsDocs | 068e4a704fade4a97e6c40353edb0a4299bd9678 | 594dbb560391eaf94bb6db6dc07702d127010b88 | refs/heads/master | 2022-12-18T22:33:23.902228 | 2020-09-22T13:11:01 | 2020-09-22T13:11:01 | 297,651,728 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,134 | py | """
Django settings for p44 project.
Generated by 'django-admin startproject' using Django 3.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^o920jad3pr1kw$gy+1hy(pgw_#wmrbwq-)*2x&$bsp5*j=s%h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# user app
'enroll',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'p44.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'p44.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
d845a2d41d812594b7e5c1c64f2625373db1d58c | 028e5d9ee8cc476459372843e759730faf4d7b73 | /Package/CONFIG.py | 12cf50ec1d6032598f68c4c67df58c27f64a788b | [
"Apache-2.0"
] | permissive | YuanYuLin/libiopccmd_client | a3635d59036ade6dfeacc0e556edb1fa16c7d65b | b9f320c28b631bb3ecea6289a2d50421a1561fd0 | refs/heads/master | 2021-01-11T19:05:30.422275 | 2017-04-23T04:54:37 | 2017-04-23T04:54:37 | 79,315,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | import ops
def MAIN_ENV(args):
env = {}
return env
def MAIN(args):
print "libiopccmd_client"
| [
"[email protected]"
] | |
190a82839258ab6b37fcae6033559509b366b1d2 | ea57ef44636ce151b3ef5322466cdfcb02482515 | /tests/formatting/test_formatter.py | 14106396206bd4fd77b1e2c26fe6f06344ede303 | [
"MIT"
] | permissive | Sn3akyP3t3/pendulum | acb3dc5067576c4569a08b1d8a8ecfce918b4724 | 7ce170bdc64199d74e09e347402983f1bb015f63 | refs/heads/master | 2020-03-22T01:15:01.160870 | 2018-07-01T15:49:09 | 2018-07-01T15:49:09 | 139,292,657 | 0 | 0 | MIT | 2018-07-01T01:46:00 | 2018-07-01T01:46:00 | null | UTF-8 | Python | false | false | 7,274 | py | # -*- coding: utf-8 -*-
import pytest
import pendulum
from pendulum.formatting import Formatter
from pendulum.locales.locale import Locale
@pytest.fixture(autouse=True)
def setup():
Locale._cache['dummy'] = {}
yield
del Locale._cache['dummy']
def test_year_tokens():
d = pendulum.datetime(2009, 1, 14, 15, 25, 50, 123456)
f = Formatter()
assert f.format(d, 'YYYY') == '2009'
assert f.format(d, 'YY') == '09'
assert f.format(d, 'Y') == '2009'
def test_quarter_tokens():
f = Formatter()
d = pendulum.datetime(1985, 1, 4)
assert f.format(d, 'Q') == '1'
d = pendulum.datetime(2029, 8, 1)
assert f.format(d, 'Q') == '3'
d = pendulum.datetime(1985, 1, 4)
assert f.format(d, 'Qo') == '1st'
d = pendulum.datetime(2029, 8, 1)
assert f.format(d, 'Qo') == '3rd'
d = pendulum.datetime(1985, 1, 4)
assert f.format(d, 'Qo', locale='fr') == '1er'
d = pendulum.datetime(2029, 8, 1)
assert f.format(d, 'Qo', locale='fr') == '3e'
def test_month_tokens():
f = Formatter()
d = pendulum.datetime(2016, 3, 24)
assert f.format(d, 'MM') == '03'
assert f.format(d, 'M') == '3'
assert f.format(d, 'MMM') == 'Mar'
assert f.format(d, 'MMMM') == 'March'
assert f.format(d, 'Mo') == '3rd'
assert f.format(d, 'MMM', locale='fr') == 'mars'
assert f.format(d, 'MMMM', locale='fr') == 'mars'
assert f.format(d, 'Mo', locale='fr') == '3e'
def test_day_tokens():
f = Formatter()
d = pendulum.datetime(2016, 3, 7)
assert f.format(d, 'DD') == '07'
assert f.format(d, 'D') == '7'
assert f.format(d, 'Do') == '7th'
assert f.format(d.first_of('month'), 'Do') == '1st'
assert f.format(d, 'Do', locale='fr') == '7e'
assert f.format(d.first_of('month'), 'Do', locale='fr') == '1er'
def test_day_of_year():
f = Formatter()
d = pendulum.datetime(2016, 8, 28)
assert f.format(d, 'DDDD') == '241'
assert f.format(d, 'DDD') == '241'
assert f.format(d.start_of('year'), 'DDDD') == '001'
assert f.format(d.start_of('year'), 'DDD') == '1'
assert f.format(d, 'DDDo') == '241st'
assert f.format(d.add(days=3), 'DDDo') == '244th'
assert f.format(d, 'DDDo', locale='fr') == '241e'
assert f.format(d.add(days=3), 'DDDo', locale='fr') == '244e'
def test_week_of_year():
f = Formatter()
d = pendulum.datetime(2016, 8, 28)
assert f.format(d, 'wo') == '34th'
def test_day_of_week():
f = Formatter()
d = pendulum.datetime(2016, 8, 28)
assert f.format(d, 'd') == '0'
assert f.format(d, 'dd') == 'Sun'
assert f.format(d, 'ddd') == 'Sun'
assert f.format(d, 'dddd') == 'Sunday'
assert f.format(d, 'dd', locale='fr') == 'dim.'
assert f.format(d, 'ddd', locale='fr') == 'dim.'
assert f.format(d, 'dddd', locale='fr') == 'dimanche'
assert f.format(d, 'do') == '0th'
def test_am_pm():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 23)
assert f.format(d, 'A') == 'PM'
assert f.format(d.set(hour=11), 'A') == 'AM'
def test_hour():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7)
assert f.format(d, 'H') == '7'
assert f.format(d, 'HH') == '07'
d = pendulum.datetime(2016, 8, 28, 0)
assert f.format(d, 'h') == '12'
assert f.format(d, 'hh') == '12'
def test_minute():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3)
assert f.format(d, 'm') == '3'
assert f.format(d, 'mm') == '03'
def test_second():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6)
assert f.format(d, 's') == '6'
assert f.format(d, 'ss') == '06'
def test_fractional_second():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123456)
assert f.format(d, 'S') == '1'
assert f.format(d, 'SS') == '12'
assert f.format(d, 'SSS') == '123'
assert f.format(d, 'SSSS') == '1234'
assert f.format(d, 'SSSSS') == '12345'
assert f.format(d, 'SSSSSS') == '123456'
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 0)
assert f.format(d, 'S') == '0'
assert f.format(d, 'SS') == '00'
assert f.format(d, 'SSS') == '000'
assert f.format(d, 'SSSS') == '0000'
assert f.format(d, 'SSSSS') == '00000'
assert f.format(d, 'SSSSSS') == '000000'
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123)
assert f.format(d, 'S') == '0'
assert f.format(d, 'SS') == '00'
assert f.format(d, 'SSS') == '000'
assert f.format(d, 'SSSS') == '0001'
assert f.format(d, 'SSSSS') == '00012'
assert f.format(d, 'SSSSSS') == '000123'
def test_timezone():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123456, tz='Europe/Paris')
assert f.format(d, 'zz') == 'CEST'
assert f.format(d, 'z') == 'Europe/Paris'
d = pendulum.datetime(2016, 1, 28, 7, 3, 6, 123456, tz='Europe/Paris')
assert f.format(d, 'zz') == 'CET'
assert f.format(d, 'z') == 'Europe/Paris'
def test_timezone_offset():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123456, tz='Europe/Paris')
assert f.format(d, 'ZZ') == '+0200'
assert f.format(d, 'Z') == '+02:00'
d = pendulum.datetime(2016, 1, 28, 7, 3, 6, 123456, tz='Europe/Paris')
assert f.format(d, 'ZZ') == '+0100'
assert f.format(d, 'Z') == '+01:00'
d = pendulum.datetime(2016, 1, 28, 7, 3, 6, 123456, tz='America/Guayaquil')
assert f.format(d, 'ZZ') == '-0500'
assert f.format(d, 'Z') == '-05:00'
def test_timestamp():
f = Formatter()
d = pendulum.datetime(1970, 1, 1)
assert f.format(d, 'X') == '0'
assert f.format(d.add(days=1), 'X') == '86400'
def test_date_formats():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123456)
assert f.format(d, 'LT') == '7:03 AM'
assert f.format(d, 'LTS') == '7:03:06 AM'
assert f.format(d, 'L') == '08/28/2016'
assert f.format(d, 'LL') == 'August 28, 2016'
assert f.format(d, 'LLL') == 'August 28, 2016 7:03 AM'
assert f.format(d, 'LLLL') == 'Sunday, August 28, 2016 7:03 AM'
assert f.format(d, 'LT', locale='fr') == '07:03'
assert f.format(d, 'LTS', locale='fr') == '07:03:06'
assert f.format(d, 'L', locale='fr') == '28/08/2016'
assert f.format(d, 'LL', locale='fr') == u'28 août 2016'
assert f.format(d, 'LLL', locale='fr') == u'28 août 2016 07:03'
assert f.format(d, 'LLLL', locale='fr') == u'dimanche 28 août 2016 07:03'
def test_escape():
f = Formatter()
d = pendulum.datetime(2016, 8, 28)
assert f.format(d, '[YYYY] YYYY \[YYYY\]') == 'YYYY 2016 [2016]'
assert f.format(d, '\D D \\\D') == 'D 28 \\28'
def test_date_formats_missing():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123456)
assert f.format(d, 'LT', locale='dummy') == '7:03 AM'
assert f.format(d, 'LTS', locale='dummy') == '7:03:06 AM'
assert f.format(d, 'L', locale='dummy') == '08/28/2016'
assert f.format(d, 'LL', locale='dummy') == 'August 28, 2016'
assert f.format(d, 'LLL', locale='dummy') == 'August 28, 2016 7:03 AM'
assert f.format(d, 'LLLL', locale='dummy') == 'Sunday, August 28, 2016 7:03 AM'
def test_unknown_token():
f = Formatter()
d = pendulum.datetime(2016, 8, 28, 7, 3, 6, 123456)
assert f.format(d, 'J') == 'J'
| [
"[email protected]"
] | |
8bb167bb9814265161c1b8eeddb4dcacc76b6bbf | 87879af0c48875acc14d7b31842f403cb29d93e3 | /isitfit/cli/core.py | 74f848a438e7cb075a711fcbda411d875f9bc413 | [
"Apache-2.0"
] | permissive | pgupta35/isitfit | bfcec5e03778837c7040a45fbc710c7d8dcc5366 | db69f0501486346e3794f1013ed0c57ce3895b8c | refs/heads/master | 2020-09-14T16:50:23.454724 | 2019-11-20T17:39:40 | 2019-11-20T17:39:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,582 | py | # RuntimeError: Click will abort further execution because Python 3 was configured to use ASCII as encoding for the environment.
# Consult https://click.palletsprojects.com/en/7.x/python3/ for mitigation steps.
#
# Edit 2019-10-08: whatsapp's wadebug uses "click.disable_unicode_literals_warning = True"
# Ref: https://github.com/WhatsApp/WADebug/blob/958ac37be804cc732ae514d4872b93d19d197a5c/wadebug/cli.py#L23
from ..utils import mysetlocale
mysetlocale()
import logging
logger = logging.getLogger('isitfit')
import click
from .. import isitfit_version
# For the --share-email "multiple options"
# https://click.palletsprojects.com/en/7.x/options/#multiple-options
@click.group(invoke_without_command=True)
@click.option('--debug', is_flag=True, help='Display more details to help with debugging')
@click.option('--optimize', is_flag=True, help='DEPRECATED: use "isitfit cost optimize" instead')
@click.option('--version', is_flag=True, help='DEPRECATED: use "isitfit version" instead')
@click.option('--share-email', multiple=True, help='Share result to email address')
@click.option('--skip-check-upgrade', is_flag=True, help='Skip step for checking for upgrade of isitfit')
@click.pass_context
def cli_core(ctx, debug, optimize, version, share_email, skip_check_upgrade):
logLevel = logging.DEBUG if debug else logging.INFO
ch = logging.StreamHandler()
ch.setLevel(logLevel)
logger.addHandler(ch)
logger.setLevel(logLevel)
if debug:
logger.debug("Enabled debug level")
logger.debug("-------------------")
# After adding the separate command for "cost" (i.e. `isitfit cost analyze`)
# putting a note here to notify user of new usage
# Ideally, this code would be deprecated though
if ctx.invoked_subcommand is None:
# if still used without subcommands, notify user of new usage
#from .cost import analyze as cost_analyze, optimize as cost_optimize
#if optimize:
# ctx.invoke(cost_optimize, filter_tags=filter_tags, n=n)
#else:
# ctx.invoke(cost_analyze, filter_tags=filter_tags)
from click.exceptions import UsageError
if optimize:
raise UsageError("As of version 0.11, please use `isitfit cost optimize` instead of `isitfit --optimize`.")
elif version:
# ctx.invoke(cli_version)
raise UsageError("As of version 0.11, please use `isitfit version` instead of `isitfit --version`.")
else:
raise UsageError("As of version 0.11, please use `isitfit cost analyze` instead of `isitfit` to calculate the cost-weighted utilization.")
# make sure that context is a dict
ctx.ensure_object(dict)
# check if emailing requested
if share_email is not None:
max_n_recipients = 3
if len(share_email) > max_n_recipients:
from click.exceptions import BadParameter
raise BadParameter("Maximum allowed number of email recipients is %i. Received %i"%(max_n_recipients, len(share_email)), param_hint="--share-email")
ctx.obj['share_email'] = share_email
# check if current version is out-of-date
if not skip_check_upgrade:
from ..utils import prompt_upgrade
is_outdated = prompt_upgrade('isitfit', isitfit_version)
ctx.obj['is_outdated'] = is_outdated
from .tags import tags as cli_tags
from .cost import cost as cli_cost
from .version import version as cli_version
cli_core.add_command(cli_version)
cli_core.add_command(cli_cost)
cli_core.add_command(cli_tags)
#-----------------------
if __name__ == '__main__':
cli_core()
| [
"[email protected]"
] | |
4f1e75369d5f0420fdc4f28eb1188153e869a7d9 | a34ec07c3464369a88e68c9006fa1115f5b61e5f | /B_HashTable/Basic/L0_1282_Group_the_People_Given_the_Group_Size_They_Belong_To.py | 92ea13493ced4cb225285f580300bf6fa20a0bcb | [] | no_license | 824zzy/Leetcode | 9220f2fb13e03d601d2b471b5cfa0c2364dbdf41 | 93b7f4448a366a709214c271a570c3399f5fc4d3 | refs/heads/master | 2023-06-27T02:53:51.812177 | 2023-06-16T16:25:39 | 2023-06-16T16:25:39 | 69,733,624 | 14 | 3 | null | 2022-05-25T06:48:38 | 2016-10-01T10:56:07 | Python | UTF-8 | Python | false | false | 440 | py | """ https://leetcode.com/problems/group-the-people-given-the-group-size-they-belong-to/
"""
class Solution:
def groupThePeople(self, groupSizes: List[int]) -> List[List[int]]:
cnt = defaultdict(list)
for i, x in enumerate(groupSizes):
cnt[x].append(i)
ans = []
for k, v in cnt.items():
for i in range(0, len(v), k):
ans.append(v[i:i+k])
return ans | [
"[email protected]"
] | |
4a59b66d1aab3d4e8c93955f5bad4b91c49bb18f | b0a350df2e6eef86b753e2eff9e57b21bd113019 | /users/models.py | 9ea4638742a4c278f8a25a672944e04bdd4c7bab | [] | no_license | fazer1929/Go-Collab | 8ba92ce926bb163e50ffcf3eb4eecb29d8e0a94b | 75efb243ee6143603026dee45ca73aabf8d9ddff | refs/heads/main | 2023-02-24T01:08:25.440449 | 2021-01-30T21:50:08 | 2021-01-30T21:50:08 | 334,199,821 | 0 | 0 | null | 2021-01-30T21:30:58 | 2021-01-29T16:24:56 | HTML | UTF-8 | Python | false | false | 655 | py | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE,null=True,blank=True)
bio = models.TextField(max_length=500, blank=True)
skills = models.CharField(max_length=40,blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save() | [
"[email protected]"
] | |
69ac7f2c16d34374a622fb6417163aa64a013b79 | 1df82fa8ef888b74fb9095c9ade89e16895366b1 | /14.Lambdas and Buit In Functions - Exercise/06. Unique Numbers.py | 6980fd7dc051a806142c78f6ce701771e8ffbacc | [] | no_license | filipov73/python_advanced_january_2020 | 868eb4bc365f7774c373183760e7ac584e1bd20c | a5e24190ee08bd1a0534dc04f91a5ba1927d1b19 | refs/heads/master | 2020-11-26T14:07:12.423309 | 2020-02-23T15:20:13 | 2020-02-23T15:20:13 | 229,097,988 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | numbers = [float(x) for x in input().split()]
round_numbers = sorted(set(map(round, numbers)))
print(min(round_numbers))
print(max(round_numbers))
mul_x_3 = [x * 3 for x in round_numbers]
print(" ".join(map(str, mul_x_3)))
# min and max and multiply the numbers by 3.
# Print only the unique numbers in ascending order separated by space
| [
"[email protected]"
] | |
39079db82e3a8b93fef53ad1103579bf166f8128 | a99bf471ebc6b3753a17bfe74b711185e72d9194 | /map/cats.py | 737f1b41deff42b9cd67e2245c1de437339c47ed | [] | no_license | andrewpilon/decals-web | efd6ecaa00196636f7c2979b308eab1c96bfd971 | 50602dea0696f3e81e57f47cab435b3f1433e092 | refs/heads/master | 2020-04-03T13:26:11.428079 | 2018-10-24T11:13:51 | 2018-10-24T11:13:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,308 | py | from __future__ import print_function
import os
import fitsio
if __name__ == '__main__':
import sys
sys.path.insert(0, 'django-1.9')
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'viewer.settings'
import django
django.setup()
from django.http import HttpResponse, StreamingHttpResponse
from viewer import settings
try:
from django.core.urlresolvers import reverse
except:
# django 2.0
from django.urls import reverse
from map.utils import send_file, trymakedirs, get_tile_wcs, oneyear
debug = print
if not settings.DEBUG_LOGGING:
def debug(*args, **kwargs):
pass
catversions = {
'decals-dr7': [1,],
'mzls+bass-dr6': [1,],
'decals-dr5': [1,],
'mzls+bass-dr4': [1,],
'decals-dr2': [2,],
'decals-dr3': [1,],
'ngc': [1,],
'lslga': [1,],
'spec': [1,],
'spec-deep2': [1,],
'bright': [1,],
'tycho2': [1,],
'targets-dr2': [1,],
'targets-dr45': [1,],
'targets-dr56': [1,],
'targets-bgs-dr56': [1,],
'targets-dr67': [1,],
'targets-bgs-dr67': [1,],
'targets-sky-dr67': [1,],
'targets-bright-dr67': [1,],
'targets-dark-dr67': [1,],
'gaia-dr1': [1,],
'gaia-dr2': [1,],
'sdss-cat': [1,],
'phat-clusters': [1,],
'ps1': [1,],
}
def cat_phat_clusters(req, ver):
import json
from astrometry.util.fits import fits_table, merge_tables
tag = 'phat-clusters'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
cat = fits_table(os.path.join(settings.DATA_DIR, 'phat-clusters.fits'))
cat.cut((cat.ra >= ralo ) * (cat.ra <= rahi) *
(cat.dec >= declo) * (cat.dec <= dechi))
return HttpResponse(json.dumps(dict(
name=[str(s.strip()) for s in cat.name],
rd=[(float(o.ra),float(o.dec)) for o in cat],
mag=[float(o.mag) for o in cat],
young=[bool(o.young) for o in cat],
velocity=[float(o.velocity) for o in cat],
metallicity=[float(o.metallicity) for o in cat],
)),
content_type='application/json')
def cat_gaia_dr1(req, ver):
import json
from legacypipe.gaiacat import GaiaCatalog
tag = 'gaia-dr1'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
os.environ['GAIA_CAT_DIR'] = settings.GAIA_DR1_CAT_DIR
gaia = GaiaCatalog()
cat = gaia.get_catalog_radec_box(ralo, rahi, declo, dechi)
return HttpResponse(json.dumps(dict(
rd=[(float(o.ra),float(o.dec)) for o in cat],
gmag=[float(o.phot_g_mean_mag) for o in cat],
)),
content_type='application/json')
def cat_gaia_dr2(req, ver):
import json
from legacypipe.gaiacat import GaiaCatalog
import numpy as np
tag = 'gaia-dr2'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
os.environ['GAIA_CAT_DIR'] = settings.GAIA_DR2_CAT_DIR
gaia = GaiaCatalog()
cat = gaia.get_catalog_radec_box(ralo, rahi, declo, dechi)
for c in ['ra','dec','phot_g_mean_mag','phot_bp_mean_mag', 'phot_rp_mean_mag',
'pmra','pmdec','parallax']:
val = cat.get(c)
val[np.logical_not(np.isfinite(val))] = 0.
cat.set(c, val)
return HttpResponse(json.dumps(dict(
rd=[(float(o.ra),float(o.dec)) for o in cat],
sourceid=[int(o.source_id) for o in cat],
gmag=[float(o.phot_g_mean_mag) for o in cat],
bpmag=[float(o.phot_bp_mean_mag) for o in cat],
rpmag=[float(o.phot_rp_mean_mag) for o in cat],
pmra=[float(o.pmra) for o in cat],
pmdec=[float(o.pmdec) for o in cat],
parallax=[float(o.parallax) for o in cat],
)),
content_type='application/json')
def cat_sdss(req, ver):
import json
import numpy as np
from astrometry.util.starutil_numpy import degrees_between, radectoxyz, xyztoradec
from map.views import sdss_ccds_near
from astrometry.util.fits import fits_table, merge_tables
tag = 'sdss-cat'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
rad = degrees_between(ralo, declo, rahi, dechi) / 2.
xyz1 = radectoxyz(ralo, declo)
xyz2 = radectoxyz(rahi, dechi)
xyz = (xyz1 + xyz2)
xyz /= np.sqrt(np.sum(xyz**2))
rc,dc = xyztoradec(xyz)
rad = rad + np.hypot(10.,14.)/2./60.
ccds = sdss_ccds_near(rc[0], dc[0], rad)
if ccds is None:
print('No SDSS CCDs nearby')
return HttpResponse(json.dumps(dict(rd=[])),
content_type='application/json')
print(len(ccds), 'SDSS CCDs')
T = []
for ccd in ccds:
# env/BOSS_PHOTOOBJ/301/2073/3/photoObj-002073-3-0088.fits
fn = os.path.join(settings.SDSS_BASEDIR, 'env', 'BOSS_PHOTOOBJ',
str(ccd.rerun), str(ccd.run), str(ccd.camcol),
'photoObj-%06i-%i-%04i.fits' % (ccd.run, ccd.camcol, ccd.field))
print('Reading', fn)
T.append(fits_table(fn, columns='ra dec objid mode objc_type objc_flags objc_flags nchild tai expflux devflux psfflux cmodelflux fracdev mjd'.split()))
T = merge_tables(T)
T.cut((T.dec >= declo) * (T.dec <= dechi))
# FIXME
T.cut((T.ra >= ralo) * (T.ra <= rahi))
# primary
T.cut(T.mode == 1)
types = ['P' if t == 6 else 'C' for t in T.objc_type]
fluxes = [p if t == 6 else c for t,p,c in zip(T.objc_type, T.psfflux, T.cmodelflux)]
return HttpResponse(json.dumps(dict(
rd=[(float(o.ra),float(o.dec)) for o in T],
sourcetype=types,
fluxes = [dict(u=float(f[0]), g=float(f[1]), r=float(f[2]),
i=float(f[3]), z=float(f[4])) for f in fluxes],
)),
content_type='application/json')
def upload_cat(req):
import tempfile
from astrometry.util.fits import fits_table
from django.http import HttpResponseRedirect
from map.views import index
if req.method != 'POST':
return HttpResponse('POST only')
print('Files:', req.FILES)
cat = req.FILES['catalog']
dirnm = settings.USER_QUERY_DIR
if not os.path.exists(dirnm):
try:
os.makedirs(dirnm)
except:
pass
f,tmpfn = tempfile.mkstemp(suffix='.fits', dir=dirnm)
os.close(f)
os.unlink(tmpfn)
print('Saving to', tmpfn)
with open(tmpfn, 'wb+') as destination:
for chunk in cat.chunks():
destination.write(chunk)
print('Wrote', tmpfn)
try:
T = fits_table(tmpfn)
except:
return HttpResponse('Must upload FITS format catalog including "RA", "Dec", optionally "Name" columns')
cols = T.columns()
if not (('ra' in cols) and ('dec' in cols)):
return HttpResponse('Must upload catalog including "RA", "Dec", optionally "Name" columns')
ra,dec = T.ra[0], T.dec[0]
catname = tmpfn.replace(dirnm, '').replace('.fits', '')
if catname.startswith('/'):
catname = catname[1:]
try:
import fitsio
primhdr = fitsio.read_header(tmpfn)
name = primhdr.get('CATNAME', '')
color = primhdr.get('CATCOLOR', '')
if len(name):
catname = catname + '-n%s' % name.strip().replace(' ','_')
if len(color):
catname = catname + '-c%s' % color.strip()
except:
pass
return HttpResponseRedirect(reverse(index) +
'?ra=%.4f&dec=%.4f&catalog=%s' % (ra, dec, catname))
def get_random_galaxy(layer=None):
import numpy as np
from map.views import galaxycat, layer_to_survey_name
if layer is not None:
layer = layer_to_survey_name(layer)
global galaxycat
if layer == 'mzls+bass-dr4':
galfn = os.path.join(settings.DATA_DIR, 'galaxies-in-dr4.fits')
drnum = 4
elif layer == 'mzls+bass-dr6':
galfn = os.path.join(settings.DATA_DIR, 'galaxies-in-dr6.fits')
drnum = 4
elif layer == 'decals-dr7':
galfn = os.path.join(settings.DATA_DIR, 'galaxies-in-dr7.fits')
drnum = 7
elif layer == 'decals-dr5':
galfn = os.path.join(settings.DATA_DIR, 'galaxies-in-dr5.fits')
drnum = 5
else:
galfn = os.path.join(settings.DATA_DIR, 'galaxies-in-dr3.fits')
drnum = 3
if galaxycat is None and not os.path.exists(galfn):
if settings.CREATE_GALAXY_CATALOG:
try:
create_galaxy_catalog(galfn, drnum)
except:
import traceback
traceback.print_exc()
pass
if not os.path.exists(galfn):
if drnum == 4:
return 147.1744, 44.0812, 'NGC 2998'
else:
return 18.6595, -1.0210, 'NGC 442'
if galaxycat is None:
from astrometry.util.fits import fits_table
galaxycat = fits_table(galfn)
i = np.random.randint(len(galaxycat))
ra = float(galaxycat.ra[i])
dec = float(galaxycat.dec[i])
name = galaxycat.name[i].strip()
return ra,dec,name
def create_galaxy_catalog(galfn, drnum):
import astrometry.catalogs
from astrometry.util.fits import fits_table, merge_tables
import fitsio
from astrometry.util.util import Tan
from astrometry.libkd.spherematch import match_radec
import numpy as np
fn = os.path.join(os.path.dirname(astrometry.catalogs.__file__), 'ngc2000.fits')
NGC = fits_table(fn)
print(len(NGC), 'NGC objects')
NGC.name = np.array(['NGC %i' % n for n in NGC.ngcnum])
NGC.delete_column('ngcnum')
fn = os.path.join(os.path.dirname(astrometry.catalogs.__file__), 'ic2000.fits')
IC = fits_table(fn)
print(len(IC), 'IC objects')
IC.name = np.array(['IC %i' % n for n in IC.icnum])
IC.delete_column('icnum')
# fn = os.path.join(settings.DATA_DIR, 'ugc.fits')
# UGC = fits_table(fn)
# print(len(UGC), 'UGC objects')
# UGC.name = np.array(['UGC %i' % n for n in UGC.ugcnum])
# UGC.delete_column('ugcnum')
#T = merge_tables([NGC, IC, UGC])
#T.writeto(os.path.join(settings.DATA_DIR, 'galaxy-cats.fits'))
T = merge_tables([NGC, IC])
T.writeto(os.path.join('/tmp/galaxy-cats.fits'))
keep = np.zeros(len(T), bool)
from map.views import get_survey
bricks = None
if drnum == 3:
bricks = fits_table(os.path.join(settings.DATA_DIR, 'decals-dr3',
'decals-bricks-in-dr3.fits'))
bricks.cut(bricks.has_g * bricks.has_r * bricks.has_z)
print(len(bricks), 'bricks with grz')
survey = get_survey('decals-dr3')
elif drnum == 4:
survey = get_survey('mzls+bass-dr4')
bricks = fits_table(os.path.join(settings.DATA_DIR, 'survey-bricks-in-dr4.fits'))
elif drnum == 6:
survey = get_survey('mzls+bass-dr6')
bricks = survey.get_bricks()
bricks.cut(bricks.has_g * bricks.has_r * bricks.has_z)
elif drnum == 5:
survey = get_survey('decals-dr5')
elif drnum == 7:
survey = get_survey('decals-dr7')
if bricks is None:
bricks = survey.get_bricks()
I,J,d = match_radec(bricks.ra, bricks.dec, T.ra, T.dec, 0.25, nearest=True)
print('Matched', len(I), 'bricks near NGC objects')
bricks.cut(I)
# bricks = fits_table(os.path.join(settings.DATA_DIR, 'survey-bricks-dr4.fits'))
# bricks.cut((bricks.nexp_g > 0) *
# (bricks.nexp_r > 0) *
# (bricks.nexp_z > 0))
# print(len(bricks), 'bricks with grz')
#
# sbricks = survey.get_bricks()
# binds = dict([(b,i) for i,b in enumerate(sbricks.brickname)])
# I = np.array([binds[b] for b in bricks.brickname])
# bricks.ra1 = sbricks.ra1[I]
# bricks.ra2 = sbricks.ra2[I]
# bricks.dec1 = sbricks.dec1[I]
# bricks.dec2 = sbricks.dec2[I]
#
# fn = '/tmp/survey-bricks-in-dr4.fits'
# bricks.writeto(fn)
# print('Wrote', fn)
for brick in bricks:
fn = survey.find_file('nexp', brick=brick.brickname, band='r')
if not os.path.exists(fn):
print('Does not exist:', fn)
continue
I = np.flatnonzero((T.ra >= brick.ra1 ) * (T.ra < brick.ra2 ) *
(T.dec >= brick.dec1) * (T.dec < brick.dec2))
print('Brick', brick.brickname, 'has', len(I), 'galaxies')
if len(I) == 0:
continue
nn,hdr = fitsio.read(fn, header=True)
h,w = nn.shape
#imgfn = survey.find_file('image', brick=brick.brickname, band='r')
#wcs = Tan(imgfn)
print('file', fn)
wcs = Tan(hdr)
ok,x,y = wcs.radec2pixelxy(T.ra[I], T.dec[I])
x = np.clip((x-1).astype(int), 0, w-1)
y = np.clip((y-1).astype(int), 0, h-1)
n = nn[y,x]
keep[I[n > 0]] = True
T.cut(keep)
fn = '/tmp/galaxies-in-dr%i.fits' % drnum
T.writeto(fn)
print('Wrote', fn)
T.writeto(galfn)
def cat_targets_dr2(req, ver):
import json
tag = 'targets-dr2'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
from astrometry.util.fits import fits_table, merge_tables
import numpy as np
from cat.models import DR2_Target as Target
from astrometry.util.starutil_numpy import radectoxyz, xyztoradec, degrees_between
xyz1 = radectoxyz(ralo, declo)
xyz2 = radectoxyz(rahi, dechi)
xyz = (xyz1 + xyz2)/2.
xyz /= np.sqrt(np.sum(xyz**2))
rc,dc = xyztoradec(xyz)
rc = rc[0]
dc = dc[0]
rad = degrees_between(rc, dc, ralo, declo)
objs = Target.objects.extra(where=[
'q3c_radial_query(target.ra, target.dec, %.4f, %.4f, %g)'
% (rc, dc, rad * 1.01)])
print('Got', objs.count(), 'targets')
print('types:', np.unique([o.type for o in objs]))
print('versions:', np.unique([o.version for o in objs]))
return HttpResponse(json.dumps(dict(
rd=[(float(o.ra),float(o.dec)) for o in objs],
name=[o.type for o in objs],
)),
content_type='application/json')
def cat_targets_dr45(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr5-0.20.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr4-0.20.0.kd.fits'),
], tag = 'targets-dr45')
def cat_targets_dr56(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr5-0.20.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr6-0.22.0.kd.fits'),
], tag = 'targets-dr56')
def cat_targets_bgs_dr56(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr5-0.20.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr6-0.22.0.kd.fits'),
], tag = 'targets-bgs-dr56', bgs=True)
def cat_targets_dr67(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr6-0.22.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr7.1-0.23.0.kd.fits'),
], tag = 'targets-dr67')
def cat_targets_bgs_dr67(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr6-0.22.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr7.1-0.23.0.kd.fits'),
], tag = 'targets-bgs-dr67', bgs=True)
def cat_targets_sky_dr67(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'skies-dr6-0.22.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'skies-dr7.1-0.22.0.kd.fits'),
], tag = 'targets-sky-dr67', sky=True)
def cat_targets_bright_dr67(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr6-0.22.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr7.1-0.23.0.kd.fits'),
], tag = 'targets-bright-dr67', bright=True)
def cat_targets_dark_dr67(req, ver):
return cat_targets_drAB(req, ver, cats=[
os.path.join(settings.DATA_DIR, 'targets-dr6-0.22.0.kd.fits'),
os.path.join(settings.DATA_DIR, 'targets-dr7.1-0.23.0.kd.fits'),
], tag = 'targets-dark-dr67', dark=True)
def cat_targets_drAB(req, ver, cats=[], tag='', bgs=False, sky=False, bright=False, dark=False):
import json
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
from astrometry.util.fits import fits_table, merge_tables
from astrometry.libkd.spherematch import tree_open, tree_search_radec
import numpy as np
from astrometry.util.starutil_numpy import radectoxyz, xyztoradec, degrees_between
xyz1 = radectoxyz(ralo, declo)
xyz2 = radectoxyz(rahi, dechi)
xyz = (xyz1 + xyz2)/2.
xyz /= np.sqrt(np.sum(xyz**2))
rc,dc = xyztoradec(xyz)
rc = rc[0]
dc = dc[0]
rad = degrees_between(rc, dc, ralo, declo)
'''
startree -i /project/projectdirs/desi/target/catalogs/targets-dr4-0.20.0.fits -o data/targets-dr4-0.20.0.kd.fits -P -k -T
'''
TT = []
for fn in cats:
kd = tree_open(fn)
I = tree_search_radec(kd, rc, dc, rad)
print('Matched', len(I), 'from', fn)
if len(I) == 0:
continue
T = fits_table(fn, rows=I)
TT.append(T)
if len(TT) == 0:
return HttpResponse(json.dumps(dict(rd=[], name=[])),
content_type='application/json')
T = merge_tables(TT)
if bgs:
T.cut(T.bgs_target > 0)
if bright:
T.cut(np.logical_or(T.bgs_target > 0, T.mws_target > 0))
if dark:
T.cut(T.desi_target > 0)
names = []
colors = []
for t in T:
desibits = []
bgsbits = []
mwsbits = []
for bit in range(64):
if (1 << bit) & int(t.desi_target):
desibits.append(bit)
if (1 << bit) & int(t.bgs_target):
bgsbits.append(bit)
if (1 << bit) & int(t.mws_target):
mwsbits.append(bit)
# https://github.com/desihub/desitarget/blob/master/py/desitarget/data/targetmask.yaml
desinames = [{
0: 'LRG',
1: 'ELG',
2: 'QSO',
# 8: 'LRG_NORTH',
# 9: 'ELG_NORTH',
# 10: 'QSO_NORTH',
# 16: 'LRG_SOUTH',
# 17: 'ELG_SOUTH',
# 18: 'QSO_SOUTH',
32: 'SKY',
33: 'STD_FSTAR',
34: 'STD_WD',
35: 'STD_BRIGHT',
36: 'BADSKY',
50: 'BRIGHT_OBJECT',
51: 'IN_BRIGHT_OBJECT',
52: 'NEAR_BRIGHT_OBJECT',
# 60: 'BGS_ANY',
# 61: 'MWS_ANY',
62: 'ANCILLARY_ANY',
}.get(b) for b in desibits]
bgsnames = [{
0: 'BGS_FAINT',
1: 'BGS_BRIGHT',
# 8: 'BGS_FAINT_NORTH',
# 9: 'BGS_BRIGHT_NORTH',
# 16: 'BGS_FAINT_SOUTH',
# 17: 'BGS_BRIGHT_SOUTH',
40: 'BGS_KNOWN_ANY',
41: 'BGS_KNOWN_COLLIDED',
42: 'BGS_KNOWN_SDSS',
43: 'BGS_KNOWN_BOSS',
}.get(b) for b in bgsbits]
mwsnames = [{
0: 'MWS_MAIN',
1: 'MWS_WD',
2: 'MWS_NEARBY',
16: 'MWS_MAIN_VERY_FAINT',
}.get(b) for b in mwsbits]
bitnames = [n for n in desinames + bgsnames + mwsnames if n is not None]
names.append(', '.join(bitnames))
nn = ' '.join(bitnames)
cc = 'white'
if 'QSO' in nn:
cc = 'cyan'
elif 'LRG' in nn:
cc = 'red'
elif 'ELG' in nn:
cc = 'gray'
elif 'BGS' in nn:
cc = 'orange'
colors.append(cc)
if sky:
fluxes = [dict(g=float(g), r=float(r), z=float(z))
for (g,r,z) in zip(T.apflux_g[:,0], T.apflux_r[:,0], T.apflux_z[:,0])]
nobs = None
else:
fluxes = [dict(g=float(g), r=float(r), z=float(z),
W1=float(W1), W2=float(W2))
for (g,r,z,W1,W2)
in zip(T.flux_g, T.flux_r, T.flux_z, T.flux_w1, T.flux_w2)]
nobs=[dict(g=int(g), r=int(r), z=int(z)) for g,r,z
in zip(T.nobs_g, T.nobs_r, T.nobs_z)],
rtn = dict(rd=[(t.ra, t.dec) for t in T],
name=names,
targetid=[int(t) for t in T.targetid],
fluxes=fluxes,
color=colors,
)
if nobs is not None:
rtn.update(nobs=nobs)
return HttpResponse(json.dumps(rtn),
content_type='application/json')
def cat_lslga(req, ver):
import json
fn = os.path.join(settings.DATA_DIR, 'lslga', 'LSLGA-v1.0.kd.fits')
tag = 'lslga'
T = cat_kd(req, ver, tag, fn)
if T is None:
return HttpResponse(json.dumps(dict(rd=[], name=[], mjd=[], fiber=[],plate=[])),
content_type='application/json')
rd = list((float(r),float(d)) for r,d in zip(T.ra, T.dec))
names = [t.strip() for t in T.galaxy]
radius = [d * 60./2. for d in T.d25]
return HttpResponse(json.dumps(dict(rd=rd, name=names, radiusArcsec=radius)),
content_type='application/json')
def cat_spec(req, ver):
import json
fn = os.path.join(settings.DATA_DIR, 'sdss', 'specObj-dr14-trimmed.kd.fits')
tag = 'spec'
T = cat_kd(req, ver, tag, fn)
if T is None:
return HttpResponse(json.dumps(dict(rd=[], name=[], mjd=[], fiber=[],plate=[])),
content_type='application/json')
plate = req.GET.get('plate', None)
if plate is not None:
plate = int(plate, 10)
T.cut(T.plate == plate)
rd = list((float(r),float(d)) for r,d in zip(T.ra, T.dec))
names = [t.strip() for t in T.label]
# HACK
#names = [t.split()[0] for t in names]
mjd = [int(x) for x in T.mjd]
fiber = [int(x) for x in T.fiberid]
plate = [int(x) for x in T.plate]
return HttpResponse(json.dumps(dict(rd=rd, name=names, mjd=mjd, fiber=fiber, plate=plate)),
content_type='application/json')
def cat_kd(req, ver, tag, fn):
tag = 'spec'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
import numpy as np
from astrometry.util.fits import fits_table, merge_tables
from astrometry.libkd.spherematch import tree_open, tree_search_radec
from astrometry.util.starutil_numpy import radectoxyz, xyztoradec, degrees_between
xyz1 = radectoxyz(ralo, declo)
xyz2 = radectoxyz(rahi, dechi)
xyz = (xyz1 + xyz2)/2.
xyz /= np.sqrt(np.sum(xyz**2))
rc,dc = xyztoradec(xyz)
rc = rc[0]
dc = dc[0]
rad = degrees_between(rc, dc, ralo, declo)
kd = tree_open(fn)
I = tree_search_radec(kd, rc, dc, rad)
print('Matched', len(I), 'from', fn)
if len(I) == 0:
return None
T = fits_table(fn, rows=I)
debug(len(T), 'spectra')
if ralo > rahi:
# RA wrap
T.cut(np.logical_or(T.ra > ralo, T.ra < rahi) * (T.dec > declo) * (T.dec < dechi))
else:
T.cut((T.ra > ralo) * (T.ra < rahi) * (T.dec > declo) * (T.dec < dechi))
debug(len(T), 'in cut')
return T
def cat_spec_deep2(req, ver):
import json
tag = 'spec-deep2'
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
from astrometry.util.fits import fits_table, merge_tables
import numpy as np
TT = []
T = fits_table(os.path.join(settings.DATA_DIR, 'deep2-zcat-dr4-uniq.fits'))
debug(len(T), 'spectra')
if ralo > rahi:
# RA wrap
T.cut(np.logical_or(T.ra > ralo, T.ra < rahi) * (T.dec > declo) * (T.dec < dechi))
else:
T.cut((T.ra > ralo) * (T.ra < rahi) * (T.dec > declo) * (T.dec < dechi))
debug(len(T), 'in cut')
rd = list((float(r),float(d)) for r,d in zip(T.ra, T.dec))
names = []
classes = T.get('class')
subclasses = T.subclass
zbests = T.zbest
zq = T.zquality
for i in range(len(T)):
clazz = classes[i]
clazz = clazz[0] + clazz[1:].lower()
#if zq[i] >= 3:
nm = clazz
sc = subclasses[i].strip()
if sc != 'NONE':
nm += ' ' + sc
if not (zq[i] == -1 and clazz.strip() == 'Star'):
nm += ' z=%.2f, q=%i' % (zbests[i], zq[i])
names.append(nm)
return HttpResponse(json.dumps(dict(rd=rd, name=names)),
content_type='application/json')
def cat_user(req, ver):
from astrometry.util.fits import fits_table
import json
import re
cat = str(req.GET.get('cat'))
if not re.match('\w?', cat):
print('Catalog "%s" did not match regex' % cat)
return
haverd = False
havei = False
if ('ralo' in req.GET and 'rahi' in req.GET and
'declo' in req.GET and 'dechi' in req.GET):
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
haverd = True
elif ('start' in req.GET and 'N' in req.GET):
start = int(req.GET['start'])
N = int(req.GET['N'])
havei = True
else:
return HttpResponse('need {ra,dec}{lo,hi} or start,N')
fn = os.path.join(settings.USER_QUERY_DIR, cat+'.fits')
if not os.path.exists(fn):
print('Does not exist:', fn)
return
cat = fits_table(fn)
if haverd:
if ralo > rahi:
import numpy as np
# RA wrap
cat.cut(np.logical_or(cat.ra > ralo, cat.ra < rahi) *
(cat.dec > declo) * (cat.dec < dechi))
else:
cat.cut((cat.ra > ralo) * (cat.ra < rahi) *
(cat.dec > declo) * (cat.dec < dechi))
print(len(cat), 'user catalog sources after RA,Dec cut')
elif havei:
cat = cat[start:start+N]
rd = list(zip(cat.ra.astype(float), cat.dec.astype(float)))
D = dict(rd=rd)
cols = cat.columns()
if 'name' in cols:
D.update(names=cat.name.tolist())
if 'type' in cols:
try:
v = list([t[0] for t in cat.get('type')])
json.dumps(v)
D.update(sourcetype=v)
except:
print('failed to convert column "type". Traceback:')
import traceback
traceback.print_exc()
if 'g' in cols and 'r' in cols and 'z' in cols:
D.update(fluxes=[dict(g=float(g), r=float(r), z=float(z))
for g,r,z in zip(10.**((cat.g - 22.5)/-2.5),
10.**((cat.r - 22.5)/-2.5),
10.**((cat.z - 22.5)/-2.5))])
if 'gnobs' in cols and 'rnobs' in cols and 'znobs' in cols:
D.update(nobs=[dict(g=int(g), r=int(r), z=int(z))
for g,r,z in zip(cat.gnobs, cat.rnobs, cat.znobs)])
if 'objids' in cols:
D.update(objids=[int(x) for x in cat.objid])
if 'brickname' in cols:
D.update(bricknames=cat.brickname.tolist())
if 'radius' in cols:
D.update(radius=list([float(r) for r in cat.radius]))
if 'color' in cols:
D.update(color=list([c.strip() for c in cat.color]))
#for k,v in D.items():
# print('Cat', k, v)
return HttpResponse(json.dumps(D).replace('NaN','null'),
content_type='application/json')
def cat_bright(req, ver):
return cat(req, ver, 'bright',
os.path.join(settings.DATA_DIR, 'bright.fits'))
def cat_tycho2(req, ver):
return cat(req, ver, 'tycho2',
os.path.join(settings.DATA_DIR, 'tycho2.fits'))
def cat_gals(req, ver):
return cat(req, ver, 'ngc',
os.path.join(settings.DATA_DIR,'galaxy-cats.fits'))
def cat_ps1(req, ver):
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
# We have the EDR region and a block around 0,0
if (rahi > 241) and (ralo < 246) * (dechi >= 6.5) * (declo < 11.5):
return cat(req, ver, 'ps1',
os.path.join(settings.DATA_DIR,'ps1-cat-edr.fits'))
return cat(req, ver, 'ps1',
os.path.join(settings.DATA_DIR,'ps1-cat.fits'))
def cat(req, ver, tag, fn):
import json
ralo = float(req.GET['ralo'])
rahi = float(req.GET['rahi'])
declo = float(req.GET['declo'])
dechi = float(req.GET['dechi'])
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
from astrometry.util.fits import fits_table
import numpy as np
TT = []
T = fits_table(fn)
debug(len(T), 'catalog objects')
if ralo > rahi:
# RA wrap
T.cut(np.logical_or(T.ra > ralo, T.ra < rahi) * (T.dec > declo) * (T.dec < dechi))
else:
T.cut((T.ra > ralo) * (T.ra < rahi) * (T.dec > declo) * (T.dec < dechi))
debug(len(T), 'in cut')
rd = list((float(r),float(d)) for r,d in zip(T.ra, T.dec))
rtn = dict(rd=rd)
# PS1
if 'ndetections' in T.columns():
T.name = np.array(['%i' % n for n in T.ndetections])
if 'name' in T.columns():
names = [t.strip() for t in T.name]
rtn['name'] = names
# bright stars
if 'alt_name' in T.columns():
rtn.update(altname = [t.strip() for t in T.alt_name])
if 'radius' in T.columns():
rtn.update(radiusArcsec=list(float(f) for f in T.radius * 3600.))
return HttpResponse(json.dumps(rtn), content_type='application/json')
def cat_decals_dr2(req, ver, zoom, x, y, tag='decals-dr2'):
return cat_decals(req, ver, zoom, x, y, tag=tag, docache=False)
def cat_decals_dr3(req, ver, zoom, x, y, tag='decals-dr3'):
return cat_decals(req, ver, zoom, x, y, tag=tag, docache=False)
def cat_mobo_dr4(req, ver, zoom, x, y, tag='mzls+bass-dr4'):
return cat_decals(req, ver, zoom, x, y, tag=tag, docache=False)
def cat_decals_dr5(req, ver, zoom, x, y, tag='decals-dr5'):
return cat_decals(req, ver, zoom, x, y, tag=tag, docache=False)
def cat_mobo_dr6(req, ver, zoom, x, y, tag='mzls+bass-dr6'):
return cat_decals(req, ver, zoom, x, y, tag=tag, docache=False)
def cat_decals_dr7(req, ver, zoom, x, y, tag='decals-dr7'):
return cat_decals(req, ver, zoom, x, y, tag=tag, docache=False)
def cat_decals(req, ver, zoom, x, y, tag='decals', docache=True):
import json
zoom = int(zoom)
if zoom < 12:
return HttpResponse(json.dumps(dict(rd=[])),
content_type='application/json')
from astrometry.util.fits import fits_table, merge_tables
import numpy as np
try:
wcs, W, H, zoomscale, zoom,x,y = get_tile_wcs(zoom, x, y)
except RuntimeError as e:
print('e:', e)
return HttpResponse(e.strerror)
ver = int(ver)
if not ver in catversions[tag]:
raise RuntimeError('Invalid version %i for tag %s' % (ver, tag))
basedir = settings.DATA_DIR
sendfile_kwargs = dict()
if docache:
cachefn = os.path.join(basedir, 'cats-cache', tag,
'%i/%i/%i/%i.cat.json' % (ver, zoom, x, y))
if os.path.exists(cachefn):
return send_file(cachefn, 'application/json',
modsince=req.META.get('HTTP_IF_MODIFIED_SINCE'),
expires=oneyear)
sendfile_kwargs.update(expires=oneyear)
else:
import tempfile
f,cachefn = tempfile.mkstemp(suffix='.json')
os.close(f)
sendfile_kwargs.update(unlink=True)
cat,hdr = _get_decals_cat(wcs, tag=tag)
if cat is None:
rd = []
types = []
fluxes = []
bricknames = []
objids = []
nobs = []
else:
rd = list(zip(cat.ra, cat.dec))
types = list([t[0] for t in cat.get('type')])
if 'decam_flux' in cat.get_columns():
fluxes = [dict(g=float(g), r=float(r), z=float(z))
for g,r,z in zip(cat.decam_flux[:,1], cat.decam_flux[:,2],
cat.decam_flux[:,4])]
nobs = [dict(g=int(g), r=int(r), z=int(z))
for g,r,z in zip(cat.decam_nobs[:,1], cat.decam_nobs[:,2],
cat.decam_nobs[:,4])]
else:
# DR4+
fluxes = [dict(g=float(g), r=float(r), z=float(z))
for g,r,z in zip(cat.flux_g, cat.flux_r, cat.flux_z)]
nobs = [dict(g=int(g), r=int(r), z=int(z))
for g,r,z in zip(cat.nobs_g, cat.nobs_r, cat.nobs_z)]
bricknames = list(cat.brickname)
objids = [int(x) for x in cat.objid]
json = json.dumps(dict(rd=rd, sourcetype=types, fluxes=fluxes, nobs=nobs,
bricknames=bricknames, objids=objids))
if docache:
trymakedirs(cachefn)
f = open(cachefn, 'w')
f.write(json)
f.close()
return send_file(cachefn, 'application/json', **sendfile_kwargs)
def _get_decals_cat(wcs, tag='decals'):
from astrometry.util.fits import fits_table, merge_tables
from map.views import get_survey
basedir = settings.DATA_DIR
H,W = wcs.shape
X = wcs.pixelxy2radec([1,1,1,W/2,W,W,W,W/2],
[1,H/2,H,H,H,H/2,1,1])
r,d = X[-2:]
#catpat = os.path.join(basedir, 'cats', tag, '%(brickname).3s',
# 'tractor-%(brickname)s.fits')
survey = get_survey(tag)
B = survey.get_bricks_readonly()
I = survey.bricks_touching_radec_box(B, r.min(), r.max(), d.min(), d.max())
#print(len(I), 'bricks touching RA,Dec box', r.min(),r.max(), d.min(),d.max())
cat = []
hdr = None
for brickname in B.brickname[I]:
catfn = survey.find_file('tractor', brick=brickname)
if not os.path.exists(catfn):
print('Does not exist:', catfn)
continue
debug('Reading catalog', catfn)
T = fits_table(catfn)
T.cut(T.brick_primary)
print('File', catfn, 'cut to', len(T), 'primary')
ok,xx,yy = wcs.radec2pixelxy(T.ra, T.dec)
T.cut((xx > 0) * (yy > 0) * (xx < W) * (yy < H))
cat.append(T)
if hdr is None:
hdr = T.get_header()
if len(cat) == 0:
cat = None
else:
cat = merge_tables(cat, columns='fillzero')
return cat,hdr
if __name__ == '__main__':
#print('Random galaxy:', get_random_galaxy(layer='mzls+bass-dr4'))
#create_galaxy_catalog('/tmp/dr6.fits', 6)
#specObj-dr14.fits
#T = fits_table('/project/projectdirs/cosmo/data/sdss/dr14/sdss/spectro/redux/specObj-dr14.fits')
from django.test import Client
c = Client()
c.get('/usercatalog/1/cat.json?ralo=200.2569&rahi=200.4013&declo=47.4930&dechi=47.5823&cat=tmpajwai3dx')
import sys
sys.exit(0)
T=fits_table('/project/projectdirs/cosmo/data/sdss/dr14/sdss/spectro/redux/specObj-dr14.fits',
columns=['plate','mjd','fiberid','plug_ra','plug_dec','class','subclass','z','zwarning'])
T.rename('plug_ra', 'ra')
T.rename('plug_dec','dec')
labels = []
for t in T:
sub = t.subclass
sub = sub.split()
sub = ' '.join([s for s in sub if s[0] != '('])
cla = t.get('class').strip()
txt = cla
if len(sub):
txt += ' (' + sub + ')'
if cla in ['GALAXY', 'QSO']:
txt += ' z=%.3f' % t.z
labels.append(txt)
T.label = np.array(labels)
T.writeto('specObj-dr14-trimmed.fits', columns=['ra','dec','plate','mjd','fiberid','z','zwarning','label'])
# startree -i data/specObj-dr14-trimmed.fits -o data/specObj-dr14-trimmed.kd.fits -T -k -P
| [
"[email protected]"
] | |
6256100f755fc35f9df3f524d14acea287224c27 | 5ea2ff68ba98c8034d2f21d69752af45e9bf61dd | /imodels/rulefit/rulefit.py | 02990962779f4af1465bece5a0f7bb3dca8abaf8 | [] | permissive | Pacmed/interpretability-implementations-demos | 160181d03a0fd8c7276f602e33afa22a5138eb41 | 4b5f21da2ca8eb17e38185c00621df3fa1308695 | refs/heads/master | 2021-07-04T04:56:30.558899 | 2020-12-21T10:55:03 | 2020-12-21T10:55:03 | 201,427,811 | 1 | 1 | MIT | 2019-11-11T16:58:43 | 2019-08-09T08:40:32 | Jupyter Notebook | UTF-8 | Python | false | false | 21,997 | py | """Linear model of tree-based decision rules
This method implement the RuleFit algorithm
The module structure is the following:
- ``RuleCondition`` implements a binary feature transformation
- ``Rule`` implements a Rule composed of ``RuleConditions``
- ``RuleEnsemble`` implements an ensemble of ``Rules``
- ``RuleFit`` implements the RuleFit algorithm
"""
import pandas as pd
import numpy as np
from sklearn.base import BaseEstimator
from sklearn.base import TransformerMixin
from sklearn.ensemble import GradientBoostingRegressor, GradientBoostingClassifier, RandomForestRegressor, RandomForestClassifier
from sklearn.linear_model import LassoCV,LogisticRegressionCV
from functools import reduce
class RuleCondition():
"""Class for binary rule condition
Warning: this class should not be used directly.
"""
def __init__(self,
feature_index,
threshold,
operator,
support,
feature_name = None):
self.feature_index = feature_index
self.threshold = threshold
self.operator = operator
self.support = support
self.feature_name = feature_name
def __repr__(self):
return self.__str__()
def __str__(self):
if self.feature_name:
feature = self.feature_name
else:
feature = self.feature_index
return "%s %s %s" % (feature, self.operator, self.threshold)
def transform(self, X):
"""Transform dataset.
Parameters
----------
X: array-like matrix, shape=(n_samples, n_features)
Returns
-------
X_transformed: array-like matrix, shape=(n_samples, 1)
"""
if self.operator == "<=":
res = 1 * (X[:,self.feature_index] <= self.threshold)
elif self.operator == ">":
res = 1 * (X[:,self.feature_index] > self.threshold)
return res
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def __hash__(self):
return hash((self.feature_index, self.threshold, self.operator, self.feature_name))
class Winsorizer():
"""Performs Winsorization 1->1*
Warning: this class should not be used directly.
"""
def __init__(self,trim_quantile=0.0):
self.trim_quantile=trim_quantile
self.winsor_lims=None
def train(self,X):
# get winsor limits
self.winsor_lims=np.ones([2,X.shape[1]])*np.inf
self.winsor_lims[0,:]=-np.inf
if self.trim_quantile>0:
for i_col in np.arange(X.shape[1]):
lower=np.percentile(X[:,i_col],self.trim_quantile*100)
upper=np.percentile(X[:,i_col],100-self.trim_quantile*100)
self.winsor_lims[:,i_col]=[lower,upper]
def trim(self,X):
X_=X.copy()
X_=np.where(X>self.winsor_lims[1,:],np.tile(self.winsor_lims[1,:],[X.shape[0],1]),np.where(X<self.winsor_lims[0,:],np.tile(self.winsor_lims[0,:],[X.shape[0],1]),X))
return X_
class FriedScale():
"""Performs scaling of linear variables according to Friedman et al. 2005 Sec 5
Each variable is first Winsorized l->l*, then standardised as 0.4 x l* / std(l*)
Warning: this class should not be used directly.
"""
def __init__(self, winsorizer = None):
self.scale_multipliers=None
self.winsorizer = winsorizer
def train(self,X):
# get multipliers
if self.winsorizer != None:
X_trimmed= self.winsorizer.trim(X)
else:
X_trimmed = X
scale_multipliers=np.ones(X.shape[1])
for i_col in np.arange(X.shape[1]):
num_uniq_vals=len(np.unique(X[:,i_col]))
if num_uniq_vals>2: # don't scale binary variables which are effectively already rules
scale_multipliers[i_col]=0.4/(1.0e-12 + np.std(X_trimmed[:,i_col]))
self.scale_multipliers=scale_multipliers
def scale(self,X):
if self.winsorizer != None:
return self.winsorizer.trim(X)*self.scale_multipliers
else:
return X*self.scale_multipliers
class Rule():
"""Class for binary Rules from list of conditions
Warning: this class should not be used directly.
"""
def __init__(self,
rule_conditions,prediction_value):
self.conditions = set(rule_conditions)
self.support = min([x.support for x in rule_conditions])
self.prediction_value=prediction_value
self.rule_direction=None
def transform(self, X):
"""Transform dataset.
Parameters
----------
X: array-like matrix
Returns
-------
X_transformed: array-like matrix, shape=(n_samples, 1)
"""
rule_applies = [condition.transform(X) for condition in self.conditions]
return reduce(lambda x,y: x * y, rule_applies)
def __str__(self):
return " & ".join([x.__str__() for x in self.conditions])
def __repr__(self):
return self.__str__()
def __hash__(self):
return sum([condition.__hash__() for condition in self.conditions])
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def extract_rules_from_tree(tree, feature_names=None):
"""Helper to turn a tree into as set of rules
"""
rules = set()
def traverse_nodes(node_id=0,
operator=None,
threshold=None,
feature=None,
conditions=[]):
if node_id != 0:
if feature_names is not None:
feature_name = feature_names[feature]
else:
feature_name = feature
rule_condition = RuleCondition(feature_index=feature,
threshold=threshold,
operator=operator,
support = tree.n_node_samples[node_id] / float(tree.n_node_samples[0]),
feature_name=feature_name)
new_conditions = conditions + [rule_condition]
else:
new_conditions = []
## if not terminal node
if tree.children_left[node_id] != tree.children_right[node_id]:
feature = tree.feature[node_id]
threshold = tree.threshold[node_id]
left_node_id = tree.children_left[node_id]
traverse_nodes(left_node_id, "<=", threshold, feature, new_conditions)
right_node_id = tree.children_right[node_id]
traverse_nodes(right_node_id, ">", threshold, feature, new_conditions)
else: # a leaf node
if len(new_conditions)>0:
new_rule = Rule(new_conditions,tree.value[node_id][0][0])
rules.update([new_rule])
else:
pass #tree only has a root node!
return None
traverse_nodes()
return rules
class RuleEnsemble():
"""Ensemble of binary decision rules
This class implements an ensemble of decision rules that extracts rules from
an ensemble of decision trees.
Parameters
----------
tree_list: List or array of DecisionTreeClassifier or DecisionTreeRegressor
Trees from which the rules are created
feature_names: List of strings, optional (default=None)
Names of the features
Attributes
----------
rules: List of Rule
The ensemble of rules extracted from the trees
"""
def __init__(self,
tree_list,
feature_names=None):
self.tree_list = tree_list
self.feature_names = feature_names
self.rules = set()
## TODO: Move this out of __init__
self._extract_rules()
self.rules=list(self.rules)
def _extract_rules(self):
"""Recursively extract rules from each tree in the ensemble
"""
for tree in self.tree_list:
rules = extract_rules_from_tree(tree[0].tree_,feature_names=self.feature_names)
self.rules.update(rules)
def filter_rules(self, func):
self.rules = filter(lambda x: func(x), self.rules)
def filter_short_rules(self, k):
self.filter_rules(lambda x: len(x.conditions) > k)
def transform(self, X,coefs=None):
"""Transform dataset.
Parameters
----------
X: array-like matrix, shape=(n_samples, n_features)
coefs: (optional) if supplied, this makes the prediction
slightly more efficient by setting rules with zero
coefficients to zero without calling Rule.transform().
Returns
-------
X_transformed: array-like matrix, shape=(n_samples, n_out)
Transformed dataset. Each column represents one rule.
"""
rule_list=list(self.rules)
if coefs is None :
return np.array([rule.transform(X) for rule in rule_list]).T
else: # else use the coefs to filter the rules we bother to interpret
res= np.array([rule_list[i_rule].transform(X) for i_rule in np.arange(len(rule_list)) if coefs[i_rule]!=0]).T
res_=np.zeros([X.shape[0],len(rule_list)])
res_[:,coefs!=0]=res
return res_
def __str__(self):
return (map(lambda x: x.__str__(), self.rules)).__str__()
class RuleFit(BaseEstimator, TransformerMixin):
"""Rulefit class
Parameters
----------
tree_size: Number of terminal nodes in generated trees. If exp_rand_tree_size=True,
this will be the mean number of terminal nodes.
sample_fract: fraction of randomly chosen training observations used to produce each tree.
FP 2004 (Sec. 2)
max_rules: approximate total number of rules generated for fitting. Note that actual
number of rules will usually be lower than this due to duplicates.
memory_par: scale multiplier (shrinkage factor) applied to each new tree when
sequentially induced. FP 2004 (Sec. 2)
rfmode: 'regress' for regression or 'classify' for binary classification.
lin_standardise: If True, the linear terms will be standardised as per Friedman Sec 3.2
by multiplying the winsorised variable by 0.4/stdev.
lin_trim_quantile: If lin_standardise is True, this quantile will be used to trim linear
terms before standardisation.
exp_rand_tree_size: If True, each boosted tree will have a different maximum number of
terminal nodes based on an exponential distribution about tree_size.
(Friedman Sec 3.3)
model_type: 'r': rules only; 'l': linear terms only; 'rl': both rules and linear terms
random_state: Integer to initialise random objects and provide repeatability.
tree_generator: Optional: this object will be used as provided to generate the rules.
This will override almost all the other properties above.
Must be GradientBoostingRegressor or GradientBoostingClassifier, optional (default=None)
Attributes
----------
rule_ensemble: RuleEnsemble
The rule ensemble
feature_names: list of strings, optional (default=None)
The names of the features (columns)
"""
def __init__(self,tree_size=4,sample_fract='default',max_rules=2000,
memory_par=0.01,
tree_generator=None,
rfmode='regress',lin_trim_quantile=0.025,
lin_standardise=True, exp_rand_tree_size=True,
model_type='rl',Cs=None,cv=3,random_state=None):
self.tree_generator = tree_generator
self.rfmode=rfmode
self.lin_trim_quantile=lin_trim_quantile
self.lin_standardise=lin_standardise
self.winsorizer=Winsorizer(trim_quantile=lin_trim_quantile)
self.friedscale=FriedScale(self.winsorizer)
self.stddev = None
self.mean = None
self.exp_rand_tree_size=exp_rand_tree_size
self.max_rules=max_rules
self.sample_fract=sample_fract
self.max_rules=max_rules
self.memory_par=memory_par
self.tree_size=tree_size
self.random_state=random_state
self.model_type=model_type
self.cv=cv
self.Cs=Cs
def fit(self, X, y=None, feature_names=None, verbose=False):
"""Fit and estimate linear combination of rule ensemble
"""
if type(X) == pd.DataFrame:
X = X.values
if type(y) in [pd.DataFrame, pd.Series]:
y = y.values
## Enumerate features if feature names not provided
N=X.shape[0]
if feature_names is None:
self.feature_names = ['feature_' + str(x) for x in range(0, X.shape[1])]
else:
self.feature_names=feature_names
if 'r' in self.model_type:
## initialise tree generator
if self.tree_generator is None:
n_estimators_default=int(np.ceil(self.max_rules/self.tree_size))
self.sample_fract_=min(0.5,(100+6*np.sqrt(N))/N)
if self.rfmode=='regress':
self.tree_generator = GradientBoostingRegressor(n_estimators=n_estimators_default, max_leaf_nodes=self.tree_size, learning_rate=self.memory_par,subsample=self.sample_fract_,random_state=self.random_state,max_depth=100)
else:
self.tree_generator =GradientBoostingClassifier(n_estimators=n_estimators_default, max_leaf_nodes=self.tree_size, learning_rate=self.memory_par,subsample=self.sample_fract_,random_state=self.random_state,max_depth=100)
if self.rfmode=='regress':
if type(self.tree_generator) not in [GradientBoostingRegressor,RandomForestRegressor]:
raise ValueError("RuleFit only works with RandomForest and BoostingRegressor")
else:
if type(self.tree_generator) not in [GradientBoostingClassifier,RandomForestClassifier]:
raise ValueError("RuleFit only works with RandomForest and BoostingClassifier")
## fit tree generator
if not self.exp_rand_tree_size: # simply fit with constant tree size
self.tree_generator.fit(X, y)
else: # randomise tree size as per Friedman 2005 Sec 3.3
np.random.seed(self.random_state)
tree_sizes=np.random.exponential(scale=self.tree_size-2,size=int(np.ceil(self.max_rules*2/self.tree_size)))
tree_sizes=np.asarray([2+np.floor(tree_sizes[i_]) for i_ in np.arange(len(tree_sizes))],dtype=int)
i=int(len(tree_sizes)/4)
while np.sum(tree_sizes[0:i])<self.max_rules:
i=i+1
tree_sizes=tree_sizes[0:i]
self.tree_generator.set_params(warm_start=True)
curr_est_=0
for i_size in np.arange(len(tree_sizes)):
size=tree_sizes[i_size]
self.tree_generator.set_params(n_estimators=curr_est_+1)
self.tree_generator.set_params(max_leaf_nodes=size)
random_state_add = self.random_state if self.random_state else 0
self.tree_generator.set_params(random_state=i_size+random_state_add) # warm_state=True seems to reset random_state, such that the trees are highly correlated, unless we manually change the random_sate here.
self.tree_generator.get_params()['n_estimators']
self.tree_generator.fit(np.copy(X, order='C'), np.copy(y, order='C'))
curr_est_=curr_est_+1
self.tree_generator.set_params(warm_start=False)
tree_list = self.tree_generator.estimators_
if isinstance(self.tree_generator, RandomForestRegressor) or isinstance(self.tree_generator, RandomForestClassifier):
tree_list = [[x] for x in self.tree_generator.estimators_]
## extract rules
self.rule_ensemble = RuleEnsemble(tree_list = tree_list,
feature_names=self.feature_names)
## concatenate original features and rules
X_rules = self.rule_ensemble.transform(X)
## standardise linear variables if requested (for regression model only)
if 'l' in self.model_type:
## standard deviation and mean of winsorized features
self.winsorizer.train(X)
winsorized_X = self.winsorizer.trim(X)
self.stddev = np.std(winsorized_X, axis = 0)
self.mean = np.mean(winsorized_X, axis = 0)
if self.lin_standardise:
self.friedscale.train(X)
X_regn=self.friedscale.scale(X)
else:
X_regn=X.copy()
## Compile Training data
X_concat=np.zeros([X.shape[0],0])
if 'l' in self.model_type:
X_concat = np.concatenate((X_concat,X_regn), axis=1)
if 'r' in self.model_type:
if X_rules.shape[0] >0:
X_concat = np.concatenate((X_concat, X_rules), axis=1)
## fit Lasso
if self.rfmode=='regress':
if self.Cs is None: # use defaultshasattr(self.Cs, "__len__"):
n_alphas= 100
alphas=None
elif hasattr(self.Cs, "__len__"):
n_alphas= None
alphas=1./self.Cs
else:
n_alphas= self.Cs
alphas=None
self.lscv = LassoCV(n_alphas=n_alphas,alphas=alphas,cv=self.cv,random_state=self.random_state)
self.lscv.fit(X_concat, y)
self.coef_=self.lscv.coef_
self.intercept_=self.lscv.intercept_
else:
Cs=10 if self.Cs is None else self.Cs
self.lscv=LogisticRegressionCV(Cs=Cs,cv=self.cv,penalty='l1',random_state=self.random_state,solver='liblinear')
self.lscv.fit(X_concat, y)
self.coef_=self.lscv.coef_[0]
self.intercept_=self.lscv.intercept_[0]
return self
def predict(self, X):
"""Predict outcome for X
"""
if type(X) == pd.DataFrame:
X = X.values.astype(np.float32)
X_concat=np.zeros([X.shape[0],0])
if 'l' in self.model_type:
if self.lin_standardise:
X_concat = np.concatenate((X_concat,self.friedscale.scale(X)), axis=1)
else:
X_concat = np.concatenate((X_concat,X), axis=1)
if 'r' in self.model_type:
rule_coefs=self.coef_[-len(self.rule_ensemble.rules):]
if len(rule_coefs)>0:
X_rules = self.rule_ensemble.transform(X,coefs=rule_coefs)
if X_rules.shape[0] >0:
X_concat = np.concatenate((X_concat, X_rules), axis=1)
return self.lscv.predict(X_concat)
def transform(self, X=None, y=None):
"""Transform dataset.
Parameters
----------
X : array-like matrix, shape=(n_samples, n_features)
Input data to be transformed. Use ``dtype=np.float32`` for maximum
efficiency.
Returns
-------
X_transformed: matrix, shape=(n_samples, n_out)
Transformed data set
"""
return self.rule_ensemble.transform(X)
def get_rules(self, exclude_zero_coef=False, subregion=None):
"""Return the estimated rules
Parameters
----------
exclude_zero_coef: If True (default), returns only the rules with an estimated
coefficient not equalt to zero.
subregion: If None (default) returns global importances (FP 2004 eq. 28/29), else returns importance over
subregion of inputs (FP 2004 eq. 30/31/32).
Returns
-------
rules: pandas.DataFrame with the rules. Column 'rule' describes the rule, 'coef' holds
the coefficients and 'support' the support of the rule in the training
data set (X)
"""
n_features= len(self.coef_) - len(self.rule_ensemble.rules)
rule_ensemble = list(self.rule_ensemble.rules)
output_rules = []
## Add coefficients for linear effects
for i in range(0, n_features):
if self.lin_standardise:
coef=self.coef_[i]*self.friedscale.scale_multipliers[i]
else:
coef=self.coef_[i]
if subregion is None:
importance = abs(coef)*self.stddev[i]
else:
subregion = np.array(subregion)
importance = sum(abs(coef)* abs([ x[i] for x in self.winsorizer.trim(subregion) ] - self.mean[i]))/len(subregion)
output_rules += [(self.feature_names[i], 'linear',coef, 1, importance)]
## Add rules
for i in range(0, len(self.rule_ensemble.rules)):
rule = rule_ensemble[i]
coef=self.coef_[i + n_features]
if subregion is None:
importance = abs(coef)*(rule.support * (1-rule.support))**(1/2)
else:
rkx = rule.transform(subregion)
importance = sum(abs(coef) * abs(rkx - rule.support))/len(subregion)
output_rules += [(rule.__str__(), 'rule', coef, rule.support, importance)]
rules = pd.DataFrame(output_rules, columns=["rule", "type","coef", "support", "importance"])
if exclude_zero_coef:
rules = rules.ix[rules.coef != 0]
return rules | [
"[email protected]"
] | |
73ce286191945ea0d7b3750e084b44c850201d99 | bbf7787d94e97d4e0c9bceb46203c08939e6e67d | /python_test_case/Data.py | de2ca81bcab87030d2f956c769b3e395ba99f41f | [] | no_license | llanoxdewa/python | 076e6fa3ed2128c21cdd26c1be6bc82ee6917f9c | 6586170c5f48827a5e1bcb35656870b5e4eed732 | refs/heads/main | 2023-06-16T05:31:52.494796 | 2021-07-09T09:04:30 | 2021-07-09T09:04:30 | 362,782,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 534 | py |
class Siswa:
def __init__(self,nama,kelas,nilai):
self.__nama = nama
self.__kelas = kelas
self.__nilai = nilai
def hasil(self):
return {
'nama':self.__nama,
'kelas':self.__kelas,
'nilai':self.__nilai
}
def getNama(self):
return self.__nama
def getKelas(self):
return self.__kelas
def getNilai(self):
return self.__nilai
if __name__=='__main__':
siswa1 = Siswa('llano','11 elektro 1',100)
siswa2 = Siswa('ujang','10 Las 3',95)
print(siswa1.hasil())
print(siswa2.hasil())
| [
"[email protected]"
] | |
802d6402d865c869adb1e04cefcef3152c37917e | 357fefa288745c9ab3bc276a7ef0bc815f3fec2a | /src/gui/descriptive_statistics.py | e5768c5bf81b94911ecfc0e7ac1bc4dd97c8ef83 | [
"MIT"
] | permissive | jdvelasq/techminer | 61da47f44719e462732627edcc1094fab6c173f1 | 7a34a9fd684ce56cfbab583fa1bb71c1669035f9 | refs/heads/main | 2023-03-15T23:26:22.876051 | 2023-03-13T21:47:24 | 2023-03-13T21:47:24 | 204,352,276 | 0 | 1 | MIT | 2019-12-09T02:37:11 | 2019-08-25T21:34:19 | Jupyter Notebook | UTF-8 | Python | false | false | 9,149 | py | import numpy as np
import pandas as pd
import ipywidgets as widgets
from ipywidgets import GridspecLayout, Layout
from IPython.display import display
from techminer.core.filter_records import filter_records
##
##
## Term extraction and count
##
##
def _extract_terms(x, column):
x = x.copy()
x[column] = x[column].map(
lambda w: w.split(";") if not pd.isna(w) and isinstance(w, str) else w
)
x = x.explode(column)
x[column] = x[column].map(lambda w: w.strip() if isinstance(w, str) else w)
x = pd.unique(x[column].dropna())
x = np.sort(x)
return pd.DataFrame({column: x})
def _count_terms(x, column):
return len(_extract_terms(x, column))
class App:
def __init__(self) -> None:
self.app_layout = GridspecLayout(9, 4, height="870px")
self.output = widgets.Output().add_class("output_color")
self.app_layout[0:, 0:] = widgets.VBox(
[self.output],
layout=Layout(margin="10px 10px 10px 10px", border="1px solid gray"),
)
def run(self):
x = filter_records(pd.read_csv("corpus.csv"))
##
## General information
##
general = {}
general["Documents:"] = str(len(x))
if "Year" in x.columns:
general["Years:"] = str(min(x.Year)) + "-" + str(max(x.Year))
n = max(x.Year) - min(x.Year) + 1
Po = len(x.Year[x.Year == min(x.Year)])
Pn = len(x)
cagr = str(round(100 * (np.power(Pn / Po, 1 / n) - 1), 2)) + " %"
general["Compound annual growth rate:"] = cagr
if "Global_Citations" in x.columns:
general["Average citations per document:"] = "{:4.2f}".format(
x["Global_Citations"].mean()
)
if "Global_Citations" in x.columns and "Year" in x.columns:
general["Average citations per document per year:"] = "{:4.2f}".format(
x["Global_Citations"].sum()
/ (len(x) * (x.Year.max() - x.Year.min() + 1))
)
if "Global_References" in x.columns:
general["Total references:"] = round(_count_terms(x, "Global_References"))
general["Average global references per document:"] = round(
_count_terms(x, "Global_References") / len(x)
)
if "Source_title" in x.columns:
general["Source titles:"] = round(_count_terms(x, "Source_title"))
general["Average documents per Source title:"] = round(
len(x) / _count_terms(x, "Source_title")
)
x.pop("Source_title")
if "Abb_Source_Title" in x.columns:
general["Abbreviated Source titles:"] = round(
_count_terms(x, "Abb_Source_Title")
)
x.pop("Abb_Source_Title")
##
## Document types
##
document_types = {}
if "Document_Type" in x.columns:
z = x[["Document_Type"]].groupby("Document_Type").size()
for index, value in zip(z.index, z):
document_types[index + ":"] = value
x.pop("Document_Type")
##
## Authors
##
authors = {}
if "Authors" in x.columns:
authors["Authors:"] = _count_terms(x, "Authors")
m = x.Authors
m = m.dropna()
m = m.map(lambda w: w.split(";"), na_action="ignore")
m = m.explode()
authors["Author appearances:"] = len(m)
authors["Documents per author:"] = round(
len(x) / _count_terms(x, "Authors"), 2
)
authors["Authors per document:"] = round(
_count_terms(x, "Authors") / len(x), 2
)
if "Num_Authors" in x.columns:
authors["Single-authored documents:"] = len(x[x["Num_Authors"] == 1])
authors["Multi-authored documents:"] = len(x[x["Num_Authors"] > 1])
authors["Co-authors per document:"] = round(x["Num_Authors"].mean(), 2)
authors["Collaboration index:"] = round(
_count_terms(x[x.Num_Authors > 1], "Authors")
/ len(x[x.Num_Authors > 1]),
2,
)
if "Institutions" in x.columns:
authors["Institutions:"] = _count_terms(x, "Institutions")
x.pop("Institutions")
if "Institution_1st_Author" in x.columns:
authors["Institutions (1st author):"] = _count_terms(
x, "Institution_1st_Author"
)
x.pop("Institution_1st_Author")
if "Countries" in x.columns:
authors["Countries:"] = _count_terms(x, "Countries")
if "Countries" in x.columns:
x.pop("Countries")
if "Country_1st_Author" in x.columns:
authors["Countries (1st author):"] = _count_terms(x, "Country_1st_Author")
x.pop("Country_1st_Author")
##
## Keywords
##
keywords = {}
if "Author_Keywords" in x.columns:
keywords["Author Keywords (raw):"] = round(
_count_terms(x, "Author_Keywords")
)
x.pop("Author_Keywords")
if "Author_Keywords_CL" in x.columns:
keywords["Author Keywords (cleaned):"] = round(
_count_terms(x, "Author_Keywords_CL")
)
x.pop("Author_Keywords_CL")
if "Index_Keywords" in x.columns:
keywords["Index Keywords (raw):"] = round(_count_terms(x, "Index_Keywords"))
x.pop("Index_Keywords")
if "Index_Keywords_CL" in x.columns:
keywords["Index Keywords (cleaned):"] = round(
_count_terms(x, "Index_Keywords_CL")
)
x.pop("Index_Keywords_CL")
if "Keywords_CL" in x.columns:
keywords["Keywords (cleaned):"] = round(_count_terms(x, "Keywords_CL"))
x.pop("Keywords_CL")
if "Title_words" in x.columns:
keywords["Title words (raw):"] = round(_count_terms(x, "Title_words"))
x.pop("Title_words")
if "Title_words_CL" in x.columns:
keywords["Title words (cleaned):"] = round(
_count_terms(x, "Title_words_CL")
)
x.pop("Title_words_CL")
if "Abstract_words" in x.columns:
keywords["Abstract words (raw)"] = round(_count_terms(x, "Abstract_words"))
x.pop("Abstract_words")
if "Abstract_words_CL" in x.columns:
keywords["Abstract words (cleaned)"] = round(
_count_terms(x, "Abstract_words_CL")
)
x.pop("Abstract_words_CL")
##
## Report
##
if "Frac_Num_Documents" in x.columns:
x.pop("Frac_Num_Documents")
if "Historiograph_ID" in x.columns:
x.pop("Historiograph_ID")
d = []
d += [key for key in general.keys()]
d += [key for key in document_types.keys()]
d += [key for key in authors.keys()]
d += [key for key in keywords.keys()]
v = []
v += [general[key] for key in general.keys()]
v += [document_types[key] for key in document_types.keys()]
v += [authors[key] for key in authors.keys()]
v += [keywords[key] for key in keywords.keys()]
##
## Other columns in the dataset
##
others = {}
for column in sorted(x.columns):
if column + ":" in d or column in [
"Abstract",
"Abstract_Author_Keywords",
"Abstract_Author_Keywords_CL",
"Abstract_Index_Keywords",
"Abstract_Index_Keywords_CL",
"Abstract_Keywords",
"Abstract_Keywords_CL",
"Authors_ID",
"Bradford_Law_Zone",
"Global_Citations",
"Global_References",
"ID",
"Keywords",
"Local_Citations",
"Local_References",
"Num_Authors",
"Title",
"Year",
"Affiliations",
]:
continue
others[column] = round(_count_terms(x, column))
if len(others):
d += [key for key in others.keys()]
v += [others[key] for key in others.keys()]
with self.output:
display(
pd.DataFrame(
v,
columns=["value"],
index=pd.MultiIndex.from_arrays(
[
["GENERAL"] * len(general)
+ ["DOCUMENT TYPES"] * len(document_types)
+ ["AUTHORS"] * len(authors)
+ ["KEYWORDS"] * len(keywords)
+ ["OTHERS"] * len(others),
d,
],
names=["Category", "Item"],
),
)
)
return self.app_layout
| [
"[email protected]"
] | |
8bd5f7297672cd1bcc6f7bac4d676841b7226b48 | f5c3fde6a1f0825ef03c73c659ad61a9ca15eac9 | /backend/course/api/v1/urls.py | d3fda365b5a60402915d8df5400dd7132d425b80 | [] | no_license | crowdbotics-apps/meira-20558 | 654cf2c71b7c1bb22a419dc4eb20f41391ca20bb | 2ff22fb39ca7c380389524805c21b70bf4c53611 | refs/heads/master | 2022-12-25T00:22:10.427658 | 2020-09-22T12:23:05 | 2020-09-22T12:23:05 | 297,630,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .viewsets import (
RecordingViewSet,
EventViewSet,
SubscriptionViewSet,
CourseViewSet,
GroupViewSet,
ModuleViewSet,
PaymentMethodViewSet,
SubscriptionTypeViewSet,
EnrollmentViewSet,
LessonViewSet,
CategoryViewSet,
)
router = DefaultRouter()
router.register("paymentmethod", PaymentMethodViewSet)
router.register("category", CategoryViewSet)
router.register("module", ModuleViewSet)
router.register("lesson", LessonViewSet)
router.register("subscriptiontype", SubscriptionTypeViewSet)
router.register("enrollment", EnrollmentViewSet)
router.register("event", EventViewSet)
router.register("subscription", SubscriptionViewSet)
router.register("recording", RecordingViewSet)
router.register("group", GroupViewSet)
router.register("course", CourseViewSet)
urlpatterns = [
path("", include(router.urls)),
]
| [
"[email protected]"
] | |
f030a759d8cdbb9ea7ca151b2e0defbfaa9762d6 | 48894ae68f0234e263d325470178d67ab313c73e | /sa/profiles/DLink/DGS3100/get_interface_status.py | c9e8246ac2d799ba19927cd9ff87b7157fcd836d | [
"BSD-3-Clause"
] | permissive | DreamerDDL/noc | 7f949f55bb2c02c15ac2cc46bc62d957aee43a86 | 2ab0ab7718bb7116da2c3953efd466757e11d9ce | refs/heads/master | 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null | UTF-8 | Python | false | false | 2,088 | py | # -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## DLink.DGS3100.get_interface_status
##----------------------------------------------------------------------
## Copyright (C) 2007-2014 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
"""
"""
from noc.sa.script import Script as NOCScript
from noc.sa.interfaces import IGetInterfaceStatus
import re
class Script(NOCScript):
name = "DLink.DGS3100.get_interface_status"
implements = [IGetInterfaceStatus]
rx_line = re.compile(
r"^\s*(?P<interface>\S+)\s+(Enabled|Disabled)\s+\S+\s+"
r"(?P<status>.+)\s+(Enabled|Disabled)\s*$",
re.IGNORECASE | re.MULTILINE)
def execute(self, interface=None):
# Not tested. Must be identical in different vendors
if self.snmp and self.access_profile.snmp_ro:
try:
# Get interface status
r = []
# IF-MIB::ifName, IF-MIB::ifOperStatus
for n, s in self.snmp.join_tables(
"1.3.6.1.2.1.31.1.1.1.1",
"1.3.6.1.2.1.2.2.1.8", bulk=True):
if not n.startswith("802.1Q Encapsulation Tag"):
if interface is not None and interface == n:
r += [{"interface": n, "status": int(s) == 1}]
else:
r += [{"interface": n, "status": int(s) == 1}]
return r
except self.snmp.TimeOutError:
pass
# Fallback to CLI
if interface is None:
interface = "all"
try:
s = self.cli("show ports %s" % interface)
except self.CLISyntaxError:
raise self.NotSupportedError()
r = []
for match in self.rx_line.finditer(s):
r += [{
"interface": match.group("interface"),
"status": match.group("status").strip() != "Link Down"
}]
return r
| [
"[email protected]"
] | |
72d33eb12312e3ec1ce19ff5b0ab6f6eae075df6 | 1d01149498a26385eb0e47b35a1045d37a1584b4 | /pyntcloud/io/ply.py | 938cdcd52aabd2ce9263e5e594e767cec8d040fe | [
"Unlicense"
] | permissive | iindovina/pyntcloud | f54c0dc528385bbeb1d37952abbfe631594e7060 | e2499f7f1a897d3ff0b526a12b88f3914a663067 | refs/heads/master | 2021-07-21T07:35:19.129049 | 2017-10-31T14:34:23 | 2017-10-31T14:34:23 | 107,396,333 | 0 | 0 | null | 2017-10-18T11:03:46 | 2017-10-18T11:03:46 | null | UTF-8 | Python | false | false | 7,055 | py | # HAKUNA MATATA
import sys
import numpy as np
import pandas as pd
from collections import defaultdict
ply_dtypes = dict([
(b'int8', 'i1'),
(b'char', 'i1'),
(b'uint8', 'u1'),
(b'uchar', 'b1'),
(b'uchar', 'u1'),
(b'int16', 'i2'),
(b'short', 'i2'),
(b'uint16', 'u2'),
(b'ushort', 'u2'),
(b'int32', 'i4'),
(b'int', 'i4'),
(b'uint32', 'u4'),
(b'uint', 'u4'),
(b'float32', 'f4'),
(b'float', 'f4'),
(b'float64', 'f8'),
(b'double', 'f8')
])
valid_formats = {'ascii': '', 'binary_big_endian': '>',
'binary_little_endian': '<'}
def read_ply(filename):
""" Read a .ply (binary or ascii) file and store the elements in pandas DataFrame
Parameters
----------
filename: str
Path tho the filename
Returns
-------
data: dict
Elements as pandas DataFrames; comments and ob_info as list of string
"""
with open(filename, 'rb') as ply:
if b'ply' not in ply.readline():
raise ValueError('The file does not start whith the word ply')
# get binary_little/big or ascii
fmt = ply.readline().split()[1].decode()
# get extension for building the numpy dtypes
ext = valid_formats[fmt]
line = []
dtypes = defaultdict(list)
count = 2
points_size = None
mesh_size = None
while b'end_header' not in line and line != b'':
line = ply.readline()
if b'element' in line:
line = line.split()
name = line[1].decode()
size = int(line[2])
if name == "vertex":
points_size = size
elif name == "face":
mesh_size = size
elif b'property' in line:
line = line.split()
# element mesh
if b'list' in line:
mesh_names = ['n_points', 'v1', 'v2', 'v3']
if fmt == "ascii":
# the first number has different dtype than the list
dtypes[name].append(
(mesh_names[0], ply_dtypes[line[2]]))
# rest of the numbers have the same dtype
dt = ply_dtypes[line[3]]
else:
# the first number has different dtype than the list
dtypes[name].append(
(mesh_names[0], ext + ply_dtypes[line[2]]))
# rest of the numbers have the same dtype
dt = ext + ply_dtypes[line[3]]
for j in range(1, 4):
dtypes[name].append((mesh_names[j], dt))
else:
if fmt == "ascii":
dtypes[name].append(
(line[2].decode(), ply_dtypes[line[1]]))
else:
dtypes[name].append(
(line[2].decode(), ext + ply_dtypes[line[1]]))
count += 1
# for bin
end_header = ply.tell()
data = {}
if fmt == 'ascii':
top = count
bottom = 0 if mesh_size is None else mesh_size
names = [x[0] for x in dtypes["vertex"]]
data["points"] = pd.read_csv(filename, sep=" ", header=None, engine="python",
skiprows=top, skipfooter=bottom, usecols=names, names=names)
for n, col in enumerate(data["points"].columns):
data["points"][col] = data["points"][col].astype(
dtypes["vertex"][n][1])
if mesh_size is not None:
top = count + points_size
names = [x[0] for x in dtypes["face"]][1:]
usecols = [1, 2, 3]
data["mesh"] = pd.read_csv(
filename, sep=" ", header=None, engine="python", skiprows=top, usecols=usecols, names=names)
for n, col in enumerate(data["mesh"].columns):
data["mesh"][col] = data["mesh"][col].astype(
dtypes["face"][n + 1][1])
else:
with open(filename, 'rb') as ply:
ply.seek(end_header)
data["points"] = pd.DataFrame(np.fromfile(
ply, dtype=dtypes["vertex"], count=points_size))
if mesh_size is not None:
data["mesh"] = pd.DataFrame(np.fromfile(
ply, dtype=dtypes["face"], count=mesh_size))
data["mesh"].drop('n_points', axis=1, inplace=True)
return data
def write_ply(filename, points=None, mesh=None, as_text=False):
"""
Parameters
----------
filename: str
The created file will be named with this
points: ndarray
mesh: ndarray
as_text: boolean
Set the write mode of the file. Default: binary
Returns
-------
boolean
True if no problems
"""
if not filename.endswith('ply'):
filename += '.ply'
# open in text mode to write the header
with open(filename, 'w') as ply:
header = ['ply']
if as_text:
header.append('format ascii 1.0')
else:
header.append('format binary_' + sys.byteorder + '_endian 1.0')
if points is not None:
header.extend(describe_element('vertex', points))
if mesh is not None:
mesh = mesh.copy()
mesh.insert(loc=0, column="n_points", value=3)
mesh["n_points"] = mesh["n_points"].astype("u1")
header.extend(describe_element('face', mesh))
header.append('end_header')
for line in header:
ply.write("%s\n" % line)
if as_text:
if points is not None:
points.to_csv(filename, sep=" ", index=False, header=False, mode='a',
encoding='ascii')
if mesh is not None:
mesh.to_csv(filename, sep=" ", index=False, header=False, mode='a',
encoding='ascii')
else:
# open in binary/append to use tofile
with open(filename, 'ab') as ply:
if points is not None:
points.to_records(index=False).tofile(ply)
if mesh is not None:
mesh.to_records(index=False).tofile(ply)
return True
def describe_element(name, df):
""" Takes the columns of the dataframe and builds a ply-like description
Parameters
----------
name: str
df: pandas DataFrame
Returns
-------
element: list[str]
"""
property_formats = {'f': 'float', 'u': 'uchar', 'i': 'int'}
element = ['element ' + name + ' ' + str(len(df))]
if name == 'face':
element.append("property list uchar int vertex_indices")
else:
for i in range(len(df.columns)):
# get first letter of dtype to infer format
f = property_formats[str(df.dtypes[i])[0]]
element.append('property ' + f + ' ' + df.columns.values[i])
return element
| [
"[email protected]"
] | |
e913c4515484c39cb110b8eeb50f63d53599c617 | cec916f882afbd09fe68f6b88879e68eaea976f6 | /bigmler/options/multilabel.py | c7cad984064814fe938c6fa487987c77e151eef4 | [
"Apache-2.0"
] | permissive | jaor/bigmler | d86db6d7950768d7ba3e21b5f29bc265467f4cad | bbf221e41ef04e8d37a511a35a63216b64689449 | refs/heads/master | 2023-04-26T12:07:49.428263 | 2023-04-12T15:22:20 | 2023-04-12T15:22:20 | 15,663,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,530 | py | # -*- coding: utf-8 -*-
#
# Copyright 2014-2023 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Options for BigMLer multi-label processing
"""
def get_multi_label_options(defaults=None):
"""Multi-label-related options
"""
if defaults is None:
defaults = {}
options = {
# Multi-label labels. If set, only the given labels are expanded
'--labels': {
'action': 'store',
'dest': 'labels',
'default': defaults.get('labels', None),
'help': ("Comma-separated list of the labels"
" to be expanded from a multi-label field.")},
# Multi-label label separator. Separator used when splitting labels in
# the objective field.
'--label-separator': {
'action': 'store',
'dest': 'label_separator',
'default': defaults.get('label_separator', None),
'help': ("Separator used when splitting labels in the"
" objective field.")},
# Multi-label fields. Comma-separated list of fields that should be
# treated as being multi-label fields.
# Either its name or column number.
'--multi-label-fields': {
'action': 'store',
'dest': 'multi_label_fields',
'default': defaults.get('multi_label_fields', None),
'help': ("Comma-separated list of the fields"
" to be expanded as being multi-label. Name"
" or column number.")},
# Label-aggregates. Comma-separated list of aggregation functions
# for the multi-label fields.
'--label-aggregates': {
'action': 'store',
'dest': 'label_aggregates',
'default': defaults.get('label_aggregates', None),
'help': ("Comma-separated list of aggregation functions "
"for the multi-label field labels."
" Allowed aggregates: count, first and last")}}
return options
| [
"[email protected]"
] | |
e4f471f5de8a7768f739889d5f7bdae7996aecb4 | de75304d96e433f67dba3438f2456dd3dbb2ce08 | /scriptsLinAlg/06_matrix-properties.py | dd6fc05f9695dbbf6cadf91808dc93466ce05c6f | [] | no_license | dalerxli/slides_linear-algebra-intro | ef7486a2779d5cd6633222662c629eae0ee59997 | 9bdbafeecd620a13e2c152bc3eb331543a5d7674 | refs/heads/master | 2023-07-14T14:35:24.395828 | 2021-08-10T17:55:04 | 2021-08-10T17:55:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | #!/usr/bin/env python3
# _*_ coding: utf-8 _*_
import numpy as np
A = np.array([
[7, 1],
[2, -3],
[4, 8],
])
B = np.array([
[ 1, 6 ],
[-2, 3 ],
])
C = np.array([
[4, 1],
[7, 3],
])
# Associative
ABC = A.dot(B.dot(C))
AB_C = A.dot(B).dot(C)
# Distributive
D = A.dot(B + C)
E = A.dot(B) + A.dot(C)
# Commutative
print('\n', B.dot(C))
print('\n', C.dot(B))
print('\n', B.dot(C) == C.dot(B))
v1 = np.array([[3],
[8],
[1],
])
v2 = np.array([[4],
[8],
[3],
])
print('\n', v1.T.dot(v2))
print('\n', v2.T.dot(v1)) | [
"[email protected]"
] | |
da6b586298b34518a6dec9ed0a3e1211f0ea97b6 | cda2c95ee6167a34ce9ba3ea25707469a2c357ca | /neural_augmented_simulator/old-code/simple_joints_lstm/dataset_ergoreachersimple_v2.py | ef2f878e4c98cdf6b58abc25f8daf9dec3c33b7a | [] | no_license | fgolemo/neural-augmented-simulator | f43d9a88187fbef478aba9b4399eaa59d8795746 | eb02f20d92e6775824dbac221771f8b8c6dda582 | refs/heads/master | 2020-06-23T15:50:35.957495 | 2020-03-06T21:16:34 | 2020-03-06T21:16:34 | 198,666,041 | 0 | 1 | null | 2019-12-04T22:19:27 | 2019-07-24T15:49:48 | Jupyter Notebook | UTF-8 | Python | false | false | 2,498 | py | import os
from torch.utils.data import Dataset
import numpy as np
class DatasetErgoreachersimpleV2(Dataset):
def __init__(self, path="~/data/sim2real/data-ergoreachersimple-v2.npz", train=True, nosim=False):
super().__init__()
ds = np.load(os.path.expanduser(path))
self.nosim = nosim
self.curr_real = ds["state_current_real"]
self.next_real = ds["state_next_real"]
self.next_sim = ds["state_next_sim"]
self.action = ds["actions"]
if train:
self.curr_real = self.curr_real[:900]
self.next_real = self.next_real[:900]
self.next_sim = self.next_sim[:900]
self.action = self.action[:900]
else:
self.curr_real = self.curr_real[900:]
self.next_real = self.next_real[900:]
self.next_sim = self.next_sim[900:]
self.action = self.action[900:]
def __len__(self):
return len(self.curr_real)
def format_data(self, idx):
if not self.nosim:
diff = self.next_real[idx] - self.next_sim[idx]
else:
diff = self.next_real[idx] - self.curr_real[idx]
return (
np.hstack((self.next_sim[idx], self.curr_real[idx], self.action[idx])),
diff
)
def __getitem__(self, idx):
x, y = self.format_data(idx)
return {"x": x, "y": y}
if __name__ == '__main__':
dsr = DatasetErgoreachersimpleV2(train=False)
print("len test", len(dsr))
print(dsr[10])
dsr = DatasetErgoreachersimpleV2(train=True)
print("len train", len(dsr))
print(dsr[10]["x"].shape, dsr[10]["y"].shape)
for i in range(10, 20):
print("real t1:", dsr[0]["x"][i, 8:16].round(2))
print("sim_ t2:", dsr[0]["x"][i, :8].round(2))
print("action_:", dsr[0]["x"][i, 16:].round(2))
print("real t2:", (dsr[0]["x"][i, :8] + dsr[0]["y"][i]).round(2))
print("delta__:", dsr[0]["y"][i].round(2))
print("===")
max_x = -np.inf
min_x = +np.inf
max_y = -np.inf
min_y = +np.inf
for item in dsr:
if item["x"].max() > max_x:
max_x = item["x"].max()
if item["y"].max() > max_y:
max_y = item["y"].max()
if item["x"].min() < min_x:
min_x = item["x"].min()
if item["y"].min() < min_y:
min_y = item["y"].min()
print("min x {}, max x {}\n"
"min y {}, max y {}".format(min_x, max_x, min_y, max_y))
| [
"[email protected]"
] | |
b5aed2abd39b21240499c9a0569177c51db73fb1 | 436290052fdd3a7c8cabfdaffd9f8bf7ff6d325b | /analysis/generate_dendrogram_2.py | f04b5eb96a4f539e9f55923e418a54389b453ece | [] | no_license | mf1832146/bi-tbcnn | bcf7abc3bb8c6022c1b21aac481e6fe0ac21b40f | 309f5f16c3211f922fa04eed32fa7d6c58978faf | refs/heads/master | 2020-07-30T05:48:35.117150 | 2019-03-23T14:22:54 | 2019-03-23T14:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,937 | py | from scipy.cluster.hierarchy import dendrogram, linkage
import random
from matplotlib.pyplot import show
import matplotlib.pyplot as plt
from scipy.cluster.hierarchy import fcluster
from matplotlib import pyplot as plt
import numpy as np
import matplotlib.colors as colors
from sklearn.decomposition import PCA
from collections import Counter
max_d = 31
with open("./test_vectors/cpp_vectors_bitbcnn_def_use.txt", "r") as f:
cpp_data = f.readlines()
with open("./test_vectors/java_vectors_bitbcnn_def_use.txt", "r") as f:
java_data = f.readlines()
labels = []
bubblesort_vectors = []
mergesort_vectors = []
quicksort_vectors = []
linkedlist_vectors = []
queue_vectors = []
stack_vectors = []
heap_vectors = []
bfs_vectors = []
dfs_vectors = []
knapsack_vectors = []
cpp_bubblesort_vectors = []
cpp_mergesort_vectors = []
cpp_quicksort_vectors = []
cpp_linkedlist_vectors = []
cpp_queue_vectors = []
cpp_stack_vectors = []
cpp_heap_vectors = []
cpp_bfs_vectors = []
cpp_dfs_vectors = []
cpp_knapsack_vectors = []
java_bubblesort_vectors = []
java_mergesort_vectors = []
java_quicksort_vectors = []
java_linkedlist_vectors = []
java_queue_vectors = []
java_stack_vectors = []
java_heap_vectors = []
java_bfs_vectors = []
java_dfs_vectors = []
java_knapsack_vectors = []
vectors_all = []
labels_all = []
label_index = []
lang_index = []
cpp_vectors = []
java_vectors = []
lang_labels = []
cpp_label_index = []
java_label_index = []
cpp_labels_all = []
java_labels_all = []
for line in cpp_data:
l = line.replace("\n","").strip()
splits = l.split(" ")
label = splits[0]
vector = splits[1:len(splits)]
vector_f = [float(i) for i in vector]
# labels.append(label)
if label == "bfs":
bfs_vectors.append(vector_f)
label_index.append(1)
cpp_label_index.append(1)
cpp_bfs_vectors.append(vector_f)
if label == "dfs":
dfs_vectors.append(vector_f)
label_index.append(2)
cpp_label_index.append(2)
cpp_dfs_vectors.append(vector_f)
if label == "bubblesort":
bubblesort_vectors.append(vector_f)
cpp_bubblesort_vectors.append(vector_f)
label_index.append(3)
cpp_label_index.append(3)
if label == "quicksort":
quicksort_vectors.append(vector_f)
cpp_quicksort_vectors.append(vector_f)
label_index.append(4)
cpp_label_index.append(4)
if label == "mergesort":
mergesort_vectors.append(vector_f)
cpp_mergesort_vectors.append(vector_f)
label_index.append(5)
cpp_label_index.append(5)
if label == "heap":
heap_vectors.append(vector_f)
cpp_heap_vectors.append(vector_f)
label_index.append(6)
cpp_label_index.append(6)
if label == "linkedlist":
linkedlist_vectors.append(vector_f)
cpp_linkedlist_vectors.append(vector_f)
label_index.append(7)
cpp_label_index.append(7)
if label == "queue":
queue_vectors.append(vector_f)
cpp_queue_vectors.append(vector_f)
label_index.append(8)
cpp_label_index.append(8)
if label == "stack":
stack_vectors.append(vector_f)
cpp_stack_vectors.append(vector_f)
label_index.append(9)
cpp_label_index.append(9)
if label == "knapsack":
knapsack_vectors.append(vector_f)
cpp_knapsack_vectors.append(vector_f)
label_index.append(10)
cpp_label_index.append(10)
cpp_vectors.append(vector_f)
# if label != "knapsack":
vectors_all.append(vector_f)
labels_all.append(label+"_cpp")
lang_index.append(1)
cpp_labels_all.append(label)
lang_labels.append("cpp")
for line in java_data:
l = line.replace("\n","").strip()
splits = l.split(" ")
label = splits[0]
vector = splits[1:len(splits)]
vector_f = [float(i) for i in vector]
# labels.append(label)
if label == "bfs":
bfs_vectors.append(vector_f)
java_bfs_vectors.append(vector_f)
label_index.append(11)
java_label_index.append(11)
if label == "dfs":
dfs_vectors.append(vector_f)
java_dfs_vectors.append(vector_f)
label_index.append(12)
java_label_index.append(12)
if label == "bubblesort":
bubblesort_vectors.append(vector_f)
java_bubblesort_vectors.append(vector_f)
label_index.append(13)
java_label_index.append(13)
if label == "quicksort":
quicksort_vectors.append(vector_f)
java_quicksort_vectors.append(vector_f)
label_index.append(14)
java_label_index.append(14)
if label == "mergesort":
mergesort_vectors.append(vector_f)
java_mergesort_vectors.append(vector_f)
label_index.append(15)
java_label_index.append(15)
if label == "heap":
heap_vectors.append(vector_f)
java_heap_vectors.append(vector_f)
label_index.append(16)
java_label_index.append(16)
if label == "linkedlist":
linkedlist_vectors.append(vector_f)
java_linkedlist_vectors.append(vector_f)
label_index.append(17)
java_label_index.append(17)
if label == "queue":
queue_vectors.append(vector_f)
java_queue_vectors.append(vector_f)
label_index.append(18)
java_label_index.append(18)
if label == "stack":
stack_vectors.append(vector_f)
java_stack_vectors.append(vector_f)
label_index.append(19)
java_label_index.append(19)
if label == "knapsack":
knapsack_vectors.append(vector_f)
java_knapsack_vectors.append(vector_f)
label_index.append(20)
java_label_index.append(20)
java_vectors.append(vector_f)
# if label != "knapsack":
vectors_all.append(vector_f)
labels_all.append(label+"_java")
lang_index.append(2)
java_labels_all.append(label)
lang_labels.append("java")
print("length java vectors : " + str(len(java_vectors)))
print("length cpp vectors : " + str(len(cpp_vectors)))
print("length all vectors : " + str(len(vectors_all)))
print("length cpp bfs vectors : " + str(len(cpp_bfs_vectors)))
print("length cpp dfs vectors : " + str(len(cpp_dfs_vectors)))
print("length cpp bubblesort vectors : " + str(len(cpp_bubblesort_vectors)))
print("length cpp quicksort vectors : " + str(len(cpp_quicksort_vectors)))
print("length cpp mergesort vectors : " + str(len(cpp_mergesort_vectors)))
print("length cpp heap vectors : " + str(len(cpp_heap_vectors)))
print("length cpp linkedlist vectors : " + str(len(cpp_linkedlist_vectors)))
print("length cpp queue vectors : " + str(len(cpp_queue_vectors)))
print("length cpp stack vectors : " + str(len(cpp_stack_vectors)))
print("length cpp knapsack vectors : " + str(len(cpp_knapsack_vectors)))
print("length java bfs vectors : " + str(len(java_bfs_vectors)))
print("length java dfs vectors : " + str(len(java_dfs_vectors)))
print("length java bubblesort vectors : " + str(len(java_bubblesort_vectors)))
print("length java quicksort vectors : " + str(len(java_quicksort_vectors)))
print("length java mergesort vectors : " + str(len(java_mergesort_vectors)))
print("length java heap vectors : " + str(len(java_heap_vectors)))
print("length java linkedlist vectors : " + str(len(java_linkedlist_vectors)))
print("length java queue vectors : " + str(len(java_queue_vectors)))
print("length java stack vectors : " + str(len(java_stack_vectors)))
print("length java knapsack vectors : " + str(len(java_knapsack_vectors)))
vectors_all_append = []
vectors_all_append.extend(cpp_bfs_vectors)
vectors_all_append.extend(cpp_dfs_vectors)
vectors_all_append.extend(cpp_bubblesort_vectors)
vectors_all_append.extend(cpp_quicksort_vectors)
vectors_all_append.extend(cpp_mergesort_vectors)
vectors_all_append.extend(cpp_heap_vectors)
vectors_all_append.extend(cpp_linkedlist_vectors)
vectors_all_append.extend(cpp_queue_vectors)
vectors_all_append.extend(cpp_stack_vectors)
vectors_all_append.extend(cpp_knapsack_vectors)
vectors_all_append.extend(java_bfs_vectors)
vectors_all_append.extend(java_dfs_vectors)
vectors_all_append.extend(java_bubblesort_vectors)
vectors_all_append.extend(java_quicksort_vectors)
vectors_all_append.extend(java_mergesort_vectors)
vectors_all_append.extend(java_heap_vectors)
vectors_all_append.extend(java_linkedlist_vectors)
vectors_all_append.extend(java_queue_vectors)
vectors_all_append.extend(java_stack_vectors)
vectors_all_append.extend(java_knapsack_vectors)
algo_lang_labels_cpp_ordered = ["bfs_cpp","dfs_cpp","bubblesort_cpp","quicksort_cpp","mergesort_cpp","heap_cpp","linkedlist_cpp","queue_cpp","stack_cpp","knapsack_cpp"]
algo_lang_labels_java_ordered = ["bfs_java","dfs_java","bubblesort_java","quicksort_java","mergesort_java","heap_java","linkedlist_java","queue_java","stack_java","knapsack_java"]
algo_lang_labels = algo_lang_labels_cpp_ordered
algo_lang_labels.extend(algo_lang_labels_java_ordered)
pca = PCA(n_components=2)
vectors_all_pca = pca.fit_transform(cpp_vectors)
# cpp_all_pca = pca.fit_transform(java_vectors)
# java_all_pca = pca.fit_transform(cpp_vectors)
# data_langs = (cpp_all_pca, java_all_pca)
# colors = ("red", "green")
groups = ["cpp", "java"]
label_color_dict = {label:idx for idx,label in enumerate(np.unique(lang_labels))}
cvec = [label_color_dict[label] for label in lang_labels]
cpp_label_dict = {
1: "bfs_cpp",
2: "dfs_cpp",
3: "bubblesort_cpp",
4: "quicksort_cpp",
5: "mergesort_cpp",
6: "heap_cpp",
7: "linkedlist_cpp",
8: "queue_cpp",
9: "stack_cpp",
10: "knapsack_cpp",
}
java_label_dict = {
11: "bfs_java",
12: "dfs_java",
13: "bubblesort_java",
14: "quicksort_java",
15: "mergesort_java",
16: "heap_java",
17: "linkedlist_java",
18: "queue_java",
19: "stack_java",
20: "knapsack_java"
}
label_dict = {
1: "bfs_cpp",
2: "dfs_cpp",
3: "bubblesort_cpp",
4: "quicksort_cpp",
5: "mergesort_cpp",
6: "heap_cpp",
7: "linkedlist_cpp",
8: "queue_cpp",
9: "stack_cpp",
10: "knapsack_cpp",
11: "bfs_java",
12: "dfs_java",
13: "bubblesort_java",
14: "quicksort_java",
15: "mergesort_java",
16: "heap_java",
17: "linkedlist_java",
18: "queue_java",
19: "stack_java",
20: "knapsack_java"
}
print lang_index
print cvec
# fig, ax = plt.subplots()
# cmap = plt.get_cmap('jet', 20)
# cmap.set_under('gray')
# fig, ax = plt.subplots()
# cax = ax.scatter(vectors_all_pca[:,0],vectors_all_pca[:,1], c= label_index, s=50, cmap=cmap)
# fig.colorbar(cax, extend='min')
# plt.show()
# Add a colorbar. Move the ticks up by 0.5, so they are centred on the colour.
# cb=plt.colorbar(ticks=np.array(label_index)+0.5)
# cb.set_ticklabels(label_index)
# plt.scatter(vectors_all_pca[0:108,0],vectors_all_pca[0:108,1],c="r",label="bfs_cpp")
# plt.scatter(vectors_all_pca[108:217,0],vectors_all_pca[108:217,1],c="g",label="dfs_cpp")
# plt.scatter(vectors_all_pca[108:217,0],vectors_all_pca[108:217,1],c="g",label="dfs_cpp")
# plt.scatter(vectors_all_pca[108:217,0],vectors_all_pca[108:217,1],c="g",label="dfs_cpp")
# plt.scatter(vectors_all_pca[108:217,0],vectors_all_pca[108:217,1],c="g",label="dfs_cpp")
# plt.scatter(vectors_all_pca[108:217,0],vectors_all_pca[108:217,1],c="g",label="dfs_cpp")
# plt.scatter(vectors_all_pca[108:217,0],vectors_all_pca[108:217,1],c="g",label="dfs_cpp")
label_index = cpp_label_index
unique = list(set(label_index))
colors_c = [plt.cm.jet(float(i)/max(unique)) for i in unique]
for i, u in enumerate(unique):
xi = [vectors_all_pca[:,0][j] for j in range(len(vectors_all_pca[:,0])) if label_index[j] == u]
yi = [vectors_all_pca[:,1][j] for j in range(len(vectors_all_pca[:,1])) if label_index[j] == u]
plt.scatter(xi, yi, c=colors_c[i], label=cpp_label_dict[u])
plt.legend(bbox_to_anchor=(1,1), loc=1, ncol=5)
# for i, vec in enumerate(vectors_all_pca):
# # print label_index[i]
# plt.scatter(vec[0],vec[1],c=colors_c[i],label=str(label_index[i]))
# ax.scatter(cpp_all_pca[:,0], cpp_all_pca[:,1], color='r')
# ax.scatter(java_all_pca[:,0], java_all_pca[:,1], color='g')
# plt.show()
# for color,group in zip(colors, groups):
# plt.scatter(vectors_all_pca[:,0],vectors_all_pca[:,1],c=cvec)
# plt.scatter(vectors_all_pca[0:1040,0],vectors_all_pca[0:1040,1],c="r",label="cpp")
# plt.scatter(vectors_all_pca[1040:1539,0],vectors_all_pca[1040:1539,1],c="g",label="java")
plt.title('Visualization of cpp programs (vectors extracted from Bi-DTBCNN)')
# plt.legend(loc=2)
plt.show()
# labels = ["bfs","dfs","bublesort","quicksort","mergesort","heap","linkedlist", "queue","stack","knapsack"]
vectors = []
print("aaaaa : " + str(len(dfs_vectors)))
# vectors.append(random.choice(bfs_vectors))
# vectors.append(random.choice(dfs_vectors))
# vectors.append(random.choice(bubblesort_vectors))
# vectors.append(random.choice(quicksort_vectors))
# vectors.append(random.choice(mergesort_vectors))
# vectors.append(random.choice(heap_vectors))
# vectors.append(random.choice(linkedlist_vectors))
# vectors.append(random.choice(queue_vectors))
# vectors.append(random.choice(stack_vectors))
# vectors.append(random.choice(knapsack_vectors))
samples = 10
vectors.extend(random.sample(bfs_vectors,samples))
vectors.extend(random.sample(dfs_vectors,samples))
vectors.extend(random.sample(bubblesort_vectors,samples))
vectors.extend(random.sample(quicksort_vectors,samples))
vectors.extend(random.sample(mergesort_vectors,samples))
vectors.extend(random.sample(heap_vectors,samples))
vectors.extend(random.sample(linkedlist_vectors,samples))
vectors.extend(random.sample(queue_vectors,samples))
vectors.extend(random.sample(stack_vectors,samples))
vectors.extend(random.sample(knapsack_vectors,samples))
labels.extend(["bfs" for i in range(0,samples)])
labels.extend(["dfs" for i in range(0,samples)])
labels.extend(["bublesort" for i in range(0,samples)])
labels.extend(["quicksort" for i in range(0,samples)])
labels.extend(["mergesort" for i in range(0,samples)])
labels.extend(["heap" for i in range(0,samples)])
labels.extend(["linkedlist" for i in range(0,samples)])
labels.extend(["queue" for i in range(0,samples)])
labels.extend(["stack" for i in range(0,samples)])
labels.extend(["knapsack" for i in range(0,samples)])
print(labels)
print("length of labels : " + str(len(labels)))
# print vectors
def augmented_dendrogram(*args, **kwargs):
ddata = dendrogram(*args, **kwargs)
if not kwargs.get('no_plot', False):
for i, d in zip(ddata['icoord'], ddata['dcoord']):
x = 0.5 * sum(i[1:3])
y = d[1]
plt.plot(x, y, 'ro')
plt.annotate("%.3g" % y, (x, y), xytext=(0, -8),
textcoords='offset points',
va='top', ha='center')
return ddata
def fancy_dendrogram(*args, **kwargs):
max_d = kwargs.pop('max_d', None)
if max_d and 'color_threshold' not in kwargs:
kwargs['color_threshold'] = max_d
annotate_above = kwargs.pop('annotate_above', 0)
ddata = dendrogram(*args, **kwargs)
if not kwargs.get('no_plot', False):
# plt.title('Hierarchical Clustering Dendrogram (10 program each class)')
plt.title('Hierarchical Clustering Dendrogram (all cpp program) - vectors are extracted from D-TBCNN')
plt.xlabel('sample index or (cluster size)')
plt.ylabel('distance')
for i, d, c in zip(ddata['icoord'], ddata['dcoord'], ddata['color_list']):
x = 0.5 * sum(i[1:3])
y = d[1]
if y > annotate_above:
plt.plot(x, y, 'o', c=c)
plt.annotate("%.3g" % y, (x, y), xytext=(0, -5),
textcoords='offset points',
va='top', ha='center')
if max_d:
plt.axhline(y=max_d, c='k')
return ddata
print("leng of vectors : " + str(len(vectors)))
Z = linkage(cpp_vectors, "ward")
# print(Z)
# fancy_dendrogram(Z, labels= labels)
# dendrogram(Z,truncate_mode='lastp',p=12,labels = labels, show_contracted=True)
ddata = fancy_dendrogram(Z,truncate_mode='lastp',labels = cpp_labels_all,max_d=max_d)
# Retrieving cluster
clusters = fcluster(Z, max_d, criterion='distance')
print(clusters)
print("length of cluster : " + str(len(clusters)))
# cluster_indexes = [x for x in range(0,11)]
from collections import defaultdict
result = defaultdict(list)
for k, v in zip(clusters, labels_all):
result[k].append(v)
for k,v in result.items():
print("---------------------------")
print("Number of programs : " + str(len(v)))
print("Cluster %s - %s" % (str(k), str(v)))
print("Counter : " + str(Counter(v)))
show()
| [
"[email protected]"
] | |
bc7b26f53ee58f08c0235b1f7d00ab1788620dcd | 3899dd3debab668ef0c4b91c12127e714bdf3d6d | /venv/Lib/site-packages/tensorflow/python/util/decorator_utils.py | 4b4ce90e1210574d10a44de4deb88c68fb84c884 | [] | no_license | SphericalPotatoInVacuum/CNNDDDD | b2f79521581a15d522d8bb52f81b731a3c6a4db4 | 03c5c0e7cb922f53f31025b7dd78287a19392824 | refs/heads/master | 2020-04-21T16:10:25.909319 | 2019-02-08T06:04:42 | 2019-02-08T06:04:42 | 169,691,960 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,741 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions for writing decorators (which modify docstrings)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
def get_qualified_name(function):
# Python 3
if hasattr(function, '__qualname__'):
return function.__qualname__
# Python 2
if hasattr(function, 'im_class'):
return function.im_class.__name__ + '.' + function.__name__
return function.__name__
def _normalize_docstring(docstring):
"""Normalizes the docstring.
Replaces tabs with spaces, removes leading and trailing blanks lines, and
removes any indentation.
Copied from PEP-257:
https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
Args:
docstring: the docstring to normalize
Returns:
The normalized docstring
"""
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# (we use sys.maxsize because sys.maxint doesn't exist in Python 3)
indent = sys.maxsize
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed)
def add_notice_to_docstring(
doc, instructions, no_doc_str, suffix_str, notice):
"""Adds a deprecation notice to a docstring."""
if not doc:
lines = [no_doc_str]
else:
lines = _normalize_docstring(doc).splitlines()
lines[0] += ' ' + suffix_str
notice = [''] + notice + ([instructions] if instructions else [])
if len(lines) > 1:
# Make sure that we keep our distance from the main body
if lines[1].strip():
notice.append('')
lines[1:1] = notice
else:
lines += notice
return '\n'.join(lines)
def validate_callable(func, decorator_name):
if not hasattr(func, '__call__'):
raise ValueError(
'%s is not a function. If this is a property, make sure'
' @property appears before @%s in your source code:'
'\n\n@property\n@%s\ndef method(...)' % (
func, decorator_name, decorator_name))
class classproperty(object): # pylint: disable=invalid-name
"""Class property decorator.
Example usage:
class MyClass(object):
@classproperty
def value(cls):
return '123'
> print MyClass.value
123
"""
def __init__(self, func):
self._func = func
def __get__(self, owner_self, owner_cls):
return self._func(owner_cls)
| [
"[email protected]"
] | |
dd950c1d86e56494571de4f4f7db8c6f7925e6d9 | a5fcf5efa26615922ad3a8169d4f8911ab6cefe7 | /candidate/controllers.py | e034d3d0337471541f7c4b5a8e18b4d5aa910037 | [
"MIT"
] | permissive | eternal44/WeVoteServer | c0bc5ad96c0c72c6b4b3198a91ef44b6f347cc93 | acaae24d7cb0ec34ec4470247ea1072ee34510cb | refs/heads/master | 2021-01-15T12:44:19.988036 | 2016-04-28T05:03:42 | 2016-04-28T05:03:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,527 | py | # candidate/controllers.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from .models import CandidateCampaignList, CandidateCampaignManager
from ballot.models import CANDIDATE
from config.base import get_environment_variable
from django.contrib import messages
from django.http import HttpResponse
from exception.models import handle_exception
from import_export_vote_smart.controllers import retrieve_and_match_candidate_from_vote_smart, \
retrieve_candidate_photo_from_vote_smart
from import_export_vote_smart.models import VoteSmartCandidateManager
import json
from office.models import ContestOfficeManager
import wevote_functions.admin
from wevote_functions.functions import positive_value_exists
logger = wevote_functions.admin.get_logger(__name__)
WE_VOTE_API_KEY = get_environment_variable("WE_VOTE_API_KEY")
CANDIDATE_CAMPAIGNS_URL = get_environment_variable("CANDIDATE_CAMPAIGNS_URL")
def candidates_import_from_sample_file(request=None, load_from_uri=False):
"""
Get the json data, and either create new entries or update existing
:return:
"""
# if load_from_uri:
# # Request json file from We Vote servers
# messages.add_message(request, messages.INFO, "Loading CandidateCampaign IDs from We Vote Master servers")
# request = requests.get(CANDIDATE_CAMPAIGNS_URL, params={
# "key": WE_VOTE_API_KEY, # This comes from an environment variable
# })
# structured_json = json.loads(request.text)
# else:
# Load saved json from local file
# messages.add_message(request, messages.INFO, "Loading CandidateCampaigns from local file")
with open("candidate/import_data/candidate_campaigns_sample.json") as json_data:
structured_json = json.load(json_data)
candidate_campaign_manager = CandidateCampaignManager()
candidates_saved = 0
candidates_updated = 0
candidates_not_processed = 0
for candidate in structured_json:
candidate_name = candidate['candidate_name'] if 'candidate_name' in candidate else ''
we_vote_id = candidate['we_vote_id'] if 'we_vote_id' in candidate else ''
google_civic_election_id = \
candidate['google_civic_election_id'] if 'google_civic_election_id' in candidate else ''
ocd_division_id = candidate['ocd_division_id'] if 'ocd_division_id' in candidate else ''
contest_office_we_vote_id = \
candidate['contest_office_we_vote_id'] if 'contest_office_we_vote_id' in candidate else ''
# This routine imports from another We Vote server, so a contest_office_id doesn't come from import
# Look it up for this local database. If we don't find it, then we know the contest_office hasn't been imported
# from another server yet, so we fail out.
contest_office_manager = ContestOfficeManager()
contest_office_id = contest_office_manager.fetch_contest_office_id_from_we_vote_id(
contest_office_we_vote_id)
if positive_value_exists(candidate_name) and positive_value_exists(google_civic_election_id) \
and positive_value_exists(we_vote_id) and positive_value_exists(contest_office_id):
proceed_to_update_or_create = True
# elif positive_value_exists(candidate_name) and positive_value_exists(google_civic_election_id) \
# and positive_value_exists(we_vote_id) and positive_value_exists(ocd_division_id) \
# and positive_value_exists(contest_office_we_vote_id):
# proceed_to_update_or_create = True
else:
proceed_to_update_or_create = False
if proceed_to_update_or_create:
updated_candidate_campaign_values = {
# Values we search against
'google_civic_election_id': google_civic_election_id,
'ocd_division_id': ocd_division_id,
'contest_office_we_vote_id': contest_office_we_vote_id,
'candidate_name': candidate_name,
# The rest of the values
'we_vote_id': we_vote_id,
'maplight_id': candidate['maplight_id'] if 'maplight_id' in candidate else None,
'contest_office_id': contest_office_id,
'politician_we_vote_id':
candidate['politician_we_vote_id'] if 'politician_we_vote_id' in candidate else '',
'state_code': candidate['state_code'] if 'state_code' in candidate else '',
'party': candidate['party'] if 'party' in candidate else '',
'order_on_ballot': candidate['order_on_ballot'] if 'order_on_ballot' in candidate else 0,
'candidate_url': candidate['candidate_url'] if 'candidate_url' in candidate else '',
'photo_url': candidate['photo_url'] if 'photo_url' in candidate else '',
'photo_url_from_maplight':
candidate['photo_url_from_maplight'] if 'photo_url_from_maplight' in candidate else '',
'facebook_url': candidate['facebook_url'] if 'facebook_url' in candidate else '',
'twitter_url': candidate['twitter_url'] if 'twitter_url' in candidate else '',
'google_plus_url': candidate['google_plus_url'] if 'google_plus_url' in candidate else '',
'youtube_url': candidate['youtube_url'] if 'youtube_url' in candidate else '',
'google_civic_candidate_name':
candidate['google_civic_candidate_name'] if 'google_civic_candidate_name' in candidate else '',
'candidate_email': candidate['candidate_email'] if 'candidate_email' in candidate else '',
'candidate_phone': candidate['candidate_phone'] if 'candidate_phone' in candidate else '',
}
results = candidate_campaign_manager.update_or_create_candidate_campaign(
we_vote_id, google_civic_election_id, ocd_division_id,
contest_office_id, contest_office_we_vote_id,
candidate_name, updated_candidate_campaign_values)
else:
candidates_not_processed += 1
results = {
'success': False,
'status': 'Required value missing, cannot update or create'
}
if results['success']:
if results['new_candidate_created']:
candidates_saved += 1
else:
candidates_updated += 1
else:
candidates_not_processed += 1
if candidates_not_processed < 5 and request is not None:
messages.add_message(request, messages.ERROR,
results['status'] + "candidate_name: {candidate_name}"
", google_civic_election_id: {google_civic_election_id}"
", we_vote_id: {we_vote_id}"
", contest_office_id: {contest_office_id}"
", contest_office_we_vote_id: {contest_office_we_vote_id}"
"".format(
candidate_name=candidate_name,
google_civic_election_id=google_civic_election_id,
we_vote_id=we_vote_id,
contest_office_id=contest_office_id,
contest_office_we_vote_id=contest_office_we_vote_id,
))
candidates_results = {
'saved': candidates_saved,
'updated': candidates_updated,
'not_processed': candidates_not_processed,
}
return candidates_results
def candidate_retrieve_for_api(candidate_id, candidate_we_vote_id):
"""
Used by the api
:param candidate_id:
:param candidate_we_vote_id:
:return:
"""
# NOTE: Candidates retrieve is independent of *who* wants to see the data. Candidates retrieve never triggers
# a ballot data lookup from Google Civic, like voterBallotItems does
if not positive_value_exists(candidate_id) and not positive_value_exists(candidate_we_vote_id):
status = 'VALID_CANDIDATE_ID_AND_CANDIDATE_WE_VOTE_ID_MISSING'
json_data = {
'status': status,
'success': False,
'kind_of_ballot_item': CANDIDATE,
'id': candidate_id,
'we_vote_id': candidate_we_vote_id,
'google_civic_election_id': 0,
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
candidate_manager = CandidateCampaignManager()
if positive_value_exists(candidate_id):
results = candidate_manager.retrieve_candidate_campaign_from_id(candidate_id)
success = results['success']
status = results['status']
elif positive_value_exists(candidate_we_vote_id):
results = candidate_manager.retrieve_candidate_campaign_from_we_vote_id(candidate_we_vote_id)
success = results['success']
status = results['status']
else:
status = 'VALID_CANDIDATE_ID_AND_CANDIDATE_WE_VOTE_ID_MISSING_2' # It should be impossible to reach this
json_data = {
'status': status,
'success': False,
'kind_of_ballot_item': CANDIDATE,
'id': candidate_id,
'we_vote_id': candidate_we_vote_id,
'google_civic_election_id': 0,
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
if success:
candidate_campaign = results['candidate_campaign']
json_data = {
'status': status,
'success': True,
'kind_of_ballot_item': CANDIDATE,
'id': candidate_campaign.id,
'we_vote_id': candidate_campaign.we_vote_id,
'ballot_item_display_name': candidate_campaign.candidate_name,
'candidate_photo_url': candidate_campaign.candidate_photo_url(),
'order_on_ballot': candidate_campaign.order_on_ballot,
'google_civic_election_id': candidate_campaign.google_civic_election_id,
'maplight_id': candidate_campaign.maplight_id,
'contest_office_id': candidate_campaign.contest_office_id,
'contest_office_we_vote_id': candidate_campaign.contest_office_we_vote_id,
'politician_id': candidate_campaign.politician_id,
'politician_we_vote_id': candidate_campaign.politician_we_vote_id,
# 'google_civic_candidate_name': candidate_campaign.google_civic_candidate_name,
'party': candidate_campaign.party_display(),
'ocd_division_id': candidate_campaign.ocd_division_id,
'state_code': candidate_campaign.state_code,
'candidate_url': candidate_campaign.candidate_url,
'facebook_url': candidate_campaign.facebook_url,
'twitter_url': candidate_campaign.twitter_url,
'twitter_handle': candidate_campaign.fetch_twitter_handle(),
'twitter_description': candidate_campaign.twitter_description,
'twitter_followers_count': candidate_campaign.twitter_followers_count,
'google_plus_url': candidate_campaign.google_plus_url,
'youtube_url': candidate_campaign.youtube_url,
'candidate_email': candidate_campaign.candidate_email,
'candidate_phone': candidate_campaign.candidate_phone,
}
else:
json_data = {
'status': status,
'success': False,
'kind_of_ballot_item': CANDIDATE,
'id': candidate_id,
'we_vote_id': candidate_we_vote_id,
'google_civic_election_id': 0,
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
def candidates_retrieve_for_api(office_id, office_we_vote_id):
"""
Used by the api
:param office_id:
:param office_we_vote_id:
:return:
"""
# NOTE: Candidates retrieve is independent of *who* wants to see the data. Candidates retrieve never triggers
# a ballot data lookup from Google Civic, like voterBallotItems does
if not positive_value_exists(office_id) and not positive_value_exists(office_we_vote_id):
status = 'VALID_OFFICE_ID_AND_OFFICE_WE_VOTE_ID_MISSING'
json_data = {
'status': status,
'success': False,
'office_id': office_id,
'office_we_vote_id': office_we_vote_id,
'google_civic_election_id': 0,
'candidate_list': [],
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
candidate_list = []
candidates_to_display = []
google_civic_election_id = 0
try:
candidate_list_object = CandidateCampaignList()
results = candidate_list_object.retrieve_all_candidates_for_office(office_id, office_we_vote_id)
success = results['success']
status = results['status']
candidate_list = results['candidate_list']
except Exception as e:
status = 'FAILED candidates_retrieve. ' \
'{error} [type: {error_type}]'.format(error=e.message, error_type=type(e))
handle_exception(e, logger=logger, exception_message=status)
success = False
if success:
# Reset office_we_vote_id and office_id so we are sure that it matches what we pull from the database
office_id = 0
office_we_vote_id = ''
for candidate in candidate_list:
one_candidate = {
'id': candidate.id,
'we_vote_id': candidate.we_vote_id,
'ballot_item_display_name': candidate.candidate_name,
'candidate_photo_url': candidate.candidate_photo_url(),
'party': candidate.party_display(),
'order_on_ballot': candidate.order_on_ballot,
'kind_of_ballot_item': CANDIDATE,
}
candidates_to_display.append(one_candidate.copy())
# Capture the office_we_vote_id and google_civic_election_id so we can return
if not positive_value_exists(office_id) and candidate.contest_office_id:
office_id = candidate.contest_office_id
if not positive_value_exists(office_we_vote_id) and candidate.contest_office_we_vote_id:
office_we_vote_id = candidate.contest_office_we_vote_id
if not positive_value_exists(google_civic_election_id) and candidate.google_civic_election_id:
google_civic_election_id = candidate.google_civic_election_id
if len(candidates_to_display):
status = 'CANDIDATES_RETRIEVED'
else:
status = 'NO_CANDIDATES_RETRIEVED'
json_data = {
'status': status,
'success': True,
'office_id': office_id,
'office_we_vote_id': office_we_vote_id,
'google_civic_election_id': google_civic_election_id,
'candidate_list': candidates_to_display,
}
else:
json_data = {
'status': status,
'success': False,
'office_id': office_id,
'office_we_vote_id': office_we_vote_id,
'google_civic_election_id': google_civic_election_id,
'candidate_list': [],
}
return HttpResponse(json.dumps(json_data), content_type='application/json')
def retrieve_candidate_photos(we_vote_candidate, force_retrieve=False):
vote_smart_candidate_exists = False
vote_smart_candidate_just_retrieved = False
vote_smart_candidate_photo_exists = False
vote_smart_candidate_photo_just_retrieved = False
# Has this candidate already been linked to a Vote Smart candidate?
candidate_retrieve_results = retrieve_and_match_candidate_from_vote_smart(we_vote_candidate, force_retrieve)
if positive_value_exists(candidate_retrieve_results['vote_smart_candidate_id']):
# Bring out the object that now has vote_smart_id attached
we_vote_candidate = candidate_retrieve_results['we_vote_candidate']
# Reach out to Vote Smart and retrieve photo URL
photo_retrieve_results = retrieve_candidate_photo_from_vote_smart(we_vote_candidate)
status = photo_retrieve_results['status']
success = photo_retrieve_results['success']
vote_smart_candidate_exists = True
vote_smart_candidate_just_retrieved = candidate_retrieve_results['vote_smart_candidate_just_retrieved']
if success:
vote_smart_candidate_photo_exists = photo_retrieve_results['vote_smart_candidate_photo_exists']
vote_smart_candidate_photo_just_retrieved = \
photo_retrieve_results['vote_smart_candidate_photo_just_retrieved']
else:
status = candidate_retrieve_results['status'] + ' '
status += 'RETRIEVE_CANDIDATE_PHOTOS_NO_CANDIDATE_MATCH'
success = False
results = {
'success': success,
'status': status,
'vote_smart_candidate_exists': vote_smart_candidate_exists,
'vote_smart_candidate_just_retrieved': vote_smart_candidate_just_retrieved,
'vote_smart_candidate_photo_just_retrieved': vote_smart_candidate_photo_just_retrieved,
'vote_smart_candidate_photo_exists': vote_smart_candidate_photo_exists,
}
return results
| [
"[email protected]"
] | |
4abad0338b29831728ac721fc4205a6c61efa18c | 48c4bb95c2d49ca9dca1e6356e61784d6f36a01d | /analysis/summary.py | b14e5896cb708693292283e6c2a96ccbab90bb1e | [] | no_license | afcarl/SpatialMoney | 2a5e5caf91ed7b0925317d4ec949d9ca4752f416 | 6c9339ae556c842f228cd1cce65842b5b346f060 | refs/heads/master | 2020-03-16T16:11:36.835280 | 2018-03-19T20:31:31 | 2018-03-19T20:31:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,032 | py | import numpy as np
import enum
import os
import matplotlib.pyplot as plt
class MoneyAnalysis:
def __init__(self, m0, m1, m2, interruptions):
self.m0 = m0
self.m1 = m1
self.m2 = m2
self.interruptions = interruptions
class MoneyAnalyst(object):
money_threshold = .75
@classmethod
def _test_for_money_state(cls, direct_exchange, indirect_exchange):
money = -1
# Money = 0?
# type '0' should use direct exchange
cond0 = direct_exchange[0] > cls.money_threshold
# type '1' should use indirect exchange
cond1 = indirect_exchange[1] > cls.money_threshold
# type '2' should use direct exchange
cond2 = direct_exchange[2] > cls.money_threshold
if (cond0 * cond1 * cond2) == 1:
money = 0
else:
# Money = 1?
cond0 = direct_exchange[0] > cls.money_threshold
cond1 = direct_exchange[1] > cls.money_threshold
cond2 = indirect_exchange[2] > cls.money_threshold
if (cond0 * cond1 * cond2) == 1:
money = 1
else:
# Money = 2?
cond0 = indirect_exchange[0] > cls.money_threshold
cond1 = direct_exchange[1] > cls.money_threshold
cond2 = direct_exchange[2] > cls.money_threshold
if (cond0 * cond1 * cond2) == 1:
money = 2
return money
@classmethod
def run(cls, direct_exchange, indirect_exchange, t_max):
money_time_line = np.zeros(t_max)
money = {0: 0, 1: 0, 2: 0, -1: 0}
interruptions = 0
for t in range(t_max):
money_t = cls._test_for_money_state(
direct_exchange=direct_exchange[t],
indirect_exchange=indirect_exchange[t])
money_time_line[t] = money_t
money[money_t] += 1
if t > 0:
cond0 = money_t == -1
cond1 = money_time_line[t-1] != -1
interruptions += cond0 * cond1
return MoneyAnalysis(
m0=money[0],
m1=money[1],
m2=money[2],
interruptions=interruptions)
def plot(data):
class X(enum.Enum):
alpha = enum.auto()
tau = enum.auto()
vision_area = enum.auto()
x = enum.auto()
x = {
X.alpha: [],
X.tau: [],
X.vision_area: [],
X.x: []
}
y = []
for d in data.data:
a = MoneyAnalyst.run(
t_max=data.parameters.t_max,
direct_exchange=d.direct_exchanges_proportions,
indirect_exchange=d.indirect_exchanges_proportions
)
x[X.vision_area].append(d.parameters.vision_area)
x[X.tau].append(d.parameters.tau)
x[X.alpha].append(d.parameters.alpha)
x[X.x].append(d.parameters.x0 + d.parameters.x1 + d.parameters.x2)
y.append(
a.m0 + a.m1 + a.m2
)
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(221)
ax.scatter(x[X.tau], y, c="black", alpha=0.4, s=15)
ax.set_ylabel("n monetary states")
ax.set_xlabel(r"$\tau$")
ax = fig.add_subplot(222)
ax.scatter(x[X.alpha], y, c="black", alpha=0.4, s=15)
ax.set_ylabel("n monetary states")
ax.set_xlabel(r"$\alpha$")
ax = fig.add_subplot(223)
ax.scatter(x[X.vision_area], y, c="black", alpha=0.4, s=15)
ax.set_ylabel("n monetary states")
ax.set_xlabel(r"vision area")
ax = fig.add_subplot(224)
ax.scatter(x[X.x], y, c="black", alpha=0.4, s=15)
ax.set_ylabel("n monetary states")
ax.set_xlabel(r"n agents")
ax.text(0.005, 0.005, data.file_name, transform=fig.transFigure,
fontsize='x-small', color='0.5')
plt.tight_layout()
file_path = "figures/{}/{}_summary.pdf".format(
data.file_name,
data.file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
plt.savefig(file_path)
plt.close(fig)
| [
"[email protected]"
] | |
ede1ced08cc6a341318a155c9fb090799f49fafd | 7a54cb77f8a900335834291a452b57c24df6faff | /docs/src/LectureNotes/_build/jupyter_execute/chapter6.py | 9f94eb16969599dd00331b16cc90c82d6cf42a4a | [] | no_license | CompPhysics/InsightPhysicsImmersionWeek | 76e016b8637aa60be0f92405769c6fffd8556d60 | 3d7d58f30ef906f2c05b52edc507ab4e11ac34a7 | refs/heads/main | 2023-06-08T23:16:03.597940 | 2021-06-25T13:36:57 | 2021-06-25T13:36:57 | 374,204,873 | 2 | 2 | null | 2021-06-15T13:56:22 | 2021-06-05T20:18:26 | null | UTF-8 | Python | false | false | 108,524 | py | # Two-body problems
## Introduction and Definitions
Central forces are forces which are directed towards or away from a
reference point. A familiar force is the gravitional
force with the motion of our Earth around the Sun as a classic. The Sun, being
approximately sixth order of magnitude heavier than the Earth serves
as our origin. A force like the gravitational force is a function of the
relative distance $\boldsymbol{r}=\boldsymbol{r}_1-\boldsymbol{r}_2$ only, where
$\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ are the positions relative to a defined
origin for object one and object two, respectively.
These forces depend on the spatial degrees of freedom only (the
positions of the interacting objects/particles). As discussed earlier, from such forces we can infer
that the total internal energy, the total linear momentum and total angular momentum are so-called constants of the motion, that is they stay constant over time. We say that energy, linear and anuglar momentum are conserved.
With a scalar potential $V(\boldsymbol{r})$ we define the force as the gradient of the potential
$$
\boldsymbol{F}(\boldsymbol{r})=-\boldsymbol{\nabla}V(\boldsymbol{r}).
$$
In general these potentials depend only on the magnitude of the
relative position and we will write the potential as $V(r)$ where $r$
is defined as,
$$
r = |\boldsymbol{r}_1-\boldsymbol{r}_2|.
$$
In three dimensions our vectors are defined as (for a given object/particle $i$)
$$
\boldsymbol{r}_i = x_i\boldsymbol{e}_1+y_i\boldsymbol{e}_2+z_i\boldsymbol{e}_3,
$$
while in two dimensions we have
$$
\boldsymbol{r}_i = x_i\boldsymbol{e}_1+y_i\boldsymbol{e}_2.
$$
In two dimensions the radius $r$ is defined as
$$
r = |\boldsymbol{r}_1-\boldsymbol{r}_2|=\sqrt{(x_1-x_2)^2+(y_1-y_2)^2}.
$$
If we consider the gravitational potential involving two masses $1$ and $2$, we have
$$
V_{12}(r)=V(r)=-\frac{Gm_1m_2}{|\boldsymbol{r}_1-\boldsymbol{r}_2|}=-\frac{Gm_1m_2}{r}.
$$
Calculating the gradient of this potential we obtain the force
$$
\boldsymbol{F}(\boldsymbol{r})=-\frac{Gm_1m_2}{|\boldsymbol{r}_1-\boldsymbol{r}_1|^2}\hat{\boldsymbol{r}}_{12}=-\frac{Gm_am_b}{r^2}\hat{\boldsymbol{r}},
$$
where we have the unit vector
$$
\hat{\boldsymbol{r}}=\hat{\boldsymbol{r}}_{12}=\frac{\boldsymbol{r}_2-\boldsymbol{r}_1}{|\boldsymbol{r}_1-\boldsymbol{r}_2|}.
$$
Here $G=6.67\times 10^{-11}$ Nm$^2$/kg$^2$, and $\boldsymbol{F}$ is the force
on $2$ due to $1$. By inspection, one can see that the force on $2$
due to $1$ and the force on $1$ due to $2$ are equal and opposite. The
net potential energy for a large number of masses would be
$$
V=\sum_{i<j}V_{ij}=\frac{1}{2}\sum_{i\ne j}V_{ij}.
$$
In general, the central forces that we will study can be written mathematically as
$$
\boldsymbol{F}(\boldsymbol{r})=f(r)\hat{r},
$$
where $f(r)$ is a scalar function. For the above gravitational force this scalar term is
$-Gm_1m_2/r^2$.
In general we will simply write this scalar function $f(r)=\alpha/r^2$ where $\alpha$ is a constant that can be either negative or positive. We will also see examples of other types of potentials in the examples below.
Besides general expressions for the potentials/forces, we will discuss
in detail different types of motion that arise, from circular to
elliptical or hyperbolic or parabolic. By transforming to either polar
coordinates or spherical coordinates, we will be able to obtain
analytical solutions for the equations of motion and thereby obtain
new insights about the properties of a system. Where possible, we will
compare our analytical equations with numerical studies.
However, before we arrive at these lovely insights, we need to
introduce some mathematical manipulations and definitions.
## Center of Mass and Relative Coordinates
Thus far, we have considered the trajectory as if the force is
centered around a fixed point. For two bodies interacting only with
one another, both masses circulate around the center of mass. One
might think that solutions would become more complex when both
particles move, but we will see here that the problem can be reduced
to one with a single body moving according to a fixed force by
expressing the trajectories for $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ into the
center-of-mass coordinate $\boldsymbol{R}$ and the relative
coordinate $\boldsymbol{r}$. We define the center-of-mass (CoM) coordinate as
$$
\boldsymbol{R}\equiv\frac{m_1\boldsymbol{r}_1+m_2\boldsymbol{r}_2}{m_1+m_2},
$$
and the relative coordinate as
$$
\boldsymbol{r}\equiv\boldsymbol{r}_1-\boldsymbol{r_2}.
$$
We can then rewrite $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ in terms of the relative and CoM coordinates as
$$
\boldsymbol{r}_1=\boldsymbol{R}+\frac{m_2}{M}\boldsymbol{r},
$$
and
$$
\boldsymbol{r}_2=\boldsymbol{R}-\frac{m_1}{M}\boldsymbol{r}.
$$
### Conservation of total Linear Momentum
In our discussions on conservative forces we defined
the total linear momentum as
$$
\boldsymbol{P}=\sum_{i=1}^Nm_i\frac{d\boldsymbol{r}_i}{dt},
$$
where $N=2$ in our case. With the above definition of the center of mass position, we see that we can rewrite the total linear momentum as (multiplying the CoM coordinate with $M$)
$$
\boldsymbol{P}=M\frac{d\boldsymbol{R}}{dt}=M\dot{\boldsymbol{R}}.
$$
The net force acting on the system is given by the time derivative of the linear momentum (assuming mass is time independent)
and we have
$$
\boldsymbol{F}^{\mathrm{net}}=\dot{\boldsymbol{P}}=M\ddot{\boldsymbol{R}}.
$$
The net force acting on the system is given by the sum of the forces acting on the two bodies, that is we have
$$
\boldsymbol{F}^{\mathrm{net}}=\boldsymbol{F}_1+\boldsymbol{F}_2=\dot{\boldsymbol{P}}=M\ddot{\boldsymbol{R}}.
$$
In our case the forces are given by the internal forces only. The force acting on object $1$ is thus $\boldsymbol{F}_{12}$ and the one acting on object $2$ is $\boldsymbol{F}_{12}$. We have also defined that $\boldsymbol{F}_{12}=-\boldsymbol{F}_{21}$. This means thar we have
$$
\boldsymbol{F}_1+\boldsymbol{F}_2=\boldsymbol{F}_{12}+\boldsymbol{F}_{21}=0=\dot{\boldsymbol{P}}=M\ddot{\boldsymbol{R}}.
$$
We could alternatively had write this
$$
\ddot{\boldsymbol{R}}_{\rm cm}=\frac{1}{m_1+m_2}\left\{m_1\ddot{\boldsymbol{r}}_1+m_2\ddot{\boldsymbol{r}}_2\right\}=\frac{1}{m_1+m_2}\left\{\boldsymbol{F}_{12}+\boldsymbol{F}_{21}\right\}=0.
$$
This has the important consequence that the CoM velocity is a constant
of the motion. And since the total linear momentum is given by the
time-derivative of the CoM coordinate times the total mass
$M=m_1+m_2$, it means that linear momentum is also conserved.
Stated differently, the center-of-mass coordinate
$\boldsymbol{R}$ moves at a fixed velocity.
This has also another important consequence for our forces. If we
assume that our force depends only on the relative coordinate, it
means that the gradient of the potential with respect to the center of
mass position is zero, that is
$$
M\ddot{d\boldsymbol{R}}=-\boldsymbol{\nabla}_{\boldsymbol{R}}V =0!
$$
If we now switch to the equation of motion for the relative coordinate, we have
$$
\ddot{\boldsymbol{r}}=\ddot{\boldsymbol{r}}_1-\ddot{\boldsymbol{r}}_2=\left(\frac{\boldsymbol{F}_{12}}{m_1}-\frac{\boldsymbol{F}_{21}}{m_2}\right)=\left(\frac{1}{m_1}+\frac{1}{m_2}\right)\boldsymbol{F}_{12},
$$
which we can rewrite in terms of the reduced mass
$$
\mu=\frac{m_1m_2}{m_1+m_2},
$$
as
$$
\mu \ddot{\boldsymbol{r}}=\boldsymbol{F}_{12}.
$$
This has a very important consequence for our coming analysis of the equations of motion for the two-body problem.
Since the acceleration for the CoM coordinate is zero, we can now
treat the trajectory as a one-body problem where the mass is given by
the reduced mass $\mu$ plus a second trivial problem for the center of
mass. The reduced mass is especially convenient when one is
considering forces that depend only on the relative coordinate (like the Gravitational force or the electrostatic force between two charges) because then for say the gravitational force we have
$$
\mu \ddot{\boldsymbol{r}}=-\frac{Gm_1m_2}{r^2}\hat{\boldsymbol{r}}=-\frac{GM\mu}{r^2}\hat{\boldsymbol{r}},
$$
where we have defined $M= m_1+m_2$. It means that the acceleration of the relative coordinate is
$$
\ddot{\boldsymbol{r}}=-\frac{GM}{r^2}\hat{\boldsymbol{r}},
$$
and we have that for the gravitational problem, the reduced mass then falls out and the
trajectory depends only on the total mass $M$.
The standard strategy is to transform into the center of mass frame,
then treat the problem as one of a single particle of mass $\mu$
undergoing a force $\boldsymbol{F}_{12}$.
Before we proceed to our definition of the CoM frame we need to set up the expression for the energy in terms of the relative and CoM coordinates.
### Kinetic and total Energy
The kinetic energy and momenta also have analogues in center-of-mass
coordinates.
We have defined the total linear momentum as
$$
\boldsymbol{P}=\sum_{i=1}^Nm_i\frac{d\boldsymbol{r}_i}{dt}=M\dot{\boldsymbol{R}}.
$$
For the relative momentum $\boldsymbol{q}$, we have that the time derivative of $\boldsymbol{r}$ is
$$
\dot{\boldsymbol{r}} =\dot{\boldsymbol{r}}_1-\dot{\boldsymbol{r}}_2,
$$
We know also that the momenta $\boldsymbol{p}_1=m_1\dot{\boldsymbol{r}}_1$ and
$\boldsymbol{p}_2=m_2\dot{\boldsymbol{r}}_2$. Using these expressions we can rewrite
$$
\dot{\boldsymbol{r}} =\frac{\boldsymbol{p}_1}{m_1}-\frac{\boldsymbol{p}_2}{m_2},
$$
which gives
$$
\dot{\boldsymbol{r}} =\frac{m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2}{m_1m_2},
$$
and dividing both sides with $M$ we have
$$
\frac{m_1m_2}{M}\dot{\boldsymbol{r}} =\frac{m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2}{M}.
$$
Introducing the reduced mass $\mu=m_1m_2/M$ we have finally
$$
\mu\dot{\boldsymbol{r}} =\frac{m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2}{M}.
$$
And $\mu\dot{\boldsymbol{r}}$ defines the relative momentum $\boldsymbol{q}=\mu\dot{\boldsymbol{r}}$.
With these definitions we can then calculate the kinetic energy in terms of the relative and CoM coordinates.
We have that
$$
K=\frac{p_1^2}{2m_1}+\frac{p_2^2}{2m_2},
$$
and with $\boldsymbol{p}_1=m_1\dot{\boldsymbol{r}}_1$ and $\boldsymbol{p}_2=m_2\dot{\boldsymbol{r}}_2$ and using
$$
\dot{\boldsymbol{r}}_1=\dot{\boldsymbol{R}}+\frac{m_2}{M}\dot{\boldsymbol{r}},
$$
and
$$
\dot{\boldsymbol{r}}_2=\dot{\boldsymbol{R}}-\frac{m_1}{M}\dot{\boldsymbol{r}},
$$
we obtain after squaring the expressions for $\dot{\boldsymbol{r}}_1$ and $\dot{\boldsymbol{r}}_2$
$$
K=\frac{(m_1+m_2)\dot{\boldsymbol{R}}^2}{2}+\frac{(m_1+m_2)m_1m_2\dot{\boldsymbol{r}}^2}{2M^2},
$$
which we simplify to
$$
K=\frac{\dot{\boldsymbol{P}}^2}{2M}+\frac{\mu\dot{\boldsymbol{q}}^2}{2}.
$$
Below we will define a reference frame, the so-called CoM-frame, where
$\boldsymbol{R}=0$. This is going to simplify our equations further.
### Conservation of Angular Momentum
The angular momentum (the total one) is the sum of the individual angular momenta. In our case we have two bodies only, meaning that our angular momentum is defined as
$$
\boldsymbol{L} = \boldsymbol{r}_1 \times \boldsymbol{p}_1+\boldsymbol{r}_2 \times \boldsymbol{p}_2,
$$
and using that $m_1\dot{\boldsymbol{r}}_1=\boldsymbol{p}_1$ and $m_2\dot{\boldsymbol{r}}_2=\boldsymbol{p}_2$ we have
$$
\boldsymbol{L} = m_1\boldsymbol{r}_1 \times \dot{\boldsymbol{r}}_1+m_2\boldsymbol{r}_2 \times \dot{\boldsymbol{r}}_2.
$$
We define now the CoM-Frame where we set $\boldsymbol{R}=0$. This means that the equations
for $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ in terms of the relative motion simplify and we have
$$
\boldsymbol{r}_1=\frac{m_2}{M}\boldsymbol{r},
$$
and
$$
\boldsymbol{r}_2=-\frac{m_1}{M}\boldsymbol{r}.
$$
resulting in
$$
\boldsymbol{L} = m_1 \frac{m_2}{M}\boldsymbol{r}\times\frac{m_2}{M}\boldsymbol{r} +m_2 \frac{m_1}{M}\boldsymbol{r} \times \frac{m_1}{M}\dot{\boldsymbol{r}}.
$$
We see that can rewrite this equation as
$$
\boldsymbol{L}=\boldsymbol{r}\times \mu\dot{\boldsymbol{r}}=\mu\boldsymbol{r}\times \dot{\boldsymbol{r}}.
$$
If we now use a central force, we have that
$$
\mu\dot{\boldsymbol{r}}=\boldsymbol{F}(\boldsymbol{r})=f(r)\hat{\boldsymbol{r}},
$$
and inserting this in the equation for the angular momentum we have
$$
\boldsymbol{L}=\boldsymbol{r}\times f(r)\hat{\boldsymbol{r}},
$$
which equals zero since we are taking the cross product of the vector
$\boldsymbol{r}$ with itself. Angular momentum is thus conserved and in
addition to the total linear momentum being conserved, we know that
energy is also conserved with forces that depend only on position and
the relative coordinate only.
Since angular momentum is conserved, we can idealize
the motion of our two objects as two bodies moving in a plane spanned by the
relative coordinate and the relative momentum. The angular
momentum is perpendicular to the plane spanned by these two vectors.
It means also, since $\boldsymbol{L}$ is conserved, that we can reduce our
problem to a motion in say the $xy$-plane. What we have done then is to
reduce a two-body problem in three-dimensions with six degrees of
freedom (the six coordinates of the two objects) to a problem defined
entirely by the relative coordinate in two dimensions. We have thus
moved from a problem with six degrees of freedom to one with two degrees of freedom only.
Since we deal with central forces that depend only on the
relative coordinate, we will show below that transforming to polar
coordinates, we cna find analytical solution to
the equation of motion
$$
\mu\dot{\boldsymbol{r}}=\boldsymbol{F}(\boldsymbol{r})=f(r)\hat{\boldsymbol{r}}.
$$
Note the boldfaced symbols for the relative position $\boldsymbol{r}$. Our vector $\boldsymbol{r}$ is defined as
$$
\boldsymbol{r}=x\boldsymbol{e}_1+y\boldsymbol{e}_2
$$
and introducing polar coordinates $r\in[0,\infty)$ and $\phi\in [0,2\pi]$ and the transformation
$$
r=\sqrt{x^2+y^2},
$$
and $x=r\cos\phi$ and $y=r\sin\phi$, we will rewrite our equation of motion by transforming from Cartesian coordinates to Polar coordinates. By so doing, we end up with two differential equations which can be solved analytically (it depends on the form of the potential).
What follows now is a rewrite of these equations and the introduction of Kepler's laws as well.
## Deriving Elliptical Orbits
Kepler's laws state that a gravitational orbit should be an ellipse
with the source of the gravitational field at one focus. Deriving this
is surprisingly messy. To do this, we first use angular momentum
conservation to transform the equations of motion so that it is in
terms of $r$ and $\phi$ instead of $r$ and $t$. The overall strategy
is to
1. Find equations of motion for $r$ and $t$ with no angle ($\phi$) mentioned, i.e. $d^2r/dt^2=\cdots$. Angular momentum conservation will be used, and the equation will involve the angular momentum $L$.
2. Use angular momentum conservation to find an expression for $\dot{\phi}$ in terms of $r$.
3. Use the chain rule to convert the equations of motions for $r$, an expression involving $r,\dot{r}$ and $\ddot{r}$, to one involving $r,dr/d\phi$ and $d^2r/d\phi^2$. This is quitecomplicated because the expressions will also involve a substitution $u=1/r$ so that one finds an expression in terms of $u$ and $\phi$.
4. Once $u(\phi)$ is found, you need to show that this can be converted to the familiar form for an ellipse.
We will now rewrite the above equation of motion (note the boldfaced vector $\boldsymbol{r}$)
$$
\mu \ddot{\boldsymbol{r}}=\boldsymbol{F}(\boldsymbol{r}),
$$
in polar coordinates.
What follows here is a repeated application of the chain rule for derivatives.
We start with derivative for $r$ as function of time in a cartesian basis
<!-- Equation labels as ordinary links -->
<div id="eq:radialeqofmotion"></div>
$$
\begin{eqnarray}
\label{eq:radialeqofmotion} \tag{1}
\frac{d}{dt}r^2&=&\frac{d}{dt}(x^2+y^2)=2x\dot{x}+2y\dot{y}=2r\dot{r},\\
\nonumber
\dot{r}&=&\frac{x}{r}\dot{x}+\frac{y}{r}\dot{y},\\
\nonumber
\ddot{r}&=&\frac{x}{r}\ddot{x}+\frac{y}{r}\ddot{y}
+\frac{\dot{x}^2+\dot{y}^2}{r}
-\frac{\dot{r}^2}{r}.
\end{eqnarray}
$$
Note that there are no vectors involved here.
Recognizing that the numerator of the third term is the velocity squared, and that it can be written in polar coordinates,
<!-- Equation labels as ordinary links -->
<div id="_auto1"></div>
$$
\begin{equation}
v^2=\dot{x}^2+\dot{y}^2=\dot{r}^2+r^2\dot{\phi}^2,
\label{_auto1} \tag{2}
\end{equation}
$$
one can write $\ddot{r}$ as
<!-- Equation labels as ordinary links -->
<div id="eq:radialeqofmotion2"></div>
$$
\begin{equation}
\label{eq:radialeqofmotion2} \tag{3}
\ddot{r}=\frac{F_x\cos\phi+F_y\sin\phi}{m}+\frac{\dot{r}^2+r^2\dot{\phi}^2}{r}-\frac{\dot{r}^2}{r}
\end{equation}
$$
$$
\nonumber
=\frac{F}{m}+\frac{r^2\dot{\phi}^2}{r}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto2"></div>
$$
\begin{equation}
\label{_auto2} \tag{4}
\end{equation}
$$
or
$$
m\ddot{r}=F+\frac{L^2}{mr^3}.
$$
This derivation used the fact that the force was radial,
$F=F_r=F_x\cos\phi+F_y\sin\phi$, and that angular momentum is
$L=mrv_{\phi}=mr^2\dot{\phi}$. The term $L^2/mr^3=mv^2/r$ behaves
like an additional force. Sometimes this is referred to as a
centrifugal force, but it is not a force. Instead, it is the
consequence of considering the motion in a rotating (and therefore
accelerating) frame.
Now, we switch to the particular case of an attractive inverse square
force, $F=-\alpha/r^2$, and show that the trajectory, $r(\phi)$, is
an ellipse. To do this we transform derivatives w.r.t. time to
derivatives w.r.t. $\phi$ using the chain rule combined with angular
momentum conservation, $\dot{\phi}=L/mr^2$.
<!-- Equation labels as ordinary links -->
<div id="eq:rtotheta"></div>
$$
\begin{eqnarray}
\label{eq:rtotheta} \tag{5}
\dot{r}&=&\frac{dr}{d\phi}\dot{\phi}=\frac{dr}{d\phi}\frac{L}{mr^2},\\
\nonumber
\ddot{r}&=&\frac{d^2r}{d\phi^2}\dot{\phi}^2
+\frac{dr}{d\phi}\left(\frac{d}{dr}\frac{L}{mr^2}\right)\dot{r}\\
\nonumber
&=&\frac{d^2r}{d\phi^2}\left(\frac{L}{mr^2}\right)^2
-2\frac{dr}{d\phi}\frac{L}{mr^3}\dot{r}\\
\nonumber
&=&\frac{d^2r}{d\phi^2}\left(\frac{L}{mr^2}\right)^2
-\frac{2}{r}\left(\frac{dr}{d\phi}\right)^2\left(\frac{L}{mr^2}\right)^2
\end{eqnarray}
$$
Equating the two expressions for $\ddot{r}$ in Eq.s ([3](#eq:radialeqofmotion2)) and ([5](#eq:rtotheta)) eliminates all the derivatives w.r.t. time, and provides a differential equation with only derivatives w.r.t. $\phi$,
<!-- Equation labels as ordinary links -->
<div id="eq:rdotdot"></div>
$$
\begin{equation}
\label{eq:rdotdot} \tag{6}
\frac{d^2r}{d\phi^2}\left(\frac{L}{mr^2}\right)^2
-\frac{2}{r}\left(\frac{dr}{d\phi}\right)^2\left(\frac{L}{mr^2}\right)^2
=\frac{F}{m}+\frac{L^2}{m^2r^3},
\end{equation}
$$
that when solved yields the trajectory, i.e. $r(\phi)$. Up to this
point the expressions work for any radial force, not just forces that
fall as $1/r^2$.
The trick to simplifying this differential equation for the inverse
square problems is to make a substitution, $u\equiv 1/r$, and rewrite
the differential equation for $u(\phi)$.
$$
\begin{eqnarray}
r&=&1/u,\\
\nonumber
\frac{dr}{d\phi}&=&-\frac{1}{u^2}\frac{du}{d\phi},\\
\nonumber
\frac{d^2r}{d\phi^2}&=&\frac{2}{u^3}\left(\frac{du}{d\phi}\right)^2-\frac{1}{u^2}\frac{d^2u}{d\phi^2}.
\end{eqnarray}
$$
Plugging these expressions into Eq. ([6](#eq:rdotdot)) gives an
expression in terms of $u$, $du/d\phi$, and $d^2u/d\phi^2$. After
some tedious algebra,
<!-- Equation labels as ordinary links -->
<div id="_auto3"></div>
$$
\begin{equation}
\frac{d^2u}{d\phi^2}=-u-\frac{F m}{L^2u^2}.
\label{_auto3} \tag{7}
\end{equation}
$$
For the attractive inverse square law force, $F=-\alpha u^2$,
<!-- Equation labels as ordinary links -->
<div id="_auto4"></div>
$$
\begin{equation}
\frac{d^2u}{d\phi^2}=-u+\frac{m\alpha}{L^2}.
\label{_auto4} \tag{8}
\end{equation}
$$
The solution has two arbitrary constants, $A$ and $\phi_0$,
<!-- Equation labels as ordinary links -->
<div id="eq:Ctrajectory"></div>
$$
\begin{eqnarray}
\label{eq:Ctrajectory} \tag{9}
u&=&\frac{m\alpha}{L^2}+A\cos(\phi-\phi_0),\\
\nonumber
r&=&\frac{1}{(m\alpha/L^2)+A\cos(\phi-\phi_0)}.
\end{eqnarray}
$$
The radius will be at a minimum when $\phi=\phi_0$ and at a
maximum when $\phi=\phi_0+\pi$. The constant $A$ is related to the
eccentricity of the orbit. When $A=0$ the radius is a constant
$r=L^2/(m\alpha)$, and the motion is circular. If one solved the
expression $mv^2/r=-\alpha/r^2$ for a circular orbit, using the
substitution $v=L/(mr)$, one would reproduce the expression
$r=L^2/(m\alpha)$.
The form describing the elliptical trajectory in
Eq. ([9](#eq:Ctrajectory)) can be identified as an ellipse with one
focus being the center of the ellipse by considering the definition of
an ellipse as being the points such that the sum of the two distances
between the two foci are a constant. Making that distance $2D$, the
distance between the two foci as $2a$, and putting one focus at the
origin,
$$
\begin{eqnarray}
2D&=&r+\sqrt{(r\cos\phi-2a)^2+r^2\sin^2\phi},\\
\nonumber
4D^2+r^2-4Dr&=&r^2+4a^2-4ar\cos\phi,\\
\nonumber
r&=&\frac{D^2-a^2}{D+a\cos\phi}=\frac{1}{D/(D^2-a^2)-a\cos\phi/(D^2-a^2)}.
\end{eqnarray}
$$
By inspection, this is the same form as Eq. ([9](#eq:Ctrajectory)) with $D/(D^2-a^2)=m\alpha/L^2$ and $a/(D^2-a^2)=A$.
Let us remind ourselves about what an ellipse is before we proceed.
%matplotlib inline
import numpy as np
from matplotlib import pyplot as plt
from math import pi
u=1. #x-position of the center
v=0.5 #y-position of the center
a=2. #radius on the x-axis
b=1.5 #radius on the y-axis
t = np.linspace(0, 2*pi, 100)
plt.plot( u+a*np.cos(t) , v+b*np.sin(t) )
plt.grid(color='lightgray',linestyle='--')
plt.show()
## Effective or Centrifugal Potential
The total energy of a particle is
$$
\begin{eqnarray}
E&=&V(r)+\frac{1}{2}mv_\phi^2+\frac{1}{2}m\dot{r}^2\\
\nonumber
&=&V(r)+\frac{1}{2}mr^2\dot{\phi}^2+\frac{1}{2}m\dot{r}^2\\
\nonumber
&=&V(r)+\frac{L^2}{2mr^2}+\frac{1}{2}m\dot{r}^2.
\end{eqnarray}
$$
The second term then contributes to the energy like an additional
repulsive potential. The term is sometimes referred to as the
"centrifugal" potential, even though it is actually the kinetic energy
of the angular motion. Combined with $V(r)$, it is sometimes referred
to as the "effective" potential,
$$
\begin{eqnarray}
V_{\rm eff}(r)&=&V(r)+\frac{L^2}{2mr^2}.
\end{eqnarray}
$$
Note that if one treats the effective potential like a real potential, one would expect to be able to generate an effective force,
$$
\begin{eqnarray}
F_{\rm eff}&=&-\frac{d}{dr}V(r) -\frac{d}{dr}\frac{L^2}{2mr^2}\\
\nonumber
&=&F(r)+\frac{L^2}{mr^3}=F(r)+m\frac{v_\perp^2}{r},
\end{eqnarray}
$$
which is indeed matches the form for $m\ddot{r}$ in Eq. ([3](#eq:radialeqofmotion2)), which included the **centrifugal** force.
The following code plots this effective potential for a simple choice of parameters, with a standard gravitational potential $-\alpha/r$. Here we have chosen $L=m=\alpha=1$.
# Common imports
import numpy as np
from math import *
import matplotlib.pyplot as plt
Deltax = 0.01
#set up arrays
xinitial = 0.3
xfinal = 5.0
alpha = 1.0 # spring constant
m = 1.0 # mass, you can change these
AngMom = 1.0 # The angular momentum
n = ceil((xfinal-xinitial)/Deltax)
x = np.zeros(n)
for i in range(n):
x[i] = xinitial+i*Deltax
V = np.zeros(n)
V = -alpha/x+0.5*AngMom*AngMom/(m*x*x)
# Plot potential
fig, ax = plt.subplots()
ax.set_xlabel('r[m]')
ax.set_ylabel('V[J]')
ax.plot(x, V)
fig.tight_layout()
plt.show()
### Gravitational force example
Using the above parameters, we can now study the evolution of the system using for example the velocity Verlet method.
This is done in the code here for an initial radius equal to the minimum of the potential well. We seen then that the radius is always the same and corresponds to a circle (the radius is always constant).
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
# Simple Gravitational Force -alpha/r
DeltaT = 0.01
#set up arrays
tfinal = 100.0
n = ceil(tfinal/DeltaT)
# set up arrays for t, v and r
t = np.zeros(n)
v = np.zeros(n)
r = np.zeros(n)
# Constants of the model, setting all variables to one for simplicity
alpha = 1.0
AngMom = 1.0 # The angular momentum
m = 1.0 # scale mass to one
c1 = AngMom*AngMom/(m*m)
c2 = AngMom*AngMom/m
rmin = (AngMom*AngMom/m/alpha)
# Initial conditions
r0 = rmin
v0 = 0.0
r[0] = r0
v[0] = v0
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up acceleration
a = -alpha/(r[i]**2)+c1/(r[i]**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -alpha/(r[i+1]**2)+c1/(r[i+1]**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
# Plot position as function of time
fig, ax = plt.subplots(2,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius')
ax[0].plot(t,r)
ax[1].set_xlabel('time')
ax[1].set_ylabel('Velocity')
ax[1].plot(t,v)
save_fig("RadialGVV")
plt.show()
Changing the value of the initial position to a value where the energy is positive, leads to an increasing radius with time, a so-called unbound orbit. Choosing on the other hand an initial radius that corresponds to a negative energy and different from the minimum value leads to a radius that oscillates back and forth between two values.
### Harmonic Oscillator in two dimensions
Consider a particle of mass $m$ in a 2-dimensional harmonic oscillator with potential
$$
V=\frac{1}{2}kr^2=\frac{1}{2}k(x^2+y^2).
$$
If the orbit has angular momentum $L$, we can find the radius and angular velocity of the circular orbit as well as the b) the angular frequency of small radial perturbations.
We consider the effective potential. The radius of a circular orbit is at the minimum of the potential (where the effective force is zero).
The potential is plotted here with the parameters $k=m=0.1$ and $L=1.0$.
# Common imports
import numpy as np
from math import *
import matplotlib.pyplot as plt
Deltax = 0.01
#set up arrays
xinitial = 0.5
xfinal = 3.0
k = 1.0 # spring constant
m = 1.0 # mass, you can change these
AngMom = 1.0 # The angular momentum
n = ceil((xfinal-xinitial)/Deltax)
x = np.zeros(n)
for i in range(n):
x[i] = xinitial+i*Deltax
V = np.zeros(n)
V = 0.5*k*x*x+0.5*AngMom*AngMom/(m*x*x)
# Plot potential
fig, ax = plt.subplots()
ax.set_xlabel('r[m]')
ax.set_ylabel('V[J]')
ax.plot(x, V)
fig.tight_layout()
plt.show()
$$
\begin{eqnarray*}
V_{\rm eff}&=&\frac{1}{2}kr^2+\frac{L^2}{2mr^2}
\end{eqnarray*}
$$
The effective potential looks like that of a harmonic oscillator for
large $r$, but for small $r$, the centrifugal potential repels the
particle from the origin. The combination of the two potentials has a
minimum for at some radius $r_{\rm min}$.
$$
\begin{eqnarray*}
0&=&kr_{\rm min}-\frac{L^2}{mr_{\rm min}^3},\\
r_{\rm min}&=&\left(\frac{L^2}{mk}\right)^{1/4},\\
\dot{\phi}&=&\frac{L}{mr_{\rm min}^2}=\sqrt{k/m}.
\end{eqnarray*}
$$
For particles at $r_{\rm min}$ with $\dot{r}=0$, the particle does not
accelerate and $r$ stays constant, i.e. a circular orbit. The radius
of the circular orbit can be adjusted by changing the angular momentum
$L$.
For the above parameters this minimum is at $r_{\rm min}=1$.
Now consider small vibrations about $r_{\rm min}$. The effective spring constant is the curvature of the effective potential.
$$
\begin{eqnarray*}
k_{\rm eff}&=&\left.\frac{d^2}{dr^2}V_{\rm eff}(r)\right|_{r=r_{\rm min}}=k+\frac{3L^2}{mr_{\rm min}^4}\\
&=&4k,\\
\omega&=&\sqrt{k_{\rm eff}/m}=2\sqrt{k/m}=2\dot{\phi}.
\end{eqnarray*}
$$
Here, the second step used the result of the last step from part
(a). Because the radius oscillates with twice the angular frequency,
the orbit has two places where $r$ reaches a minimum in one
cycle. This differs from the inverse-square force where there is one
minimum in an orbit. One can show that the orbit for the harmonic
oscillator is also elliptical, but in this case the center of the
potential is at the center of the ellipse, not at one of the foci.
The solution is also simple to write down exactly in Cartesian coordinates. The $x$ and $y$ equations of motion separate,
$$
\begin{eqnarray*}
\ddot{x}&=&-kx,\\
\ddot{y}&=&-ky.
\end{eqnarray*}
$$
So the general solution can be expressed as
$$
\begin{eqnarray*}
x&=&A\cos\omega_0 t+B\sin\omega_0 t,\\
y&=&C\cos\omega_0 t+D\sin\omega_0 t.
\end{eqnarray*}
$$
The code here finds the solution for $x$ and $y$ using the code we developed in homework 5 and 6 and the midterm. Note that this code is tailored to run in Cartesian coordinates. There is thus no angular momentum dependent term.
DeltaT = 0.01
#set up arrays
tfinal = 10.0
n = ceil(tfinal/DeltaT)
# set up arrays
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
radius = np.zeros(n)
# Constants of the model
k = 1.0 # spring constant
m = 1.0 # mass, you can change these
omega02 = sqrt(k/m) # Frequency
AngMom = 1.0 # The angular momentum
rmin = (AngMom*AngMom/k/m)**0.25
# Initial conditions as compact 2-dimensional arrays
x0 = rmin-0.5; y0= sqrt(rmin*rmin-x0*x0)
r0 = np.array([x0,y0])
v0 = np.array([0.0,0.0])
r[0] = r0
v[0] = v0
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up the acceleration
a = -r[i]*omega02
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -r[i+1]*omega02
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
# Plot position as function of time
radius = np.sqrt(r[:,0]**2+r[:,1]**2)
fig, ax = plt.subplots(3,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius squared')
ax[0].plot(t,r[:,0]**2+r[:,1]**2)
ax[1].set_xlabel('time')
ax[1].set_ylabel('x position')
ax[1].plot(t,r[:,0])
ax[2].set_xlabel('time')
ax[2].set_ylabel('y position')
ax[2].plot(t,r[:,1])
fig.tight_layout()
save_fig("2DimHOVV")
plt.show()
With some work using double angle formulas, one can calculate
$$
\begin{eqnarray*}
r^2&=&x^2+y^2\\
\nonumber
&=&(A^2+C^2)\cos^2(\omega_0t)+(B^2+D^2)\sin^2\omega_0t+(AB+CD)\cos(\omega_0t)\sin(\omega_0t)\\
\nonumber
&=&\alpha+\beta\cos 2\omega_0 t+\gamma\sin 2\omega_0 t,\\
\alpha&=&\frac{A^2+B^2+C^2+D^2}{2},~~\beta=\frac{A^2-B^2+C^2-D^2}{2},~~\gamma=AB+CD,\\
r^2&=&\alpha+(\beta^2+\gamma^2)^{1/2}\cos(2\omega_0 t-\delta),~~~\delta=\arctan(\gamma/\beta),
\end{eqnarray*}
$$
and see that radius oscillates with frequency $2\omega_0$. The
factor of two comes because the oscillation $x=A\cos\omega_0t$ has two
maxima for $x^2$, one at $t=0$ and one a half period later.
The following code shows first how we can solve this problem using the radial degrees of freedom only.
DeltaT = 0.01
#set up arrays
tfinal = 10.0
n = ceil(tfinal/DeltaT)
# set up arrays for t, v and r
t = np.zeros(n)
v = np.zeros(n)
r = np.zeros(n)
E = np.zeros(n)
# Constants of the model
AngMom = 1.0 # The angular momentum
m = 1.0
k = 1.0
omega02 = k/m
c1 = AngMom*AngMom/(m*m)
c2 = AngMom*AngMom/m
rmin = (AngMom*AngMom/k/m)**0.25
# Initial conditions
r0 = rmin
v0 = 0.0
r[0] = r0
v[0] = v0
E[0] = 0.5*m*v0*v0+0.5*k*r0*r0+0.5*c2/(r0*r0)
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up acceleration
a = -r[i]*omega02+c1/(r[i]**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -r[i+1]*omega02+c1/(r[i+1]**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
E[i+1] = 0.5*m*v[i+1]*v[i+1]+0.5*k*r[i+1]*r[i+1]+0.5*c2/(r[i+1]*r[i+1])
# Plot position as function of time
fig, ax = plt.subplots(2,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius')
ax[0].plot(t,r)
ax[1].set_xlabel('time')
ax[1].set_ylabel('Energy')
ax[1].plot(t,E)
save_fig("RadialHOVV")
plt.show()
## Stability of Orbits
The effective force can be extracted from the effective potential, $V_{\rm eff}$. Beginning from the equations of motion, Eq. ([1](#eq:radialeqofmotion)), for $r$,
$$
\begin{eqnarray}
m\ddot{r}&=&F+\frac{L^2}{mr^3}\\
\nonumber
&=&F_{\rm eff}\\
\nonumber
&=&-\partial_rV_{\rm eff},\\
\nonumber
F_{\rm eff}&=&-\partial_r\left[V(r)+(L^2/2mr^2)\right].
\end{eqnarray}
$$
For a circular orbit, the radius must be fixed as a function of time,
so one must be at a maximum or a minimum of the effective
potential. However, if one is at a maximum of the effective potential
the radius will be unstable. For the attractive Coulomb force the
effective potential will be dominated by the $-\alpha/r$ term for
large $r$ because the centrifugal part falls off more quickly, $\sim
1/r^2$. At low $r$ the centrifugal piece wins and the effective
potential is repulsive. Thus, the potential must have a minimum
somewhere with negative potential. The circular orbits are then stable
to perturbation.
The effective potential is sketched for two cases, a $1/r$ attractive
potential and a $1/r^3$ attractive potential. The $1/r$ case has a
stable minimum, whereas the circular orbit in the $1/r^3$ case is
unstable.
If one considers a potential that falls as $1/r^3$, the situation is
reversed and the point where $\partial_rV$ disappears will be a local
maximum rather than a local minimum. **Fig to come here with code**
The repulsive centrifugal piece dominates at large $r$ and the attractive
Coulomb piece wins out at small $r$. The circular orbit is then at a
maximum of the effective potential and the orbits are unstable. It is
the clear that for potentials that fall as $r^n$, that one must have
$n>-2$ for the orbits to be stable.
Consider a potential $V(r)=\beta r$. For a particle of mass $m$ with
angular momentum $L$, find the angular frequency of a circular
orbit. Then find the angular frequency for small radial perturbations.
For the circular orbit you search for the position $r_{\rm min}$ where the effective potential is minimized,
$$
\begin{eqnarray*}
\partial_r\left\{\beta r+\frac{L^2}{2mr^2}\right\}&=&0,\\
\beta&=&\frac{L^2}{mr_{\rm min}^3},\\
r_{\rm min}&=&\left(\frac{L^2}{\beta m}\right)^{1/3},\\
\dot{\phi}&=&\frac{L}{mr_{\rm min}^2}=\frac{\beta^{2/3}}{(mL)^{1/3}}
\end{eqnarray*}
$$
Now, we can find the angular frequency of small perturbations about the circular orbit. To do this we find the effective spring constant for the effective potential,
$$
\begin{eqnarray*}
k_{\rm eff}&=&\partial_r^2 \left.V_{\rm eff}\right|_{r_{\rm min}}\\
&=&\frac{3L^2}{mr_{\rm min}^4},\\
\omega&=&\sqrt{\frac{k_{\rm eff}}{m}}\\
&=&\frac{\beta^{2/3}}{(mL)^{1/3}}\sqrt{3}.
\end{eqnarray*}
$$
If the two frequencies, $\dot{\phi}$ and $\omega$, differ by an
integer factor, the orbit's trajectory will repeat itself each time
around. This is the case for the inverse-square force,
$\omega=\dot{\phi}$, and for the harmonic oscillator,
$\omega=2\dot{\phi}$. In this case, $\omega=\sqrt{3}\dot{\phi}$,
and the angles at which the maxima and minima occur change with each
orbit.
### Code example with gravitional force
The code example here is meant to illustrate how we can make a plot of the final orbit. We solve the equations in polar coordinates (the example here uses the minimum of the potential as initial value) and then we transform back to cartesian coordinates and plot $x$ versus $y$. We see that we get a perfect circle when we place ourselves at the minimum of the potential energy, as expected.
# Simple Gravitational Force -alpha/r
DeltaT = 0.01
#set up arrays
tfinal = 8.0
n = ceil(tfinal/DeltaT)
# set up arrays for t, v and r
t = np.zeros(n)
v = np.zeros(n)
r = np.zeros(n)
phi = np.zeros(n)
x = np.zeros(n)
y = np.zeros(n)
# Constants of the model, setting all variables to one for simplicity
alpha = 1.0
AngMom = 1.0 # The angular momentum
m = 1.0 # scale mass to one
c1 = AngMom*AngMom/(m*m)
c2 = AngMom*AngMom/m
rmin = (AngMom*AngMom/m/alpha)
# Initial conditions, place yourself at the potential min
r0 = rmin
v0 = 0.0 # starts at rest
r[0] = r0
v[0] = v0
phi[0] = 0.0
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up acceleration
a = -alpha/(r[i]**2)+c1/(r[i]**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -alpha/(r[i+1]**2)+c1/(r[i+1]**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
phi[i+1] = t[i+1]*c2/(r0**2)
# Find cartesian coordinates for easy plot
x = r*np.cos(phi)
y = r*np.sin(phi)
fig, ax = plt.subplots(3,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius')
ax[0].plot(t,r)
ax[1].set_xlabel('time')
ax[1].set_ylabel('Angle $\cos{\phi}$')
ax[1].plot(t,np.cos(phi))
ax[2].set_ylabel('y')
ax[2].set_xlabel('x')
ax[2].plot(x,y)
save_fig("Phasespace")
plt.show()
Try to change the initial value for $r$ and see what kind of orbits you get.
In order to test different energies, it can be useful to look at the plot of the effective potential discussed above.
However, for orbits different from a circle the above code would need modifications in order to allow us to display say an ellipse. For the latter, it is much easier to run our code in cartesian coordinates, as done here. In this code we test also energy conservation and see that it is conserved to numerical precision. The code here is a simple extension of the code we developed for homework 4.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
DeltaT = 0.01
#set up arrays
tfinal = 10.0
n = ceil(tfinal/DeltaT)
# set up arrays
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
E = np.zeros(n)
# Constants of the model
m = 1.0 # mass, you can change these
alpha = 1.0
# Initial conditions as compact 2-dimensional arrays
x0 = 0.5; y0= 0.
r0 = np.array([x0,y0])
v0 = np.array([0.0,1.0])
r[0] = r0
v[0] = v0
rabs = sqrt(sum(r[0]*r[0]))
E[0] = 0.5*m*(v[0,0]**2+v[0,1]**2)-alpha/rabs
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up the acceleration
rabs = sqrt(sum(r[i]*r[i]))
a = -alpha*r[i]/(rabs**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
rabs = sqrt(sum(r[i+1]*r[i+1]))
anew = -alpha*r[i+1]/(rabs**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
E[i+1] = 0.5*m*(v[i+1,0]**2+v[i+1,1]**2)-alpha/rabs
t[i+1] = t[i] + DeltaT
# Plot position as function of time
fig, ax = plt.subplots(3,1)
ax[0].set_ylabel('y')
ax[0].set_xlabel('x')
ax[0].plot(r[:,0],r[:,1])
ax[1].set_xlabel('time')
ax[1].set_ylabel('y position')
ax[1].plot(t,r[:,0])
ax[2].set_xlabel('time')
ax[2].set_ylabel('y position')
ax[2].plot(t,r[:,1])
fig.tight_layout()
save_fig("2DimGravity")
plt.show()
print(E)
## Exercises
### The Earth-Sun System
We start with a simpler case first, the Earth-Sun system in two dimensions only. The gravitational force $F_G$ on the earth from the sun is
$$
\boldsymbol{F}_G=-\frac{GM_{\odot}M_E}{r^3}\boldsymbol{r},
$$
where $G$ is the gravitational constant,
$$
M_E=6\times 10^{24}\mathrm{Kg},
$$
the mass of Earth,
$$
M_{\odot}=2\times 10^{30}\mathrm{Kg},
$$
the mass of the Sun and
$$
r=1.5\times 10^{11}\mathrm{m},
$$
is the distance between Earth and the Sun. The latter defines what we call an astronomical unit **AU**.
From Newton's second law we have then for the $x$ direction
$$
\frac{d^2x}{dt^2}=-\frac{F_{x}}{M_E},
$$
and
$$
\frac{d^2y}{dt^2}=-\frac{F_{y}}{M_E},
$$
for the $y$ direction.
Here we will use that $x=r\cos{(\theta)}$, $y=r\sin{(\theta)}$ and
$$
r = \sqrt{x^2+y^2}.
$$
We can rewrite these equations
$$
F_{x}=-\frac{GM_{\odot}M_E}{r^2}\cos{(\theta)}=-\frac{GM_{\odot}M_E}{r^3}x,
$$
and
$$
F_{y}=-\frac{GM_{\odot}M_E}{r^2}\sin{(\theta)}=-\frac{GM_{\odot}M_E}{r^3}y,
$$
as four first-order coupled differential equations
$$
\frac{dv_x}{dt}=-\frac{GM_{\odot}}{r^3}x,
$$
and
$$
\frac{dx}{dt}=v_x,
$$
and
$$
\frac{dv_y}{dt}=-\frac{GM_{\odot}}{r^3}y,
$$
and
$$
\frac{dy}{dt}=v_y.
$$
The four coupled differential equations
$$
\frac{dv_x}{dt}=-\frac{GM_{\odot}}{r^3}x,
$$
and
$$
\frac{dx}{dt}=v_x,
$$
and
$$
\frac{dv_y}{dt}=-\frac{GM_{\odot}}{r^3}y,
$$
and
$$
\frac{dy}{dt}=v_y,
$$
can be turned into dimensionless equations or we can introduce astronomical units with $1$ AU = $1.5\times 10^{11}$.
Using the equations from circular motion (with $r =1\mathrm{AU}$)
$$
\frac{M_E v^2}{r} = F = \frac{GM_{\odot}M_E}{r^2},
$$
we have
$$
GM_{\odot}=v^2r,
$$
and using that the velocity of Earth (assuming circular motion) is
$v = 2\pi r/\mathrm{yr}=2\pi\mathrm{AU}/\mathrm{yr}$, we have
$$
GM_{\odot}= v^2r = 4\pi^2 \frac{(\mathrm{AU})^3}{\mathrm{yr}^2}.
$$
The four coupled differential equations can then be discretized using Euler's method as (with step length $h$)
$$
v_{x,i+1}=v_{x,i}-h\frac{4\pi^2}{r_i^3}x_i,
$$
and
$$
x_{i+1}=x_i+hv_{x,i},
$$
and
$$
v_{y,i+1}=v_{y,i}-h\frac{4\pi^2}{r_i^3}y_i,
$$
and
$$
y_{i+1}=y_i+hv_{y,i},
$$
The code here implements Euler's method for the Earth-Sun system using a more compact way of representing the vectors. Alternatively, you could have spelled out all the variables $v_x$, $v_y$, $x$ and $y$ as one-dimensional arrays.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
DeltaT = 0.01
#set up arrays
tfinal = 10 # in years
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([1.0,0.0])
v0 = np.array([0.0,2*pi])
r[0] = r0
v[0] = v0
Fourpi2 = 4*pi*pi
# Start integrating using Euler's method
for i in range(n-1):
# Set up the acceleration
# Here you could have defined your own function for this
rabs = sqrt(sum(r[i]*r[i]))
a = -Fourpi2*r[i]/(rabs**3)
# update velocity, time and position using Euler's forward method
v[i+1] = v[i] + DeltaT*a
r[i+1] = r[i] + DeltaT*v[i]
t[i+1] = t[i] + DeltaT
# Plot position as function of time
fig, ax = plt.subplots()
#ax.set_xlim(0, tfinal)
ax.set_xlabel('x[AU]')
ax.set_ylabel('y[AU]')
ax.plot(r[:,0], r[:,1])
fig.tight_layout()
save_fig("EarthSunEuler")
plt.show()
We notice here that Euler's method doesn't give a stable orbit with for example $\Delta t =0.01$. It
means that we cannot trust Euler's method. Euler's method does not conserve energy. It is an
example of an integrator which is not
[symplectic](https://en.wikipedia.org/wiki/Symplectic_integrator).
Here we present thus two methods, which with simple changes allow us
to avoid these pitfalls. The simplest possible extension is the
so-called Euler-Cromer method. The changes we need to make to our
code are indeed marginal here. We need simply to replace
r[i+1] = r[i] + DeltaT*v[i]
in the above code with the velocity at the new time $t_{i+1}$
r[i+1] = r[i] + DeltaT*v[i+1]
By this simple caveat we get stable orbits. Below we derive the
Euler-Cromer method as well as one of the most utlized algorithms for
solving the above type of problems, the so-called Velocity-Verlet
method.
Let us repeat Euler's method.
We have a differential equation
<!-- Equation labels as ordinary links -->
<div id="_auto5"></div>
$$
\begin{equation}
y'(t_i)=f(t_i,y_i)
\label{_auto5} \tag{10}
\end{equation}
$$
and if we truncate at the first derivative, we have from the Taylor expansion
<!-- Equation labels as ordinary links -->
<div id="eq:euler"></div>
$$
\begin{equation}
y_{i+1}=y(t_i) + (\Delta t) f(t_i,y_i) + O(\Delta t^2), \label{eq:euler} \tag{11}
\end{equation}
$$
which when complemented with $t_{i+1}=t_i+\Delta t$ forms
the algorithm for the well-known Euler method.
Note that at every step we make an approximation error
of the order of $O(\Delta t^2)$, however the total error is the sum over all
steps $N=(b-a)/(\Delta t)$ for $t\in [a,b]$, yielding thus a global error which goes like
$NO(\Delta t^2)\approx O(\Delta t)$.
To make Euler's method more precise we can obviously
decrease $\Delta t$ (increase $N$), but this can lead to loss of numerical precision.
Euler's method is not recommended for precision calculation,
although it is handy to use in order to get a first
view on how a solution may look like.
Euler's method is asymmetric in time, since it uses information about the derivative at the beginning
of the time interval. This means that we evaluate the position at $y_1$ using the velocity
at $v_0$. A simple variation is to determine $x_{n+1}$ using the velocity at
$v_{n+1}$, that is (in a slightly more generalized form)
<!-- Equation labels as ordinary links -->
<div id="_auto6"></div>
$$
\begin{equation}
y_{n+1}=y_{n}+ v_{n+1}+O(\Delta t^2)
\label{_auto6} \tag{12}
\end{equation}
$$
and
<!-- Equation labels as ordinary links -->
<div id="_auto7"></div>
$$
\begin{equation}
v_{n+1}=v_{n}+(\Delta t) a_{n}+O(\Delta t^2).
\label{_auto7} \tag{13}
\end{equation}
$$
The acceleration $a_n$ is a function of $a_n(y_n, v_n, t_n)$ and needs to be evaluated
as well. This is the Euler-Cromer method. It is easy to change the above code and see that with the same
time step we get stable results.
Let us stay with $x$ (position) and $v$ (velocity) as the quantities we are interested in.
We have the Taylor expansion for the position given by
$$
x_{i+1} = x_i+(\Delta t)v_i+\frac{(\Delta t)^2}{2}a_i+O((\Delta t)^3).
$$
The corresponding expansion for the velocity is
$$
v_{i+1} = v_i+(\Delta t)a_i+\frac{(\Delta t)^2}{2}v^{(2)}_i+O((\Delta t)^3).
$$
Via Newton's second law we have normally an analytical expression for the derivative of the velocity, namely
$$
a_i= \frac{d^2 x}{dt^2}\vert_{i}=\frac{d v}{dt}\vert_{i}= \frac{F(x_i,v_i,t_i)}{m}.
$$
If we add to this the corresponding expansion for the derivative of the velocity
$$
v^{(1)}_{i+1} = a_{i+1}= a_i+(\Delta t)v^{(2)}_i+O((\Delta t)^2)=a_i+(\Delta t)v^{(2)}_i+O((\Delta t)^2),
$$
and retain only terms up to the second derivative of the velocity since our error goes as $O(h^3)$, we have
$$
(\Delta t)v^{(2)}_i\approx a_{i+1}-a_i.
$$
We can then rewrite the Taylor expansion for the velocity as
$$
v_{i+1} = v_i+\frac{(\Delta t)}{2}\left( a_{i+1}+a_{i}\right)+O((\Delta t)^3).
$$
Our final equations for the position and the velocity become then
$$
x_{i+1} = x_i+(\Delta t)v_i+\frac{(\Delta t)^2}{2}a_{i}+O((\Delta t)^3),
$$
and
$$
v_{i+1} = v_i+\frac{(\Delta t)}{2}\left(a_{i+1}+a_{i}\right)+O((\Delta t)^3).
$$
Note well that the term $a_{i+1}$ depends on the position at $x_{i+1}$. This means that you need to calculate
the position at the updated time $t_{i+1}$ before the computing the next velocity. Note also that the derivative of the velocity at the time
$t_i$ used in the updating of the position can be reused in the calculation of the velocity update as well.
We can now easily add the Verlet method to our original code as
DeltaT = 0.01
#set up arrays
tfinal = 10
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([1.0,0.0])
v0 = np.array([0.0,2*pi])
r[0] = r0
v[0] = v0
Fourpi2 = 4*pi*pi
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up forces, air resistance FD, note now that we need the norm of the vecto
# Here you could have defined your own function for this
rabs = sqrt(sum(r[i]*r[i]))
a = -Fourpi2*r[i]/(rabs**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
rabs = sqrt(sum(r[i+1]*r[i+1]))
anew = -4*(pi**2)*r[i+1]/(rabs**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
# Plot position as function of time
fig, ax = plt.subplots()
ax.set_xlabel('x[AU]')
ax.set_ylabel('y[AU]')
ax.plot(r[:,0], r[:,1])
fig.tight_layout()
save_fig("EarthSunVV")
plt.show()
You can easily generalize the calculation of the forces by defining a function
which takes in as input the various variables. We leave this as a challenge to you.
Running the above code for various time steps we see that the Velocity-Verlet is fully stable for various time steps.
We can also play around with different initial conditions in order to find the escape velocity from an orbit around the sun with distance one astronomical unit, 1 AU. The theoretical value for the escape velocity, is given by
$$
v = \sqrt{8\pi^2}{r},
$$
and with $r=1$ AU, this means that the escape velocity is $2\pi\sqrt{2}$ AU/yr. To obtain this we required that the kinetic energy of Earth equals the potential energy given by the gravitational force.
Setting
$$
\frac{1}{2}M_{\mathrm{Earth}}v^2=\frac{GM_{\odot}}{r},
$$
and with $GM_{\odot}=4\pi^2$ we obtain the above relation for the velocity. Setting an initial velocity say equal to $9$ in the above code, yields a planet (Earth) which escapes a stable orbit around the sun, as seen by running the code here.
DeltaT = 0.01
#set up arrays
tfinal = 100
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([1.0,0.0])
# setting initial velocity larger than escape velocity
v0 = np.array([0.0,9.0])
r[0] = r0
v[0] = v0
Fourpi2 = 4*pi*pi
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up forces, air resistance FD, note now that we need the norm of the vecto
# Here you could have defined your own function for this
rabs = sqrt(sum(r[i]*r[i]))
a = -Fourpi2*r[i]/(rabs**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
rabs = sqrt(sum(r[i+1]*r[i+1]))
anew = -4*(pi**2)*r[i+1]/(rabs**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
# Plot position as function of time
fig, ax = plt.subplots()
ax.set_xlabel('x[AU]')
ax.set_ylabel('y[AU]')
ax.plot(r[:,0], r[:,1])
fig.tight_layout()
save_fig("EscapeEarthSunVV")
plt.show()
### Testing Energy conservation
The code here implements Euler's method for the Earth-Sun system using
a more compact way of representing the vectors. Alternatively, you
could have spelled out all the variables $v_x$, $v_y$, $x$ and $y$ as
one-dimensional arrays. It tests conservation of potential and
kinetic energy as functions of time, in addition to the total energy,
again as function of time
**Note**: in all codes we have used scaled equations so that the gravitational constant times the mass of the sum is given by $4\pi^2$ and the mass of the earth is set to **one** in the calculations of kinetic and potential energies. Else, we would get very large results.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
# Initial values, time step, positions and velocites
DeltaT = 0.0001
#set up arrays
tfinal = 100 # in years
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# setting up the kinetic, potential and total energy, note only functions of time
EKinetic = np.zeros(n)
EPotential = np.zeros(n)
ETotal = np.zeros(n)
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([1.0,0.0])
v0 = np.array([0.0,2*pi])
r[0] = r0
v[0] = v0
Fourpi2 = 4*pi*pi
# Setting up variables for the calculation of energies
# distance that defines rabs in potential energy
rabs0 = sqrt(sum(r[0]*r[0]))
# Initial kinetic energy. Note that we skip the mass of the Earth here, that is MassEarth=1 in all codes
EKinetic[0] = 0.5*sum(v0*v0)
# Initial potential energy (note negative sign, why?)
EPotential[0] = -4*pi*pi/rabs0
# Initial total energy
ETotal[0] = EPotential[0]+EKinetic[0]
# Start integrating using Euler's method
for i in range(n-1):
# Set up the acceleration
# Here you could have defined your own function for this
rabs = sqrt(sum(r[i]*r[i]))
a = -Fourpi2*r[i]/(rabs**3)
# update Energies, velocity, time and position using Euler's forward method
v[i+1] = v[i] + DeltaT*a
r[i+1] = r[i] + DeltaT*v[i]
t[i+1] = t[i] + DeltaT
EKinetic[i+1] = 0.5*sum(v[i+1]*v[i+1])
EPotential[i+1] = -4*pi*pi/sqrt(sum(r[i+1]*r[i+1]))
ETotal[i+1] = EPotential[i+1]+EKinetic[i+1]
# Plot energies as functions of time
fig, axs = plt.subplots(3, 1)
axs[0].plot(t, EKinetic)
axs[0].set_xlim(0, tfinal)
axs[0].set_ylabel('Kinetic energy')
axs[1].plot(t, EPotential)
axs[1].set_ylabel('Potential Energy')
axs[2].plot(t, ETotal)
axs[2].set_xlabel('Time [yr]')
axs[2].set_ylabel('Total Energy')
fig.tight_layout()
save_fig("EarthSunEuler")
plt.show()
We see very clearly that Euler's method does not conserve energy!! Try to reduce the time step $\Delta t$. What do you see?
With the Euler-Cromer method, the only thing we need is to update the
position at a time $t+1$ with the update velocity from the same
time. Thus, the change in the code is extremely simply, and **energy is
suddenly conserved**. Note that the error runs like $O(\Delta t)$ and
this is why we see the larger oscillations. But within this
oscillating energy envelope, we see that the energies swing between a
max and a min value and never exceed these values.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
# Initial values, time step, positions and velocites
DeltaT = 0.0001
#set up arrays
tfinal = 100 # in years
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# setting up the kinetic, potential and total energy, note only functions of time
EKinetic = np.zeros(n)
EPotential = np.zeros(n)
ETotal = np.zeros(n)
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([1.0,0.0])
v0 = np.array([0.0,2*pi])
r[0] = r0
v[0] = v0
Fourpi2 = 4*pi*pi
# Setting up variables for the calculation of energies
# distance that defines rabs in potential energy
rabs0 = sqrt(sum(r[0]*r[0]))
# Initial kinetic energy. Note that we skip the mass of the Earth here, that is MassEarth=1 in all codes
EKinetic[0] = 0.5*sum(v0*v0)
# Initial potential energy
EPotential[0] = -4*pi*pi/rabs0
# Initial total energy
ETotal[0] = EPotential[0]+EKinetic[0]
# Start integrating using Euler's method
for i in range(n-1):
# Set up the acceleration
# Here you could have defined your own function for this
rabs = sqrt(sum(r[i]*r[i]))
a = -Fourpi2*r[i]/(rabs**3)
# update velocity, time and position using Euler's forward method
v[i+1] = v[i] + DeltaT*a
# Only change when we add the Euler-Cromer method
r[i+1] = r[i] + DeltaT*v[i+1]
t[i+1] = t[i] + DeltaT
EKinetic[i+1] = 0.5*sum(v[i+1]*v[i+1])
EPotential[i+1] = -4*pi*pi/sqrt(sum(r[i+1]*r[i+1]))
ETotal[i+1] = EPotential[i+1]+EKinetic[i+1]
# Plot energies as functions of time
fig, axs = plt.subplots(3, 1)
axs[0].plot(t, EKinetic)
axs[0].set_xlim(0, tfinal)
axs[0].set_ylabel('Kinetic energy')
axs[1].plot(t, EPotential)
axs[1].set_ylabel('Potential Energy')
axs[2].plot(t, ETotal)
axs[2].set_xlabel('Time [yr]')
axs[2].set_ylabel('Total Energy')
fig.tight_layout()
save_fig("EarthSunEulerCromer")
plt.show()
### Adding the velocity Verlet method
Our final equations for the position and the velocity become then
$$
x_{i+1} = x_i+(\Delta t)v_i+\frac{(\Delta t)^2}{2}a_{i}+O((\Delta t)^3),
$$
and
$$
v_{i+1} = v_i+\frac{(\Delta t)}{2}\left(a_{i+1}+a_{i}\right)+O((\Delta t)^3).
$$
Note well that the term $a_{i+1}$ depends on the position at $x_{i+1}$. This means that you need to calculate
the position at the updated time $t_{i+1}$ before the computing the next velocity. Note also that the derivative of the velocity at the time
$t_i$ used in the updating of the position can be reused in the calculation of the velocity update as well.
We can now easily add the Verlet method to our original code as
DeltaT = 0.001
#set up arrays
tfinal = 100
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([1.0,0.0])
v0 = np.array([0.0,2*pi])
r[0] = r0
v[0] = v0
Fourpi2 = 4*pi*pi
# setting up the kinetic, potential and total energy, note only functions of time
EKinetic = np.zeros(n)
EPotential = np.zeros(n)
ETotal = np.zeros(n)
# Setting up variables for the calculation of energies
# distance that defines rabs in potential energy
rabs0 = sqrt(sum(r[0]*r[0]))
# Initial kinetic energy. Note that we skip the mass of the Earth here, that is MassEarth=1 in all codes
EKinetic[0] = 0.5*sum(v0*v0)
# Initial potential energy
EPotential[0] = -4*pi*pi/rabs0
# Initial total energy
ETotal[0] = EPotential[0]+EKinetic[0]
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up forces, air resistance FD, note now that we need the norm of the vecto
# Here you could have defined your own function for this
rabs = sqrt(sum(r[i]*r[i]))
a = -Fourpi2*r[i]/(rabs**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
rabs = sqrt(sum(r[i+1]*r[i+1]))
anew = -4*(pi**2)*r[i+1]/(rabs**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
EKinetic[i+1] = 0.5*sum(v[i+1]*v[i+1])
EPotential[i+1] = -4*pi*pi/sqrt(sum(r[i+1]*r[i+1]))
ETotal[i+1] = EPotential[i+1]+EKinetic[i+1]
# Plot energies as functions of time
fig, axs = plt.subplots(3, 1)
axs[0].plot(t, EKinetic)
axs[0].set_xlim(0, tfinal)
axs[0].set_ylabel('Kinetic energy')
axs[1].plot(t, EPotential)
axs[1].set_ylabel('Potential Energy')
axs[2].plot(t, ETotal)
axs[2].set_xlabel('Time [yr]')
axs[2].set_ylabel('Total Energy')
fig.tight_layout()
save_fig("EarthSunVelocityVerlet")
plt.show()
And we see that due to the smaller truncation error that energy conservation is improved as a function of time.
Try out different time steps $\Delta t$ and see if the results improve or worsen.
### Exercise: Center-of-Mass and Relative Coordinates and Reference Frames
We define the two-body center-of-mass coordinate and relative coordinate by expressing the trajectories for
$\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ into the center-of-mass coordinate
$\boldsymbol{R}_{\rm cm}$
$$
\boldsymbol{R}_{\rm cm}\equiv\frac{m_1\boldsymbol{r}_1+m_2\boldsymbol{r}_2}{m_1+m_2},
$$
and the relative coordinate
$$
\boldsymbol{r}\equiv\boldsymbol{r}_1-\boldsymbol{r_2}.
$$
Here, we assume the two particles interact only with one another, so $\boldsymbol{F}_{12}=-\boldsymbol{F}_{21}$ (where $\boldsymbol{F}_{ij}$ is the force on $i$ due to $j$.
* 2a (5pt) Show that the equations of motion then become $\ddot{\boldsymbol{R}}_{\rm cm}=0$ and $\mu\ddot{\boldsymbol{r}}=\boldsymbol{F}_{12}$, with the reduced mass $\mu=m_1m_2/(m_1+m_2)$.
The first expression simply states that the center of mass coordinate $\boldsymbol{R}_{\rm cm}$ moves at a fixed velocity. The second expression can be rewritten in terms of the reduced mass $\mu$.
Let us first start with some basic definitions. We have the center of mass coordinate $\boldsymbol{R}$ defined as (for two particles)
$$
\boldsymbol{R}=\frac{m_1\boldsymbol{r}_1+m_2\boldsymbol{r}_2}{M},
$$
where $m_1$ and $m_2$ are the masses of the two objects and $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ their respective positions defined according to a chosen origin. Here $M=m_1+m_2$ is the total mass.
The relative position is defined as
$$
\boldsymbol{r} =\boldsymbol{r}_1-\boldsymbol{r}_2,
$$
and we then define $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ in terms of the relative and center of mass positions as
$$
\boldsymbol{r}_1=\boldsymbol{R}+\frac{m_2}{M}\boldsymbol{r},
$$
and
$$
\boldsymbol{r}_2=\boldsymbol{R}-\frac{m_1}{M}\boldsymbol{r},
$$
The total linear momentum is then defined as
$$
\boldsymbol{P}=\sum_{i=1}^Nm_i\frac{\boldsymbol{r}_i}{dt},
$$
where $N=2$ in our case. With the above definition of the center of mass position, we see that we can rewrite the total linear momentum as (multiplying the center of mass position with $M$)
$$
\boldsymbol{P}=M\frac{d\boldsymbol{R}}{dt}=M\dot{\boldsymbol{R}}.
$$
This result is also an answer to a part of exercise 2b, see below.
The net force acting on the system is given by the time derivative of the linear momentum (assuming mass is time independent)
and we have
$$
\boldsymbol{F}^{\mathrm{net}}=\dot{\boldsymbol{P}}=M\ddot{\boldsymbol{R}}.
$$
The net force acting on the system is given by the sum of the forces acting on the two object, that is we have
$$
\boldsymbol{F}^{\mathrm{net}}=\boldsymbol{F}_1+\boldsymbol{F}_2=\dot{\boldsymbol{P}}=M\ddot{\boldsymbol{R}}.
$$
In our case the forces are given by the internal forces only. The force acting on object $1$ is thus $\boldsymbol{F}_{12}$ and the one acting on object $2$ is $\boldsymbol{F}_{12}$. We have also defined that $\boldsymbol{F}_{12}=-\boldsymbol{F}_{21}$. This means thar we have
$$
\boldsymbol{F}_1+\boldsymbol{F}_2=\boldsymbol{F}_{12}+\boldsymbol{F}_{21}=0=\dot{\boldsymbol{P}}=M\ddot{\boldsymbol{R}},
$$
which is what we wanted to show. The center of mass velocity is thus a constant of the motion. We could also define the so-called center of mass reference frame where we simply set $\boldsymbol{R}=0$.
This has also another important consequence for our forces. If we assume that our force depends only on the positions, it means that the gradient of the potential with respect to the center of mass position is zero, that is
$$
M\ddot{d\boldsymbol{R}}=-\boldsymbol{\nabla}_{\boldsymbol{R}}V =0!
$$
An alternative way is
$$
\begin{eqnarray}
\ddot{\boldsymbol{R}}_{\rm cm}&=&\frac{1}{m_1+m_2}\left\{m_1\ddot{\boldsymbol{r}}_1+m_2\ddot{\boldsymbol{r}}_2\right\}\\
\nonumber
&=&\frac{1}{m_1+m_2}\left\{\boldsymbol{F}_{12}+\boldsymbol{F}_{21}\right\}=0.\\
\ddot{\boldsymbol{r}}&=&\ddot{\boldsymbol{r}}_1-\ddot{\boldsymbol{r}}_2=\left(\frac{\boldsymbol{F}_{12}}{m_1}-\frac{\boldsymbol{F}_{21}}{m_2}\right)\\
\nonumber
&=&\left(\frac{1}{m_1}+\frac{1}{m_2}\right)\boldsymbol{F}_{12}.
\end{eqnarray}
$$
The first expression simply states that the center of mass coordinate
$\boldsymbol{R}_{\rm cm}$ moves at a fixed velocity. The second expression
can be rewritten in terms of the reduced mass $\mu$.
$$
\begin{eqnarray}
\mu \ddot{\boldsymbol{r}}&=&\boldsymbol{F}_{12},\\
\frac{1}{\mu}&=&\frac{1}{m_1}+\frac{1}{m_2},~~~~\mu=\frac{m_1m_2}{m_1+m_2}.
\end{eqnarray}
$$
Thus, one can treat the trajectory as a one-body problem where the
reduced mass is $\mu$, and a second trivial problem for the center of
mass. The reduced mass is especially convenient when one is
considering gravitational problems, as we have seen during the lectures of weeks 11-13.
* 2b (5pt) Show that the linear momenta for the center-of-mass $\boldsymbol{P}$ motion and the relative motion $\boldsymbol{q}$ are given by $\boldsymbol{P}=M\dot{\boldsymbol{R}}_{\rm cm}$ with $M=m_1+m_2$ and $\boldsymbol{q}=\mu\dot{\boldsymbol{r}}$. The linear momentum of the relative motion is defined $\boldsymbol{q} = (m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2)/(m_1+m_2)$.
In 2a we showed, as an intermediate step that the total linear momentum is given by
$$
\boldsymbol{P}=\sum_{i=1}^Nm_i\frac{d\boldsymbol{r}_i}{dt}=M\dot{\boldsymbol{R}}.
$$
For the relative momentum $\boldsymbol{q}$, we have that the time derivative of $\boldsymbol{r}$ is
$$
\dot{\boldsymbol{r}} =\dot{\boldsymbol{r}}_1-\dot{\boldsymbol{r}}_2,
$$
We now also that the momenta $\boldsymbol{p}_1=m_1\dot{\boldsymbol{r}}_1$ and
$\boldsymbol{p}_2=m_2\dot{\boldsymbol{r}}_2$. Using these expressions we can rewrite
$$
\dot{\boldsymbol{r}} =\frac{\boldsymbol{p}_1}{m_1}-\frac{\boldsymbol{p}_2}{m_2},
$$
which we can rewrite as
$$
\dot{\boldsymbol{r}} =\frac{m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2}{m_1m_2},
$$
and dividing both sides with $M$ we have
$$
\frac{m_1m_2}{M}\dot{\boldsymbol{r}} =\frac{m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2}{M}.
$$
Introducing the reduced mass $\mu=m_1m_2/M$ we have finally
$$
\mu\dot{\boldsymbol{r}} =\frac{m_2\boldsymbol{p}_1-m_1\boldsymbol{p}_2}{M}.
$$
And $\mu\dot{\boldsymbol{r}}$ defines the relative momentum $\boldsymbol{q}=\mu\dot{\boldsymbol{r}}$.
When we introduce the Lagrangian formalism we will see that it is much easier to derive these equations.
* 2c (5pt) Show then that the kinetic energy for two objects can then be written as
$$
K=\frac{P^2}{2M}+\frac{q^2}{2\mu}.
$$
Here we just need to use our definitions of kinetic energy in terms of the coordinates $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$.
We have that
$$
K=\frac{p_1^2}{2m_1}+\frac{p_2^2}{2m_2},
$$
and with $\boldsymbol{p}_1=m_1\dot{\boldsymbol{r}}_1$ and $\boldsymbol{p}_2=m_2\dot{\boldsymbol{r}}_2$ and using
$$
\dot{\boldsymbol{r}}_1=\dot{\boldsymbol{R}}+\frac{m_2}{M}\dot{\boldsymbol{r}},
$$
and
$$
\dot{\boldsymbol{r}}_2=\dot{\boldsymbol{R}}-\frac{m_1}{M}\dot{\boldsymbol{r}},
$$
we obtain (after squaring the expressions for $\dot{\boldsymbol{r}}_1$ and $\dot{\boldsymbol{r}}_2$) we have
$$
K=\frac{(m_1+m_2)\dot{\boldsymbol{R}}^2}{2}+\frac{(m_1+m_2)m_1m_2\dot{\boldsymbol{r}}^2}{2M^2},
$$
which we simplify to
$$
K=\frac{\dot{\boldsymbol{P}}^2}{2M}+\frac{\mu\dot{\boldsymbol{q}}^2}{2},
$$
which is what we wanted to show.
* 2d (5pt) Show that the total angular momentum for two-particles in the center-of-mass frame $\boldsymbol{R}=0$, is given by
$$
\boldsymbol{L}=\boldsymbol{r}\times \mu\dot{\boldsymbol{r}}.
$$
Here we need again that
$$
\boldsymbol{r} =\boldsymbol{r}_1-\boldsymbol{r}_2,
$$
and we then define $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ in terms of the relative and center of mass positions with $\boldsymbol{R}=0$
$$
\boldsymbol{r}_1=\frac{m_2}{M}\boldsymbol{r},
$$
and
$$
\boldsymbol{r}_2=-\frac{m_1}{M}\boldsymbol{r},
$$
The angular momentum (the total one) is the sum of the individual angular momenta (see homework 4) and we have
$$
\boldsymbol{L} = \boldsymbol{r}_1 \times \boldsymbol{p}_1+\boldsymbol{r}_2 \times \boldsymbol{p}_2,
$$
and using that $m_1\dot{\boldsymbol{r}}_1=\boldsymbol{p}_1$ and $m_2\dot{\boldsymbol{r}}_2=\boldsymbol{p}_2$ we have
$$
\boldsymbol{L} = m_1\boldsymbol{r}_1 \times \dot{\boldsymbol{r}}_1+m_2\boldsymbol{r}_2 \times \dot{\boldsymbol{r}}_2.
$$
Inserting the equations for $\boldsymbol{r}_1$ and $\boldsymbol{r}_2$ in terms of the relative motion, we have
$$
\boldsymbol{L} = m_1 \frac{m_2}{M}\boldsymbol{r}\times\frac{m_2}{M}\boldsymbol{r} +m_2 \frac{m_1}{M}\boldsymbol{r} \times \frac{m_1}{M}\dot{\boldsymbol{r}}.
$$
We see that can rewrite this equation as
$$
\boldsymbol{L}=\boldsymbol{r}\times \mu\dot{\boldsymbol{r}},
$$
which is what we wanted to derive.
### Exercise: Conservation of Energy
The equations of motion in the center-of-mass frame in two dimensions with $x=r\cos{(\phi)}$ and $y=r\sin{(\phi)}$ and
$r\in [0,\infty)$, $\phi\in [0,2\pi]$ and $r=\sqrt{x^2+y^2}$ are given by
$$
\mu \ddot{r}=-\frac{dV(r)}{dr}+\mu r\dot{\phi}^2,
$$
and
$$
\dot{\phi}=\frac{L}{\mu r^2}.
$$
Here $V(r)$ is any central force which depends only on the relative coordinate.
* 1a (5pt) Show that you can rewrite the radial equation in terms of an effective potential $V_{\mathrm{eff}}(r)=V(r)+L^2/(2\mu r^2)$.
Here we use that
$$
\dot{\phi}=\frac{L}{\mu r^2}.
$$
and rewrite the above equation of motion as
$$
\mu \ddot{r}=-\frac{dV(r)}{dr}+\frac{L^2}{\mu r^3}.
$$
If we now define an effective potential
$$
V_{\mathrm{eff}}=V(r)+\frac{L^2}{2\mu r^2},
$$
we can rewrite our equation of motion in terms of
$$
\mu \ddot{r}=-\frac{dV_{\mathrm{eff}}(r)}{dr}=-\frac{dV(r)}{dr}+\frac{L^2}{\mu r^3}.
$$
The addition due to the angular momentum comes from the kinetic energy
when we rewrote it in terms of polar coordinates. It introduces a
so-called centrifugal barrier due to the angular momentum. This
centrifugal barrier pushes the object farther away from the origin.
Alternatively,
<!-- Equation labels as ordinary links -->
<div id="_auto8"></div>
$$
\begin{equation}
-\frac{dV_{\text{eff}}(r)}{dr} = \mu \ddot{r} =-\frac{dV(r)}{dr}+\mu\dot{\phi}^2r
\label{_auto8} \tag{14}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto9"></div>
$$
\begin{equation}
-\frac{dV_{\text{eff}}(r)}{dr} = -\frac{dV(r)}{dr}+\mu\left( \frac{L}{\mu r^2}\right) ^2r
\label{_auto9} \tag{15}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto10"></div>
$$
\begin{equation}
= -\frac{dV(r)}{dr}+\mu\frac{L^2}{\mu}r^{-3}
\label{_auto10} \tag{16}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto11"></div>
$$
\begin{equation}
= -\frac{d\left( V(r)+\frac{1}{2} \frac{L^2}{\mu r^2}\right) }{dr}.
\label{_auto11} \tag{17}
\end{equation}
$$
Integrating we obtain
<!-- Equation labels as ordinary links -->
<div id="_auto12"></div>
$$
\begin{equation}
V_{\text{eff}}(r) = V(r) + \frac{L^2}{2\mu r^2} + C
\label{_auto12} \tag{18}
\end{equation}
$$
Imposing the extra condition that $V_{\text{eff}}(r\rightarrow \infty) = V(r\rightarrow \infty)$,
<!-- Equation labels as ordinary links -->
<div id="_auto13"></div>
$$
\begin{equation}
V_{\text{eff}}(r) = V(r) + \frac{L^2}{2\mu r^2}
\label{_auto13} \tag{19}
\end{equation}
$$
Write out the final differential equation for the radial degrees of freedom when we specify that $V(r)=-\alpha/r$. Plot the effective potential. You can choose values for $\alpha$ and $L$ and discuss (see Taylor section 8.4 and example 8.2) the physics of the system for two energies, one larger than zero and one smaller than zero. This is similar to what you did in the first midterm, except that the potential is different.
We insert now the explicit potential form $V(r)=-\alpha/r$. This gives us the following equation of motion
$$
\mu \ddot{r}=-\frac{dV_{\mathrm{eff}}(r)}{dr}=-\frac{d(-\alpha/r)}{dr}+\frac{L^2}{\mu r^3}=-\frac{\alpha}{r^2}+\frac{L^2}{\mu r^3}.
$$
The following code plots this effective potential for a simple choice of parameters, with a standard gravitational potential $-\alpha/r$. Here we have chosen $L=m=\alpha=1$.
# Common imports
import numpy as np
from math import *
import matplotlib.pyplot as plt
Deltax = 0.01
#set up arrays
xinitial = 0.3
xfinal = 5.0
alpha = 1.0 # spring constant
m = 1.0 # mass, you can change these
AngMom = 1.0 # The angular momentum
n = ceil((xfinal-xinitial)/Deltax)
x = np.zeros(n)
for i in range(n):
x[i] = xinitial+i*Deltax
V = np.zeros(n)
V = -alpha/x+0.5*AngMom*AngMom/(m*x*x)
# Plot potential
fig, ax = plt.subplots()
ax.set_xlabel('r[m]')
ax.set_ylabel('V[J]')
ax.plot(x, V)
fig.tight_layout()
plt.show()
If we select a potential energy below zero (and not necessarily one
which corresponds to the minimum point), the object will oscillate
between two values of $r$, a value $r_{\mathrm{min}}$ and a value
$r_{\mathrm{max}}$. We can assume that for example the kinetic energy
is zero at these two points. The object will thus oscillate back and
forth between these two points. As we will see in connection with the
solution of the equations of motion, this case corresponds to
elliptical orbits. If we select $r$ equal to the minimum of the
potential and use initial conditions for the velocity that correspond
to circular motion, the object will have a constant value of $r$ given
by the value at the minimum and the orbit is a circle.
If we select a potential energy larger than zero, then, since the
kinetic energy is always larger or equal to zero, the object will move
away from the origin. See also the discussion in Taylor, sections 8.4-8.6.
### Exercise: Harmonic oscillator again
Consider a particle of mass $m$ in a $2$-dimensional harmonic oscillator with potential
$$
V(r)=\frac{1}{2}kr^2=\frac{1}{2}k(x^2+y^2).
$$
We assume the orbit has a final non-zero angular momentum $L$. The
effective potential looks like that of a harmonic oscillator for large
$r$, but for small $r$, the centrifugal potential repels the particle
from the origin. The combination of the two potentials has a minimum
for at some radius $r_{\rm min}$.
Set up the effective potential and plot it. Find $r_{\rm min}$ and $\dot{\phi}$. Show that the latter is given by $\dot{\phi}=\sqrt{k/m}$. At $r_{\rm min}$ the particle does not accelerate and $r$ stays constant and the motion is circular. With fixed $k$ and $m$, which parameter can we adjust to change the value of $r$ at $r_{\rm min}$?
We consider the effective potential. The radius of a circular orbit is at the minimum of the potential (where the effective force is zero).
The potential is plotted here with the parameters $k=m=1.0$ and $L=1.0$.
# Common imports
import numpy as np
from math import *
import matplotlib.pyplot as plt
Deltax = 0.01
#set up arrays
xinitial = 0.5
xfinal = 3.0
k = 1.0 # spring constant
m = 1.0 # mass, you can change these
AngMom = 1.0 # The angular momentum
n = ceil((xfinal-xinitial)/Deltax)
x = np.zeros(n)
for i in range(n):
x[i] = xinitial+i*Deltax
V = np.zeros(n)
V = 0.5*k*x*x+0.5*AngMom*AngMom/(m*x*x)
# Plot potential
fig, ax = plt.subplots()
ax.set_xlabel('r[m]')
ax.set_ylabel('V[J]')
ax.plot(x, V)
fig.tight_layout()
plt.show()
We have an effective potential
$$
\begin{eqnarray*}
V_{\rm eff}&=&\frac{1}{2}kr^2+\frac{L^2}{2mr^2}
\end{eqnarray*}
$$
The effective potential looks like that of a harmonic oscillator for
large $r$, but for small $r$, the centrifugal potential repels the
particle from the origin. The combination of the two potentials has a
minimum for at some radius $r_{\rm min}$.
$$
\begin{eqnarray*}
0&=&kr_{\rm min}-\frac{L^2}{mr_{\rm min}^3},\\
r_{\rm min}&=&\left(\frac{L^2}{mk}\right)^{1/4},\\
\dot{\theta}&=&\frac{L}{mr_{\rm min}^2}=\sqrt{k/m}.
\end{eqnarray*}
$$
For particles at $r_{\rm min}$ with $\dot{r}=0$, the particle does not
accelerate and $r$ stays constant, i.e. a circular orbit. The radius
of the circular orbit can be adjusted by changing the angular momentum
$L$.
For the above parameters this minimum is at $r_{\rm min}=1$.
Now consider small vibrations about $r_{\rm min}$. The effective spring constant is the curvature of the effective potential. Use the curvature at $r_{\rm min}$ to find the effective spring constant (hint, look at exercise 4 in homework 6) $k_{\mathrm{eff}}$. Show also that $\omega=\sqrt{k_{\mathrm{eff}}/m}=2\dot{\phi}$
$$
\begin{eqnarray*}
k_{\rm eff}&=&\left.\frac{d^2}{dr^2}V_{\rm eff}(r)\right|_{r=r_{\rm min}}=k+\frac{3L^2}{mr_{\rm min}^4}\\
&=&4k,\\
\omega&=&\sqrt{k_{\rm eff}/m}=2\sqrt{k/m}=2\dot{\theta}.
\end{eqnarray*}
$$
Because the radius oscillates with twice the angular frequency,
the orbit has two places where $r$ reaches a minimum in one
cycle. This differs from the inverse-square force where there is one
minimum in an orbit. One can show that the orbit for the harmonic
oscillator is also elliptical, but in this case the center of the
potential is at the center of the ellipse, not at one of the foci.
The solution to the equations of motion in Cartesian coordinates is simple. The $x$ and $y$ equations of motion separate, and we have $\ddot{x}=-kx/m$ and $\ddot{y}=-ky/m$. The harmonic oscillator is indeed a system where the degrees of freedom separate and we can find analytical solutions. Define a natural frequency $\omega_0=\sqrt{k/m}$ and show that (where $A$, $B$, $C$ and $D$ are arbitrary constants defined by the initial conditions)
$$
\begin{eqnarray*}
x&=&A\cos\omega_0 t+B\sin\omega_0 t,\\
y&=&C\cos\omega_0 t+D\sin\omega_0 t.
\end{eqnarray*}
$$
The solution is also simple to write down exactly in Cartesian coordinates. The $x$ and $y$ equations of motion separate,
$$
\begin{eqnarray*}
\ddot{x}&=&-kx,\\
\ddot{y}&=&-ky.
\end{eqnarray*}
$$
We know from our studies of the harmonic oscillator that the general solution can be expressed as
$$
\begin{eqnarray*}
x&=&A\cos\omega_0 t+B\sin\omega_0 t,\\
y&=&C\cos\omega_0 t+D\sin\omega_0 t.
\end{eqnarray*}
$$
With the solutions for $x$ and $y$, and $r^2=x^2+y^2$ and the definitions $\alpha=\frac{A^2+B^2+C^2+D^2}{2}$, $\beta=\frac{A^2-B^2+C^2-D^2}{2}$ and $\gamma=AB+CD$, show that
$$
r^2=\alpha+(\beta^2+\gamma^2)^{1/2}\cos(2\omega_0 t-\delta),
$$
with
$$
\delta=\arctan(\gamma/\beta).
$$
We start with $r^2 & = x^2+y^2$ and square the above analytical solutions and after some **exciting algebraic manipulations** we arrive at
<!-- Equation labels as ordinary links -->
<div id="_auto14"></div>
$$
\begin{equation}
r^2 = x^2+y^2
\label{_auto14} \tag{20}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto15"></div>
$$
\begin{equation}
= \left( A\cos\omega_0 t+B\sin\omega_0 t\right) ^2 + \left( C\cos\omega_0 t+D\sin\omega_0 t\right) ^2
\label{_auto15} \tag{21}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto16"></div>
$$
\begin{equation}
= A^2\cos^2\omega_0 t+B^2\sin^2\omega_0 t + 2AB\sin\omega_0 t \cos\omega_0 t + C^2\cos^2\omega_0 t+D^2\sin^2\omega_0 t + 2CD\sin\omega_0 t \cos\omega_0 t
\label{_auto16} \tag{22}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto17"></div>
$$
\begin{equation}
= (A^2+C^2)\cos^2\omega_0 t + (B^2+D^2)\sin^2\omega_0 t + 2(AC + BD)\sin\omega_0 t \cos\omega_0 t
\label{_auto17} \tag{23}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto18"></div>
$$
\begin{equation}
= (B^2+D^2) + (A^2+C^2-B^2-D^2)\cos^2\omega_0 t + 2(AC + BD)2\sin2\omega_0 t
\label{_auto18} \tag{24}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto19"></div>
$$
\begin{equation}
= (B^2+D^2) + (A^2+C^2-B^2-D^2)\frac{1+\cos{2\omega_0 t}}{2} + 2(AC + BD)\frac{1}{2}\sin2\omega_0 t
\label{_auto19} \tag{25}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto20"></div>
$$
\begin{equation}
= \frac{2B^2+2D^2+A^2+C^2-B^2-D^2}{2} + (A^2+C^2-B^2-D^2)\frac{\cos{2\omega_0 t}}{2} + (AC + BD)\sin2\omega_0 t
\label{_auto20} \tag{26}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto21"></div>
$$
\begin{equation}
= \frac{B^2+D^2+A^2+C^2}{2} + \frac{A^2+C^2-B^2-D^2}{2}\cos{2\omega_0 t} + (AC + BD)\sin2\omega_0 t
\label{_auto21} \tag{27}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto22"></div>
$$
\begin{equation}
= \alpha + \beta\cos{2\omega_0 t} + \gamma\sin2\omega_0 t
\label{_auto22} \tag{28}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto23"></div>
$$
\begin{equation}
= \alpha + \sqrt{\beta^2+\gamma^2}\left( \frac{\beta}{\sqrt{\beta^2+\gamma^2}}\cos{2\omega_0 t} + \frac{\gamma}{\sqrt{\beta^2+\gamma^2}}\sin2\omega_0 t\right)
\label{_auto23} \tag{29}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto24"></div>
$$
\begin{equation}
= \alpha + \sqrt{\beta^2+\gamma^2}\left( \cos{\delta}\cos{2\omega_0 t} + \sin{\delta}\sin2\omega_0 t\right)
\label{_auto24} \tag{30}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto25"></div>
$$
\begin{equation}
= \alpha + \sqrt{\beta^2+\gamma^2}\cos{\left( 2\omega_0 t - \delta\right) },
\label{_auto25} \tag{31}
\end{equation}
$$
which is what we wanted to show.
### Exercise: Numerical Solution of the Harmonic Oscillator
Using the code we developed in homeworks 5 and/or 6 for the Earth-Sun system, we can solve the above harmonic oscillator problem in two dimensions using our code from this homework. We need however to change the acceleration from the gravitational force to the one given by the harmonic oscillator potential.
* 3a (20pt) Use for example the code in the exercise set to set up the acceleration and use the initial conditions fixed by for example $r_{\rm min}$ from exercise 2. Which value should the initial velocity take if you place yourself at $r_{\rm min}$ and you require a circular motion? Hint: see the first midterm, part 2. There you used the centripetal acceleration.
Instead of solving the equations in the cartesian frame we will now rewrite the above code in terms of the radial degrees of freedom only. Our differential equation is now
$$
\mu \ddot{r}=-\frac{dV(r)}{dr}+\mu\dot{\phi}^2,
$$
and
$$
\dot{\phi}=\frac{L}{\mu r^2}.
$$
* 3b (20pt) We will use $r_{\rm min}$ to fix a value of $L$, as seen in exercise 2. This fixes also $\dot{\phi}$. Write a code which now implements the radial equation for $r$ using the same $r_{\rm min}$ as you did in 3a. Compare the results with those from 3a with the same initial conditions. Do they agree? Use only one set of initial conditions.
The code here finds the solution for $x$ and $y$ using the code we
developed in homework 5 and 6 and the midterm. Note that this code is
tailored to run in Cartesian coordinates. There is thus no angular
momentum dependent term.
Here we have chosen initial conditions that
correspond to the minimum of the effective potential
$r_{\mathrm{min}}$. We have chosen $x_0=r_{\mathrm{min}}$ and
$y_0=0$. Similarly, we use the centripetal acceleration to determine
the initial velocity so that we have a circular motion (see back to the
last question of the midterm). This means that we set the centripetal
acceleration $v^2/r$ equal to the force from the harmonic oscillator $-k\boldsymbol{r}$. Taking the
magnitude of $\boldsymbol{r}$ we have then
$v^2/r=k/mr$, which gives $v=\pm\omega_0r$.
Since the code here solves the equations of motion in cartesian
coordinates and the harmonic oscillator potential leads to forces in
the $x$- and $y$-directions that are decoupled, we have to select the initial velocities and positions so that we don't get that for example $y(t)=0$.
We set $x_0$ to be different from zero and $v_{y0}$ to be different from zero.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
DeltaT = 0.001
#set up arrays
tfinal = 10.0
n = ceil(tfinal/DeltaT)
# set up arrays
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
radius = np.zeros(n)
# Constants of the model
k = 1.0 # spring constant
m = 1.0 # mass, you can change these
omega02 = k/m # Frequency
AngMom = 1.0 # The angular momentum
# Potential minimum
rmin = (AngMom*AngMom/k/m)**0.25
# Initial conditions as compact 2-dimensional arrays, x0=rmin and y0 = 0
x0 = rmin; y0= 0.0
r0 = np.array([x0,y0])
vy0 = sqrt(omega02)*rmin; vx0 = 0.0
v0 = np.array([vx0,vy0])
r[0] = r0
v[0] = v0
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up the acceleration
a = -r[i]*omega02
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -r[i+1]*omega02
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
# Plot position as function of time
radius = np.sqrt(r[:,0]**2+r[:,1]**2)
fig, ax = plt.subplots(3,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius squared')
ax[0].plot(t,r[:,0]**2+r[:,1]**2)
ax[1].set_xlabel('time')
ax[1].set_ylabel('x position')
ax[1].plot(t,r[:,0])
ax[2].set_xlabel('time')
ax[2].set_ylabel('y position')
ax[2].plot(t,r[:,1])
fig.tight_layout()
save_fig("2DimHOVV")
plt.show()
We see that the radius (to within a given error), we obtain a constant radius.
The following code shows first how we can solve this problem using the radial degrees of freedom only.
Here we need to add the explicit centrifugal barrier. Note that the variable $r$ depends only on time. There is no $x$ and $y$ directions
since we have transformed the equations to polar coordinates.
DeltaT = 0.01
#set up arrays
tfinal = 10.0
n = ceil(tfinal/DeltaT)
# set up arrays for t, v and r
t = np.zeros(n)
v = np.zeros(n)
r = np.zeros(n)
E = np.zeros(n)
# Constants of the model
AngMom = 1.0 # The angular momentum
m = 1.0
k = 1.0
omega02 = k/m
c1 = AngMom*AngMom/(m*m)
c2 = AngMom*AngMom/m
rmin = (AngMom*AngMom/k/m)**0.25
# Initial conditions
r0 = rmin
v0 = 0.0
r[0] = r0
v[0] = v0
E[0] = 0.5*m*v0*v0+0.5*k*r0*r0+0.5*c2/(r0*r0)
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up acceleration
a = -r[i]*omega02+c1/(r[i]**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -r[i+1]*omega02+c1/(r[i+1]**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
E[i+1] = 0.5*m*v[i+1]*v[i+1]+0.5*k*r[i+1]*r[i+1]+0.5*c2/(r[i+1]*r[i+1])
# Plot position as function of time
fig, ax = plt.subplots(2,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius')
ax[0].plot(t,r)
ax[1].set_xlabel('time')
ax[1].set_ylabel('Energy')
ax[1].plot(t,E)
save_fig("RadialHOVV")
plt.show()
### Exercise: Equations for an ellipse
Consider an ellipse defined by the sum of the distances from the two foci being $2D$, which expressed in a Cartesian coordinates with the middle of the ellipse being at the origin becomes
$$
\sqrt{(x-a)^2+y^2}+\sqrt{(x+a)^2+y^2}=2D.
$$
Here the two foci are at $(a,0)$ and $(-a,0)$. Show that this form is can be written as
$$
\frac{x^2}{D^2}+\frac{y^2}{D^2-a^2}=1.
$$
We start by squaring the two sides and, again, after some **exciting algebraic manipulations** we arrive at
$$
\sqrt{(x-a)^2+y^2}+\sqrt{(x+a)^2+y^2} =2D
\\ (x-a)^2 + y^2 + (x+a)^2 + y^2 + 2\sqrt{(x-a)^2 + y^2}\sqrt{(x+a)^2+y^2} = 4D^2
\\ 2y^2 + 2x^2 + 2a^2 + 2\sqrt{(x-a)^2(x+a)^2 + y^4 + y^2[(x-a)^2+(x+a)^2]} = 4D^2
\\ y^2 + x^2 + a^2 + \sqrt{(x^2-a^2)^2 + y^4 + y^2(2x^2+2a^2)} = 2D^2
\\ \sqrt{(x^2-a^2)^2 + y^4 + y^2(2x^2+2a^2)} = 2D^2 -( y^2 + x^2 + a^2 )
\\ (x^2-a^2)^2 + y^4 + y^2(2x^2+2a^2) = 4D^4 + y^4 + x^4 + a^4 - 4D^2( y^2 + x^2 + a^2 ) + 2(y^2x^2+y^2a^2+x^2a^2)
\\ x^4-2x^2a^2+a^4 + y^4 + 2y^2x^2+2y^2a^2 = 4D^4 + y^4 + x^4 + a^4 - 4D^2y^2 -4D^2 x^2 -4D^2 a^2 + 2y^2x^2+2y^2a^2+2x^2a^2
\\ 4D^4 - 4D^2y^2 -4D^2 x^2 -4D^2 a^2 +4x^2a^2 = 0
\\ D^4 - D^2y^2 -D^2 x^2 -D^2 a^2 +x^2a^2 = 0
\\ D^2(D^2-a^2) - x^2(D^2-a^2) = D^2y^2
\\ D^2 - x^2 = \frac{D^2y^2}{D^2-a^2}
\\ 1 - \frac{x^2}{D^2} = \frac{y^2}{D^2-a^2}
\\ \frac{x^2}{D^2} + \frac{y^2}{D^2-a^2} = 1,
$$
where the last line is indeed the equation for an ellipse.
### Exercise: Attractive Potential
Consider a particle in an attractive potential
$$
U(r)=-\alpha/r.
$$
The quantity $r$ is the absolute value of the relative position. We
will use the reduced mass $\mu$ and the angular momentum $L$, as
discussed during the lectures. With the transformation of a two-body
problem to the center-of-mass frame, the actual equations look like an
*effective* one-body problem. The energy of the system is $E$ and the
minimum of the effective potential is $r_{\rm min}$.
The analytical solution to the radial equation of motion is
$$
r(\phi) = \frac{1}{\frac{\mu\alpha}{L^2}+A\cos{(\phi)}}.
$$
Find the value of $A$. Hint: Use the fact that at $r_{\rm min}$
there is no radial kinetic energy and $E=-\alpha/r_{\rm min}+L^2/2mr_{\rm min}^2$.
At $r_{\mathrm{min}}$ and $r_{\mathrm{max}}$ , all the kinetic energy is stored in the velocity in the direction perpendicular to $r$ since the radial velocity is set to zero . We can calculate using angular momentum and from there, find 𝐴 in terms of the energy $E$ which is constant. But first, we need to find $r_{\mathrm{min}}$ from the conservation of energy (noting that the radial velocity $\ddot{r}$ at the mininum is zero):
$$
E = U(r) + \frac{1}{2} \mu(\ddot{r}^2 + (r\ddot{\phi})^2)
\\
E = \frac{-\alpha}{r_{\min}} + \frac{1}{2} \mu\left( \frac{L}{\mu r_{\min}}\right) ^2
\\
E r_{\min}^2 - \frac{1}{2}\mu\left( \frac{L}{\mu}\right) ^2 + \alpha r_{\min} = 0
\\
r_{\min}^2 + \frac{\alpha}{E} r_{\min} - \frac{L^2}{2E\mu} = 0
\\
r_{\min} = - \frac{\alpha}{2E} \pm \frac{1}{2} \sqrt{\frac{\alpha^2}{E^2} + 2\frac{L^2}{E\mu}}
$$
Since we're looking for the minimum, the ± sign must be negative (then 𝑟min will not be negative since 𝐸<0 ). Therefore, we have
$$
\frac{1}{\frac{\mu\alpha}{L^2}+A} = -\frac{\alpha}{2E} - \frac{1}{2} \sqrt{\frac{\alpha^2}{E^2} + 2\frac{L^2}{E\mu}}
\\
A = - \frac{\mu\alpha}{L^2} - \frac{2E}{\alpha + \sqrt{\alpha^2 + 2\frac{L^2E}{\mu}}}
$$
### Exercise: Inverse-square force
Consider again the same effective potential as in the previous exercise. This leads to an attractive inverse-square-law force, $F=-\alpha/r^2$. Consider a particle of mass $m$ with angular momentum $L$. Taylor sections 8.4-8.7 are relevant background material. See also the harmonic oscillator potential from hw8. The equation of motion for the radial degrees of freedom is (see also hw8) in the center-of-mass frame in two dimensions with $x=r\cos{(\phi)}$ and $y=r\sin{(\phi)}$ and
$r\in [0,\infty)$, $\phi\in [0,2\pi]$ and $r=\sqrt{x^2+y^2}$ are given by
$$
\ddot{r}=-\frac{1}{m}\frac{dV(r)}{dr}+r\dot{\phi}^2,
$$
and
$$
\dot{\phi}=\frac{L}{m r^2}.
$$
Here $V(r)$ is any central force which depends only on the relative coordinate.
Find the radius of a circular orbit by solving for the position of the minimum of the effective potential.
<!-- Equation labels as ordinary links -->
<div id="_auto26"></div>
$$
\begin{equation}
\frac{1}{m}\frac{dV(r)}{dr} = r\dot{\phi}^2
\label{_auto26} \tag{32}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto27"></div>
$$
\begin{equation} \frac{1}{m}\left( -\frac{-\alpha}{r^2}\right) = r \frac{L^2}{m^2r^4}
\label{_auto27} \tag{33}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto28"></div>
$$
\begin{equation} \frac{\alpha}{mr^2} = \frac{L^2}{m^2r^3}
\label{_auto28} \tag{34}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto29"></div>
$$
\begin{equation} r = \frac{L^2}{m\alpha}
\label{_auto29} \tag{35}
\end{equation}
$$
At the minimum, the radial velocity is zero and it is only the [centripetal velocity](https://en.wikipedia.org/wiki/Centripetal_force) which is nonzero. This implies that $\ddot{r}=0$. What is the angular frequency, $\dot{\theta}$, of the orbit? Solve this by setting $\ddot{r}=0=F/m+\dot{\theta}^2r$.
<!-- Equation labels as ordinary links -->
<div id="_auto30"></div>
$$
\begin{equation}
\dot{\theta}^2 r = - \frac{F}{m}
\label{_auto30} \tag{36}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto31"></div>
$$
\begin{equation} \dot{\theta}^2 r = - \frac{-\frac{\alpha}{r^2}}{m}
\label{_auto31} \tag{37}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto32"></div>
$$
\begin{equation} \dot{\theta}^2 = \frac{\alpha}{mr^3}
\label{_auto32} \tag{38}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto33"></div>
$$
\begin{equation} \dot{\theta} = \pm \sqrt{\frac{\alpha}{mr^3}}
\label{_auto33} \tag{39}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto34"></div>
$$
\begin{equation} \dot{\theta} = \pm \sqrt{\frac{\alpha}{m\frac{L^6}{m^3\alpha^3}}}
\label{_auto34} \tag{40}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto35"></div>
$$
\begin{equation} \dot{\theta} = \pm \sqrt{\frac{\alpha^4m^2}{L^6}}
\label{_auto35} \tag{41}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto36"></div>
$$
\begin{equation} \dot{\theta} = \pm \frac{\alpha^2m}{L^3}
\label{_auto36} \tag{42}
\end{equation}
$$
Find the effective spring constant for the particle at the minimum.
We have shown in class that from the taylor expansion, we have
$$
k = \frac{d^2V_{\text{eff}}}{dr^2}
$$
Therefore, all we have to do is find the second derivative of $V_{\text{eff}}$ around the minimum point of $V_{\text{eff}}$ where $\dot{r} = \ddot{r} = 0$.
<!-- Equation labels as ordinary links -->
<div id="_auto37"></div>
$$
\begin{equation}
k = \frac{d^2V_{\text{eff}}}{dr^2}
\label{_auto37} \tag{43}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto38"></div>
$$
\begin{equation} = \frac{d^2\left( -\frac{\alpha}{r} + \frac{1}{2} \frac{L^2}{mr^2}\right) }{dr^2}
\label{_auto38} \tag{44}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto39"></div>
$$
\begin{equation} = -\frac{2\alpha}{r^3} + \frac{3L^2}{mr^4}
\label{_auto39} \tag{45}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto40"></div>
$$
\begin{equation} = -\frac{2\alpha}{\frac{L^6}{m^3\alpha^3}} + \frac{3L^2}{m\frac{L^8}{m^4\alpha^4}}
\label{_auto40} \tag{46}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto41"></div>
$$
\begin{equation} = -\frac{2m^3\alpha^4}{L^6} + \frac{3m^3\alpha^4}{L^6}
\label{_auto41} \tag{47}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto42"></div>
$$
\begin{equation} = \frac{m^3\alpha^4}{L^6}
\label{_auto42} \tag{48}
\end{equation}
$$
What is the angular frequency for small vibrations about the minimum? How does this compare with the answer to (3b)?
For small deviations $\delta r$ of $r$,
$$
m\frac{d^2\left( \delta r \right) }{dt^2} = -k \delta r
$$
The solution of this differential equation is of the form
$$
\delta r = A \cos(\omega t + \phi)
$$
where
<!-- Equation labels as ordinary links -->
<div id="_auto43"></div>
$$
\begin{equation}
\omega = \sqrt{\frac{k}{m}}
\label{_auto43} \tag{49}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto44"></div>
$$
\begin{equation} = \sqrt{\frac{m^2\alpha^4}{L^6}}
\label{_auto44} \tag{50}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto45"></div>
$$
\begin{equation} = \frac{m\alpha^2}{L^3}
\label{_auto45} \tag{51}
\end{equation}
$$
This is in fact equal to the expression for $\dot{\theta}$. This means that small perturbations oscillate in sync with the orbit and this traces out an ellipse with a very small eccentricity, a very nice physical result.
### Exercise: Inverse-square force again
Consider again a particle of mass $m$ in the same attractive potential, $U(r)=-\alpha/r$, with angular momentum $L$ with just the right energy so that
$$
A=m\alpha/L^2
$$
where $A$ comes from the expression
$$
r=\frac{1}{(m\alpha/L^2)+A\cos{(\phi)}}.
$$
The trajectory can then be rewritten as
$$
r=\frac{2r_0}{1+\cos\theta},~~~r_0=\frac{L^2}{2m\alpha}.
$$
Show that for this case the total energy $E$ approaches zero.
<!-- Equation labels as ordinary links -->
<div id="_auto46"></div>
$$
\begin{equation}
E = - \frac{\alpha}{r} + \frac{1}{2} m \left( (\dot{\theta}r)^2+\dot{r}^2\right)
\label{_auto46} \tag{52}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto47"></div>
$$
\begin{equation} = - \frac{\alpha}{r} + \frac{1}{2} m \left[ \left( \frac{L}{mr^2}r\right) ^2+\left( \frac{dr}{d\theta}\dot{\theta}\right) ^2\right]
\label{_auto47} \tag{53}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto48"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) + \frac{1}{2} m \left[ \left( \frac{L(1+\cos\theta)}{2mr_0}\right) ^2+\left( 2r_0\frac{-1}{(1+\cos\theta)^2}(-\sin\theta)\frac{L}{mr^2}\right) ^2\right]
\label{_auto48} \tag{54}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto49"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) + \frac{1}{2} m \left[ \left( \frac{L(1+\cos\theta)}{2mr_0}\right) ^2+\left( 2r_0\frac{-1}{(1+\cos\theta)^2}(-\sin\theta)\frac{L(1+\cos\theta)^2}{4mr_0^2}\right) ^2\right]
\label{_auto49} \tag{55}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto50"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) +
\frac{1}{2} m \left[ \left( \frac{L(1+\cos\theta)}{2mr_0}\right) ^2+\left( \sin\theta\frac{L}{2mr_0}\right) ^2\right]
\label{_auto50} \tag{56}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto51"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) +
\frac{1}{2} m \left[ \left( \frac{L(1+\cos\theta)}{2mr_0}\right) ^2+\left( \sin\theta\frac{L}{2mr_0}\right) ^2\right]
\label{_auto51} \tag{57}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto52"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) +
\frac{1}{2} m \frac{L^2}{4m^2r_0^2} \left[ \left( 1+\cos\theta\right) ^2+\left( \sin\theta\right) ^2\right]
\label{_auto52} \tag{58}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto53"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) +
\frac{1}{2} \frac{L^2}{4mr_0^2} \left( 1 + \cos^2\theta + 2\cos \theta + \sin^2\theta\right)
\label{_auto53} \tag{59}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto54"></div>
$$
\begin{equation} = - \frac{\alpha}{2r_0}(1+\cos\theta) +
\frac{1}{2} \frac{L^2}{4mr_0^2} \left( 2 + 2\cos \theta \right)
\label{_auto54} \tag{60}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto55"></div>
$$
\begin{equation} = (1+\cos\theta) \left( - \frac{\alpha}{2r_0} + \frac{L^2}{4mr_0^2}\right)
\label{_auto55} \tag{61}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto56"></div>
$$
\begin{equation} = (1+\cos\theta) \left( - \frac{\alpha}{2\frac{L^2}{2m\alpha}} + \frac{L^2}{4m\frac{L^4}{4m^2\alpha^2}}\right)
\label{_auto56} \tag{62}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto57"></div>
$$
\begin{equation} = (1+\cos\theta) \left( - \frac{m\alpha^2}{L^2} + \frac{m\alpha^2}{L^2}\right)
\label{_auto57} \tag{63}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto58"></div>
$$
\begin{equation} = 0
\label{_auto58} \tag{64}
\end{equation}
$$
With zero energy $E=0$, write this trajectory in a more recognizable parabolic form, that is express $x_0$ and $R$ in terms of $r_0$ using
$$
x=x_0-\frac{y^2}{R}.
$$
We have that
<!-- Equation labels as ordinary links -->
<div id="_auto59"></div>
$$
\begin{equation}
x = r \cos\theta
\label{_auto59} \tag{65}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto60"></div>
$$
\begin{equation}
y = r \sin \theta.
\label{_auto60} \tag{66}
\end{equation}
$$
Using the general solution with eccintricity $\epsilon=1$, we have
$$
r(\theta)=\frac{c}{1+\cos\theta},
$$
and multiplying both sides with $1+\cos\theta$ and using that $x=r\cos\theta$,
$$
r = c -x,
$$
and using that $r^2=x^2+y^2$, we square both sides
$$
r^2 = x^2+y^2=c^2 +x^2-2cx,
$$
leading to
$$
y^2=c^2-2cx,
$$
and using that we defined
$$
c=2r_0=\frac{L^2}{m\alpha},
$$
we divide by $2c$
and we get the final answer
$$
x = r_0 - \frac{y^2}{4r_0}
$$
### Exercise: Parabolic and hyperbolic orbits
The solution to the radial function for an inverse-square-law force, see for example Taylor equation (8.59) or the equation above, is
$$
r(\phi) = \frac{c}{1+\epsilon\cos{(\phi)}}.
$$
For $\epsilon=1$ (or the energy $E=0$) the orbit reduces to a parabola as we saw in the previous exercise,
while for $\epsilon > 1$ (or energy positive) the orbit becomes a hyperbola. The equation for a hyperbola in Cartesian coordinates is
$$
\frac{(x-\delta)^2}{\alpha^2}-\frac{y^2}{\beta^2}=1.
$$
For a hyperbola, identify the constants $\alpha$, $\beta$ and $\delta$ in terms of the constants $c$ and $\epsilon$ for $r(\phi)$.
<!-- Equation labels as ordinary links -->
<div id="_auto61"></div>
$$
\begin{equation}
x = r\cos\phi
\label{_auto61} \tag{67}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto62"></div>
$$
\begin{equation} = \frac{c\cos\phi}{1+\epsilon\cos\phi}
\label{_auto62} \tag{68}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto63"></div>
$$
\begin{equation}
y = r\sin\phi
\label{_auto63} \tag{69}
\end{equation}
$$
<!-- Equation labels as ordinary links -->
<div id="_auto64"></div>
$$
\begin{equation} = \frac{c\sin\phi}{1+\epsilon\cos\phi}
\label{_auto64} \tag{70}
\end{equation}
$$
Here $\epsilon>1$. We use our equation for $r$, multiply with the denominator $1+\epsilon\cos\phi$ on both sides and have
$$
r(1+\epsilon\cos\phi)=c,
$$
use $x=r\cos\phi$ and square and use that $r^2=x^2+y^2$ and we have
$$
r^2=x^2+y^2=c^2+\epsilon^2x^2-2cx\epsilon,
$$
and reorder
$$
x^2(\epsilon^2-1)-y^2-2cx\epsilon= -c^2.
$$
We complete the square in $x$ by adding and subtracting on both sides $\epsilon^2c^2/(\epsilon^2-1)$
and we obtain
$$
(\epsilon^2-1)(x-\delta)^2-y^2= -c^2+\frac{\epsilon^2c^2}{\epsilon^2-1}.
$$
Here we have defined
$$
\delta = \frac{c\epsilon}{\epsilon^2-1},
$$
and introducing the constants
$$
\alpha = \frac{c}{\epsilon^2-1},
$$
and
$$
\beta = \frac{c}{\sqrt{\epsilon^2-1}},
$$
we can rewrite the above equation as
$$
\frac{(x-\delta)^2}{\alpha^2}-\frac{y^2}{\beta^2}=1,
$$
which is nothing but the equation for a hyperbola.
### Exercise: Testing orbit types
In this exercise we can use the program for $r(\phi)$ we developed in hw8. We will use an inverse-square-law force as in the previous four exercises. The aim is to see that the orbits we get for $E<0$ become ellipses (or circles), parabola for $E=0$ and hyperbola for $E>0$. An example code is shown here.
Here we have defined the constants $L=m=\alpha=1$. Feel free to set new values. **You need also to set the initial conditions** in order to study the different types of orbits. It may be useful to plot the potential here and find the values for the initial conditions that fit $E<0$, $E=0$ and $E>0$.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
# Simple Gravitational Force -alpha/r
DeltaT = 0.01
#set up arrays
tfinal = 100.0
n = ceil(tfinal/DeltaT)
# set up arrays for t, v and r
t = np.zeros(n)
v = np.zeros(n)
r = np.zeros(n)
# Constants of the model, setting all variables to one for simplicity
alpha = 1.0
AngMom = 1.0 # The angular momentum
m = 1.0 # scale mass to one
c1 = AngMom*AngMom/(m*m)
c2 = AngMom*AngMom/m
# You need to specify the initial conditions
# Here we have chosen the conditions which lead to circular orbit and thereby a constant r
r0 = (AngMom*AngMom/m/alpha)
v0 = 0.0
r[0] = r0
v[0] = v0
# Start integrating using the Velocity-Verlet method
for i in range(n-1):
# Set up acceleration
a = -alpha/(r[i]**2)+c1/(r[i]**3)
# update velocity, time and position using the Velocity-Verlet method
r[i+1] = r[i] + DeltaT*v[i]+0.5*(DeltaT**2)*a
anew = -alpha/(r[i+1]**2)+c1/(r[i+1]**3)
v[i+1] = v[i] + 0.5*DeltaT*(a+anew)
t[i+1] = t[i] + DeltaT
# Plot position as function of time
fig, ax = plt.subplots(2,1)
ax[0].set_xlabel('time')
ax[0].set_ylabel('radius')
ax[0].plot(t,r)
ax[1].set_xlabel('time')
ax[1].set_ylabel('Velocity')
ax[1].plot(t,v)
plt.show()
Run your code and study and discuss the situations where you have
elliptical, parabolic and hyperbolic orbits. Discuss the physics of
these cases. The results from the four previous exercises 4 may be useful
here. In the code here we have chosen initial conditions which correspond to circular motion.
This corresponds to
$$
r_{\mathrm{min}} = \frac{L^2}{m\alpha}.
$$
Note well that the velocity is now the radial velocity. If we want to
study the angular velocity we would need to add the equations for this
quantity. The solution to exercises 1-4 give you the minimum $r$
values needed to find the elliptical, parabolic and hyperbolic
orbits. For elliptical orbits you should have $\frac{L^2}{2m\alpha} <
r_{\mathrm{min}} <\frac{L^2}{m\alpha}$. For parabolic orbits
$r_{\mathrm{min}} =\frac{L^2}{m\alpha}$ and for hyperbolic orbits we
have $0<r_{\mathrm{min}} <\frac{L^2}{m\alpha}$. Try out these
different initial conditions in order to test these different types of
motion.
### Exercise: New reference frame
Show that if one transforms to a reference frame where the total
momentum is zero, $\boldsymbol{p}_1=-\boldsymbol{p}_2$, that the relative momentum
$\boldsymbol{q}$ corresponds to either $\boldsymbol{p}_1$ or $-\boldsymbol{p}_2$. This
means that in this frame the magnitude of $\boldsymbol{q}$ is one half the
magnitude of $\boldsymbol{p}_1-\boldsymbol{p}_2$.
### Exercise: Center of mass and relative coordinates
Given the center of mass and relative coordinates $\boldsymbol{R}$ and $\boldsymbol{r}$, respectively, for
particles of mass $m_1$ and $m_2$, find the coordinates $\boldsymbol{r}_1$
and $\boldsymbol{r}_2$ in terms of the masses, $\boldsymbol{R}$ and $\boldsymbol{r}$.
### Exercise: Two-body problems
Consider a particle of mass $m$ moving in a potential
$$
V(r)=\alpha\ln(r/\alpha),
$$
where $\alpha$ is a constant.
* (a) If the particle is moving in a circular orbit of radius $R$, find the angular frequency $\dot{\theta}$. Solve this by setting $F=-m\dot{\theta}^2r$ (force and acceleration point inward).
* (b) Express the angular momentum $L$ in terms of the constant $\alpha$, the mass $m$ and the radius $R$. Also express $R$ in terms of $L$, $\alpha$ and $m$.
* (c) Sketch the effective radial potential, $V_{\rm eff}(r)$, for a particle with angular momentum $L$. (No longer necessarily moving in a circular orbit.)
* (d) Find the position of the minimum of $V_{\rm eff}$ in terms of $L$, $\alpha$ and $m$, then compare to the result of (b).
* (e) What is the effective spring constant for a particle at the minimum of $V_{\rm eff}$? Express your answer in terms of $L$, $m$ and $\alpha$.
* (f) What is the angular frequency, $\omega$, for small oscillations of $r$ about the $R_{\rm min}$? Express your answer in terms of $\dot{\theta}$ from part (3a). | [
"[email protected]"
] | |
41c19da94508fae06a1c001e18307c47af389914 | 556403cb93b2fdd464c3aef4cba4f1c3dc42e9d7 | /Python/venv/Scripts/easy_install-script.py | 7f846650c43e425c3d8dd49ca82ecb1272cbbe96 | [] | no_license | msivakumarm/PycharmProjects | 4d90a0105f334f2393d30fe46dc650808002b4fd | 7d84194a576f9ec8356ff272642d07dbddc48d42 | refs/heads/master | 2020-09-06T14:42:12.945424 | 2019-11-08T11:42:14 | 2019-11-08T11:42:14 | 219,989,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 448 | py | #!C:\Users\SIVA\PycharmProjects\Python\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"[email protected]"
] | |
b042fdcacc6b466da8f278faceace2ce9a4f2584 | ae81b16cf4242d329dfcb055e85fafe87262cc7f | /leetcode/509斐波那切数列.py | ec74e05e99dfed630718815437601a4785cd97d8 | [] | no_license | coquelin77/PyProject | 3d2d3870b085c4b7ff41bd200fe025630969ab8e | 58e84ed8b3748c6e0f78184ab27af7bff3778cb8 | refs/heads/master | 2023-03-18T19:14:36.441967 | 2019-06-19T02:44:22 | 2019-06-19T02:44:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,009 | py | '''斐波那契数,通常用 F(n) 表示,形成的序列称为斐波那契数列。该数列由 0 和 1 开始,后面的每一项数字都是前面两项数字的和。也就是:
F(0) = 0, F(1) = 1
F(N) = F(N - 1) + F(N - 2), 其中 N > 1.
给定 N,计算 F(N)。
示例 1:
输入:2
输出:1
解释:F(2) = F(1) + F(0) = 1 + 0 = 1.
示例 2:
输入:3
输出:2
解释:F(3) = F(2) + F(1) = 1 + 1 = 2.
示例 3:
输入:4
输出:3
解释:F(4) = F(3) + F(2) = 2 + 1 = 3.'''
class Solution(object):
def fib(self, N):
"""
:type N: int
:rtype: int
"""
list = [0, 1]
if N<2 and N>=0:
if N==0:
return 0
if N==1:
return 1
else:
for i in range(2, N+1):
add = list[i - 1] + list[i - 2]
list.append(add)
e = list[-1]
return e
if __name__ == '__main__':
a=Solution()
p=a.fib(0)
print(p) | [
"[email protected]"
] | |
31e4b5de766ec1ab842af725b94e2c8ce8339e03 | 7c04a4dd2159284fe1a1dfef1e88a53757abf843 | /orm/sqlalchemy复习/s17.py | bce14ab6016578d3fbc969a6a6996fd70150afbe | [] | no_license | RelaxedDong/flask_advance | f6c24c38e4f3143bb49f3c461011bcad15ea03ae | 1cc42cb6c7607f35903f1a2edc3d11a34a5046e1 | refs/heads/master | 2022-01-07T18:26:00.010508 | 2019-04-13T09:51:17 | 2019-04-13T09:51:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,193 | py | from sqlalchemy import create_engine, Column, Integer, String, func
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine("mysql+pymysql://root:[email protected]/ormreview?charset=utf8",
encoding='utf8')
session = sessionmaker(engine)()
Base = declarative_base(engine)
class Aticle(Base):
__tablename__ = "article"
id = Column(Integer, primary_key=True, autoincrement=True)
title = Column(String(100), nullable=False)
def reset():
Base.metadata.drop_all()
Base.metadata.create_all()
print('---重置成功---')
def query():
#查询不一样的标题
#article = session.query(Aticle.title).distinct().all()
#update
#session.query(Aticle).filter(Aticle.id==1).update({"title":'updated title'})
#delete
# session.query(Aticle).filter(Aticle.id==1).delete()
import time
start = time.time()
articles = session.query(func.count(Aticle.id)).all()
end = time.time()
print('%f'%(end-start)) #有索引 0.028020
#没有索引 0.388275
print(articles)
if __name__ == '__main__':
# reset()
query()
| [
"[email protected]"
] | |
079d5a036fab015463d7d3b7689f744e6925af06 | 2781ffdb7dd131c43d5777d33ee002643c839c28 | /WebScraping/DatasetScriping.py | 949a2f2d7b0afe8876a8d4464e6de15688ba91fb | [] | no_license | AbuBakkar32/Python-Essential-Practice | b5e820d2e27e557b04848b5ec63dd78ae5b554c4 | 8659cf5652441d32476dfe1a8d90184a9ae92b3b | refs/heads/master | 2022-11-13T23:07:16.829075 | 2020-06-27T18:46:52 | 2020-06-27T18:46:52 | 275,432,093 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | import requests
from bs4 import BeautifulSoup
link = requests.get('https://www.data.gov/')
soup = BeautifulSoup(link.text, 'html.parser')
data = soup.find_all('small')
for i in data:
a = i.find('a').text[0:]
print('This website has :{}'.format(a))
| [
"[email protected]"
] | |
74de4d362f296692eacb457593fecbd74f4e209d | 214bfc26b3982c5c662b50a35756d7070e1664a2 | /initial/lm/trainAttention/ARCHIVE_September_2021/matchData_EYE.py | dc65bdc172a3dff3c1273fc2132c6cade23b6b2c | [] | no_license | m-hahn/forgetting-model | cf3f51e40fb53a49e9dfa0c648cd296803eec7ed | 4ee0f06c6cd01ffff579cfbca67287cc32fac66c | refs/heads/master | 2022-09-03T02:29:56.607656 | 2022-08-27T13:51:36 | 2022-08-27T13:51:36 | 252,456,546 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,054 | py | import os
import sys
import random
from collections import defaultdict
with open("/u/scr/mhahn/Dundee/DundeeTreebankTokenized.csv", "r") as inFile:
dundee = [x.split("\t") for x in inFile.read().strip().split("\n")]
header = dundee[0]
header = dict(zip(header, list(range(len(header)))))
dundee = dundee[1:]
calibrationSentences = []
for i in range(len(dundee)):
line = dundee[i]
Itemno, WNUM, SentenceID, ID, WORD, Token = line
SentenceID = (SentenceID)
if i == 0 or SentenceID != dundee[i-1][header["SentenceID"]]:
calibrationSentences.append([])
print(SentenceID, dundee[i-1][header["SentenceID"]])
if i > 0 and SentenceID == dundee[i-1][header["SentenceID"]] and ID == dundee[i-1][header["ID"]]:
continue
else:
calibrationSentences[-1].append((WORD.strip(".").strip(",").strip("?").strip(":").strip(";").replace("’", "'").strip("!").lower(), line))
# else:
if True:
numberOfSamples = 12
with open("analyze_EYE/"+__file__+".tsv", "w") as outFile:
# print("\t".join([str(w) for w in [sentenceID, regions[i], remainingInput[i][0]] + remainingInput[i][1] ]), file=outFile) #, file=outFile)
# Itemno, WNUM, SentenceID, ID, WORD, Token = line
print("\t".join(["Sentence", "Region", "Word", "Itemno", "WNUM", "SentenceID", "ID", "WORD", "Token"]), file=outFile)
for sentenceID in range(len(calibrationSentences)):
sentence = calibrationSentences[sentenceID] #.lower().replace(".", "").replace(",", "").replace("n't", " n't").split(" ")
print(sentence)
context = sentence[0]
remainingInput = sentence[1:]
regions = range(len(sentence))
print("INPUT", context, remainingInput)
if len(sentence) < 2:
continue
assert len(remainingInput) > 0
for i in range(len(remainingInput)):
print("\t".join([str(w) for w in [sentenceID, regions[i], remainingInput[i][0]] + remainingInput[i][1] ]), file=outFile) #, file=outFile)
| [
"[email protected]"
] | |
9648e10ec3c1b48598944069a8af287b4bf97fe1 | 2a1cbd7570ade00e6efb5c39ca9246f05c7f500b | /21/00/0.py | 9db06c49841196fdd6336ffd86da7b9713ae110a | [
"CC0-1.0"
] | permissive | pylangstudy/201707 | dd5faf446cb0cc3d95f7f9db30f47e0f15400258 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | refs/heads/master | 2020-12-03T02:19:42.341198 | 2017-07-31T00:11:35 | 2017-07-31T00:11:35 | 95,927,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | from abc import ABCMeta
class MyABC(metaclass=ABCMeta): pass
MyABC.register(tuple)
assert issubclass(tuple, MyABC)
assert isinstance((), MyABC)
| [
"[email protected]"
] | |
ccf77c6dbb63b159be375a0549f4330ed4b77ada | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_phalanges.py | 95a07958093f090cc6658575ecaed636093c7c0e | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py |
from xai.brain.wordbase.nouns._phalanx import _PHALANX
#calss header
class _PHALANGES(_PHALANX, ):
def __init__(self,):
_PHALANX.__init__(self)
self.name = "PHALANGES"
self.specie = 'nouns'
self.basic = "phalanx"
self.jsondata = {}
| [
"[email protected]"
] | |
df3851986643d89986f27702969b422b0c5e4585 | 17d05858112c2aa73feab0c054457ce9808797df | /setup.py | 6bcb2919589ac0e9e4988473f5290cdcb93a9395 | [] | no_license | invisibleroads/invisibleroads-macros-descriptor | ffad4b56c1b9191131a1d85e16a82ea19801ca30 | de428ead5cea757ea2f800ace556ded069e53586 | refs/heads/master | 2023-08-11T17:33:00.458008 | 2020-11-30T03:48:54 | 2020-11-30T03:48:54 | 248,628,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | from os.path import abspath, dirname, join
from setuptools import find_packages, setup
ENTRY_POINTS = '''
'''
APP_CLASSIFIERS = [
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
]
APP_REQUIREMENTS = [
]
TEST_REQUIREMENTS = [
'pytest',
'pytest-cov',
]
FOLDER = dirname(abspath(__file__))
DESCRIPTION = '\n\n'.join(open(join(FOLDER, x)).read().strip() for x in [
'README.md', 'CHANGES.md'])
setup(
name='invisibleroads-macros-descriptor',
version='1.0.2',
description='Shortcut functions for descriptor operations',
long_description=DESCRIPTION,
long_description_content_type='text/markdown',
classifiers=APP_CLASSIFIERS,
author='Roy Hyunjin Han',
author_email='[email protected]',
url=(
'https://github.com/invisibleroads/'
'invisibleroads-macros-descriptor'),
keywords='invisibleroads',
packages=find_packages(),
include_package_data=True,
zip_safe=True,
extras_require={'test': TEST_REQUIREMENTS},
install_requires=APP_REQUIREMENTS,
entry_points=ENTRY_POINTS)
| [
"[email protected]"
] | |
d62ec5ded84e385a5a6f52da4618333cd3e5cec7 | 037877a31670a85fa78b61df9ceabe981cfdfbf6 | /sympy/concrete/gosper.py | 76f439ce5d72929e5d79906a18fcaefc30adc036 | [] | no_license | certik/sympy_gamma | 6343b02e5d6d1c7d511a3329bbbd27cd11cd7ec8 | b0e555ca03f8476533cb1c19575f4461533837de | refs/heads/master | 2020-12-25T03:52:40.132034 | 2010-02-15T08:02:31 | 2010-02-15T08:02:31 | 344,391 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,661 | py | """
"""
from sympy.core.basic import Basic, S
from sympy.core.symbol import Symbol
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core import sympify
from sympy.polys import gcd, quo, roots, resultant
def normal(f, g, n=None):
"""Given relatively prime univariate polynomials 'f' and 'g',
rewrite their quotient to a normal form defined as follows:
f(n) A(n) C(n+1)
---- = Z -----------
g(n) B(n) C(n)
where Z is arbitrary constant and A, B, C are monic
polynomials in 'n' with follwing properties:
(1) gcd(A(n), B(n+h)) = 1 for all 'h' in N
(2) gcd(B(n), C(n+1)) = 1
(3) gcd(A(n), C(n)) = 1
This normal form, or rational factorization in other words,
is crucial step in Gosper's algorithm and in difference
equations solving. It can be also used to decide if two
hypergeometric are similar or not.
This procedure will return return triple containig elements
of this factorization in the form (Z*A, B, C). For example:
>>> from sympy import Symbol
>>> n = Symbol('n', integer=True)
>>> normal(4*n+5, 2*(4*n+1)*(2*n+3), n)
(1/4, 3/2 + n, 1/4 + n)
"""
f, g = map(sympify, (f, g))
p = f.as_poly(n)
q = g.as_poly(n)
a, p = p.LC, p.as_monic()
b, q = q.LC, q.as_monic()
A = p.as_basic()
B = q.as_basic()
C, Z = S.One, a / b
h = Symbol('h', dummy=True)
res = resultant(A, B.subs(n, n+h), n)
nni_roots = roots(res, h, domain='Z',
predicate=lambda r: r >= 0).keys()
if not nni_roots:
return (f, g, S.One)
else:
for i in sorted(nni_roots):
d = gcd(A, B.subs(n, n+i), n)
A = quo(A, d, n)
B = quo(B, d.subs(n, n-i), n)
C *= Mul(*[ d.subs(n, n-j) for j in xrange(1, i+1) ])
return (Z*A, B, C)
def gosper(term, k, a, n):
from sympy.solvers import rsolve_poly
if not hyper:
return None
else:
p, q = expr.as_numer_denom()
A, B, C = normal(p, q, k)
B = B.subs(k, k-1)
R = rsolve_poly([-B, A], C, k)
symbol = []
if not (R is None or R is S.Zero):
if symbol != []:
symbol = symbol[0]
W = R.subs(symbol, S.Zero)
if W is S.Zero:
R = R.subs(symbol, S.One)
else:
R = W
Z = B*R*term/C
return simplify(Z.subs(k, n+1) - Z.subs(k, a))
else:
return None
| [
"[email protected]"
] | |
64d9dc0646e47648a14378c7f64e13b43bf794aa | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/fc/receivefeccountershist.py | 0e8130b82af1eba3932723f59a0bef420d485acf | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,036 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ReceiveFecCountersHist(Mo):
meta = StatsClassMeta("cobra.model.fc.ReceiveFecCountersHist", "Fec counters")
counter = CounterMeta("uncorrected", CounterCategory.COUNTER, "frames", "Uncorrected counters")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "uncorrectedCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "uncorrectedPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "uncorrectedMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "uncorrectedMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "uncorrectedAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "uncorrectedSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "uncorrectedThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "uncorrectedTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "uncorrectedRate"
meta._counters.append(counter)
counter = CounterMeta("corrected", CounterCategory.COUNTER, "frames", "Corrected counters")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "correctedCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "correctedPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "correctedMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "correctedMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "correctedAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "correctedSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "correctedThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "correctedTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "correctedRate"
meta._counters.append(counter)
meta.isAbstract = True
meta.moClassName = "fcReceiveFecCountersHist"
meta.moClassName = "fcReceiveFecCountersHist"
meta.rnFormat = ""
meta.category = MoCategory.STATS_HISTORY
meta.label = "historical Fec counters stats"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Hist")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist1mo")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist1h")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist1year")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist1d")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist1qtr")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist1w")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist5min")
meta.concreteSubClasses.add("cobra.model.fc.ReceiveFecCountersHist15min")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "correctedAvg", "correctedAvg", 45182, PropCategory.IMPLICIT_AVG)
prop.label = "Corrected counters average value"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedAvg", prop)
prop = PropMeta("str", "correctedCum", "correctedCum", 45178, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Corrected counters cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedCum", prop)
prop = PropMeta("str", "correctedMax", "correctedMax", 45181, PropCategory.IMPLICIT_MAX)
prop.label = "Corrected counters maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedMax", prop)
prop = PropMeta("str", "correctedMin", "correctedMin", 45180, PropCategory.IMPLICIT_MIN)
prop.label = "Corrected counters minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedMin", prop)
prop = PropMeta("str", "correctedPer", "correctedPer", 45179, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Corrected counters periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedPer", prop)
prop = PropMeta("str", "correctedRate", "correctedRate", 45186, PropCategory.IMPLICIT_RATE)
prop.label = "Corrected counters rate"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedRate", prop)
prop = PropMeta("str", "correctedSpct", "correctedSpct", 45183, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Corrected counters suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedSpct", prop)
prop = PropMeta("str", "correctedThr", "correctedThr", 45184, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Corrected counters thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("correctedThr", prop)
prop = PropMeta("str", "correctedTr", "correctedTr", 45185, PropCategory.IMPLICIT_TREND)
prop.label = "Corrected counters trend"
prop.isOper = True
prop.isStats = True
meta.props.add("correctedTr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "index", "index", 115, PropCategory.REGULAR)
prop.label = "History Index"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("index", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "uncorrectedAvg", "uncorrectedAvg", 45203, PropCategory.IMPLICIT_AVG)
prop.label = "Uncorrected counters average value"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedAvg", prop)
prop = PropMeta("str", "uncorrectedCum", "uncorrectedCum", 45199, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Uncorrected counters cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedCum", prop)
prop = PropMeta("str", "uncorrectedMax", "uncorrectedMax", 45202, PropCategory.IMPLICIT_MAX)
prop.label = "Uncorrected counters maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedMax", prop)
prop = PropMeta("str", "uncorrectedMin", "uncorrectedMin", 45201, PropCategory.IMPLICIT_MIN)
prop.label = "Uncorrected counters minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedMin", prop)
prop = PropMeta("str", "uncorrectedPer", "uncorrectedPer", 45200, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Uncorrected counters periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedPer", prop)
prop = PropMeta("str", "uncorrectedRate", "uncorrectedRate", 45207, PropCategory.IMPLICIT_RATE)
prop.label = "Uncorrected counters rate"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedRate", prop)
prop = PropMeta("str", "uncorrectedSpct", "uncorrectedSpct", 45204, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Uncorrected counters suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedSpct", prop)
prop = PropMeta("str", "uncorrectedThr", "uncorrectedThr", 45205, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Uncorrected counters thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("uncorrectedThr", prop)
prop = PropMeta("str", "uncorrectedTr", "uncorrectedTr", 45206, PropCategory.IMPLICIT_TREND)
prop.label = "Uncorrected counters trend"
prop.isOper = True
prop.isStats = True
meta.props.add("uncorrectedTr", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
bee39884f4e19d05af666738a1bb2b126d605f33 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/insights/v20170401/outputs.py | 7ca157a0564561fd13512aa45f207d8e08294d4e | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 21,889 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'ActivityLogAlertActionGroupResponse',
'ActivityLogAlertActionListResponse',
'ActivityLogAlertAllOfConditionResponse',
'ActivityLogAlertLeafConditionResponse',
'AutomationRunbookReceiverResponse',
'AzureAppPushReceiverResponse',
'EmailReceiverResponse',
'ItsmReceiverResponse',
'SmsReceiverResponse',
'WebhookReceiverResponse',
]
@pulumi.output_type
class ActivityLogAlertActionGroupResponse(dict):
"""
A pointer to an Azure Action Group.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "actionGroupId":
suggest = "action_group_id"
elif key == "webhookProperties":
suggest = "webhook_properties"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ActivityLogAlertActionGroupResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ActivityLogAlertActionGroupResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ActivityLogAlertActionGroupResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
action_group_id: str,
webhook_properties: Optional[Mapping[str, str]] = None):
"""
A pointer to an Azure Action Group.
:param str action_group_id: The resourceId of the action group. This cannot be null or empty.
:param Mapping[str, str] webhook_properties: the dictionary of custom properties to include with the post operation. These data are appended to the webhook payload.
"""
pulumi.set(__self__, "action_group_id", action_group_id)
if webhook_properties is not None:
pulumi.set(__self__, "webhook_properties", webhook_properties)
@property
@pulumi.getter(name="actionGroupId")
def action_group_id(self) -> str:
"""
The resourceId of the action group. This cannot be null or empty.
"""
return pulumi.get(self, "action_group_id")
@property
@pulumi.getter(name="webhookProperties")
def webhook_properties(self) -> Optional[Mapping[str, str]]:
"""
the dictionary of custom properties to include with the post operation. These data are appended to the webhook payload.
"""
return pulumi.get(self, "webhook_properties")
@pulumi.output_type
class ActivityLogAlertActionListResponse(dict):
"""
A list of activity log alert actions.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "actionGroups":
suggest = "action_groups"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ActivityLogAlertActionListResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ActivityLogAlertActionListResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ActivityLogAlertActionListResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
action_groups: Optional[Sequence['outputs.ActivityLogAlertActionGroupResponse']] = None):
"""
A list of activity log alert actions.
:param Sequence['ActivityLogAlertActionGroupResponse'] action_groups: The list of activity log alerts.
"""
if action_groups is not None:
pulumi.set(__self__, "action_groups", action_groups)
@property
@pulumi.getter(name="actionGroups")
def action_groups(self) -> Optional[Sequence['outputs.ActivityLogAlertActionGroupResponse']]:
"""
The list of activity log alerts.
"""
return pulumi.get(self, "action_groups")
@pulumi.output_type
class ActivityLogAlertAllOfConditionResponse(dict):
"""
An Activity Log alert condition that is met when all its member conditions are met.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "allOf":
suggest = "all_of"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ActivityLogAlertAllOfConditionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ActivityLogAlertAllOfConditionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ActivityLogAlertAllOfConditionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
all_of: Sequence['outputs.ActivityLogAlertLeafConditionResponse']):
"""
An Activity Log alert condition that is met when all its member conditions are met.
:param Sequence['ActivityLogAlertLeafConditionResponse'] all_of: The list of activity log alert conditions.
"""
pulumi.set(__self__, "all_of", all_of)
@property
@pulumi.getter(name="allOf")
def all_of(self) -> Sequence['outputs.ActivityLogAlertLeafConditionResponse']:
"""
The list of activity log alert conditions.
"""
return pulumi.get(self, "all_of")
@pulumi.output_type
class ActivityLogAlertLeafConditionResponse(dict):
"""
An Activity Log alert condition that is met by comparing an activity log field and value.
"""
def __init__(__self__, *,
equals: str,
field: str):
"""
An Activity Log alert condition that is met by comparing an activity log field and value.
:param str equals: The field value will be compared to this value (case-insensitive) to determine if the condition is met.
:param str field: The name of the field that this condition will examine. The possible values for this field are (case-insensitive): 'resourceId', 'category', 'caller', 'level', 'operationName', 'resourceGroup', 'resourceProvider', 'status', 'subStatus', 'resourceType', or anything beginning with 'properties.'.
"""
pulumi.set(__self__, "equals", equals)
pulumi.set(__self__, "field", field)
@property
@pulumi.getter
def equals(self) -> str:
"""
The field value will be compared to this value (case-insensitive) to determine if the condition is met.
"""
return pulumi.get(self, "equals")
@property
@pulumi.getter
def field(self) -> str:
"""
The name of the field that this condition will examine. The possible values for this field are (case-insensitive): 'resourceId', 'category', 'caller', 'level', 'operationName', 'resourceGroup', 'resourceProvider', 'status', 'subStatus', 'resourceType', or anything beginning with 'properties.'.
"""
return pulumi.get(self, "field")
@pulumi.output_type
class AutomationRunbookReceiverResponse(dict):
"""
The Azure Automation Runbook notification receiver.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "automationAccountId":
suggest = "automation_account_id"
elif key == "isGlobalRunbook":
suggest = "is_global_runbook"
elif key == "runbookName":
suggest = "runbook_name"
elif key == "webhookResourceId":
suggest = "webhook_resource_id"
elif key == "serviceUri":
suggest = "service_uri"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AutomationRunbookReceiverResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AutomationRunbookReceiverResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AutomationRunbookReceiverResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
automation_account_id: str,
is_global_runbook: bool,
runbook_name: str,
webhook_resource_id: str,
name: Optional[str] = None,
service_uri: Optional[str] = None):
"""
The Azure Automation Runbook notification receiver.
:param str automation_account_id: The Azure automation account Id which holds this runbook and authenticate to Azure resource.
:param bool is_global_runbook: Indicates whether this instance is global runbook.
:param str runbook_name: The name for this runbook.
:param str webhook_resource_id: The resource id for webhook linked to this runbook.
:param str name: Indicates name of the webhook.
:param str service_uri: The URI where webhooks should be sent.
"""
pulumi.set(__self__, "automation_account_id", automation_account_id)
pulumi.set(__self__, "is_global_runbook", is_global_runbook)
pulumi.set(__self__, "runbook_name", runbook_name)
pulumi.set(__self__, "webhook_resource_id", webhook_resource_id)
if name is not None:
pulumi.set(__self__, "name", name)
if service_uri is not None:
pulumi.set(__self__, "service_uri", service_uri)
@property
@pulumi.getter(name="automationAccountId")
def automation_account_id(self) -> str:
"""
The Azure automation account Id which holds this runbook and authenticate to Azure resource.
"""
return pulumi.get(self, "automation_account_id")
@property
@pulumi.getter(name="isGlobalRunbook")
def is_global_runbook(self) -> bool:
"""
Indicates whether this instance is global runbook.
"""
return pulumi.get(self, "is_global_runbook")
@property
@pulumi.getter(name="runbookName")
def runbook_name(self) -> str:
"""
The name for this runbook.
"""
return pulumi.get(self, "runbook_name")
@property
@pulumi.getter(name="webhookResourceId")
def webhook_resource_id(self) -> str:
"""
The resource id for webhook linked to this runbook.
"""
return pulumi.get(self, "webhook_resource_id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Indicates name of the webhook.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceUri")
def service_uri(self) -> Optional[str]:
"""
The URI where webhooks should be sent.
"""
return pulumi.get(self, "service_uri")
@pulumi.output_type
class AzureAppPushReceiverResponse(dict):
"""
The Azure mobile App push notification receiver.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "emailAddress":
suggest = "email_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureAppPushReceiverResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureAppPushReceiverResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureAppPushReceiverResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
email_address: str,
name: str):
"""
The Azure mobile App push notification receiver.
:param str email_address: The email address registered for the Azure mobile app.
:param str name: The name of the Azure mobile app push receiver. Names must be unique across all receivers within an action group.
"""
pulumi.set(__self__, "email_address", email_address)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> str:
"""
The email address registered for the Azure mobile app.
"""
return pulumi.get(self, "email_address")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the Azure mobile app push receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class EmailReceiverResponse(dict):
"""
An email receiver.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "emailAddress":
suggest = "email_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in EmailReceiverResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
EmailReceiverResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
EmailReceiverResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
email_address: str,
name: str,
status: str):
"""
An email receiver.
:param str email_address: The email address of this receiver.
:param str name: The name of the email receiver. Names must be unique across all receivers within an action group.
:param str status: The receiver status of the e-mail.
"""
pulumi.set(__self__, "email_address", email_address)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> str:
"""
The email address of this receiver.
"""
return pulumi.get(self, "email_address")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the email receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def status(self) -> str:
"""
The receiver status of the e-mail.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class ItsmReceiverResponse(dict):
"""
An Itsm receiver.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "connectionId":
suggest = "connection_id"
elif key == "ticketConfiguration":
suggest = "ticket_configuration"
elif key == "workspaceId":
suggest = "workspace_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ItsmReceiverResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ItsmReceiverResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ItsmReceiverResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
connection_id: str,
name: str,
region: str,
ticket_configuration: str,
workspace_id: str):
"""
An Itsm receiver.
:param str connection_id: Unique identification of ITSM connection among multiple defined in above workspace.
:param str name: The name of the Itsm receiver. Names must be unique across all receivers within an action group.
:param str region: Region in which workspace resides. Supported values:'centralindia','japaneast','southeastasia','australiasoutheast','uksouth','westcentralus','canadacentral','eastus','westeurope'
:param str ticket_configuration: JSON blob for the configurations of the ITSM action. CreateMultipleWorkItems option will be part of this blob as well.
:param str workspace_id: OMS LA instance identifier.
"""
pulumi.set(__self__, "connection_id", connection_id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "region", region)
pulumi.set(__self__, "ticket_configuration", ticket_configuration)
pulumi.set(__self__, "workspace_id", workspace_id)
@property
@pulumi.getter(name="connectionId")
def connection_id(self) -> str:
"""
Unique identification of ITSM connection among multiple defined in above workspace.
"""
return pulumi.get(self, "connection_id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the Itsm receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def region(self) -> str:
"""
Region in which workspace resides. Supported values:'centralindia','japaneast','southeastasia','australiasoutheast','uksouth','westcentralus','canadacentral','eastus','westeurope'
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="ticketConfiguration")
def ticket_configuration(self) -> str:
"""
JSON blob for the configurations of the ITSM action. CreateMultipleWorkItems option will be part of this blob as well.
"""
return pulumi.get(self, "ticket_configuration")
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> str:
"""
OMS LA instance identifier.
"""
return pulumi.get(self, "workspace_id")
@pulumi.output_type
class SmsReceiverResponse(dict):
"""
An SMS receiver.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "countryCode":
suggest = "country_code"
elif key == "phoneNumber":
suggest = "phone_number"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SmsReceiverResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SmsReceiverResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SmsReceiverResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
country_code: str,
name: str,
phone_number: str,
status: str):
"""
An SMS receiver.
:param str country_code: The country code of the SMS receiver.
:param str name: The name of the SMS receiver. Names must be unique across all receivers within an action group.
:param str phone_number: The phone number of the SMS receiver.
:param str status: The status of the receiver.
"""
pulumi.set(__self__, "country_code", country_code)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "phone_number", phone_number)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="countryCode")
def country_code(self) -> str:
"""
The country code of the SMS receiver.
"""
return pulumi.get(self, "country_code")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the SMS receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> str:
"""
The phone number of the SMS receiver.
"""
return pulumi.get(self, "phone_number")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the receiver.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class WebhookReceiverResponse(dict):
"""
A webhook receiver.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serviceUri":
suggest = "service_uri"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in WebhookReceiverResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
WebhookReceiverResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
WebhookReceiverResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
service_uri: str):
"""
A webhook receiver.
:param str name: The name of the webhook receiver. Names must be unique across all receivers within an action group.
:param str service_uri: The URI where webhooks should be sent.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "service_uri", service_uri)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the webhook receiver. Names must be unique across all receivers within an action group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceUri")
def service_uri(self) -> str:
"""
The URI where webhooks should be sent.
"""
return pulumi.get(self, "service_uri")
| [
"[email protected]"
] | |
81efde228ef6de285080c846e3a219b46e2322e7 | d5a4fe6a8d466dce3c131c9678e86398b17b27c5 | /Finite Difference Computing with Exponential Decay_HPL/SF_FDCED/softeng/decay_flat.py | 26557b62d82049ce8043028498525738a4bb08ef | [] | no_license | AaronCHH/B_PYTHON_Numerical | d8fe3ec843dc1b1b35a407134980027ac5516721 | d369d73f443cc9afdb0a10912b539bc8c48f7d4a | refs/heads/master | 2021-01-19T10:14:57.903055 | 2017-06-21T05:21:21 | 2017-06-21T05:21:21 | 87,845,435 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | from numpy import *
from matplotlib.pyplot import *
I = 1
a = 2
T = 4
dt = 0.2
Nt = int(round(T/dt)) # no of time intervals
u = zeros(Nt+1) # array of u[n] values
t = linspace(0, T, Nt+1) # time mesh
theta = 1 # Backward Euler method
u[0] = I # assign initial condition
for n in range(0, Nt): # n=0,1,...,Nt-1
u[n+1] = (1 - (1-theta)*a*dt)/(1 + theta*dt*a)*u[n]
# Compute norm of the error
u_e = I*exp(-a*t) - u # exact u at the mesh points
error = u_e - u
E = sqrt(dt*sum(error**2))
print 'Norm of the error: %.3E' % E
# Compare numerical (u) and exact solution (u_e) in a plot
plot(t, u, 'r--o')
t_e = linspace(0, T, 1001) # very fine mesh for u_e
u_e = I*exp(-a*t_e)
plot(t_e, u_e, 'b-')
legend(['numerical, theta=%g' % theta, 'exact'])
xlabel('t')
ylabel('u')
show()
| [
"[email protected]"
] | |
8a569eaf12f58d883faecc822a171a6d31d01b88 | de9fe69314068eea0e66500144c382de149e1882 | /backend/bruhcult_18193/wsgi.py | f972466a75ac627b92d1a22c6584755e6c690ba6 | [] | no_license | crowdbotics-apps/bruhcult-18193 | 35d7ee7c14fb92ae8d52b9f4a5688ef725421735 | c82fd627b8e28155292a073e2c1fd8d2422cf72f | refs/heads/master | 2022-11-08T12:18:49.426568 | 2020-06-18T19:15:10 | 2020-06-18T19:15:10 | 273,281,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for bruhcult_18193 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bruhcult_18193.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
aa609bf28d3a0cdc57bab21c53b69728615ad8d5 | 93e55f080779f16f47a7382a3fb0b29a4189e074 | /convertor/huawei/te/tik/tik_lib/tik_conf_.py | 8a618ab5b76cc48f38139fb0c205cb300d15e6f9 | [] | no_license | jizhuoran/caffe-huawei-atlas-convertor | b00cfdec3888da3bb18794f52a41deea316ada67 | 148511a31bfd195df889291946c43bb585acb546 | refs/heads/master | 2022-11-25T13:59:45.181910 | 2020-07-31T07:37:02 | 2020-07-31T07:37:02 | 283,966,371 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 10,844 | py | """
Copyright (R) @huawei.com, all rights reserved
-*- coding:utf-8 -*-
FILE: tik_conf_.py
DESC: configuration of tik
CREATED: 2019-04-18 18:53:42
MODIFIED: 2019-07-25 09:59:30
"""
from te.platform.cce_conf import te_set_version
from te.platform.cce_conf import cceProduct
from te.platform.cce_conf import get_soc_spec
from te.platform.cce_params import scope_cbuf
from te.platform.cce_params import scope_ubuf
from te.platform.cce_params import scope_ca
from te.platform.cce_params import scope_cb
from te.platform.cce_params import scope_cc
from te.platform.cce_params import scope_smask
from te.platform.cce_params import AIC
from te.platform.cce_params import HI3796CV300ESAIC
from te.platform.cce_params import VEC
from te.platform.cce_params import ASCEND_310AIC
from te.platform.cce_params import ASCEND_910AIC
from .tik_params import KB_SIZE, L1_BUFFER, UB_BUFFER,\
L0A_BUFFER, L0B_BUFFER, LOC_BUFFER, SMASK_BUFFER, \
AI_CORE_VERSION_MAP_TO_PRODUCT
from .tik_source_info import TikSourceInfo, source_info_decorator
from .tik_check_util import TikCheckUtil
from ..common.tik_get_soc_name import get_soc_name
from ..common.tik_get_soc_name import get_soc_core_type
def _get_buffers_parameter(buffer_arch_list):
"""
According D core information,buffers parameters can be known.
Parameters
----------
buffer_arch_list:D aicore information
Returns
----------
return:buffers_map
buffers_map: L0A/B/C,L1 and UB buffer memory size
"""
buffer_names = [L1_BUFFER, UB_BUFFER,
L0A_BUFFER, L0B_BUFFER, LOC_BUFFER, SMASK_BUFFER]
buffer_arch = {}
for buffer_params in buffer_arch_list:
for buffer_name in buffer_names:
if buffer_params.find(buffer_name) == 0:
buffer_paras = buffer_params.split(' ')
if buffer_name == SMASK_BUFFER:
buffer_size = buffer_paras[-1].split("B")[0]
buffer_arch[buffer_name] = int(buffer_size)
else:
buffer_size = buffer_paras[-1].split("KB")[0]
buffer_arch[buffer_name] = int(buffer_size)*KB_SIZE
return buffer_arch
class Dprofile1():
"""
ai_core profile explanation
"""
_SMASK_MAP = {
AIC: 256,
VEC: 0,
HI3796CV300ESAIC: 256,
ASCEND_310AIC: 0,
ASCEND_910AIC: 0
}
def __init__(self, ai_core_arch=None, ai_core_version=None, ddk_version=None):
"""
ai_core profile initialization
Parameters
----------
ai_core_arch : ai_core architecture
ai_core_version: ai_core version
Returns
-------
"""
# need TikSourceInfo() init function to set source_info None first
self.source_info = TikSourceInfo()
self.source_info.register_source_info(depth=2)
if ddk_version is not None:
self.ddk_version = ddk_version
cceProduct(self.ddk_version)
elif SetProductFlag.is_set_product_version or \
(ai_core_arch is None or ai_core_version is None):
# ge do the init!
pass
else:
TikCheckUtil.check_type_match(ai_core_arch, str,
"ai_core_arch should be str")
TikCheckUtil.check_type_match(ai_core_version, str,
"ai_core_version should be str")
self.ai_core_arch = ai_core_arch.lower()
self.ai_core_version = ai_core_version.lower()
# version
self.ddk_version = _gen_version(self.ai_core_arch,
self.ai_core_version)
cceProduct(self.ddk_version)
# we will use cce product params to represent ai core arch and version.
# map for save device buffer info
self.ai_core_buffer = {}
self.registe()
self.source_info.clear_source_info()
def registe(self):
"""
ai_core register configure
Parameters
----------
Returns
-------
return:no return
"""
# get the device buffer info
_smask_map = {
AIC: 256,
VEC: 0,
HI3796CV300ESAIC: 256,
ASCEND_310AIC: 0,
ASCEND_910AIC: 0
}
l1_buffer = _get_l1_size()
ub_buffer = _get_ub_size()
l0a_buffer = _get_l0a_size()
l0b_buffer = _get_l0b_size()
l0c_buffer = _get_l0c_size()
smask_buffer = _smask_map[get_soc_name() + get_soc_core_type()]
key_name = get_soc_name() + get_soc_core_type()
# save the device buffer info into the map
self.ai_core_buffer[key_name] = ["L1_Buffer: " +
str(l1_buffer // KB_SIZE) + "KB",
"Unified_Buffer: " +
str(ub_buffer // KB_SIZE) + "KB",
"L0A_Buffer: " +
str(l0a_buffer // KB_SIZE) + "KB",
"L0B_Buffer: " +
str(l0b_buffer // KB_SIZE) + "KB",
"L0C_Buffer: " +
str(l0c_buffer // KB_SIZE) + "KB",
"SMASK_Buffer: " +
str(smask_buffer) + "B"
]
@source_info_decorator()
def get_aicore_num(self):
"""
return ai_core number for specify ai core
Parameters
----------
Returns
-------
return:ai_core number
"""
return get_soc_spec("CORE_NUM")
def buffer_size_query(self, buffer_scope=None):
"""
according the AI core params, get the buffer params
for_example:LOA/B/C,L01,UB buffer
Parameters
----------
buffer_scope:value is scope_cbuf, scope_ubuf, scope_ca
scope_cb, scope_cc
Returns
-------
return:ai_core buffer params
"""
key_map = {scope_cbuf: L1_BUFFER,
scope_ubuf: UB_BUFFER,
scope_ca: L0A_BUFFER,
scope_cb: L0B_BUFFER,
scope_cc: LOC_BUFFER,
scope_smask: SMASK_BUFFER}
key_name = get_soc_name() + get_soc_core_type()
buffer_arch_list = self.ai_core_buffer[key_name]
buffer_arch = _get_buffers_parameter(buffer_arch_list)
if buffer_scope is None:
buffer_map = {scope_cbuf: 0, scope_ubuf: 0, scope_ca: 0,
scope_cb: 0, scope_cc: 0, scope_smask: 0}
for scope in key_map:
buffer_map[scope] = buffer_arch[key_map[scope]]
return buffer_map
TikCheckUtil.check_in_range(
buffer_scope, key_map.keys(), "buffer_scope value is not correct!")
return buffer_arch[key_map[buffer_scope]]
@source_info_decorator()
def get_l1_buffer_size(self):
"""
return l1 buffer size for specify ai core
Parameters
----------
Returns
-------
return:l1_buffer_size
"""
return _get_l1_size()
@source_info_decorator()
def get_l0a_buffer_size(self):
"""
return l0a_buffer buffer size for specify ai core
Parameters
----------
Returns
-------
return:l0a_buffer_size
"""
return _get_l0a_size()
@source_info_decorator()
def get_l0b_buffer_size(self):
"""
return l0b_buffer buffer size for specify ai core
Parameters
----------
Returns
-------
return:l0a_buffer_size
"""
return _get_l0b_size()
@source_info_decorator()
def get_l0c_buffer_size(self):
"""
return l0c_buffer buffer size for specify ai core
Parameters
----------
Returns
-------
return:l0c_buffer_size
"""
return _get_l0c_size()
@source_info_decorator()
def get_product_name(self):
"""
return product_name for specify ai core
Parameters
----------
Returns
-------
return:product_name
"""
return _get_product_name()
def _get_product_name():
"""
return a tuple, include version and product name
Parameters
----------
product:ddk version, like 1.1.xxx.xxx
Returns
-------
return:a tuple represents current product, like v100, mini
"""
product_name = get_soc_name() + get_soc_core_type()
if product_name in AI_CORE_VERSION_MAP_TO_PRODUCT:
return AI_CORE_VERSION_MAP_TO_PRODUCT[product_name]
return TikCheckUtil.raise_error(
"Not valid product version for tik:" + product_name)
def set_product_version(version):
"""set version info
Parameters
----------
version : str
product name
format: <major>.<middle>.<minor>.<point>
major: 1xx or 2xx or 3xx
"""
# need TikSourceInfo() init function to set source_info None first
TikSourceInfo().register_source_info()
te_set_version(version)
SetProductFlag.is_set_product_version = True
TikSourceInfo.clear_source_info()
def unset_product_version():
"""unset SetProductFlag to false
"""
# need TikSourceInfo() init function to set source_info None first
TikSourceInfo().register_source_info()
SetProductFlag.is_set_product_version = False
TikSourceInfo.clear_source_info()
class SetProductFlag():
"""
use to show whether has set product version
"""
is_set_product_version = False
def __str__(self):
pass
def __hash__(self):
pass
def _get_ub_size():
return get_soc_spec("UB_SIZE")
def _get_l0a_size():
return get_soc_spec("L0A_SIZE")
def _get_l0b_size():
return get_soc_spec("L0B_SIZE")
def _get_l0c_size():
return get_soc_spec("L0C_SIZE")
def _get_l1_size():
return get_soc_spec("L1_SIZE")
def _gen_version(arch, version):
"""
gen version
:param arch:
:param version:
:return: string ddk version
"""
if arch == "v100":
if version == "mini":
return "1.1.xxx.xxx"
if version == "cloud":
return "1.60.xxx.xxx"
TikCheckUtil.raise_error("Error chip!")
elif arch == "v200":
if version == "aic":
return "2.10.xxx.xxx"
if version in ("hisi-es", "hisi-cs"):
return "5.10.xxx.xxx"
if version == "vec":
return "3.20.xxx.xxx"
TikCheckUtil.raise_error("Error chip!")
| [
"[email protected]"
] | |
460b60c4a8cdb5b8753f5e3e2789318b3fea73b4 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /jmZe7R4ibXkrQbogr_0.py | 46152937d3897c2b43d87c5e4b43b440156a655f | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 847 | py | """
Write a **regular expression** that checks to see if a password is valid. For
a password to be valid, it must meet the following requirments:
1. The password must contain at least one uppercase character.
2. The password must contain at least one lowercase character.
3. The password must contain at least one number.
4. The password must contain at least one special character `! ? * #`
5. The password must be at least 8 characters in length.
### Examples
"Password*12" ➞ True
"passWORD12!" ➞ True
"Pass" ➞ False
### Notes
* The lowercase char, uppercase char, special char, and number can appear at any part of the password.
* **You will only be writing a regular expression; do not write a function.**
"""
import re
r="^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[!*#?])[A-Za-z\d!#*?]{8,}$"
| [
"[email protected]"
] | |
fa23bb740b0242e213687ec80b61153c25233105 | 1cadec31e3f5c71407a67b4676fdc9e1ab9891bf | /modoboa/extensions/admin/forms/__init__.py | 5bfbed71114c50c36aac8fb32d3ac51b92833677 | [
"ISC"
] | permissive | SonRiab/modoboa | 9b840b639e6e05a8e6145bffd45ddd1ce90a2b8b | 97db0811c089aa477b21f28f318ab631d20cf108 | refs/heads/master | 2020-12-25T09:09:27.286185 | 2014-08-13T09:58:22 | 2014-08-13T09:58:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | from .domain import DomainFormGeneral, DomainFormOptions, DomainForm
from .account import (
AccountFormGeneral, AccountFormMail, AccountPermissionsForm,
AccountForm
)
from .alias import AliasForm
from .forward import ForwardForm
from .import_ import ImportDataForm, ImportIdentitiesForm
from .export import ExportDataForm, ExportDomainsForm, ExportIdentitiesForm
__all__ = [
'DomainFormGeneral', 'DomainFormOptions', 'DomainForm',
'AccountFormGeneral', 'AccountFormMail', 'AccountPermissionsForm',
'AccountForm', 'AliasForm', 'ImportDataForm', 'ImportIdentitiesForm',
'ExportDataForm', 'ExportDomainsForm', 'ExportIdentitiesForm',
'ForwardForm'
]
| [
"[email protected]"
] | |
7e2e79ddfb16ee42977e799c3cf15480e2b87a9f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p04046/s526215955.py | 321044d84612a39406e8d438749ed1e56e37746f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 200 | py | m=10**9+7
h,w,a,b=map(int,input().split())
d=c=1
for i in range(h-1):
d=c=c*(w+h-b-2-i)*pow(i+1,m-2,m)%m
for i in range(1,h-a):
c=c*(b-1+i)*(h-i)*pow(i*(w+h-b-1-i),m-2,m)%m
d+=c
print(d%m) | [
"[email protected]"
] | |
471f4a97647808b279d4134c0ea9d027ab9c2229 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /place/different_eye/new_time.py | c6abfe47f36c42f4f14c519a34724907fd677864 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py |
#! /usr/bin/env python
def way_or_child(str_arg):
week(str_arg)
print('case_or_week')
def week(str_arg):
print(str_arg)
if __name__ == '__main__':
way_or_child('last_government_and_next_fact')
| [
"[email protected]"
] | |
55bfa04f5f0c6a8178be290cfe54075f6133963e | e2969ce860cba03add07d3b6412397d337b8b0a1 | /algorithms/sorting/2_median_of_medians/solution/solution.py | af924bcc4a21f05025bc57bdf3318b334ac67747 | [] | no_license | AshRavi7/Exercises | cfe16ef1d7a248b1fead12e3fb4dd0b26205de07 | 0a2738771e03b36e353cc25edfc7d8b8a0e4b2fe | refs/heads/master | 2022-12-20T08:30:04.104476 | 2020-09-11T15:58:46 | 2020-09-11T15:58:46 | 290,986,365 | 0 | 0 | null | 2020-08-28T07:56:53 | 2020-08-28T07:56:52 | null | UTF-8 | Python | false | false | 36 | py | def median_of_medians(lst):
pass | [
"[email protected]"
] | |
ca7a2aa1310b5cf5fe2bc79a782b7d9bba4daaed | 46ac378de81b13359ceff567b8ec1b04cda1d4d5 | /luddite/app.py | dc79b95750eafccdf985da0f64d4caa7ea8b20f0 | [
"MIT"
] | permissive | ntoll/luddite | f54f8c4564590e57bdc97b7f3ee9847f7e940849 | e234a955db16391d9cd9b4cd97c214c954e5697d | refs/heads/master | 2021-01-11T22:10:52.120319 | 2017-01-23T09:53:29 | 2017-01-23T09:53:29 | 78,933,221 | 7 | 2 | null | 2017-01-20T20:46:05 | 2017-01-14T10:15:20 | Python | UTF-8 | Python | false | false | 871 | py | """
Luddite - the anti-browser. :-)
"""
import os
import logging
from luddite import __version__
from luddite.browser import Browser
def setup_logging():
"""
This could probably be more elegant.
"""
home = os.path.expanduser('~')
log_dir = os.path.join(home, 'luddite')
log_file = os.path.join(log_dir, 'luddite.log')
if not os.path.exists(log_dir):
os.makedirs(log_dir)
log_fmt = '%(asctime)s - %(name)s(%(funcName)s) %(levelname)s: %(message)s'
logging.basicConfig(filename=log_file, filemode='w', format=log_fmt,
level=logging.DEBUG)
print('Logging to {}'.format(log_file))
def run(urls):
setup_logging()
logging.info('Starting Luddite {}'.format(__version__))
logging.info(urls)
browser = Browser()
for url in urls:
browser.create_tab(url)
browser.mainloop()
| [
"[email protected]"
] | |
8f181a38345a7cd75b67289817a44b5871fe08f4 | 000d7a149b59b9e23ca31a34cd07ce62bb7bcae4 | /models/linktracker.py | 83e9e4d6130876b20bb212924e6914ad9e00e321 | [
"MIT"
] | permissive | Lumiare1/tyggbot | 37ce0f2d2a697b0e673b83b02eb032c43deced3a | 5a1e09504304f966f73344ca6ee27d7f4c20fc6e | refs/heads/master | 2021-01-16T17:59:14.656659 | 2015-10-04T11:03:45 | 2015-10-04T11:03:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,792 | py | import logging
import datetime
import pymysql
from urllib.parse import urlsplit
log = logging.getLogger('tyggbot')
class LinkTrackerLink:
@classmethod
def load(cls, cursor, url):
link = cls()
cursor.execute('SELECT * FROM `tb_link_data` WHERE `url`=%s', [url])
row = cursor.fetchone()
if row:
# We found a link matching this URL in the database!
link.id = row['id']
link.url = row['url']
link.times_linked = row['times_linked']
link.first_linked = row['first_linked']
link.last_linked = row['last_linked']
link.needs_sync = False
else:
# No link was found with this URL, create a new one!
link.id = -1
link.url = url
link.times_linked = 0
link.first_linked = datetime.datetime.now()
link.last_linked = datetime.datetime.now()
link.needs_sync = False
return link
def increment(self):
self.times_linked += 1
self.last_linked = datetime.datetime.now()
self.needs_sync = True
def sync(self, cursor):
_first_linked = self.first_linked.strftime('%Y-%m-%d %H:%M:%S')
_last_linked = self.last_linked.strftime('%Y-%m-%d %H:%M:%S')
if self.id == -1:
cursor.execute('INSERT INTO `tb_link_data` (`url`, `times_linked`, `first_linked`, `last_linked`) VALUES (%s, %s, %s, %s)',
[self.url, self.times_linked, _first_linked, _last_linked])
self.id = cursor.lastrowid
else:
cursor.execute('UPDATE `tb_link_data` SET `times_linked`=%s, `last_linked`=%s WHERE `id`=%s',
[self.times_linked, _last_linked, self.id])
class LinkTracker:
def __init__(self, sqlconn):
self.sqlconn = sqlconn
self.links = {}
def add(self, url):
url_data = urlsplit(url)
if url_data.netloc[:4] == 'www.':
netloc = url_data.netloc[4:]
else:
netloc = url_data.netloc
if url_data.path.endswith('/'):
path = url_data.path[:-1]
else:
path = url_data.path
if len(url_data.query) > 0:
query = '?' + url_data.query
else:
query = ''
url = netloc + path + query
if url not in self.links:
self.links[url] = LinkTrackerLink.load(self.sqlconn.cursor(pymysql.cursors.DictCursor), url)
self.links[url].increment()
def sync(self):
self.sqlconn.autocommit(False)
cursor = self.sqlconn.cursor()
for link in [link for k, link in self.links.items() if link.needs_sync]:
link.sync(cursor)
cursor.close()
self.sqlconn.autocommit(True)
| [
"[email protected]"
] | |
518672544e321b6bed89cfb0a084186f20f524b8 | 38422c3edeb269926502fed31a0761aff8dd3d3b | /Swanepoel_analysis/Swanepoel_analysis/config_files/ek036-4_config.py | 79e82e0ebd6416bc263e9bebbc4bb486c1ccf30f | [] | no_license | vfurtula/Alle-projekter | 2dab3ccbf7ddb6be3ee09f9f5e87085f354dd84a | da3d7c9611088043e2aea5d844f1ae6056215e04 | refs/heads/master | 2022-06-07T05:17:35.327228 | 2020-04-30T10:28:48 | 2020-04-30T10:28:48 | 260,180,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | loadSubOlis=["/home/vfurtula/Documents/Projects/Swanepoel_analysis/data/sapphirejune16.asc",True]
loadSubFilmOlis=["/home/vfurtula/Documents/Projects/Swanepoel_analysis/data/ek036-4.asc",True]
loadSubFTIR=["/home/vfurtula/Documents/Projects/Swanepoel_analysis/data/SAPPHIREJUNE16.DPT",True]
loadSubFilmFTIR=["/home/vfurtula/Documents/Projects/Swanepoel_analysis/data/EK036-4.DPT",True]
fit_linear_spline="spline"
gaussian_factors=[7.25, 1.0, 0.75, 0.0]
gaussian_borders=[0.225, 0.6, 0.8, 1.6, 3.25]
ignore_data_pts=45
corr_slit=1
fit_poly_order=4
fit_poly_ranges=[[0.6, 0.95, 1.5, 2.5],True]
filename="save_to_file"
folder="save_to_folde"
timestr="180223-1627"
save_figs=True
plot_X="eV" | [
"[email protected]"
] | |
49ad7fbd531626b902b58ec3a7df9d440d97e36b | a518141ca3ba2b6fa63a7961b51936d9438ff022 | /401 - Palindromes.py | c489ec9ee8ebc9e83818d1be9f90a8116897be74 | [] | no_license | jlhung/UVA-Python | ec93b2c98e04c753e8356f3e4825584fae4a8663 | 7a0db4fecffd7ac4f377f93da41291a8e998ee9b | refs/heads/master | 2022-11-28T04:47:49.270187 | 2020-08-10T13:19:58 | 2020-08-10T13:19:58 | 116,969,745 | 19 | 9 | null | null | null | null | UTF-8 | Python | false | false | 655 | py | '''
20180203 jlhung v1.0
'''
a = "A000300HIL0JM0O0002TUVMXY51SE0Z0080"
def cal(z):
if z >= 65:
return z - 65
else:
return z - 49 + 26
while True:
try:
n = input()
except EOFError:
break
if n == n[::-1]:
p = 1
else:
p = 0
m = 1
for i in range(len(n)//2 + 1):
x = cal(ord(n[i]))
if a[x] != n[len(n)-1-i]:
m = 0
break
if m == 1 and p == 1:
print("{} -- is a mirrored palindrome.".format(n))
elif m == 0 and p == 1:
print("{} -- is a regular palindrome.".format(n))
elif m == 1 and p == 0:
print("{} -- is a mirrored string.".format(n))
else:
print("{} -- is not a palindrome.".format(n))
print()
| [
"[email protected]"
] | |
d172f7350f6aaeb3f780cbf04830ffcac3896f82 | deae8bc2da1936c4f9afbd8e9412af8df39e96a2 | /src/spaceone/inventory/connector/__init__.py | c89ed55465d6f21041cb30667531f80727c4f47b | [
"Apache-2.0"
] | permissive | choonho/inventory | 11a3ef0ec327214cc26260ace049305021c892cf | cc89757490d28fecb7ffccdfd6f89d4c0aa40da5 | refs/heads/master | 2023-04-17T05:51:20.373738 | 2020-06-16T07:14:27 | 2020-06-16T07:14:27 | 273,175,186 | 0 | 0 | null | 2020-06-18T07:49:19 | 2020-06-18T07:49:18 | null | UTF-8 | Python | false | false | 394 | py | from spaceone.inventory.connector.identity_connector import IdentityConnector
from spaceone.inventory.connector.plugin_connector import PluginConnector
from spaceone.inventory.connector.secret_connector import SecretConnector
from spaceone.inventory.connector.collector_connector import CollectorPluginConnector
from spaceone.inventory.connector.repository_connector import RepositoryConnector
| [
"[email protected]"
] | |
87c88e8b5fe16b76cb62ec4af876bdc38c0526de | a74cabbe1b11fc8ef575ea86f2543cd95db78ec9 | /python_program/q1348_Tweet_Counts_Per_Frequency.py | 0ebd6ba5dc2c6918e1155392e2fb98e3ccab9546 | [] | no_license | tszandy/leetcode | 87e3ccf291b2879637d2d8238935a455b401a78a | f1f4361541dcffbb291285663c8820d7ffb37d2f | refs/heads/master | 2023-04-06T15:34:04.847875 | 2023-03-26T12:22:42 | 2023-03-26T12:22:42 | 204,069,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | from typing import List
from collections import Counter,defaultdict
from math import *
from functools import reduce,lru_cache
import numpy as np
from heapq import *
from bisect import bisect_left
class TweetCounts:
def __init__(self):
self.counter = Counter()
def recordTweet(self, tweetName: str, time: int) -> None:
self.counter[(tweetName,time)]+=1
def getTweetCountsPerFrequency(self, freq: str, tweetName: str, startTime: int, endTime: int) -> List[int]:
freq_to_num = {"minute":60,"hour":3600,"day":86400}
return_list = []
for i in range(startTime,endTime+1,freq_to_num[freq]):
return_list.append(0)
for j in range(min(freq_to_num[freq],endTime-i+1)):
time = i+j
return_list[-1]+=self.counter[(tweetName,time)]
return return_list
# Your TweetCounts object will be instantiated and called as such:
# obj = TweetCounts()
# obj.recordTweet(tweetName,time)
# param_2 = obj.getTweetCountsPerFrequency(freq,tweetName,startTime,endTime)
sol = Solution()
# input
["TweetCounts","recordTweet","recordTweet","recordTweet","getTweetCountsPerFrequency","getTweetCountsPerFrequency","recordTweet","getTweetCountsPerFrequency"]
[[],["tweet3",0],["tweet3",60],["tweet3",10],["minute","tweet3",0,59],["minute","tweet3",0,60],["tweet3",120],["hour","tweet3",0,210]]
# output
output = sol.func()
# answer
answer = ""
print(output, answer, answer == output)
| [
"[email protected]"
] | |
0996721394223768eb7b2c15f84c02d6f59230e9 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/vaster.py | a727153d233f607502258f30f0c7640bac736fa4 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 45 | py | ii = [('BentJDO2.py', 1), ('SadlMLP2.py', 1)] | [
"[email protected]"
] | |
844ef7dcae523873a814431086047825c6846d7b | cde6893942b906bc7388e2af69dd33c2cca6bb8b | /docs/source/conf.py | d2cb29be5dd0a759ae77be5f9c5a2db66da1b5d9 | [
"MIT"
] | permissive | samirelanduk/fuzz | 9b482d51436caf3ca576ae57639031f61da498c3 | c2b75009686ca9775d8bfaa4e489e7e1db5ac22b | refs/heads/master | 2020-12-03T08:13:05.763707 | 2020-12-02T15:57:08 | 2020-12-02T15:57:08 | 95,669,452 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,796 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# fuzz documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 18 15:49:06 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'fuzz'
copyright = '2017, Sam Ireland'
author = 'Sam Ireland'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'fuzzdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'fuzz.tex', 'fuzz Documentation',
'Sam Ireland', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'fuzz', 'fuzz Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'fuzz', 'fuzz Documentation',
author, 'fuzz', 'One line description of project.',
'Miscellaneous'),
]
autodoc_member_order = 'bysource'
autodoc_docstring_signature = True
| [
"[email protected]"
] | |
d1a69c770860c71d4ea8cbe1f0d096cc33ed146f | 9e36b3a0a609f862aa2894a1473896c8465c41a1 | /arelle/ViewWinGrid.py | b2cea677258415d91d3d4774b0643e666a8e869e | [
"Apache-2.0"
] | permissive | marado/Arelle | 7cd74a66d19be174c9f1fe66f788dd53447bffac | 7ca2bf09c852787cd7a38d68b13c11d5e33e72a2 | refs/heads/master | 2020-04-08T00:27:54.212337 | 2011-10-28T17:49:49 | 2011-10-28T17:49:49 | 2,658,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,703 | py | '''
Created on Oct 9, 2010
@author: Mark V Systems Limited
(c) Copyright 2010 Mark V Systems Limited, All rights reserved.
'''
from tkinter import *
from tkinter.ttk import *
from arelle.CntlrWinTooltip import ToolTip
from arelle.UiUtil import (scrolledHeaderedFrame, scrolledFrame)
class ViewGrid:
def __init__(self, modelXbrl, tabWin, tabTitle, hasToolTip=False, lang=None):
self.tabWin = tabWin
#self.viewFrame = Frame(tabWin)
#self.viewFrame.grid(row=0, column=0, sticky=(N, S, E, W))
'''
paneWin = PanedWindow(self.viewFrame, orient=VERTICAL)
paneWin.grid(row=1, column=0, sticky=(N, S, E, W))
self.zGrid = scrollgrid(paneWin)
self.zGrid.grid(row=0, column=0, sticky=(N, S, E, W))
self.xyGrid = scrollgrid(paneWin)
self.xyGrid.grid(row=1, column=0, sticky=(N, S, E, W))
'''
'''
self.gridBody = scrollgrid(self.viewFrame)
self.gridBody.grid(row=0, column=0, sticky=(N, S, E, W))
'''
self.viewFrame = scrolledHeaderedFrame(tabWin)
self.gridTblHdr = self.viewFrame.tblHdrInterior
self.gridColHdr = self.viewFrame.colHdrInterior
self.gridRowHdr = self.viewFrame.rowHdrInterior
self.gridBody = self.viewFrame.bodyInterior
'''
self.viewFrame = scrolledFrame(tabWin)
self.gridTblHdr = self.gridRowHdr = self.gridColHdr = self.gridBody = self.viewFrame.interior
'''
tabWin.add(self.viewFrame,text=tabTitle)
self.modelXbrl = modelXbrl
self.hasToolTip = hasToolTip
self.toolTipText = StringVar()
if hasToolTip:
self.gridBody.bind("<Motion>", self.motion, '+')
self.gridBody.bind("<Leave>", self.leave, '+')
self.toolTipText = StringVar()
self.toolTip = ToolTip(self.gridBody,
textvariable=self.toolTipText,
wraplength=480,
follow_mouse=True,
state="disabled")
self.toolTipColId = None
self.toolTipRowId = None
self.modelXbrl = modelXbrl
self.contextMenuClick = self.modelXbrl.modelManager.cntlr.contextMenuClick
self.gridTblHdr.contextMenuClick = self.contextMenuClick
self.gridColHdr.contextMenuClick = self.contextMenuClick
self.gridRowHdr.contextMenuClick = self.contextMenuClick
self.gridBody.contextMenuClick = self.contextMenuClick
self.lang = lang
if modelXbrl:
modelXbrl.views.append(self)
if not lang:
self.lang = modelXbrl.modelManager.defaultLang
def close(self):
self.tabWin.forget(self.viewFrame)
self.modelXbrl.views.remove(self)
self.modelXbrl = None
def select(self):
self.tabWin.select(self.viewFrame)
def leave(self, *args):
self.toolTipColId = None
self.toolTipRowId = None
def motion(self, *args):
'''
tvColId = self.gridBody.identify_column(args[0].x)
tvRowId = self.gridBody.identify_row(args[0].y)
if tvColId != self.toolTipColId or tvRowId != self.toolTipRowId:
self.toolTipColId = tvColId
self.toolTipRowId = tvRowId
newValue = None
if tvRowId and len(tvRowId) > 0:
try:
col = int(tvColId[1:])
if col == 0:
newValue = self.gridBody.item(tvRowId,"text")
else:
values = self.gridBody.item(tvRowId,"values")
if col <= len(values):
newValue = values[col - 1]
except ValueError:
pass
self.setToolTip(newValue, tvColId)
'''
def setToolTip(self, text, colId="#0"):
self.toolTip._hide()
if isinstance(text,str) and len(text) > 0:
width = self.gridBody.column(colId,"width")
if len(text) * 8 > width or '\n' in text:
self.toolTipText.set(text)
self.toolTip.configure(state="normal")
self.toolTip._schedule()
else:
self.toolTipText.set("")
self.toolTip.configure(state="disabled")
else:
self.toolTipText.set("")
self.toolTip.configure(state="disabled")
def contextMenu(self):
try:
return self.menu
except AttributeError:
self.menu = Menu( self.viewFrame, tearoff = 0 )
self.gridBody.bind( self.contextMenuClick, self.popUpMenu )
if not self.gridTblHdr.bind(self.contextMenuClick):
self.gridTblHdr.bind( self.contextMenuClick, self.popUpMenu )
if not self.gridColHdr.bind(self.contextMenuClick):
self.gridColHdr.bind( self.contextMenuClick, self.popUpMenu )
if not self.gridRowHdr.bind(self.contextMenuClick):
self.gridRowHdr.bind( self.contextMenuClick, self.popUpMenu )
return self.menu
def popUpMenu(self, event):
self.menu.post( event.x_root, event.y_root )
def menuAddLangs(self):
langsMenu = Menu(self.viewFrame, tearoff=0)
self.menu.add_cascade(label=_("Language"), menu=langsMenu, underline=0)
for lang in sorted(self.modelXbrl.langs):
langsMenu.add_cascade(label=lang, underline=0, command=lambda l=lang: self.setLang(l))
def setLang(self, lang):
self.lang = lang
self.view()
| [
"[email protected]"
] | |
0b5a6d6033e0072d7b2378fe1573de7aa7606581 | 9673db0d489c0cfa0a304844b9ff5ba8c6024621 | /cwmipermanentevents/win_cwmieventlogconsumer.py | 319871554f23f8852e9a806d3457eb0744bf007b | [] | no_license | daveres/Ansible-Auto-Generated-Modules | a9ec1ad2f9ff9c741b77f5d411b927f1a48c6ce3 | a91b484171be12b9e2bc7c0a9c23bdd767877e38 | refs/heads/master | 2020-12-25T03:29:37.751104 | 2015-12-22T19:24:01 | 2015-12-22T19:24:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,930 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# <COPYRIGHT>
# <CODEGENMETA>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_cwmieventlogconsumer
version_added:
short_description: Generated from DSC module cwmipermanentevents version 1.1 at 22.12.2015 20.11.12
description:
- DSC Resources for managing WMI permanent events
options:
EventID:
description:
-
required: True
default:
aliases: []
Name:
description:
-
required: True
default:
aliases: []
Category:
description:
-
required: False
default:
aliases: []
Ensure:
description:
-
required: False
default:
aliases: []
choices:
- Absent
- Present
EventType:
description:
-
required: False
default:
aliases: []
choices:
- AuditFailure
- AuditSuccess
- Error
- Information
- Success
- Warning
InsertionStringTemplates:
description:
-
required: False
default:
aliases: []
NumberOfInsertionStrings:
description:
-
required: False
default:
aliases: []
PsDscRunAsCredential_username:
description:
-
required: False
default:
aliases: []
PsDscRunAsCredential_password:
description:
-
required: False
default:
aliases: []
SourceName:
description:
-
required: False
default:
aliases: []
UNCServerName:
description:
-
required: False
default:
aliases: []
AutoInstallModule:
description:
- If true, the required dsc resource/module will be auto-installed using the Powershell package manager
required: False
default: false
aliases: []
choices:
- true
- false
AutoConfigureLcm:
description:
- If true, LCM will be auto-configured for directly invoking DSC resources (which is a one-time requirement for Ansible DSC modules)
required: False
default: false
aliases: []
choices:
- true
- false
| [
"[email protected]"
] | |
de2ae8f2273970279ea9cb452dd78cf5bfdf252b | 9645bdfbb15742e0d94e3327f94471663f32061a | /Python/394 - Decode String/394_decode-string.py | 1f0ffd16417ab8710b0220c574306888c52a0085 | [] | no_license | aptend/leetcode-rua | f81c080b2260adb2da677612e5c437eda256781d | 80e44f4e9d3a5b592fdebe0bf16d1df54e99991e | refs/heads/master | 2023-06-22T00:40:05.533424 | 2021-03-17T13:51:28 | 2021-03-17T13:51:28 | 186,434,133 | 2 | 0 | null | 2023-06-21T22:12:51 | 2019-05-13T14:17:27 | HTML | UTF-8 | Python | false | false | 1,013 | py | from leezy import Solution, solution
class Q394(Solution):
@solution
def decodeString(self, s):
# 20ms 49.53%
i = 0
while i < len(s) and s[i].isalpha():
i += 1
if i == len(s): # pure string
return s
lead_str = s[:i]
# s[i] is digit or [
cnt = 0
while s[i].isdigit(): # no boundry check when we have trusted input
cnt = cnt * 10 + int(s[i])
i += 1
# now s[i] is '['
open_ = 1
j = i
while j < len(s) and open_:
j += 1
if s[j] == '[':
open_ += 1
elif s[j] == ']':
open_ -= 1
return lead_str + cnt * self.decodeString(s[i+1:j]) + self.decodeString(s[j+1:])
@solution
def decode_str(self, s):
pass
def main():
q = Q394()
q.add_args('3[a]2[bc]')
q.add_args('3[a2[c]]')
q.add_args('2[abc]3[cd]ef')
q.run()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
322d450662d582c2c1f19c213517f29168d4ec15 | a9f676c06bacee1f8b27e08d3c411c89a69cfd40 | /falmer/events/migrations/0031_auto_20180928_1223.py | f7d76238b0effcfd8ed14b357d772ca4cd902f86 | [
"MIT"
] | permissive | sussexstudent/falmer | 1b877c3ac75a0477f155ce1a9dee93a5ada686d6 | ae735bd9d6177002c3d986e5c19a78102233308f | refs/heads/master | 2022-12-11T19:40:12.232488 | 2020-03-20T13:01:47 | 2020-03-20T13:01:47 | 88,043,958 | 2 | 3 | MIT | 2022-12-08T03:17:26 | 2017-04-12T11:24:02 | Python | UTF-8 | Python | false | false | 537 | py | # Generated by Django 2.0.8 on 2018-09-28 11:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0030_brandingperiod_override_listings_root'),
]
operations = [
migrations.AlterField(
model_name='event',
name='ticket_type',
field=models.CharField(choices=[('NA', 'n/a'), ('NT', 'Native'), ('EB', 'Eventbrite'), ('AC', 'ACCA'), ('GN', 'Generic'), ('MSL', 'MSL')], default='NA', max_length=3),
),
]
| [
"[email protected]"
] | |
eaa42c766189d48ffb00f361d854aead4aac7002 | 1534531d248728e583310214c84cd329cfeb243b | /accelerator/examples/a_dsexample_multipledatasets.py | 1539fa2b787d90fd2240e9cfee47148b70491c9b | [
"Apache-2.0"
] | permissive | eBay/accelerator | 415a006d18283940661c0f3cbae2c311acc1ffaa | 8376d289e39cd90562de7dc2e3cdaa0bf080587b | refs/heads/master | 2023-03-10T11:08:58.828517 | 2022-07-14T19:15:46 | 2022-07-14T19:15:46 | 130,265,539 | 146 | 30 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | def prepare(job):
dw1 = job.datasetwriter(name='first')
dw2 = job.datasetwriter(name='second')
dw3 = job.datasetwriter(name='third')
dw1.add('col1', 'int64')
dw2.add('col1', 'json')
dw3.add('col1', 'number')
dw3.add('col2', 'ascii')
dw3.add('col3', 'bool')
return dw1, dw2, dw3
def analysis(sliceno, prepare_res):
dw1, dw2, dw3 = prepare_res
dw1.write(sliceno)
dw2.write({'sliceno': sliceno})
dw3.write(sliceno, str(sliceno), sliceno % 2 == 0)
| [
"[email protected]"
] | |
1ad148f1ed603386468eba8abf94ca4498c06ff7 | 70cdf0741a22c678401a306229003bf036ffe5a6 | /ocbind/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/__init__.py | c7c0b7216ecbdb740eabfcde08d780c3cf6e5192 | [] | no_license | zsblevins/nanog81-hackathon | 5001e034339d6b0c6452ae2474f06916bcd715cf | 1b64fd207dd69837f947094fbd6d6c1cea3a1070 | refs/heads/main | 2023-03-03T09:39:28.460000 | 2021-02-15T13:41:38 | 2021-02-15T13:41:38 | 336,698,856 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49,081 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common-multiprotocol - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common-structure - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-peer-group - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-neighbor - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
from . import config
from . import state
class prefix_limit(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-global - based on the path /bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv4-labeled-unicast/prefix-limit. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'prefix-limit'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv4-labeled-unicast', 'prefix-limit']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv4_labeled_unicast/prefix_limit/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
| [
"[email protected]"
] | |
41423559ea1814593584b0719e067271b835e2f5 | 36de14c6b188886df6a284ee9ce4a464a5ded433 | /Solutions/0481/0481.py | 9cb5c5f805a1753547fb9a793374fca17c61eb5e | [] | no_license | washing1127/LeetCode | 0dca0f3caa5fddd72b299e6e8f59b5f2bf76ddd8 | b910ddf32c7e727373449266c9e3167c21485167 | refs/heads/main | 2023-03-04T23:46:40.617866 | 2023-02-21T03:00:04 | 2023-02-21T03:00:04 | 319,191,720 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | # -*- coding:utf-8 -*-
# Author: washing
# DateTime: 2022/10/31 07:56
# File: 0481.py
# Desc:
class Solution:
def magicalString(self, n: int) -> int:
if n <= 3: return 1
l = [1,2,2]
gai = 1
idx = 2
for _ in range(2, n):
l.extend([gai] * l[idx])
idx += 1
if gai == 1: gai = 2
else: gai = 1
if len(l) >= n:
return sum([i%2 for i in l[:n]])
| [
"[email protected]"
] | |
4c27d86afe78672f295177ebcb30ea11b71ae563 | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/models/run_locally.runfiles/pypi__tensorboard_1_12_1/tensorboard/backend/event_processing/db_import_multiplexer.py | b3af8b8b84de188f8bd2edf64b42541f54c25b77 | [
"Apache-2.0"
] | permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 172 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorboard_1_12_1/tensorboard/backend/event_processing/db_import_multiplexer.py | [
"[email protected]"
] | |
6fc9932f4c30db04bb30d77843208976772512e4 | ca4b23956a888c0c26106eafb665bf60d067161e | /stepik-union/src/test/resources/samples/python3/expected_replaced/5.py | 7d90a37efa7a26919641cef8f49997350f660afa | [] | no_license | shanedecamp/intellij-plugins | c9f332ca20eaecd6736e6582641b7b725a025d8c | 27475af7baa78de7c9506b06046287f163bebae3 | refs/heads/master | 2020-04-13T18:22:17.996572 | 2017-11-23T12:08:08 | 2017-11-23T12:08:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37 | py | replaced
# Stepik code: end
some
text | [
"[email protected]"
] | |
174372ef8d2ca43068f5360b308aef75060ce3fb | 4766d241bbc736e070f79a6ae6a919a8b8bb442d | /archives/20190519python/0977. Squares of a Sorted Array.py | 31e803b0f0a7a1799a41547fd8cdb2db90103d5c | [] | no_license | yangzongwu/leetcode | f7a747668b0b5606050e8a8778cc25902dd9509b | 01f2edd79a1e922bfefecad69e5f2e1ff3a479e5 | refs/heads/master | 2021-07-08T06:45:16.218954 | 2020-07-18T10:20:24 | 2020-07-18T10:20:24 | 165,957,437 | 10 | 8 | null | null | null | null | UTF-8 | Python | false | false | 1,211 | py | class Solution:
def sortedSquares(self, A: List[int]) -> List[int]:
if not A:
return A
if A[0]>=0:
return self.getSquares(A)
if A[0]<0:
if A[-1]<0:
A=self.getSquares(A)
return A[::-1]
k=0
while k<len(A) and A[k]<0:
k+=1
nums1=self.getSquares(A[:k][::-1])
nums2=self.getSquares(A[k:])
return self.getsortedArray(nums1,nums2)
def getsortedArray(self,nums1,nums2):
if not nums1:
return nums2
if not nums2:
return nums1
rep=[]
k1,k2=0,0
while k1<len(nums1) and k2<len(nums2):
if nums1[k1]<nums2[k2]:
rep.append(nums1[k1])
k1+=1
else:
rep.append(nums2[k2])
k2+=1
while k1<len(nums1):
rep.append(nums1[k1])
k1+=1
while k2<len(nums2):
rep.append(nums2[k2])
k2+=1
return rep
def getSquares(self,A):
for k in range(len(A)):
A[k]=A[k]*A[k]
return A
| [
"[email protected]"
] | |
f2751c6ffd13b7ca049b44caf699c03881be8ee1 | 6e34d59a5220d42b8baa39bd5bc49d69f77103b6 | /timelapse_stack.py | faa41545ac02dbba0480be8e54f29d69fd136595 | [] | no_license | pbmanis/timelapse | 034dc6633fa98d43cba03faf68eb9d2636da8120 | 6a989545cbf83e2a40e2d8f87120860104ee24b6 | refs/heads/master | 2021-06-29T18:38:41.726219 | 2020-07-14T01:33:03 | 2020-07-14T01:33:03 | 66,783,357 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,952 | py | """
Script used to convert timelapse+zstack data into a max-filtered video.
Luke Campagnola and Paul Manis, 4-2015 and 3, 4-2016.
Input data structures:
1. 'auto': ImageSequence_nnn has a number if image_nnn.ma files; each of those files is a single
time point in the sequence.
2. 'manual': Each ImageSequence_nnn has a single image_000.ma file; the ImageSequence itself is
the individual time point; the slice directory handles the
"""
from acq4.util.metaarray import MetaArray
import acq4.util.DataManager as DataManager
import imreg_dft
import scipy.stats
import re
import os
import numpy as np
import pyqtgraph as pg
from collections import OrderedDict
import argparse
parser = argparse.ArgumentParser(description='Analyze time lapse stacks')
parser.add_argument('Experiment', type=int,
help='Select an Experiment number')
args = parser.parse_args()
expt = args.Experiment
app = pg.mkQApp()
basedir = '/Volumes/Backup2B/Sullivan_Chelsea/Chelsea/'
basedir = '/Volumes/Promise Pegasus/ManisLab_Data3/Sullivan_Chelsea/'
# man.setBaseDir(basedir)
#
# Analysis is driven by the filelist data structure
#
# 'filelist' is a dictionary, which contains a dict of parameters to guide the analysis.
# 'refframes' is a list of the matching frames from each of the z-stacks in the successive
# time points
# 'mode' is either 'auto', or 'manual'. If the data are collected as a time-lapse seuqence of
# z stacks, and appear as a set of "ImageSequence_000" directories, then the mode should be
# 'auto'. If the time-lapse points were manually collected, but the stacks are automatic,
# then the mode should be 'manual'.
# 'datalist' is a list of the records to include. If 'datalist' is set to None, then all
# recordings will be included. Note that if mode is "auto", then datalist should be None.
#
filelist = OrderedDict([('2015.04.17_000/slice_001/ImageSequence_000',
{'refframes': [40, 37, 33, 30, 28, 26, 23, 21, 19, 17, 16, 14],
'mode': 'auto', 'datalist': None}),
('2016.03.22_000/slice_000',
{'refframes': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
'mode': 'manual', 'datalist': None}),
('2016.03.23_000',
{'refframes': [0]*39,
'mode': 'manual', 'datalist': None}),
('2016.03.28_000/slice_000',
{'refframes': [0]*len(range(0, 9)),
'mode': 'manual', 'datalist': range(0, 9)}),
('2016.04.11_000/slice_000',
{'refframes': [0]*len(range(0, 13)),
'mode': 'manual', 'datalist': range(0, 13)}),
('2016.04.13_000/slice_000',
{'refframes': [0]*len(range(2, 16)),
'mode': 'manual', 'datalist': range(2, 16)}),
('2016.04.15_000/slice_000',
{'refframes': [0]*len(range(14, 34)),
'mode': 'manual', 'datalist': range(14, 34)}),
])
# select a dataset to analyze:
ff = filelist.keys()[expt] # gets the dataset name
fullpath = os.path.join(basedir, ff)
print 'File: ', fullpath
dh = DataManager.getDirHandle(fullpath, create=False)
# collect all data with depth corrected
#dh = man.currentFile
found = False
for n in filelist.keys():
if n in dh.name():
found = True
break
if not found:
raise ValueError('Unknown file: %s' % dh.name())
print 'Dataset found.'
indexes = filelist[n]['refframes']
if filelist[n]['mode'] == 'auto':
z_length = len(dh.info()['zStackValues'])
offsets = [-min(indexes), z_length - max(indexes)]
print 'Analyzing in Auto mode'
print '\tTimes in timelapse: ', z_length
print '\tIndexes: ', indexes
print '\tOffsets: ', offsets
data = [dh['image_%03d.ma'%i].read()[indexes[i]+offsets[0]:indexes[i]+offsets[1]].
asarray()[np.newaxis, ...] for i in range(len(indexes))]
elif filelist[n]['mode'] == 'manual':
nframes = dh['ImageSequence_%03d' % filelist[n]['datalist'][0]]['image_000.ma'].read().shape[0]
ts = []
if filelist[n]['datalist'] != None :
sequence = filelist[n]['datalist']
else:
sequence = range(len(indexes))
for i in sequence:
th = dh['ImageSequence_%03d'%i]['image_000.ma']
if th.exists() and th.read().shape[0] == nframes:
ts.append(i)
z_length = len(ts)
offsets = [-min(indexes), z_length - max(indexes)]
print 'Analyzing in Manual mode'
print '\t# of depths in timelapse: ', z_length
print '\t# of frames in each: ', nframes
print '\tIndexes: ', indexes
print '\tOffsets: ', offsets
try:
print indexes
print offsets
print 'list of indexes reading: ', [[indexes[i]+offsets[0],indexes[i]+offsets[1]] for i in ts]
data = [dh['ImageSequence_%03d'%i]['image_000.ma'].read()[indexes[i]+offsets[0]:indexes[i]+offsets[1]].
asarray()[np.newaxis, ...] for i in range(len(ts[:-2]))]
except:
print 'error'
print 'len ts: ', len(ts)
print 'ts: ', ts
print 'i: ', i
print 'index[i], o: ', indexes[i], offsets[0], offsets[1]
raise ValueError('Indexing error for ImageSequence image data set %d' % i)
else:
raise ValueError('Unknown data mode: %s' % filelist[n]['mode'])
print 'data shape: ', [len(k) for k in data]
data = np.concatenate(data, axis=0)
# print 'data shape (t, z, x, y): ', data.shape
# dim edges to avoid artifacts at the edges of depth range
dim = data.copy()
dim[:,0] *= 0.33
dim[:,1] *= 0.66
dim[:,-1] *= 0.33
dim[:,-2] *= 0.66
# flatten stacks
m = dim.max(axis=1)
nreg = m.shape[0]
ireg = int(nreg/2) # get one near the middle of the sequence.
# correct for lateral motion
off = [imreg_dft.translation(m[ireg], m[i])[0] for i in range(0, m.shape[0])]
offt = np.array(off).T
# find boundaries of outer rectangle including all images as registered
minx = np.min(offt[0])
maxx = np.max(offt[0])
miny = np.min(offt[1])
maxy = np.max(offt[1])
# build canvas
canvas = np.zeros(shape=(m.shape[0], m.shape[1]-minx+maxx,
m.shape[2]-miny+maxy), dtype=m.dtype)
# set initial image (offsets were computed relative to this, so it has no offset)
# canvas[0, -minx:-minx+m.shape[1], -miny:-miny+m.shape[2]] = m[0]
for i in range(0, m.shape[0]):
ox = offt[0][i] - minx
oy = offt[1][i] - miny
canvas[i, ox:(ox+m.shape[1]), oy:(oy+m.shape[2])] = m[i]
# print 'canvas %d set' % i
# correct for bleaching
levels = np.array([np.median(m[m>scipy.stats.scoreatpercentile(m[i], 95)]) for i in range(m.shape[0])])
norm = canvas / levels[:, np.newaxis, np.newaxis]
w = pg.image()
w.setImage(norm)
# write the resulting compressed z-stacks to a file in the original directory.
ma = MetaArray(norm, info=[{'name': 'Time'}, {'name': 'X'}, {'name': 'Y'}, {}])
ma.write(dh.name() + '/max_stack.ma')
pg.show()
import sys
if sys.flags.interactive == 0:
app.exec_() | [
"[email protected]"
] | |
a2912aa8082dd0c86f50d80a953f898ece522e01 | e733d07a1492f6e9b762d9ca496ec59668aedb95 | /qcloudsdkcvm/InquiryInstancePriceHourRequest.py | 906b50e9717e953b7d3cb28273d3cd7f2de595fb | [
"Apache-2.0"
] | permissive | QcloudApi/qcloudcli | 1f67d8467b81ac8964362491cd4f3104f8e59161 | ba16161f65df5f621d9f1c5587b9900dca600cb5 | refs/heads/master | 2023-08-15T01:51:05.236254 | 2018-07-11T08:07:29 | 2018-07-11T08:07:29 | 100,922,202 | 8 | 6 | null | 2018-03-29T11:57:26 | 2017-08-21T06:55:45 | Python | UTF-8 | Python | false | false | 2,262 | py | # -*- coding: utf-8 -*-
from qcloudsdkcore.request import Request
class InquiryInstancePriceHourRequest(Request):
def __init__(self):
super(InquiryInstancePriceHourRequest, self).__init__(
'cvm', 'qcloudcliV1', 'InquiryInstancePriceHour', 'cvm.api.qcloud.com')
def get_bandwidth(self):
return self.get_params().get('bandwidth')
def set_bandwidth(self, bandwidth):
self.add_param('bandwidth', bandwidth)
def get_bandwidthType(self):
return self.get_params().get('bandwidthType')
def set_bandwidthType(self, bandwidthType):
self.add_param('bandwidthType', bandwidthType)
def get_cpu(self):
return self.get_params().get('cpu')
def set_cpu(self, cpu):
self.add_param('cpu', cpu)
def get_goodsNum(self):
return self.get_params().get('goodsNum')
def set_goodsNum(self, goodsNum):
self.add_param('goodsNum', goodsNum)
def get_imageId(self):
return self.get_params().get('imageId')
def set_imageId(self, imageId):
self.add_param('imageId', imageId)
def get_imageType(self):
return self.get_params().get('imageType')
def set_imageType(self, imageType):
self.add_param('imageType', imageType)
def get_instanceModel(self):
return self.get_params().get('instanceModel')
def set_instanceModel(self, instanceModel):
self.add_param('instanceModel', instanceModel)
def get_mem(self):
return self.get_params().get('mem')
def set_mem(self, mem):
self.add_param('mem', mem)
def get_rootSize(self):
return self.get_params().get('rootSize')
def set_rootSize(self, rootSize):
self.add_param('rootSize', rootSize)
def get_storageSize(self):
return self.get_params().get('storageSize')
def set_storageSize(self, storageSize):
self.add_param('storageSize', storageSize)
def get_storageType(self):
return self.get_params().get('storageType')
def set_storageType(self, storageType):
self.add_param('storageType', storageType)
def get_zoneId(self):
return self.get_params().get('zoneId')
def set_zoneId(self, zoneId):
self.add_param('zoneId', zoneId)
| [
"[email protected]"
] | |
24de9783acf09079e0e372ead54d08b82db5e567 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /storagegateway_write_f/smb-file-share_update.py | 3357133385bf1dc97e35672040d499ce1326752f | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
create-smb-file-share : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/storagegateway/create-smb-file-share.html
describe-smb-file-shares : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/storagegateway/describe-smb-file-shares.html
"""
write_parameter("storagegateway", "update-smb-file-share") | [
"[email protected]"
] | |
2dd74bd70f39713191de8dc6a0ece7478c6387db | 6e158a54409937515b14676730adfadfd457d4ae | /gaussian_spheres/pwl.py | 829e513a721f8fe6245d003f3e220aab7e410ea8 | [] | no_license | Tjstretchalot/machinelearning | e2b277efd99f6e45005cb92a0cc17e90bf7d37e4 | 5a3b17c49211a63f71cdf40ca35e00a3af4b198a | refs/heads/master | 2020-05-02T09:25:25.032430 | 2019-07-25T14:37:43 | 2019-07-25T14:37:43 | 177,871,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,665 | py | """A gaussian spheres input technique. Randomly places cluster centers in an n-dimensional cube.
Then assigns each cluster a label. To generate points, a cluster is selected uniformly at random,
then a radius is selected from a normal distribution, then a point is selected uniformly from
within a sphere centered at the cluster center with the given radius."""
import torch
import typing
import scipy.spatial.distance as distance
import numpy as np
from shared.pwl import PointWithLabelProducer, PointWithLabel
class GaussianSpheresPWLP(PointWithLabelProducer):
"""Produces points selected from gaussian spheres. Marks are ignored.
Attributes:
clusters (list[PointWithLabel])
radius_dist (torch.distributions.distribution.Distribution)
"""
def __init__(self, epoch_size: int, input_dim: int, output_dim: int,
clusters: typing.List[PointWithLabel], std_dev: float, mean: float):
super().__init__(epoch_size, input_dim, output_dim)
self.clusters = clusters
self.radius_dist = torch.distributions.normal.Normal(
torch.tensor([float(mean)]), torch.tensor([float(std_dev)])) #pylint: disable=not-callable
@classmethod
def create(cls, epoch_size: int, input_dim: int, output_dim: int, cube_half_side_len: float,
num_clusters: int, std_dev: float, mean: float, min_sep: float,
force_split: bool = False):
"""Creates a new gaussian spheres pwlp, pulling points from the cube with a side length
of 2*cube_half_side_len centered at the origin
Arguments:
epoch_size (int): the number of points we will consider 1 epoch
input_dim (int): the input dimension (i.e., number of coordinates per point)
output_dim (int): the output dimension (i.e., number of unique labels)
cube_half_side_len (float): if '1', each coordinate is uniform from [-1, 1]
num_clusters (int): the number of clusters
std_dev (float): standard deviation of the radius
mean (float): mean of the radius
min_sep (float): minimum separation between points
force_split (bool, optional): if True then there will be an even as possible
distribution of cluster labels. if False then there will be a multinomial
distribution of cluster labels with the same probability for each
"""
# rejection sampling
clust_centers = np.zeros((num_clusters, input_dim), dtype='double')
clusters = []
if force_split:
next_label = 0
for i in range(num_clusters):
rejections = 0
center = torch.zeros((input_dim,), dtype=torch.double)
while True:
torch.rand(input_dim, out=center)
center = (center - 0.5) * 2 * cube_half_side_len
distances = distance.cdist(center.reshape(1, -1).numpy(), clust_centers)
if np.min(distances) < min_sep:
rejections += 1
if rejections > 10000:
raise ValueError('rejected too many points!')
else:
break
clust_centers[i, :] = center.numpy()
if force_split:
clust_label = next_label
next_label = (next_label + 1) % output_dim
else:
clust_label = torch.randint(output_dim, (1,)).item()
clusters.append(PointWithLabel(point=center, label=clust_label))
return cls(epoch_size, input_dim, output_dim, clusters, std_dev, mean)
def _fill_with_clusters(self, points: torch.tensor, labels: torch.tensor,
cluster_inds: torch.tensor):
vec = torch.zeros((self.input_dim,), dtype=torch.double)
for i in range(points.shape[0]):
clust = self.clusters[cluster_inds[i].item()]
radius = torch.abs(self.radius_dist.sample()).double()
torch.randn(self.input_dim, out=vec)
vec *= (radius / torch.norm(vec))
labels[i] = clust.label
points[i, :] = clust.point + vec
def _fill(self, points: torch.tensor, labels: torch.tensor):
batch_size = points.shape[0]
cluster_inds = torch.randint(len(self.clusters), (batch_size,), dtype=torch.long)
self._fill_with_clusters(points, labels, cluster_inds)
def fill_uniform(self, points: torch.tensor, labels: torch.tensor):
"""Fills the specified points and labels such that the labels are spread
evenly"""
batch_size = points.shape[0]
num_per_label = batch_size // self.output_dim
if num_per_label * self.output_dim != batch_size:
raise ValueError(f'cannot fill {batch_size} uniformly when output dim is {self.output_dim}')
cluster_lbls = np.zeros(len(self.clusters), dtype='int32')
for ind, clust in enumerate(self.clusters):
cluster_lbls[ind] = clust.label
cluster_inds = torch.zeros(0, dtype=torch.long)
for lbl in range(self.output_dim):
mask = cluster_lbls == lbl
viable_clust_inds = np.arange(len(self.clusters), dtype='int64')[mask]
lbl_clust_inds = np.random.choice(viable_clust_inds, (num_per_label,))
cluster_inds = torch.cat((cluster_inds, torch.from_numpy(lbl_clust_inds)))
self._fill_with_clusters(points, labels, cluster_inds)
def _position(self, pos: int):
pass
| [
"[email protected]"
] | |
e9ad0d1f8948db6b00d4c77f6d1d720bfbf254d9 | 6b699b7763a0ff8c32b85014d96f6faf02514a2e | /models/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor.py | 96586e17f65a9ead1cb9d1c93540cd46477f3b2f | [
"Apache-2.0"
] | permissive | leizeling/Base_tensorflow-object_detection_2Dcord | df7c195685fed21fd456f1dd79881a198cf8b6e0 | d07418eb68543adc2331211ccabbc27137c8676e | refs/heads/master | 2020-03-19T11:51:57.961688 | 2018-06-07T14:47:16 | 2018-06-07T14:47:16 | 136,481,479 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,185 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Embedded-friendly SSDFeatureExtractor for MobilenetV1 features."""
import tensorflow as tf
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import feature_map_generators
from object_detection.utils import context_manager
from object_detection.utils import ops
from nets import mobilenet_v1
slim = tf.contrib.slim
class EmbeddedSSDMobileNetV1FeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
"""Embedded-friendly SSD Feature Extractor using MobilenetV1 features.
This feature extractor is similar to SSD MobileNetV1 feature extractor, and
it fixes input resolution to be 256x256, reduces the number of feature maps
used for box prediction and ensures convolution kernel to be no larger
than input tensor in spatial dimensions.
This feature extractor requires support of the following ops if used in
embedded devices:
- Conv
- DepthwiseConv
- Relu6
All conv/depthwiseconv use SAME padding, and no additional spatial padding is
needed.
"""
def __init__(self,
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
conv_hyperparams_fn,
reuse_weights=None,
use_explicit_padding=False,
use_depthwise=False,
override_base_feature_extractor_hyperparams=False):
"""MobileNetV1 Feature Extractor for Embedded-friendly SSD Models.
Args:
is_training: whether the network is in training mode.
depth_multiplier: float depth multiplier for feature extractor.
min_depth: minimum feature extractor depth.
pad_to_multiple: the nearest multiple to zero pad the input height and
width dimensions to. For EmbeddedSSD it must be set to 1.
conv_hyperparams_fn: A function to construct tf slim arg_scope for conv2d
and separable_conv2d ops in the layers that are added on top of the
base feature extractor.
reuse_weights: Whether to reuse variables. Default is None.
use_explicit_padding: Whether to use explicit padding when extracting
features. Default is False.
use_depthwise: Whether to use depthwise convolutions. Default is False.
override_base_feature_extractor_hyperparams: Whether to override
hyperparameters of the base feature extractor with the one from
`conv_hyperparams_fn`.
Raises:
ValueError: upon invalid `pad_to_multiple` values.
"""
if pad_to_multiple != 1:
raise ValueError('Embedded-specific SSD only supports `pad_to_multiple` '
'of 1.')
super(EmbeddedSSDMobileNetV1FeatureExtractor, self).__init__(
is_training, depth_multiplier, min_depth, pad_to_multiple,
conv_hyperparams_fn, reuse_weights, use_explicit_padding, use_depthwise,
override_base_feature_extractor_hyperparams)
def preprocess(self, resized_inputs):
"""SSD preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def extract_features(self, preprocessed_inputs):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
Raises:
ValueError: if image height or width are not 256 pixels.
"""
image_shape = preprocessed_inputs.get_shape()
image_shape.assert_has_rank(4)
image_height = image_shape[1].value
image_width = image_shape[2].value
if image_height is None or image_width is None:
shape_assert = tf.Assert(
tf.logical_and(tf.equal(tf.shape(preprocessed_inputs)[1], 256),
tf.equal(tf.shape(preprocessed_inputs)[2], 256)),
['image size must be 256 in both height and width.'])
with tf.control_dependencies([shape_assert]):
preprocessed_inputs = tf.identity(preprocessed_inputs)
elif image_height != 256 or image_width != 256:
raise ValueError('image size must be = 256 in both height and width;'
' image dim = %d,%d' % (image_height, image_width))
feature_map_layout = {
'from_layer': [
'Conv2d_11_pointwise', 'Conv2d_13_pointwise', '', '', ''
],
'layer_depth': [-1, -1, 512, 256, 256],
'conv_kernel_size': [-1, -1, 3, 3, 2],
'use_explicit_padding': self._use_explicit_padding,
'use_depthwise': self._use_depthwise,
}
with tf.variable_scope('MobilenetV1',
reuse=self._reuse_weights) as scope:
with slim.arg_scope(
mobilenet_v1.mobilenet_v1_arg_scope(is_training=None)):
with (slim.arg_scope(self._conv_hyperparams_fn())
if self._override_base_feature_extractor_hyperparams
else context_manager.IdentityContextManager()):
# TODO(skligys): Enable fused batch norm once quantization supports it.
with slim.arg_scope([slim.batch_norm], fused=False):
_, image_features = mobilenet_v1.mobilenet_v1_base(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple),
final_endpoint='Conv2d_13_pointwise',
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
use_explicit_padding=self._use_explicit_padding,
scope=scope)
with slim.arg_scope(self._conv_hyperparams_fn()):
# TODO(skligys): Enable fused batch norm once quantization supports it.
with slim.arg_scope([slim.batch_norm], fused=False):
feature_maps = feature_map_generators.multi_resolution_feature_maps(
feature_map_layout=feature_map_layout,
depth_multiplier=self._depth_multiplier,
min_depth=self._min_depth,
insert_1x1_conv=True,
image_features=image_features)
return feature_maps.values()
| [
"[email protected]"
] | |
8f1f76c2428f1baf72a698618a67511bdb315528 | fb8909f8ca2418e3ee25469073fa06636c3d294b | /src/unity/python/turicreate/toolkits/regression/decision_tree_regression.py | cde30984bb969b7d260d1210461c4ede7f907451 | [
"BSD-3-Clause"
] | permissive | fossabot/turicreate | a1e9d95fc1f68e7074d1d8024ffd64de82463d39 | a500d5e52143ad15ebdf771d9f74198982c7c45c | refs/heads/master | 2020-12-02T21:45:09.045339 | 2019-12-31T17:50:02 | 2019-12-31T17:50:02 | 231,127,861 | 0 | 0 | BSD-3-Clause | 2019-12-31T17:50:01 | 2019-12-31T17:50:00 | null | UTF-8 | Python | false | false | 18,183 | py | # -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
"""
This package contains the decision tree model class and the create function.
"""
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
import turicreate as _turicreate
from turicreate.toolkits._supervised_learning import SupervisedLearningModel as _SupervisedLearningModel
import turicreate.toolkits._supervised_learning as _sl
import turicreate.toolkits._main as _toolkits_main
from turicreate.toolkits._internal_utils import _toolkit_repr_print
from turicreate.toolkits._internal_utils import _raise_error_evaluation_metric_is_valid
from turicreate.toolkits._internal_utils import _raise_error_if_column_exists
from turicreate.toolkits._tree_model_mixin import TreeModelMixin as _TreeModelMixin
from turicreate.toolkits._internal_utils import _raise_error_if_not_sframe
from turicreate.toolkits._internal_utils import _map_unity_proxy_to_object
_DECISION_TREE_MODEL_PARAMS_KEYS = ['max_depth', 'min_child_weight',
'min_loss_reduction']
_DECISION_TREE_TRAINING_PARAMS_KEYS = ['objective', 'training_time',
'training_error', 'validation_error', 'evaluation_metric']
_DECISION_TREE_TRAINING_DATA_PARAMS_KEYS = ['target', 'features',
'num_features', 'num_examples', 'num_validation_examples']
class DecisionTreeRegression(_SupervisedLearningModel, _TreeModelMixin):
"""
The prediction is based on a collection of base learners, `regression trees
<http://en.wikipedia.org/wiki/Decision_tree_learning>`_. This algorithm is
a special case for boosted trees regression with number of trees set to 1.
Different from linear models, e.g. linear regression, the gradient boost
trees model is able to model non-linear interactions between the features
and the target using decision trees as the subroutine. It is good for
handling numerical features and categorical features with tens of
categories but is less suitable for highly sparse feautres such as text
data.
This model cannot be constructed directly. Instead, use
:func:`turicreate.decision_tree_regression.create` to create an instance of
this model. A detailed list of parameter options and code samples are
available in the documentation for the create function.
See Also
--------
create
"""
def __init__(self, proxy):
"""__init__(self)"""
self.__proxy__ = proxy
self.__name__ = self.__class__._native_name()
@classmethod
def _native_name(cls):
return "decision_tree_regression"
def __str__(self):
"""
Return a string description of the model to the ``print`` method.
Returns
-------
out : string
A description of the model.
"""
return self.__repr__()
def _get_summary_struct(self):
"""
Returns a structured description of the model, including (where relevant)
the schema of the training data, description of the training data,
training statistics, and model hyperparameters.
Returns
-------
sections : list (of list of tuples)
A list of summary sections.
Each section is a list.
Each item in a section list is a tuple of the form:
('<label>','<field>')
section_titles: list
A list of section titles.
The order matches that of the 'sections' object.
"""
data_fields = [
('Number of examples', 'num_examples'),
('Number of feature columns', 'num_features'),
('Number of unpacked features', 'num_unpacked_features')]
training_fields = [
("Max tree depth", 'max_depth'),
("Train RMSE", 'training_rmse'),
("Validation RMSE", 'validation_rmse'),
("Training time (sec)", 'training_time')]
return ( [data_fields, training_fields], ['Schema', 'Settings'])
def __repr__(self):
"""
Print a string description of the model, when the model name is entered
in the terminal.
"""
(sections, section_titles) = self._get_summary_struct()
return _toolkit_repr_print(self, sections, section_titles, width=30)
def _get(self, field):
"""
Get the value of a given field. The list of all queryable fields is
detailed below, and can be obtained programmatically using the
:func:`~turicreate.decision_tree_regression._list_fields` method.
+-------------------------+--------------------------------------------------------------------------------+
| Field | Description |
+=========================+================================================================================+
| column_subsample | Percentage of the columns for training each individual tree |
+-------------------------+--------------------------------------------------------------------------------+
| features | Names of the feature columns |
+-------------------------+--------------------------------------------------------------------------------+
| max_depth | The maximum depth of individual trees |
+-------------------------+--------------------------------------------------------------------------------+
| min_child_weight | Minimun weight required on the leave nodes |
+-------------------------+--------------------------------------------------------------------------------+
| min_loss_reduction | Minimun loss reduction required for splitting a node |
+-------------------------+--------------------------------------------------------------------------------+
| num_features | Number of features in the model |
+-------------------------+--------------------------------------------------------------------------------+
| num_unpacked_features | Number of features in the model (including unpacked dict/list type columns) |
+-------------------------+--------------------------------------------------------------------------------+
| num_examples | Number of training examples |
+-------------------------+--------------------------------------------------------------------------------+
| num_validation_examples | Number of validation examples |
+-------------------------+--------------------------------------------------------------------------------+
| target | Name of the target column |
+-------------------------+--------------------------------------------------------------------------------+
| training_error | Error on training data |
+-------------------------+--------------------------------------------------------------------------------+
| training_time | Time spent on training the model in seconds |
+-------------------------+--------------------------------------------------------------------------------+
| trees_json | Tree encoded using JSON |
+-------------------------+--------------------------------------------------------------------------------+
| valiation_error | Error on validation data |
+-------------------------+--------------------------------------------------------------------------------+
| unpacked_features | Feature names (including expanded list/dict features) |
+-------------------------+--------------------------------------------------------------------------------+
| random_seed | Seed for row and column subselection |
+-------------------------+--------------------------------------------------------------------------------+
| metric | Performance metric(s) that are tracked during training |
+-------------------------+--------------------------------------------------------------------------------+
Parameters
----------
field : string
Name of the field to be retrieved.
Returns
-------
out : [various]
The current value of the requested field.
Examples
--------
>>> m.get('training_error')
"""
return super(DecisionTreeRegression, self)._get(field)
def evaluate(self, dataset, metric='auto', missing_value_action='auto'):
"""
Evaluate the model on the given dataset.
Parameters
----------
dataset : SFrame
Dataset in the same format used for training. The columns names and
types of the dataset must be the same as that used in training.
metric : str, optional
Name of the evaluation metric. Can be one of:
- 'auto': Compute all metrics.
- 'rmse': Rooted mean squared error.
- 'max_error': Maximum error.
missing_value_action : str, optional
Action to perform when missing values are encountered. Can be
one of:
- 'auto': By default the model will treat missing value as is.
- 'impute': Proceed with evaluation by filling in the missing
values with the mean of the training data. Missing
values are also imputed if an entire column of data is
missing during evaluation.
- 'error': Do not proceed with evaluation and terminate with
an error message.
Returns
-------
out : dict
A dictionary containing the evaluation result.
See Also
----------
create, predict
Examples
--------
..sourcecode:: python
>>> results = model.evaluate(test_data, 'rmse')
"""
_raise_error_evaluation_metric_is_valid(
metric, ['auto', 'rmse', 'max_error'])
return super(DecisionTreeRegression, self).evaluate(dataset,
missing_value_action=missing_value_action,
metric=metric)
def export_coreml(self, filename):
"""
Export the model in Core ML format.
Parameters
----------
filename: str
A valid filename where the model can be saved.
Examples
--------
>>> model.export_coreml("MyModel.mlmodel")
"""
from turicreate.toolkits import _coreml_utils
display_name = "decision tree regression"
short_description = _coreml_utils._mlmodel_short_description(display_name)
context = {"mode" : "regression",
"model_type" : "decision_tree",
"version": _turicreate.__version__,
"class": self.__class__.__name__,
"short_description": short_description}
self._export_coreml_impl(filename, context)
def predict(self, dataset, missing_value_action='auto'):
"""
Predict the target column of the given dataset.
The target column is provided during
:func:`~turicreate.decision_tree_regression.create`. If the target column is in the
`dataset` it will be ignored.
Parameters
----------
dataset : SFrame
A dataset that has the same columns that were used during training.
If the target column exists in ``dataset`` it will be ignored
while making predictions.
missing_value_action : str, optional
Action to perform when missing values are encountered. Can be
one of:
- 'auto': By default the model will treat missing value as is.
- 'impute': Proceed with evaluation by filling in the missing
values with the mean of the training data. Missing
values are also imputed if an entire column of data is
missing during evaluation.
- 'error': Do not proceed with evaluation and terminate with
an error message.
Returns
-------
out : SArray
Predicted target value for each example (i.e. row) in the dataset.
See Also
----------
create, predict
Examples
--------
>>> m.predict(testdata)
"""
return super(DecisionTreeRegression, self).predict(dataset, output_type='margin',
missing_value_action=missing_value_action)
@classmethod
def _get_queryable_methods(cls):
'''Returns a list of method names that are queryable through Predictive
Service'''
methods = _SupervisedLearningModel._get_queryable_methods()
methods['extract_features'] = {'dataset': 'sframe'}
return methods
def create(dataset, target,
features=None,
validation_set='auto',
max_depth=6,
min_loss_reduction=0.0, min_child_weight=0.1,
verbose=True,
random_seed = None,
metric = 'auto',
**kwargs):
"""
Create a :class:`~turicreate.decision_tree_regression.DecisionTreeRegression` to predict
a scalar target variable using one or more features. In addition to standard
numeric and categorical types, features can also be extracted automatically
from list- or dictionary-type SFrame columns.
Parameters
----------
dataset : SFrame
A training dataset containing feature columns and a target column.
Only numerical typed (int, float) target column is allowed.
target : str
The name of the column in ``dataset`` that is the prediction target.
This column must have a numeric type.
features : list[str], optional
A list of columns names of features used for training the model.
Defaults to None, using all columns.
validation_set : SFrame, optional
The validation set that is used to watch the validation result as
boosting progress.
max_depth : float, optional
Maximum depth of a tree. Must be at least 1.
min_loss_reduction : float, optional (non-negative)
Minimum loss reduction required to make a further partition/split a
node during the tree learning phase. Larger (more positive) values
can help prevent overfitting by avoiding splits that do not
sufficiently reduce the loss function.
min_child_weight : float, optional (non-negative)
Controls the minimum weight of each leaf node. Larger values result in
more conservative tree learning and help prevent overfitting.
Formally, this is minimum sum of instance weights (hessians) in each
node. If the tree learning algorithm results in a leaf node with the
sum of instance weights less than `min_child_weight`, tree building
will terminate.
verbose : boolean, optional
If True, print progress information during training.
random_seed: int, optional
Seeds random operations such as column and row subsampling, such that
results are reproduceable.
metric : str or list[str], optional
Performance metric(s) that are tracked during training. When specified,
the progress table will display the tracked metric(s) on training and
validation set.
Supported metrics are: {'rmse', 'max_error'}
Returns
-------
out : DecisionTreeRegression
A trained decision tree model
References
----------
- `Wikipedia - Gradient tree boosting
<http://en.wikipedia.org/wiki/Gradient_boosting#Gradient_tree_boosting>`_
- `Trevor Hastie's slides on Boosted Trees and Random Forest
<http://jessica2.msri.org/attachments/10778/10778-boost.pdf>`_
See Also
--------
DecisionTreeRegression, turicreate.linear_regression.LinearRegression, turicreate.regression.create
Examples
--------
Setup the data:
>>> url = 'https://static.turi.com/datasets/xgboost/mushroom.csv'
>>> data = turicreate.SFrame.read_csv(url)
>>> data['label'] = data['label'] == 'p'
Split the data into training and test data:
>>> train, test = data.random_split(0.8)
Create the model:
>>> model = turicreate.decision_tree_regression.create(train, target='label')
Make predictions and evaluate the model:
>>> predictions = model.predict(test)
>>> results = model.evaluate(test)
"""
if random_seed is not None:
kwargs['random_seed'] = random_seed
model = _sl.create(dataset = dataset,
target = target,
features = features,
model_name = 'decision_tree_regression',
validation_set = validation_set,
max_depth = max_depth,
min_loss_reduction = min_loss_reduction,
min_child_weight = min_child_weight,
verbose = verbose, **kwargs)
return DecisionTreeRegression(model.__proxy__)
| [
"[email protected]"
] | |
1bcc583b18dbe1c149df61636316daf19ebb3da8 | bc3bd7601fa427d638f872b4ddfdebe4ce23a25c | /bitbucketopenapi/models/branching_model_all_of_branch_types.py | 5c19cc4a2bfc8439d01eead784bcecbd8c6be7a7 | [] | no_license | magmax/bitbucket-openapi | 59ef55ab3aa42940c8211d3ecd16ef7d6fc74c21 | 836ae762735ae5b1ececcee5287fa271d7d8de5b | refs/heads/master | 2020-07-28T16:10:32.736169 | 2019-09-19T04:17:09 | 2019-09-19T04:17:09 | 209,460,884 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,858 | py | # coding: utf-8
"""
Bitbucket API
Code against the Bitbucket API to automate simple tasks, embed Bitbucket data into your own site, build mobile or desktop apps, or even add custom UI add-ons into Bitbucket itself using the Connect framework. # noqa: E501
The version of the OpenAPI document: 2.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class BranchingModelAllOfBranchTypes(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'kind': 'str',
'prefix': 'str'
}
attribute_map = {
'kind': 'kind',
'prefix': 'prefix'
}
def __init__(self, kind=None, prefix=None): # noqa: E501
"""BranchingModelAllOfBranchTypes - a model defined in OpenAPI""" # noqa: E501
self._kind = None
self._prefix = None
self.discriminator = None
self.kind = kind
self.prefix = prefix
@property
def kind(self):
"""Gets the kind of this BranchingModelAllOfBranchTypes. # noqa: E501
The kind of branch. # noqa: E501
:return: The kind of this BranchingModelAllOfBranchTypes. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this BranchingModelAllOfBranchTypes.
The kind of branch. # noqa: E501
:param kind: The kind of this BranchingModelAllOfBranchTypes. # noqa: E501
:type: str
"""
if kind is None:
raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501
allowed_values = ["feature", "bugfix", "release", "hotfix"] # noqa: E501
if kind not in allowed_values:
raise ValueError(
"Invalid value for `kind` ({0}), must be one of {1}" # noqa: E501
.format(kind, allowed_values)
)
self._kind = kind
@property
def prefix(self):
"""Gets the prefix of this BranchingModelAllOfBranchTypes. # noqa: E501
The prefix for this branch type. A branch with this prefix will be classified as per `kind`. The prefix must be a valid prefix for a branch and must always exist. It cannot be blank, empty or `null`. # noqa: E501
:return: The prefix of this BranchingModelAllOfBranchTypes. # noqa: E501
:rtype: str
"""
return self._prefix
@prefix.setter
def prefix(self, prefix):
"""Sets the prefix of this BranchingModelAllOfBranchTypes.
The prefix for this branch type. A branch with this prefix will be classified as per `kind`. The prefix must be a valid prefix for a branch and must always exist. It cannot be blank, empty or `null`. # noqa: E501
:param prefix: The prefix of this BranchingModelAllOfBranchTypes. # noqa: E501
:type: str
"""
if prefix is None:
raise ValueError("Invalid value for `prefix`, must not be `None`") # noqa: E501
self._prefix = prefix
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BranchingModelAllOfBranchTypes):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
00d30d1d0c99da6ea3aafe35fc7e3c3e88eb6f3e | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/ZMGOCycleFlexConfig.py | a346c3b220c6ea3812a72dfb56fd69a64303cdc3 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 2,868 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class ZMGOCycleFlexConfig(object):
def __init__(self):
self._cycle_flex_withhold_fee_name = None
self._cycle_flex_withhold_max_price = None
self._cycle_flex_withhold_total_period_count = None
@property
def cycle_flex_withhold_fee_name(self):
return self._cycle_flex_withhold_fee_name
@cycle_flex_withhold_fee_name.setter
def cycle_flex_withhold_fee_name(self, value):
self._cycle_flex_withhold_fee_name = value
@property
def cycle_flex_withhold_max_price(self):
return self._cycle_flex_withhold_max_price
@cycle_flex_withhold_max_price.setter
def cycle_flex_withhold_max_price(self, value):
self._cycle_flex_withhold_max_price = value
@property
def cycle_flex_withhold_total_period_count(self):
return self._cycle_flex_withhold_total_period_count
@cycle_flex_withhold_total_period_count.setter
def cycle_flex_withhold_total_period_count(self, value):
self._cycle_flex_withhold_total_period_count = value
def to_alipay_dict(self):
params = dict()
if self.cycle_flex_withhold_fee_name:
if hasattr(self.cycle_flex_withhold_fee_name, 'to_alipay_dict'):
params['cycle_flex_withhold_fee_name'] = self.cycle_flex_withhold_fee_name.to_alipay_dict()
else:
params['cycle_flex_withhold_fee_name'] = self.cycle_flex_withhold_fee_name
if self.cycle_flex_withhold_max_price:
if hasattr(self.cycle_flex_withhold_max_price, 'to_alipay_dict'):
params['cycle_flex_withhold_max_price'] = self.cycle_flex_withhold_max_price.to_alipay_dict()
else:
params['cycle_flex_withhold_max_price'] = self.cycle_flex_withhold_max_price
if self.cycle_flex_withhold_total_period_count:
if hasattr(self.cycle_flex_withhold_total_period_count, 'to_alipay_dict'):
params['cycle_flex_withhold_total_period_count'] = self.cycle_flex_withhold_total_period_count.to_alipay_dict()
else:
params['cycle_flex_withhold_total_period_count'] = self.cycle_flex_withhold_total_period_count
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = ZMGOCycleFlexConfig()
if 'cycle_flex_withhold_fee_name' in d:
o.cycle_flex_withhold_fee_name = d['cycle_flex_withhold_fee_name']
if 'cycle_flex_withhold_max_price' in d:
o.cycle_flex_withhold_max_price = d['cycle_flex_withhold_max_price']
if 'cycle_flex_withhold_total_period_count' in d:
o.cycle_flex_withhold_total_period_count = d['cycle_flex_withhold_total_period_count']
return o
| [
"[email protected]"
] | |
33142582fc29000c271ed5a172c7f98479c6dbda | 2f09a5d75343702a0aecf10112b77b00c2063816 | /setup.py | 466ca0e3e205500ff3041ffbb29c2b4101f5c4d1 | [
"Apache-2.0"
] | permissive | tracer0tong/statsitemap | 8ac963d03ab53a61c942eeb7c1d63d4fb03c0c24 | 0e0cc4387b98cd91ffc717f5494e0a2168127992 | refs/heads/master | 2016-09-10T21:36:22.734268 | 2015-04-21T22:02:49 | 2015-04-21T22:02:49 | 34,352,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 524 | py | from distutils.core import setup
setup(
name='statsitemap',
packages=['statsitemap'],
version='0.1',
description='Library for building statistical graph (sitemap) from called URI/referer pairs',
author='Yury Leonychev (@tracer0tong)',
author_email='[email protected]',
url='https://github.com/tracer0tong/statsitemap',
download_url='https://github.com/tracer0tong/statsitemap/tarball/0.1',
keywords=['statsitemap', 'graph', 'nginx', 'apache', 'accesslog'],
classifiers=[],
)
| [
"[email protected]"
] | |
c8f7add0004abb00bdec5a84216d5e250182acc9 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/agc007/B/4738230.py | 5ff3a4324a3c6b6e9adee2311585322f49bfc888 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | n = int(input())
x = list(map(int, input().split()))
a = [20001*i for i in range(1,n+1)]
b = [20001*(n+1-i) for i in range(1,n+1)]
for i in range(n):
b[x[i]-1] += i
for x in a:print(x, end=' ')
print()
for x in b:print(x, end=' ') | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.