blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fe54aba99af7334fa816a9c0282b522dea6e026c | 8d55d41a4f5c0b89331cac714c1525e9581d9720 | /WalkingFoot/main_RevisionExamples2.py | 18a2303fe1a6b91bde48556986a9ae98c1b31b48 | [
"Apache-2.0"
] | permissive | hpgit/HumanFoot | 8cf35ceeeb35a0371e03eaf19d6da58dc01487eb | f9a1a341b7c43747bddcd5584b8c98a0d1ac2973 | refs/heads/master | 2022-04-13T23:38:19.072203 | 2019-12-06T06:36:10 | 2019-12-06T06:36:10 | 41,348,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43,724 | py | from fltk import *
import copy, os.path, cPickle, time
import numpy as np
import sys
if '../PyCommon/modules' not in sys.path:
sys.path.append('../PyCommon/modules')
import Math.mmMath as mm
import Math.csMath as cm
import Math.ysFunctionGraph as yfg
import Renderer.ysRenderer as yr
import Renderer.csVpRenderer as cvr
import Simulator.csVpWorld as cvw
import Simulator.csVpModel as cvm
import Simulator.ysVpUtil as yvu
import GUI.ysSimpleViewer as ysv
import GUI.ysMultiViewer as ymv
import ArticulatedBody.ysControl as yct
import ArticulatedBody.ysReferencePoints as yrp
import Motion.ysMotionAnalysis as yma
import Motion.ysBipedAnalysis as yba
import Motion.ysMotion as ym
import Motion.ysMotionBlend as ymb
import Motion.ysMotionExtend as ymt
import Motion.ysSkeletonEdit as yhe
import Motion.mmAnalyticIK as aik
import Util.ysMatplotEx as ymp
import Resource.ysMotionLoader as yf
import Simulator.ysPhysConfig as ypc
MOTION_COLOR = (213,111,162)
CHARACTER_COLOR = (20,166,188)
#BOX_COLOR = (255,204,153)
BOX_COLOR = (235,184,133)
def push_simbicon_mass():
# Trunk 29.27
# Head 5.89
# Pelvis 16.61
# Thigh 8.35
# Shank 4.16
# Foot 1.34
# Arm 2.79
# Forearm 1.21
# Hand 0.55
class ForceInfo:
def __init__(self, startFrame, duration, force):
self.startFrame = startFrame # frame
self.duration = duration # sec
self.force = force # Newton
self.targetBody = None
#===============================================================================
# load motion
#===============================================================================
MULTI_VIEWER = False
CAMERA_TRACKING = True
TORQUE_PLOT = False
# global parameters
Kt = 60.; Dt = 2*(Kt**.5)
Ks = 4000.; Ds = 2*(Ks**.5)
K_stb_vel = .1
mu = 2.
# constaants
c_min_contact_vel = 100.
# c_min_contact_vel = 2.
c_min_contact_time = .7
c_landing_duration = .2
c_taking_duration = .3
# c_swf_mid_offset = .0
c_swf_mid_offset = .02
c_swf_stability = .5
c_locking_vel = .05
# c_swf_offset = .0
c_swf_offset = .01
# c_swf_offset = .005
K_stp_pos = 0.
# c5 = .5; c6 = .01
c5 = .5; c6 = .02
# c5 = .5; c6 = .05
# c5 = 1.; c6 = .05
# c5 = .0; c6 = .0
K_stb_vel = .1
K_stb_pos = .1
OLD_SWING_HEIGHT = False
# OLD_SWING_HEIGHT = True
HIGHER_OFFSET = True
# HIGHER_OFFSET = False
dir = './ppmotion/'
# max push
# forceInfos = []
# maximum
# forceInfos = [ForceInfo(4*i*30, .4, (160,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30, .4, (-130,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30, .4, (0,0,80)) for i in range(2,12)]
forceInfos = [ForceInfo(4*i*30+1, .4, (0,0,-105)) for i in range(2,12)]
# # maximum with more checking
# forceInfos = [ForceInfo(4*i*30, .4, (145,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30+1, .4, (-120,0,0)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30+1, .4, (0,0,80)) for i in range(2,12)]
# forceInfos = [ForceInfo(4*i*30, .4, (0,0,-105)) for i in range(2,12)]
# # for video
# forceInfos = [ForceInfo(4*i*30+2, .4, (160,0,0)) for i in range(2,4)] \
# + [ForceInfo(4*i*30+2, .4, (0,0,-105)) for i in range(4,6)] \
# + [ForceInfo(4*i*30+2, .4, (-130,0,0)) for i in range(6,8)] \
# + [ForceInfo(4*i*30+2, .4, (0,0,80)) for i in range(8,10)]
# Kt = 40.; Dt = 2*(Kt**.5)
# Ks = 3000.; Ds = 2*(Ks**.5)
# mu = 1.
# c_swf_mid_offset = .04
# K_swp_vel_sag = .0; K_swp_vel_sag_faster = .0;
# K_swp_pos_sag = 1.5; K_swp_pos_sag_faster = .1;
# K_swp_vel_cor = .25; K_swp_pos_cor = .3
# K_stp_pos = 0.
# K_stb_vel = .02
# K_stb_pos = .15
Kt = 40.; Dt = 2*(Kt**.5)
Ks = 3000.; Ds = 2*(Ks**.5)
mu = 1.5
c_swf_mid_offset = .04
K_swp_vel_sag = .05; K_swp_vel_sag_faster = .0;
K_swp_pos_sag = 1.7; K_swp_pos_sag_faster = .1;
K_swp_vel_cor = .25; K_swp_pos_cor = .3
# K_stb_vel = .02
# K_stb_pos = .15
filename = 'wd2_WalkSameSame01_REPEATED.bvh'
motion_ori = yf.readBvhFile(dir+filename)
frameTime = 1/motion_ori.fps
if 'REPEATED' in filename:
REPEATED = True
CAMERA_TRACKING = True
else:
REPEATED = False
#===============================================================================
# options
#===============================================================================
SEGMENT_EDITING = True
STANCE_FOOT_STABILIZE = True
MATCH_STANCE_LEG = True
SWING_FOOT_PLACEMENT = True
SWING_FOOT_HEIGHT = True
if '_FOOT' in filename:
SWING_FOOT_ORIENTATION = True
else:
SWING_FOOT_ORIENTATION = False
STANCE_FOOT_PUSH = True
STANCE_FOOT_BALANCING = True
stitch_func = lambda x : 1. - yfg.hermite2nd(x)
stf_stabilize_func = yfg.concatenate([yfg.hermite2nd, yfg.one], [c_landing_duration])
match_stl_func = yfg.hermite2nd
# match_stl_func_y = yfg.hermite2nd
swf_placement_func = yfg.hermite2nd
swf_height_func = yfg.hermite2nd
swf_height_sine_func = yfg.sine
# stf_balancing_func = yfg.concatenate([yfg.hermite2nd, yfg.one], [c_landing_duration])
stf_balancing_func = yfg.hermite2nd
#===============================================================================
# initialize character
#===============================================================================
# mcfgfile = open(dir + 'mcfg', 'r')
mcfgfile = open('mcfg_simbicon', 'r')
mcfg = cPickle.load(mcfgfile)
mcfgfile.close()
wcfg = ypc.WorldConfig()
wcfg.planeHeight = 0.
wcfg.useDefaultContactModel = False
wcfg.lockingVel = c_locking_vel
stepsPerFrame = 30
wcfg.timeStep = (frameTime)/stepsPerFrame
vpWorld = cvw.VpWorld(wcfg)
motionModel = cvm.VpMotionModel(vpWorld, motion_ori[0], mcfg)
controlModel = cvm.VpControlModel(vpWorld, motion_ori[0], mcfg)
vpWorld.initialize()
print controlModel
motionModel.recordVelByFiniteDiff()
controlModel.initializeHybridDynamics()
#===============================================================================
# load segment info
#===============================================================================
skeleton = motion_ori[0].skeleton
segname = os.path.splitext(filename)[0]+'.seg'
segfile = open(dir+segname, 'r')
seginfo = cPickle.load(segfile)
segfile.close()
intervals = [info['interval'] for info in seginfo]
states = [info['state'] for info in seginfo]
temp_motion = copy.deepcopy(motion_ori)
segments = yma.splitMotionIntoSegments(temp_motion, intervals)
print len(intervals), 'segments'
for i in range(len(intervals)):
print '%dth'%i, yba.GaitState.text[states[i]], intervals[i], ',',
print
motion_seg_orig = ym.JointMotion()
motion_seg_orig += segments[0]
motion_seg = ym.JointMotion()
motion_seg += segments[0]
motion_stitch = ym.JointMotion()
motion_stitch += segments[0]
motion_stf_stabilize = ym.JointMotion()
motion_match_stl = ym.JointMotion()
motion_swf_placement = ym.JointMotion()
motion_swf_height = ym.JointMotion()
motion_swf_orientation = ym.JointMotion()
motion_stf_balancing = ym.JointMotion()
motion_stf_push = ym.JointMotion()
motion_control = ym.JointMotion()
motion_debug1 = ym.JointMotion()
motion_debug2 = ym.JointMotion()
motion_debug3 = ym.JointMotion()
P = ym.JointMotion()
P_hat = ym.JointMotion()
M_tc = ym.JointMotion()
M_hat_tc_1 = ym.JointMotion()
#===============================================================================
# loop variable
#===============================================================================
seg_index = [0]
acc_offset = [0]
extended = [False]
prev_R_swp = [None]
stl_y_limit_num = [0]
stl_xz_limit_num = [0]
avg_dCM = [mm.O_Vec3()]
# avg_stf_v = [mm.O_Vec3()]
# avg_stf_av = [mm.O_Vec3()]
# stf_push_func = [yfg.zero]
step_length_cur = [0.]
step_length_tar = [0.]
step_axis = [mm.O_Vec3()]
#===============================================================================
# information
#===============================================================================
bodyIDsToCheck = range(vpWorld.getBodyNum())
mus = [mu]*len(bodyIDsToCheck)
bodyMasses = controlModel.getBodyMasses()
totalMass = controlModel.getTotalMass()
lID = controlModel.name2id('LeftFoot'); rID = controlModel.name2id('RightFoot')
lUpLeg = skeleton.getJointIndex('LeftUpLeg');rUpLeg = skeleton.getJointIndex('RightUpLeg')
lKnee = skeleton.getJointIndex('LeftLeg'); rKnee = skeleton.getJointIndex('RightLeg')
lFoot = skeleton.getJointIndex('LeftFoot'); rFoot = skeleton.getJointIndex('RightFoot')
spine = skeleton.getJointIndex('Spine')
uppers = [skeleton.getJointIndex(name) for name in ['Hips', 'Spine', 'Spine1', 'LeftArm', 'LeftForeArm', 'RightArm', 'RightForeArm']]
upperMass = sum([bodyMasses[i] for i in uppers])
lLegs = [skeleton.getJointIndex(name) for name in ['LeftUpLeg', 'LeftLeg', 'LeftFoot']]
rLegs = [skeleton.getJointIndex(name) for name in ['RightUpLeg', 'RightLeg', 'RightFoot']]
allJoints = set(range(skeleton.getJointNum()))
halfFootHeight = controlModel.getBodyShape(lFoot)[1] / 2.
for fi in forceInfos:
fi.targetBody = spine
#===========================================================================
# data collection
#===========================================================================
rhip_torques = []
rknee_torques = []
rankle_torques = []
#===============================================================================
# rendering
#===============================================================================
rd_CM = [None]; rd_CP = [None]; rd_CMP = [None]
rd_forces = [None]; rd_force_points = [None]
rd_torques = []; rd_joint_positions = []
rd_point1 = [None]
rd_point2 = [None]
rd_vec1 = [None]; rd_vecori1 = [None]
rd_vec2 = [None]; rd_vecori2 = [None]
rd_frame1 = [None]
rd_frame2 = [None]
if MULTI_VIEWER:
viewer = ymv.MultiViewer(800, 655, True, wheelWork=True)
# viewer = ymv.MultiViewer(1600, 1255)
viewer.setRenderers1([cvr.VpModelRenderer(motionModel, MOTION_COLOR, yr.POLYGON_FILL)])
viewer.setRenderers2([cvr.VpModelRenderer(controlModel, CHARACTER_COLOR, yr.POLYGON_FILL),
yr.ForcesRenderer(rd_forces, rd_force_points, (255,0,0), ratio=.01, lineWidth=.04, fromPoint=False)])
# viewer.glWindow2.groundOffset[0] -= 10
viewer.glWindow2.groundSize = 100
else:
viewer = ysv.SimpleViewer()
# viewer.record(False)
# viewer.doc.addRenderer('motionModel', cvr.VpModelRenderer(motionModel, (0,150,255), yr.POLYGON_LINE))
viewer.doc.addRenderer('controlModel', cvr.VpModelRenderer(controlModel, (200,200,200), yr.POLYGON_LINE))
# viewer.doc.addObject('motion_ori', motion_ori)
# viewer.doc.addRenderer('motion_ori', yr.JointMotionRenderer(motion_ori, (0,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_seg_orig', yr.JointMotionRenderer(motion_seg_orig, (0,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_seg', yr.JointMotionRenderer(motion_seg, (0,150,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stitch', yr.JointMotionRenderer(motion_stitch, (0,255,200), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stf_stabilize', yr.JointMotionRenderer(motion_stf_stabilize, (255,0,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_match_stl', yr.JointMotionRenderer(motion_match_stl, (255,200,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_swf_placement', yr.JointMotionRenderer(motion_swf_placement, (255,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_swf_height', yr.JointMotionRenderer(motion_swf_height, (50,255,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_swf_orientation', yr.JointMotionRenderer(motion_swf_orientation, (255,100,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stf_push', yr.JointMotionRenderer(motion_stf_push, (50,255,200), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_stf_balancing', yr.JointMotionRenderer(motion_stf_balancing, (255,100,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_control', yr.JointMotionRenderer(motion_control, (255,0,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_debug1', yr.JointMotionRenderer(motion_debug1, (0,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_debug2', yr.JointMotionRenderer(motion_debug2, (255,0,255), yr.LINK_BONE))
# viewer.doc.addRenderer('motion_debug3', yr.JointMotionRenderer(motion_debug3, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('M_tc', yr.JointMotionRenderer(M_tc, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('P_hat', yr.JointMotionRenderer(P_hat, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('P', yr.JointMotionRenderer(P, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('M_hat_tc_1', yr.JointMotionRenderer(M_hat_tc_1, (255,255,0), yr.LINK_BONE))
# viewer.doc.addRenderer('rd_CM', yr.PointsRenderer(rd_CM, (255,255,0)))
# viewer.doc.addRenderer('rd_CP', yr.PointsRenderer(rd_CP, (255,0,0)))
# viewer.doc.addRenderer('rd_CMP', yr.PointsRenderer(rd_CMP, (0,255,0)))
viewer.doc.addRenderer('forces', yr.ForcesRenderer(rd_forces, rd_force_points, (255,0,0), ratio=.01, lineWidth=.04, fromPoint=False))
# viewer.doc.addRenderer('torques', yr.VectorsRenderer(rd_torques, rd_joint_positions, (255,0,0)))
# viewer.doc.addRenderer('rd_point1', yr.PointsRenderer(rd_point1, (0,255,0)))
# viewer.doc.addRenderer('rd_point2', yr.PointsRenderer(rd_point2, (255,0,0)))
# viewer.doc.addRenderer('rd_vec1', yr.VectorsRenderer(rd_vec1, rd_vecori1, (255,0,0)))
# viewer.doc.addRenderer('rd_vec2', yr.VectorsRenderer(rd_vec2, rd_vecori2, (0,255,0)))
# viewer.doc.addRenderer('rd_frame1', yr.FramesRenderer(rd_frame1, (0,200,200)))
# viewer.doc.addRenderer('rd_frame2', yr.FramesRenderer(rd_frame2, (200,200,0)))
# viewer.setMaxFrame(len(motion_ori)-1)
if not REPEATED:
viewer.setMaxFrame(len(motion_ori)-1)
else:
viewer.setMaxFrame(1440)
if CAMERA_TRACKING:
if MULTI_VIEWER:
cameraTargets1 = [None] * (viewer.getMaxFrame()+1)
cameraTargets2 = [None] * (viewer.getMaxFrame()+1)
else:
cameraTargets = [None] * (viewer.getMaxFrame()+1)
if TORQUE_PLOT:
rhip_torques = [0.]*viewer.getMaxFrame()
rknee_torques = [0.]*viewer.getMaxFrame()
rankle_torques = [0.]*viewer.getMaxFrame()
pt = [0.]
def postFrameCallback_Always(frame):
if frame==1: pt[0] = time.time()
if frame==31: print 'elapsed time for 30 frames:', time.time()-pt[0]
if CAMERA_TRACKING:
if MULTI_VIEWER:
if cameraTargets1[frame]==None:
cameraTargets1[frame] = motionModel.getBodyPositionGlobal(0)
# cameraTargets1[frame] = motion_ori[frame].getJointPositionGlobal(0)
viewer.setCameraTarget1(cameraTargets1[frame])
if cameraTargets2[frame]==None:
cameraTargets2[frame] = controlModel.getJointPositionGlobal(0)
viewer.setCameraTarget2(cameraTargets2[frame])
else:
if cameraTargets[frame]==None:
cameraTargets[frame] = controlModel.getJointPositionGlobal(0)
viewer.setCameraTarget(cameraTargets[frame])
if plot!=None:
plot.updateVline(frame)
viewer.setPostFrameCallback_Always(postFrameCallback_Always)
plot = None
# plot = ymp.InteractivePlot()
if plot!=None:
plot.setXlimit(0, len(motion_ori))
plot.setYlimit(0., 1.)
plot.addDataSet('zero')
plot.addDataSet('diff')
plot.addDataSet('debug1')
plot.addDataSet('debug2')
def viewer_onClose(data):
if plot!=None:
plot.close()
viewer.onClose(data)
viewer.callback(viewer_onClose)
def simulateCallback(frame):
# seginfo
segIndex = seg_index[0]
curState = seginfo[segIndex]['state']
curInterval = yma.offsetInterval(acc_offset[0], seginfo[segIndex]['interval'])
stanceLegs = seginfo[segIndex]['stanceHips']
swingLegs = seginfo[segIndex]['swingHips']
stanceFoots = seginfo[segIndex]['stanceFoots']
swingFoots = seginfo[segIndex]['swingFoots']
swingKnees = seginfo[segIndex]['swingKnees']
groundHeight = seginfo[segIndex]['ground_height']
# maxStfPushFrame = seginfo[segIndex]['max_stf_push_frame']
prev_frame = frame-1 if frame>0 else 0
# prev_frame = frame
# information
# dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(frame), bodyMasses, upperMass, uppers)
# CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(frame), bodyMasses, upperMass, uppers)
## dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(frame), bodyMasses, totalMass)
## CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(frame), bodyMasses, totalMass)
# stf_tar = motion_seg.getJointPositionGlobal(stanceFoots[0], frame)
# CMr_tar = CM_tar - stf_tar
dCM_tar = motion_seg.getJointVelocityGlobal(0, prev_frame)
CM_tar = motion_seg.getJointPositionGlobal(0, prev_frame)
# dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(prev_frame), bodyMasses, upperMass, uppers)
# CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(prev_frame), bodyMasses, upperMass, uppers)
# dCM_tar = yrp.getCM(motion_seg.getJointVelocitiesGlobal(prev_frame), bodyMasses, totalMass)
# CM_tar = yrp.getCM(motion_seg.getJointPositionsGlobal(prev_frame), bodyMasses, totalMass)
stf_tar = motion_seg.getJointPositionGlobal(stanceFoots[0], prev_frame)
CMr_tar = CM_tar - stf_tar
dCM = avg_dCM[0]
CM = controlModel.getJointPositionGlobal(0)
# CM = yrp.getCM(controlModel.getJointPositionsGlobal(), bodyMasses, upperMass, uppers)
# CM = yrp.getCM(controlModel.getJointPositionsGlobal(), bodyMasses, totalMass)
CMreal = yrp.getCM(controlModel.getJointPositionsGlobal(), bodyMasses, totalMass)
stf = controlModel.getJointPositionGlobal(stanceFoots[0])
CMr = CM - stf
diff_dCM = mm.projectionOnPlane(dCM-dCM_tar, (1,0,0), (0,0,1))
diff_dCM_axis = np.cross((0,1,0), diff_dCM)
rd_vec1[0] = diff_dCM; rd_vecori1[0] = CM_tar
diff_CMr = mm.projectionOnPlane(CMr-CMr_tar, (1,0,0), (0,0,1))
# rd_vec1[0] = diff_CMr; rd_vecori1[0] = stf_tar
diff_CMr_axis = np.cross((0,1,0), diff_CMr)
direction = mm.normalize2(mm.projectionOnPlane(dCM_tar, (1,0,0), (0,0,1)))
# direction = mm.normalize2(mm.projectionOnPlane(dCM, (1,0,0), (0,0,1)))
directionAxis = np.cross((0,1,0), direction)
diff_dCM_sag, diff_dCM_cor = mm.projectionOnVector2(diff_dCM, direction)
# rd_vec1[0] = diff_dCM_sag; rd_vecori1[0] = CM_tar
diff_dCM_sag_axis = np.cross((0,1,0), diff_dCM_sag)
diff_dCM_cor_axis = np.cross((0,1,0), diff_dCM_cor)
diff_CMr_sag, diff_CMr_cor = mm.projectionOnVector2(diff_CMr, direction)
diff_CMr_sag_axis = np.cross((0,1,0), diff_CMr_sag)
diff_CMr_cor_axis = np.cross((0,1,0), diff_CMr_cor)
t = (frame-curInterval[0])/float(curInterval[1]-curInterval[0])
t_raw = t
if t>1.: t=1.
p_root = motion_stitch[frame].getJointPositionGlobal(0)
R_root = motion_stitch[frame].getJointOrientationGlobal(0)
motion_seg_orig.goToFrame(frame)
motion_seg.goToFrame(frame)
motion_stitch.goToFrame(frame)
motion_debug1.append(motion_stitch[frame].copy())
motion_debug1.goToFrame(frame)
motion_debug2.append(motion_stitch[frame].copy())
motion_debug2.goToFrame(frame)
motion_debug3.append(motion_stitch[frame].copy())
motion_debug3.goToFrame(frame)
# paper implementation
M_tc.append(motion_stitch[prev_frame])
M_tc.goToFrame(frame)
P_hat.append(M_tc[frame].copy())
P_hat.goToFrame(frame)
p_temp = ym.JointPosture(skeleton)
p_temp.rootPos = controlModel.getJointPositionGlobal(0)
p_temp.setJointOrientationsLocal(controlModel.getJointOrientationsLocal())
P.append(p_temp)
P.goToFrame(frame)
# stance foot stabilize
motion_stf_stabilize.append(motion_stitch[frame].copy())
motion_stf_stabilize.goToFrame(frame)
if STANCE_FOOT_STABILIZE:
for stanceFoot in stanceFoots:
R_target_foot = motion_seg[frame].getJointOrientationGlobal(stanceFoot)
R_current_foot = motion_stf_stabilize[frame].getJointOrientationGlobal(stanceFoot)
motion_stf_stabilize[frame].setJointOrientationGlobal(stanceFoot, cm.slerp(R_current_foot, R_target_foot , stf_stabilize_func(t)))
# R_target_foot = motion_seg[frame].getJointOrientationLocal(stanceFoot)
# R_current_foot = motion_stf_stabilize[frame].getJointOrientationLocal(stanceFoot)
# motion_stf_stabilize[frame].setJointOrientationLocal(stanceFoot, cm.slerp(R_current_foot, R_target_foot , stf_stabilize_func(t)))
# match stance leg
motion_match_stl.append(motion_stf_stabilize[frame].copy())
motion_match_stl.goToFrame(frame)
if MATCH_STANCE_LEG:
if curState!=yba.GaitState.STOP:
for i in range(len(stanceLegs)):
stanceLeg = stanceLegs[i]
stanceFoot = stanceFoots[i]
# # motion stance leg -> character stance leg as time goes
R_motion = motion_match_stl[frame].getJointOrientationGlobal(stanceLeg)
R_character = controlModel.getJointOrientationGlobal(stanceLeg)
motion_match_stl[frame].setJointOrientationGlobal(stanceLeg, cm.slerp(R_motion, R_character, match_stl_func(t)))
# t_y = match_stl_func_y(t)
# t_xz = match_stl_func(t)
#
# R_motion = motion_match_stl[frame].getJointOrientationGlobal(stanceLeg)
# R_character = controlModel.getJointOrientationGlobal(stanceLeg)
# R = np.dot(R_character, R_motion.T)
# R_y, R_xz = mm.projectRotation((0,1,0), R)
# motion_match_stl[frame].mulJointOrientationGlobal(stanceLeg, mm.scaleSO3(R_xz, t_xz))
# motion_match_stl[frame].mulJointOrientationGlobal(stanceLeg, mm.scaleSO3(R_y, t_y))
# swing foot placement
motion_swf_placement.append(motion_match_stl[frame].copy())
motion_swf_placement.goToFrame(frame)
if SWING_FOOT_PLACEMENT:
t_swing_foot_placement = swf_placement_func(t);
if extended[0]:
R_swp_sag = prev_R_swp[0][0]
R_swp_cor = prev_R_swp[0][1]
else:
R_swp_sag = mm.I_SO3(); R_swp_cor = mm.I_SO3()
R_swp_cor = np.dot(R_swp_cor, mm.exp(diff_dCM_cor_axis * K_swp_vel_cor * -t_swing_foot_placement))
if np.dot(direction, diff_CMr_sag) < 0:
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_dCM_sag_axis * K_swp_vel_sag * -t_swing_foot_placement))
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_CMr_sag_axis * K_swp_pos_sag * -t_swing_foot_placement))
else:
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_dCM_sag_axis * K_swp_vel_sag_faster * -t_swing_foot_placement))
R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_CMr_sag_axis * K_swp_pos_sag_faster * -t_swing_foot_placement))
R_swp_cor = np.dot(R_swp_cor, mm.exp(diff_CMr_cor_axis * K_swp_pos_cor * -t_swing_foot_placement))
for i in range(len(swingLegs)):
swingLeg = swingLegs[i]
swingFoot = swingFoots[i]
# save swing foot global orientation
# R_swf = motion_swf_placement[frame].getJointOrientationGlobal(swingFoot)
# rotate swing leg
motion_swf_placement[frame].mulJointOrientationGlobal(swingLeg, R_swp_sag)
motion_swf_placement[frame].mulJointOrientationGlobal(swingLeg, R_swp_cor)
# restore swing foot global orientation
# motion_swf_placement[frame].setJointOrientationGlobal(swingFoot, R_swf)
prev_R_swp[0] = (R_swp_sag, R_swp_cor)
# swing foot height
motion_swf_height.append(motion_swf_placement[frame].copy())
motion_swf_height.goToFrame(frame)
if SWING_FOOT_HEIGHT:
for swingFoot in swingFoots:
stanceFoot = stanceFoots[0]
# save foot global orientation
R_foot = motion_swf_height[frame].getJointOrientationGlobal(swingFoot)
R_stance_foot = motion_swf_height[frame].getJointOrientationGlobal(stanceFoot)
if OLD_SWING_HEIGHT:
height_tar = motion_swf_height[frame].getJointPositionGlobal(swingFoot)[1] - motion_swf_height[frame].getJointPositionGlobal(stanceFoot)[1]
else:
height_tar = motion_swf_height[prev_frame].getJointPositionGlobal(swingFoot)[1] - groundHeight
d_height_tar = motion_swf_height.getJointVelocityGlobal(swingFoot, prev_frame)[1]
# height_tar += c_swf_mid_offset * swf_height_sine_func(t)
# motion_debug1[frame] = motion_swf_height[frame].copy()
# rotate
motion_swf_height[frame].rotateByTarget(controlModel.getJointOrientationGlobal(0))
# motion_debug2[frame] = motion_swf_height[frame].copy()
# motion_debug2[frame].translateByTarget(controlModel.getJointPositionGlobal(0))
if OLD_SWING_HEIGHT:
height_cur = motion_swf_height[frame].getJointPositionGlobal(swingFoot)[1] - motion_swf_height[frame].getJointPositionGlobal(stanceFoot)[1]
else:
height_cur = controlModel.getJointPositionGlobal(swingFoot)[1] - halfFootHeight - c_swf_offset
d_height_cur = controlModel.getJointVelocityGlobal(swingFoot)[1]
if OLD_SWING_HEIGHT:
offset_height = (height_tar - height_cur) * swf_height_func(t) * c5
else:
offset_height = ((height_tar - height_cur) * c5
+ (d_height_tar - d_height_cur) * c6) * swf_height_func(t)
offset_sine = c_swf_mid_offset * swf_height_sine_func(t)
# offset_sine = 0.
offset = 0.
offset += offset_height
offset += offset_sine
if offset > 0.:
newPosition = motion_swf_height[frame].getJointPositionGlobal(swingFoot)
newPosition[1] += offset
aik.ik_analytic(motion_swf_height[frame], swingFoot, newPosition)
else:
if HIGHER_OFFSET:
newPosition = motion_swf_height[frame].getJointPositionGlobal(stanceFoot)
newPosition[1] -= offset
aik.ik_analytic(motion_swf_height[frame], stanceFoot, newPosition)
# return
# motion_debug3[frame] = motion_swf_height[frame].copy()
# motion_debug3[frame].translateByTarget(controlModel.getJointPositionGlobal(0))
motion_swf_height[frame].rotateByTarget(R_root)
# restore foot global orientation
motion_swf_height[frame].setJointOrientationGlobal(swingFoot, R_foot)
motion_swf_height[frame].setJointOrientationGlobal(stanceFoot, R_stance_foot)
if plot!=None:
plot.addDataPoint('debug1', frame, offset_height)
plot.addDataPoint('debug2', frame, height_tar - height_cur)
# plot.addDataPoint('diff', frame, diff)
# swing foot orientation
motion_swf_orientation.append(motion_swf_height[frame].copy())
motion_swf_orientation.goToFrame(frame)
if SWING_FOOT_ORIENTATION:
swf_orientation_func = yfg.concatenate([yfg.zero, yfg.hermite2nd, yfg.one], [.25, .75])
for swingFoot in swingFoots:
R_target_foot = motion_seg[curInterval[1]].getJointOrientationGlobal(swingFoot)
R_current_foot = motion_swf_orientation[frame].getJointOrientationGlobal(swingFoot)
motion_swf_orientation[frame].setJointOrientationGlobal(swingFoot, cm.slerp(R_current_foot, R_target_foot, swf_orientation_func(t)))
# swf_stabilize_func = yfg.concatenate([yfg.hermite2nd, yfg.one], [c_taking_duration])
# push orientation
# for swingFoot in swingFoots:
# R_target_foot = motion_seg[frame].getJointOrientationGlobal(swingFoot)
# R_current_foot = motion_swf_orientation[frame].getJointOrientationGlobal(swingFoot)
# motion_swf_orientation[frame].setJointOrientationGlobal(swingFoot, cm.slerp(R_current_foot, R_target_foot , swf_stabilize_func(t)))
# stance foot push
motion_stf_push.append(motion_swf_orientation[frame].copy())
motion_stf_push.goToFrame(frame)
if STANCE_FOOT_PUSH:
for swingFoot in swingFoots:
# max_t = (maxStfPushFrame)/float(curInterval[1]-curInterval[0])
# stf_push_func = yfg.concatenate([yfg.sine, yfg.zero], [max_t*2])
stf_push_func = yfg.concatenate([yfg.sine, yfg.zero], [c_taking_duration*2])
R_swp_sag = mm.I_SO3()
# R_swp_sag = np.dot(R_swp_sag, mm.exp(diff_dCM_sag_axis * K_stp_vel * -stf_push_func(t)))
# if step_length_cur[0] < step_length_tar[0]:
# ratio = step_length_cur[0] / step_length_tar[0]
# R_max = maxmaxStfPushFrame
# R_zero =
R_swp_sag = np.dot(R_swp_sag, mm.exp((step_length_tar[0] - step_length_cur[0])*step_axis[0] * K_stp_pos * -stf_push_func(t)))
motion_stf_push[frame].mulJointOrientationGlobal(swingFoot, R_swp_sag)
# stance foot balancing
motion_stf_balancing.append(motion_stf_push[frame].copy())
motion_stf_balancing.goToFrame(frame)
if STANCE_FOOT_BALANCING:
R_stb = mm.exp(diff_dCM_axis * K_stb_vel * stf_balancing_func(t))
R_stb = np.dot(R_stb, mm.exp(diff_CMr_axis * K_stb_pos * stf_balancing_func(t)))
for stanceFoot in stanceFoots:
if frame < 5: continue
motion_stf_balancing[frame].mulJointOrientationGlobal(stanceFoot, R_stb)
# control trajectory
motion_control.append(motion_stf_balancing[frame].copy())
motion_control.goToFrame(frame)
#=======================================================================
# tracking with inverse dynamics
#=======================================================================
th_r = motion_control.getDOFPositions(frame)
th = controlModel.getDOFPositions()
dth_r = motion_control.getDOFVelocities(frame)
dth = controlModel.getDOFVelocities()
ddth_r = motion_control.getDOFAccelerations(frame)
ddth_des = yct.getDesiredDOFAccelerations(th_r, th, dth_r, dth, ddth_r, Kt, Dt)
#=======================================================================
# simulation
#=======================================================================
CP = mm.v3(0.,0.,0.)
F = mm.v3(0.,0.,0.)
avg_dCM[0] = mm.v3(0.,0.,0.)
# external force rendering info
del rd_forces[:]; del rd_force_points[:]
for fi in forceInfos:
if fi.startFrame <= frame and frame < fi.startFrame + fi.duration*(1/frameTime):
rd_forces.append(fi.force)
rd_force_points.append(controlModel.getBodyPositionGlobal(fi.targetBody) + -mm.normalize2(fi.force)*.2)
for i in range(stepsPerFrame):
bodyIDs, contactPositions, contactPositionLocals, contactForces = vpWorld.calcPenaltyForce(bodyIDsToCheck, mus, Ks, Ds)
vpWorld.applyPenaltyForce(bodyIDs, contactPositionLocals, contactForces)
# apply external force
for fi in forceInfos:
if fi.startFrame <= frame and frame < fi.startFrame + fi.duration*(1/frameTime):
controlModel.applyBodyForceGlobal(fi.targetBody, fi.force)
controlModel.setDOFAccelerations(ddth_des)
controlModel.solveHybridDynamics()
# # apply external force
# for fi in forceInfos:
# if fi.startFrame <= frame and frame < fi.startFrame + fi.duration*(1/frameTime):
# controlModel.applyBodyForceGlobal(fi.targetBody, fi.force)
vpWorld.step()
# yvu.align2D(controlModel)
if len(contactForces) > 0:
CP += yrp.getCP(contactPositions, contactForces)
F += sum(contactForces)
avg_dCM[0] += controlModel.getJointVelocityGlobal(0)
# avg_dCM[0] += yrp.getCM(controlModel.getJointVelocitiesGlobal(), bodyMasses, upperMass, uppers)
# avg_dCM[0] += yrp.getCM(controlModel.getJointVelocitiesGlobal(), bodyMasses, totalMass)
# if len(stanceFoots)>0:
# avg_stf_v[0] += controlModel.getJointVelocityGlobal(stanceFoots[0])
# avg_stf_av[0] += controlModel.getJointAngVelocityGlobal(stanceFoots[0])
CP /= stepsPerFrame
F /= stepsPerFrame
avg_dCM[0] /= stepsPerFrame
# if len(stanceFoots)>0:
# avg_stf_v[0] /= stepsPerFrame
# avg_stf_av[0] /= stepsPerFrame
# rd_vec1[0] = avg_stf_av[0]; rd_vec1[0][0] = 0.; rd_vec1[0][2] = 0.
# rd_vecori1[0]= controlModel.getJointPositionGlobal(stanceFoots[0])
#=======================================================================
# segment editing
#=======================================================================
lastFrame = False
if SEGMENT_EDITING:
if curState==yba.GaitState.STOP:
if frame == len(motion_seg)-1:
lastFrame = True
elif (curState==yba.GaitState.LSWING or curState==yba.GaitState.RSWING) and t>c_min_contact_time:
swingID = lID if curState==yba.GaitState.LSWING else rID
contact = False
if swingID in bodyIDs:
minContactVel = 1000.
for i in range(len(bodyIDs)):
if bodyIDs[i]==swingID:
vel = controlModel.getBodyVelocityGlobal(swingID, contactPositionLocals[i])
vel[1] = 0
contactVel = mm.length(vel)
if contactVel < minContactVel: minContactVel = contactVel
if minContactVel < c_min_contact_vel: contact = True
extended[0] = False
if contact:
# print frame, 'foot touch'
lastFrame = True
acc_offset[0] += frame - curInterval[1]
elif frame == len(motion_seg)-1:
print frame, 'extend frame', frame+1
preserveJoints = []
# preserveJoints = [lFoot, rFoot]
# preserveJoints = [lFoot, rFoot, lKnee, rKnee]
# preserveJoints = [lFoot, rFoot, lKnee, rKnee, lUpLeg, rUpLeg]
stanceKnees = [rKnee] if curState==yba.GaitState.LSWING else [lKnee]
preserveJoints = [stanceFoots[0], stanceKnees[0], stanceLegs[0]]
diff = 3
motion_seg_orig.extend([motion_seg_orig[-1]])
motion_seg.extend(ymt.extendByIntegration_root(motion_seg, 1, diff))
motion_stitch.extend(ymt.extendByIntegration_constant(motion_stitch, 1, preserveJoints, diff))
# # extend for swing foot ground speed matching & swing foot height lower
## extendedPostures = ymt.extendByIntegration(motion_stitch, 1, preserveJoints, diff)
## extendedPostures = [motion_stitch[-1]]
##
# extendFrameNum = frame - curInterval[1] + 1
# k = 1.-extendFrameNum/5.
# if k<0.: k=0.
# extendedPostures = ymt.extendByIntegrationAttenuation(motion_stitch, 1, preserveJoints, diff, k)
#
## if len(swingFoots)>0 and np.inner(dCM_tar, dCM)>0.:
## print frame, 'speed matching'
## R_swf = motion_stitch[-1].getJointOrientationGlobal(swingFoots[0])
##
## p_swf = motion_stitch[-1].getJointPositionGlobal(swingFoots[0])
## v_swf = motion_stitch.getJointVelocityGlobal(swingFoots[0], frame-diff, frame)
## a_swf = motion_stitch.getJointAccelerationGlobal(swingFoots[0], frame-diff, frame)
## p_swf += v_swf * (frameTime) + a_swf * (frameTime)*(frameTime)
## aik.ik_analytic(extendedPostures[0], swingFoots[0], p_swf)
##
## extendedPostures[0].setJointOrientationGlobal(swingFoots[0], R_swf)
#
# motion_stitch.extend(extendedPostures)
extended[0] = True
else:
if frame == len(motion_seg)-1: lastFrame = True
if lastFrame:
if segIndex < len(segments)-1:
print '%d (%d): end of %dth seg (%s, %s)'%(frame, frame-curInterval[1],segIndex, yba.GaitState.text[curState], curInterval)
if plot!=None: plot.addDataPoint('diff', frame, (frame-curInterval[1])*.01)
if len(stanceFoots)>0 and len(swingFoots)>0:
# step_cur = controlModel.getJointPositionGlobal(swingFoots[0]) - controlModel.getJointPositionGlobal(stanceFoots[0])
# step_tar = motion_seg[curInterval[1]].getJointPositionGlobal(swingFoots[0]) - motion_seg[curInterval[1]].getJointPositionGlobal(stanceFoots[0])
step_cur = controlModel.getJointPositionGlobal(0) - controlModel.getJointPositionGlobal(stanceFoots[0])
step_tar = motion_seg[curInterval[1]].getJointPositionGlobal(0) - motion_seg[curInterval[1]].getJointPositionGlobal(stanceFoots[0])
step_cur = mm.projectionOnPlane(step_cur, (1,0,0), (0,0,1))
step_tar = mm.projectionOnPlane(step_tar, (1,0,0), (0,0,1))
step_cur_sag, step_cur_cor = mm.projectionOnVector2(step_cur, direction)
step_tar_sag, step_tar_cor = mm.projectionOnVector2(step_tar, direction)
step_length_tar[0] = mm.length(step_tar_sag)
if np.inner(step_tar_sag, step_cur_sag) > 0:
step_length_cur[0] = mm.length(step_cur_sag)
else:
step_length_cur[0] = -mm.length(step_cur_sag)
step_axis[0] = directionAxis
# rd_vec1[0] = step_tar_sag
# rd_vecori1[0] = motion_seg[curInterval[1]].getJointPositionGlobal(stanceFoots[0])
# rd_vec2[0] = step_cur_sag
# rd_vecori2[0] = controlModel.getJointPositionGlobal(stanceFoots[0])
seg_index[0] += 1
curSeg = segments[seg_index[0]]
stl_y_limit_num[0] = 0
stl_xz_limit_num[0] = 0
del motion_seg_orig[frame+1:]
motion_seg_orig.extend(ymb.getAttachedNextMotion(curSeg, motion_seg_orig[-1], False, False))
del motion_seg[frame+1:]
del motion_stitch[frame+1:]
transitionLength = len(curSeg)-1
# motion_seg.extend(ymb.getAttachedNextMotion(curSeg, motion_seg[-1], False, False))
# motion_stitch.extend(ymb.getStitchedNextMotion(curSeg, motion_control[-1], transitionLength, stitch_func, True, False))
d = motion_seg[-1] - curSeg[0]
d.rootPos[1] = 0.
motion_seg.extend(ymb.getAttachedNextMotion(curSeg, d, True, False))
d = motion_control[-1] - curSeg[0]
d.rootPos[1] = 0.
motion_stitch.extend(ymb.getStitchedNextMotion(curSeg, d, transitionLength, stitch_func, True, False))
# motion_seg.extend(ymb.getAttachedNextMotion(curSeg, motion_seg[-1], False, True))
# motion_stitch.extend(ymb.getStitchedNextMotion(curSeg, motion_control[-1], transitionLength, stitch_func, True, True))
else:
motion_seg_orig.append(motion_seg_orig[-1])
motion_seg.append(motion_seg[-1])
motion_stitch.append(motion_control[-1])
# rendering
motionModel.update(motion_ori[frame])
# motionModel.update(motion_seg[frame])
rd_CP[0] = CP
rd_CMP[0] = (CMreal[0] - (F[0]/F[1])*CMreal[1], 0, CMreal[2] - (F[2]/F[1])*CMreal[1])
if plot!=None:
plot.addDataPoint('zero', frame, 0)
plot.updatePoints()
viewer.setSimulateCallback(simulateCallback)
if MULTI_VIEWER:
viewer.startTimer(frameTime / 1.4)
else:
viewer.startTimer(frameTime * .1)
viewer.show()
Fl.run()
pass
push_simbicon_mass()
| [
"[email protected]"
] | |
2aa099e77ec976eea8a2ce7424afac7d5124999a | b0c2a8c77fc0e025690e59f990950b6eb347c4c3 | /corpus_builder/spiders/newspaper/janakantha.py | f0a39957ada5c7a106bab473fa6104a3258d95fb | [
"MIT"
] | permissive | ibraheem-moosa/corpus-builder | 1b31cbc501026436e5ebde2e363379b6fc094dd0 | 5f09835f9aa62abb5f891c4d3896206eedd9fe12 | refs/heads/master | 2020-08-06T21:39:31.700339 | 2018-08-24T14:00:18 | 2018-08-24T14:00:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,923 | py | # -*- coding: utf-8 -*-
import datetime
import urlparse
import scrapy
from corpus_builder.templates.spider import CommonSpider
class JanakanthaSpider(CommonSpider):
name = "janakantha"
allowed_domains = ["dailyjanakantha.com"]
base_url = 'https://www.dailyjanakantha.com'
start_request_url = base_url
content_body = {
'css': 'p.artDetails *::text'
}
allowed_configurations = [
['start_date'],
['start_date', 'end_date'],
['category', 'start_date'],
['category', 'start_date', 'end_date'],
]
def request_index(self, response):
menu_links = [urlparse.urlparse(x.strip()).path.split('/')[-1] \
for x in response.css('nav.menu a::attr("href")').extract()]
categories = [x for x in menu_links if (not x == "" and not x == "#")]
if self.category is not None:
if self.category in categories:
categories = [self.category]
else:
raise ValueError('invalid category slug. available slugs: %s' % ", ".join(categories))
date_processing = self.start_date
while date_processing <= self.end_date:
for category in categories:
# https://www.dailyjanakantha.com/frontpage/date/2016-06-01
url = self.base_url + '/{0}/date/{1}'.format(
category,
date_processing.strftime('%Y-%m-%d')
)
yield scrapy.Request(url, callback=self.extract_news_category)
date_processing += datetime.timedelta(days=1)
def extract_news_category(self, response):
news_links = list(set(response.xpath('//div[@class="content"]//a').extract()))
for link in news_links:
if not link[:4] == 'http':
link = self.base_url + link
yield scrapy.Request(link, callback=self.parse_content)
| [
"[email protected]"
] | |
f8f2a2c16488ab7b2db5c75b3e3384fe28779156 | c5458f2d53d02cb2967434122183ed064e1929f9 | /sdks/python/test/test_generate_commitments_request.py | 8c5a96d5277ec4d96a679c0c58dcd09553377df4 | [] | no_license | ross-weir/ergo-node-api-sdks | fd7a32f79784dbd336ef6ddb9702b9dd9a964e75 | 9935ef703b14760854b24045c1307602b282c4fb | refs/heads/main | 2023-08-24T05:12:30.761145 | 2021-11-08T10:28:10 | 2021-11-08T10:28:10 | 425,785,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,207 | py | """
Ergo Node API
API docs for Ergo Node. Models are shared between all Ergo products # noqa: E501
The version of the OpenAPI document: 4.0.15
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import openapi_client
from openapi_client.model.generate_commitments_request_secrets import GenerateCommitmentsRequestSecrets
from openapi_client.model.unsigned_ergo_transaction import UnsignedErgoTransaction
globals()['GenerateCommitmentsRequestSecrets'] = GenerateCommitmentsRequestSecrets
globals()['UnsignedErgoTransaction'] = UnsignedErgoTransaction
from openapi_client.model.generate_commitments_request import GenerateCommitmentsRequest
class TestGenerateCommitmentsRequest(unittest.TestCase):
"""GenerateCommitmentsRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGenerateCommitmentsRequest(self):
"""Test GenerateCommitmentsRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = GenerateCommitmentsRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
57c861cd16af96f077cd25db431a46f4feb6d0b2 | c30d4f174a28aac495463f44b496811ee0c21265 | /python/helpers/python-skeletons/multiprocessing/__init__.py | de9d1ddfa3ca0c043f71519ec442d5c291506ae8 | [
"Apache-2.0"
] | permissive | sarvex/intellij-community | cbbf08642231783c5b46ef2d55a29441341a03b3 | 8b8c21f445550bd72662e159ae715e9d944ba140 | refs/heads/master | 2023-05-14T14:32:51.014859 | 2023-05-01T06:59:21 | 2023-05-01T06:59:21 | 32,571,446 | 0 | 0 | Apache-2.0 | 2023-05-01T06:59:22 | 2015-03-20T08:16:17 | Java | UTF-8 | Python | false | false | 4,217 | py | """Skeleton for 'multiprocessing' stdlib module."""
from multiprocessing.pool import Pool
class Process(object):
def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
self.name = ''
self.daemon = False
self.authkey = None
self.exitcode = None
self.ident = 0
self.pid = 0
self.sentinel = None
def run(self):
pass
def start(self):
pass
def terminate(self):
pass
def join(self, timeout=None):
pass
def is_alive(self):
return False
class ProcessError(Exception):
pass
class BufferTooShort(ProcessError):
pass
class AuthenticationError(ProcessError):
pass
class TimeoutError(ProcessError):
pass
class Connection(object):
def send(self, obj):
pass
def recv(self):
pass
def fileno(self):
return 0
def close(self):
pass
def poll(self, timeout=None):
pass
def send_bytes(self, buffer, offset=-1, size=-1):
pass
def recv_bytes(self, maxlength=-1):
pass
def recv_bytes_into(self, buffer, offset=-1):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def Pipe(duplex=True):
return Connection(), Connection()
class Queue(object):
def __init__(self, maxsize=-1):
self._maxsize = maxsize
def qsize(self):
return 0
def empty(self):
return False
def full(self):
return False
def put(self, obj, block=True, timeout=None):
pass
def put_nowait(self, obj):
pass
def get(self, block=True, timeout=None):
pass
def get_nowait(self):
pass
def close(self):
pass
def join_thread(self):
pass
def cancel_join_thread(self):
pass
class SimpleQueue(object):
def empty(self):
return False
def get(self):
pass
def put(self, item):
pass
class JoinableQueue(multiprocessing.Queue):
def task_done(self):
pass
def join(self):
pass
def active_childern():
"""
:rtype: list[multiprocessing.Process]
"""
return []
def cpu_count():
return 0
def current_process():
"""
:rtype: multiprocessing.Process
"""
return Process()
def freeze_support():
pass
def get_all_start_methods():
return []
def get_context(method=None):
pass
def get_start_method(allow_none=False):
pass
def set_executable(path):
pass
def set_start_method(method):
pass
class Barrier(object):
def __init__(self, parties, action=None, timeout=None):
self.parties = parties
self.n_waiting = 0
self.broken = False
def wait(self, timeout=None):
pass
def reset(self):
pass
def abort(self):
pass
class Semaphore(object):
def __init__(self, value=1):
pass
def acquire(self, blocking=True, timeout=None):
pass
def release(self):
pass
class BoundedSemaphore(multiprocessing.Semaphore):
pass
class Condition(object):
def __init__(self, lock=None):
pass
def acquire(self, *args):
pass
def release(self):
pass
def wait(self, timeout=None):
pass
def wait_for(self, predicate, timeout=None):
pass
def notify(self, n=1):
pass
def notify_all(self):
pass
class Event(object):
def is_set(self):
return False
def set(self):
pass
def clear(self):
pass
def wait(self, timeout=None):
pass
class Lock(object):
def acquire(self, blocking=True, timeout=-1):
pass
def release(self):
pass
class RLock(object):
def acquire(self, blocking=True, timeout=-1):
pass
def release(self):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def Value(typecode_or_type, *args, **kwargs):
pass
def Array(typecode_or_type, size_or_initializer, lock=True):
pass
def Manager():
return multiprocessing.SyncManager()
| [
"[email protected]"
] | |
6b354ee59c681faf08710f4c4a73bf94b911ddca | 33af6185b48bd76f97f0a74390a3a812ee216c78 | /angr/angr/procedures/glibc/__libc_start_main.py | 12aa852769b769b404e992c1b45228fc1eb2aa92 | [
"BSD-2-Clause"
] | permissive | Ruide/angr-dev | dab0cabd907fce47ac698f890c3f3a8b80ab7e2a | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | refs/heads/master | 2022-11-10T11:27:13.355024 | 2017-10-07T14:29:09 | 2017-10-07T14:29:09 | 104,417,044 | 0 | 1 | BSD-2-Clause | 2022-10-16T04:48:10 | 2017-09-22T01:35:12 | C | UTF-8 | Python | false | false | 8,177 | py |
import logging
import pyvex
import angr
l = logging.getLogger("angr.procedures.glibc.__libc_start_main")
######################################
# __libc_start_main
######################################
class __libc_start_main(angr.SimProcedure):
#pylint:disable=arguments-differ,unused-argument,attribute-defined-outside-init
ADDS_EXITS = True
NO_RET = True
IS_FUNCTION = True
local_vars = ('main', 'argc', 'argv', 'init', 'fini')
def _initialize_b_loc_table(self):
"""
Initialize ptable for ctype
See __ctype_b_loc.c in libc implementation
"""
malloc = angr.SIM_PROCEDURES['libc']['malloc']
table = self.inline_call(malloc, 768).ret_expr
table_ptr = self.inline_call(malloc, self.state.arch.bits / 8).ret_expr
for pos, c in enumerate(self.state.libc.LOCALE_ARRAY):
# Each entry is 2 bytes
self.state.memory.store(table + (pos*2),
self.state.se.BVV(c, 16),
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
# Offset for negative chars
# 256 because 2 bytes each, -128 * 2
table += 256
self.state.memory.store(table_ptr,
table,
size=self.state.arch.bits / 8,
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
self.state.libc.ctype_b_loc_table_ptr = table_ptr
def _initialize_tolower_loc_table(self):
"""
Initialize ptable for ctype
See __ctype_tolower_loc.c in libc implementation
"""
malloc = angr.SIM_PROCEDURES['libc']['malloc']
# 384 entries, 4 bytes each
table = self.inline_call(malloc, 384*4).ret_expr
table_ptr = self.inline_call(malloc, self.state.arch.bits / 8).ret_expr
for pos, c in enumerate(self.state.libc.TOLOWER_LOC_ARRAY):
self.state.memory.store(table + (pos * 4),
self.state.se.BVV(c, 32),
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
# Offset for negative chars: -128 index (4 bytes per index)
table += (128 * 4)
self.state.memory.store(table_ptr,
table,
size=self.state.arch.bits / 8,
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
self.state.libc.ctype_tolower_loc_table_ptr = table_ptr
def _initialize_toupper_loc_table(self):
"""
Initialize ptable for ctype
See __ctype_toupper_loc.c in libc implementation
"""
malloc = angr.SIM_PROCEDURES['libc']['malloc']
# 384 entries, 4 bytes each
table = self.inline_call(malloc, 384*4).ret_expr
table_ptr = self.inline_call(malloc, self.state.arch.bits / 8).ret_expr
for pos, c in enumerate(self.state.libc.TOUPPER_LOC_ARRAY):
self.state.memory.store(table + (pos * 4),
self.state.se.BVV(c, 32),
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
# Offset for negative chars: -128 index (4 bytes per index)
table += (128 * 4)
self.state.memory.store(table_ptr,
table,
size=self.state.arch.bits / 8,
endness=self.state.arch.memory_endness,
inspect=False,
disable_actions=True,
)
self.state.libc.ctype_toupper_loc_table_ptr = table_ptr
def _initialize_ctype_table(self):
self._initialize_b_loc_table()
self._initialize_tolower_loc_table()
self._initialize_toupper_loc_table()
@property
def envp(self):
return self.argv + (self.argc+1)*self.state.arch.bytes
def run(self, main, argc, argv, init, fini):
# TODO: handle symbolic and static modes
# TODO: add argument types
self._initialize_ctype_table()
self.main, self.argc, self.argv, self.init, self.fini = self._extract_args(self.state, main, argc, argv, init,
fini)
# TODO: __cxa_atexit calls for various at-exit needs
self.call(self.init, (self.argc, self.argv, self.envp), 'after_init')
def after_init(self, main, argc, argv, init, fini, exit_addr=0):
if isinstance(self.state.arch, ArchAMD64):
# (rsp+8) must be aligned to 16 as required by System V ABI
# ref: http://www.x86-64.org/documentation/abi.pdf , page 16
self.state.regs.rsp = (self.state.regs.rsp & 0xfffffffffffffff0) - 8
self.call(self.main, (self.argc, self.argv, self.envp), 'after_main')
def after_main(self, main, argc, argv, init, fini, exit_addr=0):
self.exit(0)
def static_exits(self, blocks):
# Execute those blocks with a blank state, and then dump the arguments
blank_state = angr.SimState(project=self.project, mode="fastpath")
# set up the stack pointer
blank_state.regs.sp = 0x7fffffff
# Execute each block
state = blank_state
for b in blocks:
# state.regs.ip = next(iter(stmt for stmt in b.statements if isinstance(stmt, pyvex.IRStmt.IMark))).addr
irsb = angr.SimEngineVEX().process(state, b,
force_addr=next(iter(stmt for stmt in b.statements if isinstance(stmt, pyvex.IRStmt.IMark))).addr)
if irsb.successors:
state = irsb.successors[0]
else:
break
cc = angr.DEFAULT_CC[self.arch.name](self.arch)
args = [ cc.arg(state, _) for _ in xrange(5) ]
main, _, _, init, fini = self._extract_args(blank_state, *args)
all_exits = [
(init, 'Ijk_Call'),
(main, 'Ijk_Call'),
(fini, 'Ijk_Call'),
]
return all_exits
@staticmethod
def _extract_args(state, main, argc, argv, init, fini):
"""
Extract arguments and set them to
:param angr.sim_state.SimState state: The program state.
:param main: An argument to __libc_start_main.
:param argc: An argument to __libc_start_main.
:param argv: An argument to __libc_start_main.
:param init: An argument to __libc_start_main.
:param fini: An argument to __libc_start_main.
:return: A tuple of five elements: (main, argc, argv, init, fini)
:rtype: tuple
"""
main_ = main
argc_ = argc
argv_ = argv
init_ = init
fini_ = fini
if state.arch.name == "PPC32":
# for some dumb reason, PPC passes arguments to libc_start_main in some completely absurd way
argv_ = argc_
argc_ = main_
main_ = state.mem[state.regs.r8 + 4:].int.resolved
init_ = state.mem[state.regs.r8 + 8:].int.resolved
fini_ = state.mem[state.regs.r8 + 12:].int.resolved
elif state.arch.name == "PPC64":
main_ = state.mem[state.regs.r8 + 8:].long.resolved
init_ = state.mem[state.regs.r8 + 16:].long.resolved
fini_ = state.mem[state.regs.r8 + 24:].long.resolved
return main_, argc_, argv_, init_, fini_
from archinfo import ArchAMD64
| [
"[email protected]"
] | |
d1ef46174618edcfd908c875a157a06da832d91a | 602ea0c05970cbd766df068b003671c561f59661 | /tools/perf/benchmarks/jetstream2.py | 19f31f16c1bc952e688b1bb19284defef99e3e9d | [
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.0-or-later",
"MIT",
"BSD-3-Clause",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.0-only",
"APSL-2.0",
"LicenseRef-scancode-unknown",
"Zlib"
] | permissive | VitalyKononenko/chromium | 088de78a639375b073cabb7665afc638334e8672 | b8ad2cadb6a163269cd7851bc7962744743785bd | refs/heads/master | 2023-03-01T10:15:00.815394 | 2019-08-15T19:51:40 | 2019-08-15T19:51:40 | 202,603,102 | 1 | 0 | BSD-3-Clause | 2019-08-15T19:54:34 | 2019-08-15T19:54:33 | null | UTF-8 | Python | false | false | 1,655 | py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Apple's JetStream 2 benchmark.
JetStream 2 combines together a variety of JavaScript and Web Assembly
benchmarks, covering a variety of advanced workloads and programming
techniques, and reports a single score that balances them using a geometric
mean.
Each benchmark measures a distinct workload, and no single optimization
technique is sufficient to speed up all benchmarks. Some benchmarks
demonstrate tradeoffs, and aggressive or specialized optimizations for one
benchmark might make another benchmark slower. JetStream 2 rewards browsers
that start up quickly, execute code quickly, and continue running smoothly.
Each benchmark in JetStream 2 computes its own individual score. JetStream 2
weighs each benchmark equally, taking the geometric mean over each individual
benchmark's score to compute the overall JetStream 2 score.
"""
from telemetry import benchmark
import page_sets
from benchmarks import press
@benchmark.Info(emails=['[email protected]', '[email protected]'],
component='Blink>JavaScript',
documentation_url='https://browserbench.org/JetStream/in-depth.html')
class Jetstream2(press._PressBenchmark): # pylint: disable=protected-access
"""JetStream2, a combination of JavaScript and Web Assembly benchmarks.
Run all the Jetstream 2 benchmarks by default.
"""
@classmethod
def Name(cls):
return 'UNSCHEDULED_jetstream2'
def CreateStorySet(self, options):
return page_sets.Jetstream2StorySet()
| [
"[email protected]"
] | |
d9f3996fc6b6e11676bb6d73c8c96a5562d5fcec | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/gui/Scaleform/daapi/view/lobby/tank_setup/ammunition_setup_vehicle.py | 572f09c20630d76920f59564d58da4e89187b639 | [] | no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 972 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/tank_setup/ammunition_setup_vehicle.py
from CurrentVehicle import g_currentVehicle
from helpers import dependency
from skeletons.gui.shared import IItemsCache
class _TankSetupVehicle(object):
__slots__ = ('__vehicle',)
_itemsCache = dependency.descriptor(IItemsCache)
def __init__(self):
super(_TankSetupVehicle, self).__init__()
self.__vehicle = None
return
def setVehicle(self, value):
self.__vehicle = value
@property
def item(self):
return self.__vehicle or g_currentVehicle.item
@property
def defaultItem(self):
return g_currentVehicle.item if g_currentVehicle.isPresent() else None
def isPresent(self):
return self.__vehicle is not None
def dispose(self):
self.__vehicle = None
return
g_tankSetupVehicle = _TankSetupVehicle()
| [
"[email protected]"
] | |
b001592fbd0025106b5c0de3a8e0852a6fb0006e | 226b1c73a706f4734834196d18305d4d2c873589 | /synlib/descriptions/EDFFXL.py | 206bab5d4710ad637b02fca8e34e937d415a73fb | [] | no_license | ocakgun/vlsistuff | 43b4b07ae186b8d2360d11c57cd10b861e96bcbe | 776c07f5d0c40fe7d410b5c85e7381017d4dab64 | refs/heads/master | 2022-06-13T14:40:22.641310 | 2020-05-08T11:09:00 | 2020-05-08T11:09:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | Desc = cellDescClass("EDFFXL")
Desc.properties["cell_leakage_power"] = "1762.140420"
Desc.properties["cell_footprint"] = "edff"
Desc.properties["area"] = "76.507200"
Desc.pinOrder = ['CK', 'D', 'E', 'IQ', 'IQN', 'Q', 'QN', 'next']
Desc.add_arc("CK","D","setup_rising")
Desc.add_arc("CK","D","hold_rising")
Desc.add_arc("CK","E","setup_rising")
Desc.add_arc("CK","E","hold_rising")
Desc.add_arc("CK","Q","rising_edge")
Desc.add_arc("CK","QN","rising_edge")
Desc.add_param("area",76.507200);
Desc.set_pin_job("CK","clock")
Desc.add_pin("CK","input")
Desc.add_pin("E","input")
Desc.add_pin("D","input")
Desc.add_pin("IQ","output")
Desc.add_pin_func("IQ","unknown")
Desc.add_pin("next","output")
Desc.add_pin_func("next","unknown")
Desc.add_pin("Q","output")
Desc.add_pin_func("Q","unknown")
Desc.add_pin("IQN","output")
Desc.add_pin_func("IQN","unknown")
Desc.add_pin("QN","output")
Desc.add_pin_func("QN","unknown")
Desc.set_job("flipflop")
CellLib["EDFFXL"]=Desc
| [
"[email protected]"
] | |
021e4fcee7a62e92f84e0a057de120f6f6d67961 | 6c8f3ab5f952d986a17edda582c5a039bf65c632 | /django/consolidate_project/consolidate_project/settings.py | 08a82252a1c6016afd3b14e4b91661d7bd5f4c59 | [] | no_license | phillipn/coding_bootcamp_projects | 3d3bd697728dd4502267e0cd2be7a090952029a8 | 278f96df9d256364583654a00fe585d474ea86a1 | refs/heads/master | 2021-01-17T17:30:14.607944 | 2017-03-19T18:12:32 | 2017-03-19T18:12:32 | 82,971,619 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,218 | py | """
Django settings for consolidate_project project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'tqgt7)2b(iphav%!(5-e1(6kk%x=*o^#&i_aa_ab55)t0xgj5_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.login',
'apps.registration',
'apps.turtles',
'apps.classes',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'consolidate_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'consolidate_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
4e3e8013222191f5557eeefbb7ca5e65131aeece | aebc347ff9a8ad739111f13aa8d4cf9d48a1e4bd | /data/170818/170818_074301_spectral_scan/0019_electron_loading_trap_studies_E5071C.py | b1d29626c5ebe7e0216866f17a237346e2a494bb | [] | no_license | geyang/170422_EonHe_M018V6 | f01a60d3b8a911ba815a0fcc0bf1b6e2aa8f5f17 | ce189e22f99942e46fce84a0dca714888e44bc69 | refs/heads/master | 2021-06-22T21:01:23.257239 | 2017-08-25T09:48:17 | 2017-08-25T09:48:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,239 | py | from data_cache import dataCacheProxy
from time import sleep, time, strftime
from setup_instruments import fridge, seekat, yoko1, nwa, filament
from resonance_fitting import fit_res_gerwin
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
import os
from shutil import copyfile
import quicktimetrace_4D as fastsweep
this_script = r"0019_electron_loading_trap_studies_E5071C.py"
expt = 'spectral_scan'
#res = seekat
t0 = time()
if __name__ == "__main__":
today = strftime("%y%m%d")
now = strftime("%H%M%S")
expt_path = os.path.join(r'S:\_Data\170422 - EonHe M018V6 with L3 etch\data', today, "%s_electron_loading_2D_resg_trap" % now)
print "Saving data in %s" % expt_path
if not os.path.isdir(expt_path):
os.makedirs(expt_path)
sleep(1)
copyfile(os.path.join(r"S:\_Data\170422 - EonHe M018V6 with L3 etch\experiment", this_script),
os.path.join(expt_path, this_script))
dataCache = dataCacheProxy(file_path=os.path.join(expt_path, os.path.split(expt_path)[1] + ".h5"))
prefix = "electron_loading"
fridgeParams = {'wait_for_temp': 0.080,
'min_temp_wait_time': 60}
filamentParams = {"amplitude": 4.2,
"offset": -0.5,
"frequency": 113e3,
"duration": 40e-3}
pulseParams = {"delay": .00,
"pulses": 200}
def set_voltages(res, trap, res_guard, trap_guard, pinch=None, verbose=True):
if res is not None:
seekat.set_voltage(1, res, verbose=verbose)
if trap is not None:
seekat.set_voltage(2, trap, verbose=verbose)
if res_guard is not None:
seekat.set_voltage(3, res_guard, verbose=verbose)
if trap_guard is not None:
seekat.set_voltage(4, trap_guard, verbose=verbose)
if pinch is not None:
seekat.set_voltage(5, pinch, verbose=verbose)
dataCache.post("voltage_log", np.array([time(),
seekat.get_voltage(1), seekat.get_voltage(2),
seekat.get_voltage(3), seekat.get_voltage(4),
seekat.get_voltage(5)]))
def get_voltages():
return seekat.get_voltage(1), seekat.get_voltage(2), seekat.get_voltage(3), \
seekat.get_voltage(4), seekat.get_voltage(5)
filament.setup_driver(**filamentParams)
filament.set_timeout(10000)
print filament.get_id()
def unload():
print "********************"
print "UNLOADING ELECTRONS!"
print "********************"
for k in range(5):
print "\tStep %d"%(k+1)
for volts in [-1, -2, -3, -4, -3, -2, -1]:
set_voltages(volts, volts, volts, volts, verbose=False)
sleep(0.5)
def unload_trap(start=-3.0, stop=-5.0):
print "********************"
print "UNLOADING TRAP ONLY!"
print "********************"
res_init, trap_init, res_guard_init, trap_guard_init, pinch = get_voltages()
vs = list(np.arange(start, stop, -1)) +\
list(np.arange(stop, start, +1))
for k in range(5):
print "\tStep %d"%(k+1)
for volts in vs:
set_voltages(res_init, volts, res_guard_init, trap_guard_init, verbose=False)
sleep(0.5)
set_voltages(res_init, trap_init, res_guard_init, trap_guard_init)
def take_trace_and_save(averages, do_fit=False):
temperature = fridge.get_mc_temperature()
dataCache.post('temperature', temperature)
Vres, Vtrap, Vrg, Vtg, Vpinch = get_voltages()
dataCache.post('Vres', Vres)
dataCache.post('Vtrap', Vtrap)
dataCache.post('Vpinch', Vpinch)
dataCache.post('Vrg', Vrg)
dataCache.post('Vtg', Vtg)
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
if do_fit:
fitspan = 2E6;
center_freq = fpts[np.argmax(mags)]
try:
fitres = dsfit.fitlor(fpts, dataanalysis.dBm_to_W(mags),
domain=[center_freq - fitspan / 2., center_freq + fitspan / 2.])
dataCache.post('f0', fitres[2])
except:
print "FIT FAILED!"
if np.max(mags) < -55:
print "WARNING: PEAK OUT OF RANGE!"
#else:
#nwa.set_center_frequency(fitres[2])
dataCache.post('fpts', fpts)
dataCache.post('mags', mags)
dataCache.post('phases', phases)
dataCache.post('time', time() - t0)
return temperature
def unload_with_filament():
# First loading to get rid of most electrons!
if load_electrons:
set_voltages(-3.0, -3.0, 0.0, 0.0)
sleep(2.0)
temperature = fridge.get_mc_temperature()
print "Waiting for consistent electron loading temperature of < 550 mK...."
while temperature > 0.550:
temperature = fridge.get_mc_temperature()
sleep(2)
print '.',
filament.fire_filament(100, 0.01)
print "Fired filament!"
sleep(60.0)
def load_resonator_not_trap():
print "\n"
print "********************"
print "LOADING ELECTRONS..."
print "********************"
set_voltages(2.0, -3.0, 0.0, 0.0)
sleep(2.0)
temperature = fridge.get_mc_temperature()
print "Waiting for consistent electron loading temperature of < 550 mK...."
while temperature > 0.550:
temperature = fridge.get_mc_temperature()
sleep(2)
print '.',
filament.fire_filament(100, 0.01)
print "Fired filament!"
sleep(60.0)
not_settled = True
stable_temp = 0.550
# print "Waiting for temperature to stabilize to %.0f mK..." % (stable_temp * 1E3)
while not_settled:
temperature = fridge.get_mc_temperature()
if temperature <= stable_temp:
not_settled = False
nwa.set_measure('S21')
set_voltages(0.00, 0.00, 0.00, 0.00, pinch=0.00)
unload()
load_electrons = True
power = -40
averages = 25
sweep_points = 801
nwa.set_trigger_source('BUS')
nwa.set_format('SLOG')
nwa_config = {'start' : 6.385E9,
'stop': 6.407E9,
'sweep_points': sweep_points,
'power': power,
'averages': averages,
'ifbw': nwa.get_ifbw()}
nwa.configure(**nwa_config)
nwa.set_electrical_delay(68E-9)
nwa.set_phase_offset(180.0)
dataCache.set_dict('nwa_config', nwa_config)
nwa.auto_scale()
Vresguards = list(np.arange(0.00, 0.15+0.025, +0.025))
Vtraps = list(np.arange(0.00, 0.25, 0.005)) \
+ list(np.arange(0.25, 0.00, -0.005))
Vress = 0.6 * np.ones(len(Vtraps))
fig = plt.figure(figsize=(8.,12.))
plt.subplot(311)
plt.plot(Vress, 'o', color="#23aaff", markeredgecolor="none", label="Resonator")
plt.plot(Vtraps, 'o', color="#f4b642", markeredgecolor="none", label='Trap')
plt.ylabel("Resonator voltage (V)")
plt.xlim(0, np.max([len(Vress), len(Vtraps)]))
plt.legend(loc=0, prop={'size' : 8})
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
plt.subplot(312)
current_vres, current_vtrap, current_vrg, current_vtg, pinch = get_voltages()
plt.text(np.min(fpts) + 0.10*(np.max(fpts)-np.min(fpts)),
np.min(mags) + 0.85*(np.max(mags) - np.min(mags)),
"res, trap, rg, tg = (%.2fV, %.2fV, %.2fV, %.2fV)" % (current_vres, current_vtrap, current_vrg, current_vtg))
plt.plot(fpts, mags)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Magnitude (dB)')
plt.xlim(np.min(fpts), np.max(fpts))
plt.subplot(313)
plt.plot(fpts, phases)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Phase (deg)')
plt.xlim(np.min(fpts), np.max(fpts))
fig.savefig(os.path.join(expt_path, "pre_electron_loading.png"), dpi=200)
plt.show()
nwa.set_format('MLOG')
nwa.auto_scale()
nwa.set_trigger_source('INT')
#unload_with_filament()
nwa.set_trigger_source('BUS')
nwa.set_format('SLOG')
nwa.set_average_state(True)
if load_electrons:
abs_deltaf = 1e9
Q = 0
# Set both the Q and deltaf threshold to something low if you want it to continue after the first load
while not (Q > 9000 and abs_deltaf > 6.5E6):
unload_with_filament()
load_resonator_not_trap()
set_voltages(0.6, -2.0, None, None)
sleep(1.0)
#set_voltages(Vress[0], Vtraps[0], None)
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
f0, Q = fit_res_gerwin(fpts, mags, span=3E6)
abs_deltaf = np.abs(f0-6.40511e9)
print "Fit result after loading: delta f = %.2f MHz and Q = %.0f" % (abs_deltaf/1E6, Q)
sleep(120)
nwa.set_center_frequency(f0+0.15E6)
nwa.set_span(3E6)
print "Set center frequency to %.6f GHz (shift = %.2f MHz)"%(f0/1E9, (f0-6.40511e9)/1E6)
dataCache.post('data_shape', [len(Vresguards), len(Vtraps)])
set_voltages(None, None, 0.00, 0.00)
print "Starting sweep..."
set_voltages(Vress[0], Vtraps[0], Vresguards[0], None, -1.00)
for Vrg in Vresguards:
set_voltages(None, None, Vrg, None)
for Vtrap in tqdm(Vtraps):
set_voltages(None, Vtrap, None, None)
take_trace_and_save(averages, do_fit=True)
# for Vrg in Vresguards:
# set_voltages(None, None, Vrg, None)
# for Vtrap in tqdm(Vtraps):
# set_voltages(None, Vtrap, None, None)
# take_trace_and_save(averages, do_fit=True)
nwa.set_format('MLOG')
nwa.auto_scale()
nwa.set_trigger_source('INT')
| [
"[email protected]"
] | |
580f96d5338bff027bec064c4f87a82504567a6d | 9cd9e89359f0da1750a19f0609619e3bbe4c536e | /tests/fork_test.py | 61c0e16d67a73a9a7ee9b0fcc4c582058fe208d0 | [
"BSD-3-Clause"
] | permissive | ph448/mitogen | e961c95578c852908e33861da7226919547070f0 | c24d29d3676aa122e25716450246306aaf4a797b | refs/heads/master | 2020-03-19T01:48:11.362786 | 2018-05-28T05:00:57 | 2018-05-28T05:00:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,219 | py |
import ctypes
import os
import random
import ssl
import struct
import sys
import mitogen
import unittest2
import testlib
import plain_old_module
IS_64BIT = struct.calcsize('P') == 8
PLATFORM_TO_PATH = {
('darwin', False): '/usr/lib/libssl.dylib',
('darwin', True): '/usr/lib/libssl.dylib',
('linux2', False): '/usr/lib/libssl.so',
('linux2', True): '/usr/lib/x86_64-linux-gnu/libssl.so',
}
c_ssl = ctypes.CDLL(PLATFORM_TO_PATH[sys.platform, IS_64BIT])
c_ssl.RAND_pseudo_bytes.argtypes = [ctypes.c_char_p, ctypes.c_int]
c_ssl.RAND_pseudo_bytes.restype = ctypes.c_int
def ping():
return 123
def random_random():
return random.random()
def RAND_pseudo_bytes(n=32):
buf = ctypes.create_string_buffer(n)
assert 1 == c_ssl.RAND_pseudo_bytes(buf, n)
return buf[:]
def exercise_importer(n):
"""
Ensure the forked child has a sensible importer.
"""
sys.path.remove(testlib.DATA_DIR)
import simple_pkg.a
return simple_pkg.a.subtract_one_add_two(n)
class ForkTest(testlib.RouterMixin, unittest2.TestCase):
def test_okay(self):
context = self.router.fork()
self.assertNotEqual(context.call(os.getpid), os.getpid())
self.assertEqual(context.call(os.getppid), os.getpid())
def test_random_module_diverges(self):
context = self.router.fork()
self.assertNotEqual(context.call(random_random), random_random())
def test_ssl_module_diverges(self):
# Ensure generator state is initialized.
RAND_pseudo_bytes()
context = self.router.fork()
self.assertNotEqual(context.call(RAND_pseudo_bytes),
RAND_pseudo_bytes())
def test_importer(self):
context = self.router.fork()
self.assertEqual(2, context.call(exercise_importer, 1))
def test_on_start(self):
recv = mitogen.core.Receiver(self.router)
def on_start(econtext):
sender = mitogen.core.Sender(econtext.parent, recv.handle)
sender.send(123)
context = self.router.fork(on_start=on_start)
self.assertEquals(123, recv.get().unpickle())
class DoubleChildTest(testlib.RouterMixin, unittest2.TestCase):
def test_okay(self):
# When forking from the master process, Mitogen had nothing to do with
# setting up stdio -- that was inherited wherever the Master is running
# (supervisor, iTerm, etc). When forking from a Mitogen child context
# however, Mitogen owns all of fd 0, 1, and 2, and during the fork
# procedure, it deletes all of these descriptors. That leaves the
# process in a weird state that must be handled by some combination of
# fork.py and ExternalContext.main().
# Below we simply test whether ExternalContext.main() managed to boot
# successfully. In future, we need lots more tests.
c1 = self.router.fork()
c2 = self.router.fork(via=c1)
self.assertEquals(123, c2.call(ping))
def test_importer(self):
c1 = self.router.fork(name='c1')
c2 = self.router.fork(name='c2', via=c1)
self.assertEqual(2, c2.call(exercise_importer, 1))
if __name__ == '__main__':
unittest2.main()
| [
"[email protected]"
] | |
426610fdd1399339d2e3a2442398b51ab5209027 | c5146f60c3a865050433229ba15c5339c59a9b68 | /zhaquirks/plaid/__init__.py | 203efe3bfb31a25162bfb3a165e8d609307592a0 | [
"Apache-2.0"
] | permissive | Shulyaka/zha-device-handlers | 331505618a63691a86b83977b43508b0e3142af2 | 84d02be7abde55a6cee80fa155f0cbbc20347c40 | refs/heads/dev | 2023-02-22T10:17:09.000060 | 2023-02-04T15:57:17 | 2023-02-04T15:57:17 | 194,286,710 | 1 | 0 | Apache-2.0 | 2023-02-04T22:59:20 | 2019-06-28T14:25:54 | Python | UTF-8 | Python | false | false | 72 | py | """Quirks for Plaid Systems devices."""
PLAID_SYSTEMS = "PLAID SYSTEMS"
| [
"[email protected]"
] | |
3726da4b6e8c640f2c1e4980ff8758f66e31bb14 | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/era5_scripts/02_preprocessing/lag82/504-tideGauge.py | f6cdd6a41e7cde5295cbc1bf322d1a52927b4360 | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,984 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 31 17:12:23 2020
****************************************************
Load predictors & predictands + predictor importance
****************************************************
@author: Michael Tadesse
"""
#import packages
import os
import pandas as pd
import datetime as dt #used for timedelta
from datetime import datetime
#define directories
dir_in = '/lustre/fs0/home/mtadesse/ereaFiveCombine'
dir_out = '/lustre/fs0/home/mtadesse/eraFiveLag'
def lag():
os.chdir(dir_in)
#get names
tg_list_name = os.listdir()
x = 504
y = 505
for t in range(x, y):
tg_name = tg_list_name[t]
print(tg_name, '\n')
# #check if the file exists
# os.chdir(dir_out)
# if (os.path.isfile(tg_name)):
# print('file already exists')
# continue
#cd to where the actual file is
os.chdir(dir_in)
pred = pd.read_csv(tg_name)
pred.sort_values(by = 'date', inplace=True)
pred.reset_index(inplace = True)
pred.drop('index', axis = 1, inplace = True)
#create a daily time series - date_range
#get only the ymd of the start and end times
start_time = pred['date'][0].split(' ')[0]
end_time = pred['date'].iloc[-1].split(' ')[0]
print(start_time, ' - ', end_time, '\n')
date_range = pd.date_range(start_time, end_time, freq = 'D')
#defining time changing lambda functions
time_str = lambda x: str(x)
time_converted_str = pd.DataFrame(map(time_str, date_range), columns = ['date'])
time_converted_stamp = pd.DataFrame(date_range, columns = ['timestamp'])
"""
first prepare the six time lagging dataframes
then use the merge function to merge the original
predictor with the lagging dataframes
"""
#prepare lagged time series for time only
#note here that since ERA20C has 3hrly data
#the lag_hrs is increased from 6(eraint) to 11 (era20C)
time_lagged = pd.DataFrame()
lag_hrs = [0, 6, 12, 18, 24, 30]
for lag in lag_hrs:
lag_name = 'lag'+str(lag)
lam_delta = lambda x: str(x - dt.timedelta(hours = lag))
lag_new = pd.DataFrame(map(lam_delta, time_converted_stamp['timestamp']), \
columns = [lag_name])
time_lagged = pd.concat([time_lagged, lag_new], axis = 1)
#datafrmae that contains all lagged time series (just time)
time_all = pd.concat([time_converted_str, time_lagged], axis = 1)
pred_lagged = pd.DataFrame()
for ii in range(1,time_all.shape[1]): #to loop through the lagged time series
print(time_all.columns[ii])
#extracting corresponding tag time series
lag_ts = pd.DataFrame(time_all.iloc[:,ii])
lag_ts.columns = ['date']
#merge the selected tlagged time with the predictor on = "date"
pred_new = pd.merge(pred, lag_ts, on = ['date'], how = 'right')
pred_new.drop('Unnamed: 0', axis = 1, inplace = True)
#sometimes nan values go to the bottom of the dataframe
#sort df by date -> reset the index -> remove old index
pred_new.sort_values(by = 'date', inplace=True)
pred_new.reset_index(inplace=True)
pred_new.drop('index', axis = 1, inplace= True)
#concatenate lagged dataframe
if ii == 1:
pred_lagged = pred_new
else:
pred_lagged = pd.concat([pred_lagged, pred_new.iloc[:,1:]], axis = 1)
#cd to saving directory
os.chdir(dir_out)
pred_lagged.to_csv(tg_name)
os.chdir(dir_in)
#run script
lag()
| [
"[email protected]"
] | |
d0f805cd5b4c54300491e93aef4f4b816517393e | ea872f0a2bcc4270b7089120e3eb2f8dd32a165e | /Baxter/build/planning_baxter/catkin_generated/pkg.develspace.context.pc.py | e788f2c9fa75bd9400e0e1903a35e10d75c2678c | [] | no_license | ZhenYaGuo/Warehouse-Robotic-System | 2def137478911f499c45276aa3103a0b68ebb8d7 | 47b78d111b387102e29d2596bd5dc7c704f74f8f | refs/heads/master | 2021-08-24T04:12:43.379580 | 2017-12-08T01:48:09 | 2017-12-08T01:48:09 | 113,405,332 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "planning_baxter"
PROJECT_SPACE_DIR = "/home/cc/ee106a/fa17/class/ee106a-aax/ros_workspaces/project/devel"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
541499ca55be3f2e1263c133d3499ab8864a3f6e | 0649c9b00358dd7b3ca418f7ce15ff88507ed6b3 | /storage_data.py | bbaf647ccba897a5ed812da12c2ddb2007fc1b50 | [] | no_license | Aaron9477/sky_lake | 9c9b7904dbd3e36a366508aa0d3beccedd7355d9 | baaba25bc72c81cf0868136a623036529eb9a840 | refs/heads/master | 2021-08-08T00:02:03.903907 | 2017-11-09T06:54:31 | 2017-11-09T06:54:31 | 109,121,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | import src.utils as u
if __name__ == '__main__':
for m in u.get_malls():
u.get_data(m) | [
"[email protected]"
] | |
f5b45500bb75688f6f3ca574206f37660a15e559 | e9ef558d04f39f0e82ad63e955dd8772e63c99c3 | /chat_project/chat_project/settings.py | a728c23923310ab703ec61e0f1b5ef83ec5c8de4 | [
"MIT"
] | permissive | nahidsaikat/Chat | 5634ff91eef394ec2b6288d1adff17f0eb867b15 | 7d314195b03d355844767f7c89cca34f0fad95c9 | refs/heads/master | 2022-12-14T03:44:01.105994 | 2021-06-30T15:19:21 | 2021-06-30T15:19:21 | 177,983,242 | 0 | 0 | MIT | 2022-12-08T07:44:46 | 2019-03-27T11:49:48 | Python | UTF-8 | Python | false | false | 3,434 | py | """
Django settings for chat_project project.
Generated by 'django-admin startproject' using Django 2.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ti1$n@9k5ub@9r76iw*f(&m*8#wm#-oiiid2jzi)_94bjq_1y&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'chat_app'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'chat_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'chat_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
}
| [
"[email protected]"
] | |
d05b8fe31cb2b3669e6ffacc405b55cbda7ff8b4 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/tunnel/lsite.py | cf53b4bf12e69f5b767bc243bd15658320be6f5d | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 5,189 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class LSite(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.tunnel.LSite")
meta.moClassName = "tunnelLSite"
meta.rnFormat = "lsite-%(id)s"
meta.category = MoCategory.REGULAR
meta.label = "Tunnel info for the local site in a multisite topology"
meta.writeAccessMask = 0x400000000001
meta.readAccessMask = 0x400000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.l3.Inst")
meta.superClasses.add("cobra.model.pol.Instr")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.rnPrefixes = [
('lsite-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5581, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "etep", "etep", 33221, PropCategory.REGULAR)
prop.label = "ETEP IP"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("etep", prop)
prop = PropMeta("str", "id", "id", 33222, PropCategory.REGULAR)
prop.label = "Site ID"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("id", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
1f62074c0c85f84ac88700f413546240cba19622 | ec78979fd8479e884ab93d723360744db5152134 | /wechat_stat.py | e05254f8304d487894b38f59d8004251e12e30bd | [] | no_license | xushubo/learn-python | 49c5f4fab1ac0e06c91eaa6bd54159fd661de0b9 | 8cb6f0cc23d37011442a56f1c5a11f99b1179ce6 | refs/heads/master | 2021-01-19T17:00:05.247958 | 2017-09-03T03:22:28 | 2017-09-03T03:22:28 | 101,032,298 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | import itchat
from echarts import Echart, Legend, Pie
itchat.login() #登录
friends = itchat.get_friends(update=True)[0:] #获取好友列表
male = female = other = 0 #初始化计数器,男、女、不填的
for i in friends[1:]: #遍历好友列表,列表第一个是自己,所以从1开始计算 1表示男性,2女性
sex = i['Sex']
if sex == 1:
male +=1
elif sex == 2:
female += 1
else:
other += 1
total = len(friends[1:])
print('wechat好友总数:%d' % total)
print('男性好友: %.2f%%' % (float(male)/total*100))
print('女性好友: %.2f%%' % (float(female)/total*100))
print('其他: %.2f%%' % (float(other)/total*100))
'''
chart = Echart('%s的微信好友性别比例' % (friends[0]['NickName']), 'from WeChat')
chart.use(Pie('WeChat', [{'value': male, 'name': '男性 %.2f%%' % (float(male) / total * 100)}, {'value': female, 'name': '女性 %.2f%%' % (float(female) / total * 100)}, {'value': other, 'name': '其他 %.2f%%' % (float(other) / total * 100)}], radius=["50%", "70%"]))
chart.use(Legend(['male', 'female', 'other']))
del chart.json['xAxis']
del chart.json['yAxis']
chart.plot()
''' | [
"[email protected]"
] | |
c3a0d221d0881ea417f3e5b03fd1a8fe558c52c1 | 632d58b9f7ae470d9ec2b0e88af0aa8054dfa40e | /src/ryzom_django/management/commands/ryzom_bundle.py | 48c255b344ea621534b03d56660dbf76563dd28f | [] | no_license | yourlabs/ryzom | 8d06bf829ee9d31d33fa9353fdf187241c82b6ef | 425859e2de30c3b939756a23a064fb1affe04b02 | refs/heads/master | 2023-05-13T10:27:09.766272 | 2023-05-02T14:49:25 | 2023-05-02T14:49:25 | 192,992,635 | 5 | 1 | null | 2022-10-11T20:19:52 | 2019-06-20T22:03:37 | Python | UTF-8 | Python | false | false | 873 | py | import os
from django.core.management.base import BaseCommand, CommandError
from ryzom_django import bundle
class Command(BaseCommand):
help = 'Write JS & CSS bundles to ryzom_django/static/bundle.*'
def handle(self, *args, **options):
static_path = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
'..',
'static',
)
)
if not os.path.exists(static_path):
os.makedirs(static_path)
with open(f'{static_path}/bundle.js', 'w+') as f:
f.write(bundle.js())
with open(f'{static_path}/bundle.css', 'w+') as f:
f.write(bundle.css())
self.stdout.write(self.style.SUCCESS(f'Successfully wrote {static_path}/bundle.*'))
self.stdout.write('Do not forget to collectstatic!')
| [
"[email protected]"
] | |
a291c7bfaadb64ce0f0f8fe7ff044a54344a7ba5 | 77c8d29caad199fb239133e6267d1b75bd2dfe48 | /packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi | 78b29c5aa7e794980ce9212e92a26b6fbb0c2072 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | simpoir/pyright | 9c80e596f99dfd1341a55373a96d8795cb72fb56 | 320a0a2fd31e4ffc69d4bd96d7202bbe8d8cb410 | refs/heads/master | 2023-04-18T06:42:16.194352 | 2021-04-29T15:20:19 | 2021-04-29T15:20:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54,998 | pyi | import sys
import types
from _typeshed import (
AnyPath,
OpenBinaryMode,
OpenBinaryModeReading,
OpenBinaryModeUpdating,
OpenBinaryModeWriting,
OpenTextMode,
ReadableBuffer,
SupportsDivMod,
SupportsKeysAndGetItem,
SupportsLessThan,
SupportsLessThanT,
SupportsRDivMod,
SupportsWrite,
)
from ast import AST, mod
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
from types import CodeType, TracebackType
from typing import (
IO,
AbstractSet,
Any,
BinaryIO,
ByteString,
Callable,
Container,
Dict,
FrozenSet,
Generic,
ItemsView,
Iterable,
Iterator,
KeysView,
List,
Mapping,
MutableMapping,
MutableSequence,
MutableSet,
NoReturn,
Optional,
Protocol,
Reversible,
Sequence,
Set,
Sized,
SupportsAbs,
SupportsBytes,
SupportsComplex,
SupportsFloat,
SupportsInt,
SupportsRound,
Tuple,
Type,
TypeVar,
Union,
ValuesView,
overload,
runtime_checkable,
)
from typing_extensions import Literal
if sys.version_info >= (3, 9):
from types import GenericAlias
class _SupportsIndex(Protocol):
def __index__(self) -> int: ...
class _SupportsTrunc(Protocol):
def __trunc__(self) -> int: ...
_T = TypeVar("_T")
_T_co = TypeVar("_T_co", covariant=True)
_T_contra = TypeVar("_T_contra", contravariant=True)
_KT = TypeVar("_KT")
_VT = TypeVar("_VT")
_S = TypeVar("_S")
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
_T3 = TypeVar("_T3")
_T4 = TypeVar("_T4")
_T5 = TypeVar("_T5")
_TT = TypeVar("_TT", bound="type")
_TBE = TypeVar("_TBE", bound="BaseException")
class object:
__doc__: Optional[str]
__dict__: Dict[str, Any]
__slots__: Union[str, Iterable[str]]
__module__: str
__annotations__: Dict[str, Any]
@property
def __class__(self: _T) -> Type[_T]: ...
# Ignore errors about type mismatch between property getter and setter
@__class__.setter
def __class__(self, __type: Type[object]) -> None: ... # type: ignore # noqa: F811
def __init__(self) -> None: ...
def __new__(cls) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
def __eq__(self, o: object) -> bool: ...
def __ne__(self, o: object) -> bool: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __hash__(self) -> int: ...
def __format__(self, format_spec: str) -> str: ...
def __getattribute__(self, name: str) -> Any: ...
def __delattr__(self, name: str) -> None: ...
def __sizeof__(self) -> int: ...
def __reduce__(self) -> Union[str, Tuple[Any, ...]]: ...
def __reduce_ex__(self, protocol: int) -> Union[str, Tuple[Any, ...]]: ...
def __dir__(self) -> Iterable[str]: ...
def __init_subclass__(cls) -> None: ...
class staticmethod(object): # Special, only valid as a decorator.
__func__: Callable[..., Any]
__isabstractmethod__: bool
def __init__(self, f: Callable[..., Any]) -> None: ...
def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ...
class classmethod(object): # Special, only valid as a decorator.
__func__: Callable[..., Any]
__isabstractmethod__: bool
def __init__(self, f: Callable[..., Any]) -> None: ...
def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ...
def __get__(self, obj: _T, type: Optional[Type[_T]] = ...) -> Callable[..., Any]: ...
class type(object):
__base__: type
__bases__: Tuple[type, ...]
__basicsize__: int
__dict__: Dict[str, Any]
__dictoffset__: int
__flags__: int
__itemsize__: int
__module__: str
__mro__: Tuple[type, ...]
__name__: str
__qualname__: str
__text_signature__: Optional[str]
__weakrefoffset__: int
@overload
def __init__(self, o: object) -> None: ...
@overload
def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any], **kwds: Any) -> None: ...
@overload
def __new__(cls, o: object) -> type: ...
@overload
def __new__(cls, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any], **kwds: Any) -> type: ...
def __call__(self, *args: Any, **kwds: Any) -> Any: ...
def __subclasses__(self: _TT) -> List[_TT]: ...
# Note: the documentation doesnt specify what the return type is, the standard
# implementation seems to be returning a list.
def mro(self) -> List[type]: ...
def __instancecheck__(self, instance: Any) -> bool: ...
def __subclasscheck__(self, subclass: type) -> bool: ...
@classmethod
def __prepare__(metacls, __name: str, __bases: Tuple[type, ...], **kwds: Any) -> Mapping[str, Any]: ...
if sys.version_info >= (3, 10):
def __or__(self, t: Any) -> types.Union: ...
class super(object):
@overload
def __init__(self, t: Any, obj: Any) -> None: ...
@overload
def __init__(self, t: Any) -> None: ...
@overload
def __init__(self) -> None: ...
class int:
@overload
def __new__(cls: Type[_T], x: Union[str, bytes, SupportsInt, _SupportsIndex, _SupportsTrunc] = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], x: Union[str, bytes, bytearray], base: int) -> _T: ...
if sys.version_info >= (3, 8):
def as_integer_ratio(self) -> Tuple[int, Literal[1]]: ...
@property
def real(self) -> int: ...
@property
def imag(self) -> int: ...
@property
def numerator(self) -> int: ...
@property
def denominator(self) -> int: ...
def conjugate(self) -> int: ...
def bit_length(self) -> int: ...
def to_bytes(self, length: int, byteorder: str, *, signed: bool = ...) -> bytes: ...
@classmethod
def from_bytes(
cls, bytes: Union[Iterable[int], SupportsBytes], byteorder: str, *, signed: bool = ...
) -> int: ... # TODO buffer object argument
def __add__(self, x: int) -> int: ...
def __sub__(self, x: int) -> int: ...
def __mul__(self, x: int) -> int: ...
def __floordiv__(self, x: int) -> int: ...
def __truediv__(self, x: int) -> float: ...
def __mod__(self, x: int) -> int: ...
def __divmod__(self, x: int) -> Tuple[int, int]: ...
def __radd__(self, x: int) -> int: ...
def __rsub__(self, x: int) -> int: ...
def __rmul__(self, x: int) -> int: ...
def __rfloordiv__(self, x: int) -> int: ...
def __rtruediv__(self, x: int) -> float: ...
def __rmod__(self, x: int) -> int: ...
def __rdivmod__(self, x: int) -> Tuple[int, int]: ...
@overload
def __pow__(self, __x: Literal[2], __modulo: Optional[int] = ...) -> int: ...
@overload
def __pow__(self, __x: int, __modulo: Optional[int] = ...) -> Any: ... # Return type can be int or float, depending on x.
def __rpow__(self, x: int, mod: Optional[int] = ...) -> Any: ...
def __and__(self, n: int) -> int: ...
def __or__(self, n: int) -> int: ...
def __xor__(self, n: int) -> int: ...
def __lshift__(self, n: int) -> int: ...
def __rshift__(self, n: int) -> int: ...
def __rand__(self, n: int) -> int: ...
def __ror__(self, n: int) -> int: ...
def __rxor__(self, n: int) -> int: ...
def __rlshift__(self, n: int) -> int: ...
def __rrshift__(self, n: int) -> int: ...
def __neg__(self) -> int: ...
def __pos__(self) -> int: ...
def __invert__(self) -> int: ...
def __trunc__(self) -> int: ...
def __ceil__(self) -> int: ...
def __floor__(self) -> int: ...
def __round__(self, ndigits: Optional[int] = ...) -> int: ...
def __getnewargs__(self) -> Tuple[int]: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: int) -> bool: ...
def __le__(self, x: int) -> bool: ...
def __gt__(self, x: int) -> bool: ...
def __ge__(self, x: int) -> bool: ...
def __str__(self) -> str: ...
def __float__(self) -> float: ...
def __int__(self) -> int: ...
def __abs__(self) -> int: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
def __index__(self) -> int: ...
class float:
def __new__(cls: Type[_T], x: Union[SupportsFloat, _SupportsIndex, str, bytes, bytearray] = ...) -> _T: ...
def as_integer_ratio(self) -> Tuple[int, int]: ...
def hex(self) -> str: ...
def is_integer(self) -> bool: ...
@classmethod
def fromhex(cls, __s: str) -> float: ...
@property
def real(self) -> float: ...
@property
def imag(self) -> float: ...
def conjugate(self) -> float: ...
def __add__(self, x: float) -> float: ...
def __sub__(self, x: float) -> float: ...
def __mul__(self, x: float) -> float: ...
def __floordiv__(self, x: float) -> float: ...
def __truediv__(self, x: float) -> float: ...
def __mod__(self, x: float) -> float: ...
def __divmod__(self, x: float) -> Tuple[float, float]: ...
def __pow__(
self, x: float, mod: None = ...
) -> float: ... # In Python 3, returns complex if self is negative and x is not whole
def __radd__(self, x: float) -> float: ...
def __rsub__(self, x: float) -> float: ...
def __rmul__(self, x: float) -> float: ...
def __rfloordiv__(self, x: float) -> float: ...
def __rtruediv__(self, x: float) -> float: ...
def __rmod__(self, x: float) -> float: ...
def __rdivmod__(self, x: float) -> Tuple[float, float]: ...
def __rpow__(self, x: float, mod: None = ...) -> float: ...
def __getnewargs__(self) -> Tuple[float]: ...
def __trunc__(self) -> int: ...
if sys.version_info >= (3, 9):
def __ceil__(self) -> int: ...
def __floor__(self) -> int: ...
@overload
def __round__(self, ndigits: None = ...) -> int: ...
@overload
def __round__(self, ndigits: int) -> float: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: float) -> bool: ...
def __le__(self, x: float) -> bool: ...
def __gt__(self, x: float) -> bool: ...
def __ge__(self, x: float) -> bool: ...
def __neg__(self) -> float: ...
def __pos__(self) -> float: ...
def __str__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
def __abs__(self) -> float: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
class complex:
@overload
def __new__(cls: Type[_T], real: float = ..., imag: float = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], real: Union[str, SupportsComplex, _SupportsIndex]) -> _T: ...
@property
def real(self) -> float: ...
@property
def imag(self) -> float: ...
def conjugate(self) -> complex: ...
def __add__(self, x: complex) -> complex: ...
def __sub__(self, x: complex) -> complex: ...
def __mul__(self, x: complex) -> complex: ...
def __pow__(self, x: complex, mod: None = ...) -> complex: ...
def __truediv__(self, x: complex) -> complex: ...
def __radd__(self, x: complex) -> complex: ...
def __rsub__(self, x: complex) -> complex: ...
def __rmul__(self, x: complex) -> complex: ...
def __rpow__(self, x: complex, mod: None = ...) -> complex: ...
def __rtruediv__(self, x: complex) -> complex: ...
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __neg__(self) -> complex: ...
def __pos__(self) -> complex: ...
def __str__(self) -> str: ...
def __abs__(self) -> float: ...
def __hash__(self) -> int: ...
def __bool__(self) -> bool: ...
class _FormatMapMapping(Protocol):
def __getitem__(self, __key: str) -> Any: ...
class str(Sequence[str]):
@overload
def __new__(cls: Type[_T], o: object = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], o: bytes, encoding: str = ..., errors: str = ...) -> _T: ...
def capitalize(self) -> str: ...
def casefold(self) -> str: ...
def center(self, __width: int, __fillchar: str = ...) -> str: ...
def count(self, x: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ...
def endswith(
self, __suffix: Union[str, Tuple[str, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def expandtabs(self, tabsize: int = ...) -> str: ...
def find(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def format(self, *args: object, **kwargs: object) -> str: ...
def format_map(self, map: _FormatMapMapping) -> str: ...
def index(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
if sys.version_info >= (3, 7):
def isascii(self) -> bool: ...
def isdecimal(self) -> bool: ...
def isdigit(self) -> bool: ...
def isidentifier(self) -> bool: ...
def islower(self) -> bool: ...
def isnumeric(self) -> bool: ...
def isprintable(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def join(self, __iterable: Iterable[str]) -> str: ...
def ljust(self, __width: int, __fillchar: str = ...) -> str: ...
def lower(self) -> str: ...
def lstrip(self, __chars: Optional[str] = ...) -> str: ...
def partition(self, __sep: str) -> Tuple[str, str, str]: ...
def replace(self, __old: str, __new: str, __count: int = ...) -> str: ...
if sys.version_info >= (3, 9):
def removeprefix(self, __prefix: str) -> str: ...
def removesuffix(self, __suffix: str) -> str: ...
def rfind(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rindex(self, __sub: str, __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, __width: int, __fillchar: str = ...) -> str: ...
def rpartition(self, __sep: str) -> Tuple[str, str, str]: ...
def rsplit(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
def rstrip(self, __chars: Optional[str] = ...) -> str: ...
def split(self, sep: Optional[str] = ..., maxsplit: int = ...) -> List[str]: ...
def splitlines(self, keepends: bool = ...) -> List[str]: ...
def startswith(
self, __prefix: Union[str, Tuple[str, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def strip(self, __chars: Optional[str] = ...) -> str: ...
def swapcase(self) -> str: ...
def title(self) -> str: ...
def translate(self, __table: Union[Mapping[int, Union[int, str, None]], Sequence[Union[int, str, None]]]) -> str: ...
def upper(self) -> str: ...
def zfill(self, __width: int) -> str: ...
@staticmethod
@overload
def maketrans(__x: Union[Dict[int, _T], Dict[str, _T], Dict[Union[str, int], _T]]) -> Dict[int, _T]: ...
@staticmethod
@overload
def maketrans(__x: str, __y: str, __z: Optional[str] = ...) -> Dict[int, Union[int, None]]: ...
def __add__(self, s: str) -> str: ...
# Incompatible with Sequence.__contains__
def __contains__(self, o: str) -> bool: ... # type: ignore
def __eq__(self, x: object) -> bool: ...
def __ge__(self, x: str) -> bool: ...
def __getitem__(self, i: Union[int, slice]) -> str: ...
def __gt__(self, x: str) -> bool: ...
def __hash__(self) -> int: ...
def __iter__(self) -> Iterator[str]: ...
def __le__(self, x: str) -> bool: ...
def __len__(self) -> int: ...
def __lt__(self, x: str) -> bool: ...
def __mod__(self, x: Any) -> str: ...
def __mul__(self, n: int) -> str: ...
def __ne__(self, x: object) -> bool: ...
def __repr__(self) -> str: ...
def __rmul__(self, n: int) -> str: ...
def __str__(self) -> str: ...
def __getnewargs__(self) -> Tuple[str]: ...
class bytes(ByteString):
@overload
def __new__(cls: Type[_T], ints: Iterable[int]) -> _T: ...
@overload
def __new__(cls: Type[_T], string: str, encoding: str, errors: str = ...) -> _T: ...
@overload
def __new__(cls: Type[_T], length: int) -> _T: ...
@overload
def __new__(cls: Type[_T]) -> _T: ...
@overload
def __new__(cls: Type[_T], o: SupportsBytes) -> _T: ...
def capitalize(self) -> bytes: ...
def center(self, __width: int, __fillchar: bytes = ...) -> bytes: ...
def count(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def decode(self, encoding: str = ..., errors: str = ...) -> str: ...
def endswith(
self, __suffix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def expandtabs(self, tabsize: int = ...) -> bytes: ...
def find(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
if sys.version_info >= (3, 8):
def hex(self, sep: Union[str, bytes] = ..., bytes_per_sep: int = ...) -> str: ...
else:
def hex(self) -> str: ...
def index(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
if sys.version_info >= (3, 7):
def isascii(self) -> bool: ...
def isdigit(self) -> bool: ...
def islower(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def join(self, __iterable_of_bytes: Iterable[Union[ByteString, memoryview]]) -> bytes: ...
def ljust(self, __width: int, __fillchar: bytes = ...) -> bytes: ...
def lower(self) -> bytes: ...
def lstrip(self, __bytes: Optional[bytes] = ...) -> bytes: ...
def partition(self, __sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
def replace(self, __old: bytes, __new: bytes, __count: int = ...) -> bytes: ...
if sys.version_info >= (3, 9):
def removeprefix(self, __prefix: bytes) -> bytes: ...
def removesuffix(self, __suffix: bytes) -> bytes: ...
def rfind(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rindex(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, __width: int, __fillchar: bytes = ...) -> bytes: ...
def rpartition(self, __sep: bytes) -> Tuple[bytes, bytes, bytes]: ...
def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ...
def rstrip(self, __bytes: Optional[bytes] = ...) -> bytes: ...
def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytes]: ...
def splitlines(self, keepends: bool = ...) -> List[bytes]: ...
def startswith(
self, __prefix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def strip(self, __bytes: Optional[bytes] = ...) -> bytes: ...
def swapcase(self) -> bytes: ...
def title(self) -> bytes: ...
def translate(self, __table: Optional[bytes], delete: bytes = ...) -> bytes: ...
def upper(self) -> bytes: ...
def zfill(self, __width: int) -> bytes: ...
@classmethod
def fromhex(cls, __s: str) -> bytes: ...
@classmethod
def maketrans(cls, frm: bytes, to: bytes) -> bytes: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[int]: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
def __hash__(self) -> int: ...
@overload
def __getitem__(self, i: int) -> int: ...
@overload
def __getitem__(self, s: slice) -> bytes: ...
def __add__(self, s: bytes) -> bytes: ...
def __mul__(self, n: int) -> bytes: ...
def __rmul__(self, n: int) -> bytes: ...
def __mod__(self, value: Any) -> bytes: ...
# Incompatible with Sequence.__contains__
def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: bytes) -> bool: ...
def __le__(self, x: bytes) -> bool: ...
def __gt__(self, x: bytes) -> bool: ...
def __ge__(self, x: bytes) -> bool: ...
def __getnewargs__(self) -> Tuple[bytes]: ...
class bytearray(MutableSequence[int], ByteString):
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, ints: Iterable[int]) -> None: ...
@overload
def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ...
@overload
def __init__(self, length: int) -> None: ...
def capitalize(self) -> bytearray: ...
def center(self, __width: int, __fillchar: bytes = ...) -> bytearray: ...
def count(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def copy(self) -> bytearray: ...
def decode(self, encoding: str = ..., errors: str = ...) -> str: ...
def endswith(
self, __suffix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def expandtabs(self, tabsize: int = ...) -> bytearray: ...
def find(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
if sys.version_info >= (3, 8):
def hex(self, sep: Union[str, bytes] = ..., bytes_per_sep: int = ...) -> str: ...
else:
def hex(self) -> str: ...
def index(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def insert(self, __index: int, __item: int) -> None: ...
def isalnum(self) -> bool: ...
def isalpha(self) -> bool: ...
if sys.version_info >= (3, 7):
def isascii(self) -> bool: ...
def isdigit(self) -> bool: ...
def islower(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def join(self, __iterable_of_bytes: Iterable[Union[ByteString, memoryview]]) -> bytearray: ...
def ljust(self, __width: int, __fillchar: bytes = ...) -> bytearray: ...
def lower(self) -> bytearray: ...
def lstrip(self, __bytes: Optional[bytes] = ...) -> bytearray: ...
def partition(self, __sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
if sys.version_info >= (3, 9):
def removeprefix(self, __prefix: bytes) -> bytearray: ...
def removesuffix(self, __suffix: bytes) -> bytearray: ...
def replace(self, __old: bytes, __new: bytes, __count: int = ...) -> bytearray: ...
def rfind(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rindex(self, __sub: Union[bytes, int], __start: Optional[int] = ..., __end: Optional[int] = ...) -> int: ...
def rjust(self, __width: int, __fillchar: bytes = ...) -> bytearray: ...
def rpartition(self, __sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ...
def rsplit(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ...
def rstrip(self, __bytes: Optional[bytes] = ...) -> bytearray: ...
def split(self, sep: Optional[bytes] = ..., maxsplit: int = ...) -> List[bytearray]: ...
def splitlines(self, keepends: bool = ...) -> List[bytearray]: ...
def startswith(
self, __prefix: Union[bytes, Tuple[bytes, ...]], __start: Optional[int] = ..., __end: Optional[int] = ...
) -> bool: ...
def strip(self, __bytes: Optional[bytes] = ...) -> bytearray: ...
def swapcase(self) -> bytearray: ...
def title(self) -> bytearray: ...
def translate(self, __table: Optional[bytes], delete: bytes = ...) -> bytearray: ...
def upper(self) -> bytearray: ...
def zfill(self, __width: int) -> bytearray: ...
@classmethod
def fromhex(cls, __string: str) -> bytearray: ...
@classmethod
def maketrans(cls, __frm: bytes, __to: bytes) -> bytes: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[int]: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
__hash__: None # type: ignore
@overload
def __getitem__(self, i: int) -> int: ...
@overload
def __getitem__(self, s: slice) -> bytearray: ...
@overload
def __setitem__(self, i: int, x: int) -> None: ...
@overload
def __setitem__(self, s: slice, x: Union[Iterable[int], bytes]) -> None: ...
def __delitem__(self, i: Union[int, slice]) -> None: ...
def __add__(self, s: bytes) -> bytearray: ...
def __iadd__(self, s: Iterable[int]) -> bytearray: ...
def __mul__(self, n: int) -> bytearray: ...
def __rmul__(self, n: int) -> bytearray: ...
def __imul__(self, n: int) -> bytearray: ...
def __mod__(self, value: Any) -> bytes: ...
# Incompatible with Sequence.__contains__
def __contains__(self, o: Union[int, bytes]) -> bool: ... # type: ignore
def __eq__(self, x: object) -> bool: ...
def __ne__(self, x: object) -> bool: ...
def __lt__(self, x: bytes) -> bool: ...
def __le__(self, x: bytes) -> bool: ...
def __gt__(self, x: bytes) -> bool: ...
def __ge__(self, x: bytes) -> bool: ...
class memoryview(Sized, Container[int]):
format: str
itemsize: int
shape: Optional[Tuple[int, ...]]
strides: Optional[Tuple[int, ...]]
suboffsets: Optional[Tuple[int, ...]]
readonly: bool
ndim: int
obj: Union[bytes, bytearray]
c_contiguous: bool
f_contiguous: bool
contiguous: bool
nbytes: int
def __init__(self, obj: ReadableBuffer) -> None: ...
def __enter__(self) -> memoryview: ...
def __exit__(
self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
) -> None: ...
def cast(self, format: str, shape: Union[List[int], Tuple[int]] = ...) -> memoryview: ...
@overload
def __getitem__(self, i: int) -> int: ...
@overload
def __getitem__(self, s: slice) -> memoryview: ...
def __contains__(self, x: object) -> bool: ...
def __iter__(self) -> Iterator[int]: ...
def __len__(self) -> int: ...
@overload
def __setitem__(self, s: slice, o: bytes) -> None: ...
@overload
def __setitem__(self, i: int, o: int) -> None: ...
if sys.version_info >= (3, 8):
def tobytes(self, order: Optional[Literal["C", "F", "A"]] = ...) -> bytes: ...
else:
def tobytes(self) -> bytes: ...
def tolist(self) -> List[int]: ...
if sys.version_info >= (3, 8):
def toreadonly(self) -> memoryview: ...
def release(self) -> None: ...
if sys.version_info >= (3, 8):
def hex(self, sep: Union[str, bytes] = ..., bytes_per_sep: int = ...) -> str: ...
else:
def hex(self) -> str: ...
class bool(int):
def __new__(cls: Type[_T], __o: object = ...) -> _T: ...
@overload
def __and__(self, x: bool) -> bool: ...
@overload
def __and__(self, x: int) -> int: ...
@overload
def __or__(self, x: bool) -> bool: ...
@overload
def __or__(self, x: int) -> int: ...
@overload
def __xor__(self, x: bool) -> bool: ...
@overload
def __xor__(self, x: int) -> int: ...
@overload
def __rand__(self, x: bool) -> bool: ...
@overload
def __rand__(self, x: int) -> int: ...
@overload
def __ror__(self, x: bool) -> bool: ...
@overload
def __ror__(self, x: int) -> int: ...
@overload
def __rxor__(self, x: bool) -> bool: ...
@overload
def __rxor__(self, x: int) -> int: ...
def __getnewargs__(self) -> Tuple[int]: ...
class slice(object):
start: Any
step: Any
stop: Any
@overload
def __init__(self, stop: Any) -> None: ...
@overload
def __init__(self, start: Any, stop: Any, step: Any = ...) -> None: ...
__hash__: None # type: ignore
def indices(self, len: int) -> Tuple[int, int, int]: ...
class tuple(Sequence[_T_co], Generic[_T_co]):
def __new__(cls: Type[_T], iterable: Iterable[_T_co] = ...) -> _T: ...
def __len__(self) -> int: ...
def __contains__(self, x: object) -> bool: ...
@overload
def __getitem__(self, x: int) -> _T_co: ...
@overload
def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ...
def __iter__(self) -> Iterator[_T_co]: ...
def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ...
def __le__(self, x: Tuple[_T_co, ...]) -> bool: ...
def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ...
def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ...
@overload
def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ...
@overload
def __add__(self, x: Tuple[Any, ...]) -> Tuple[Any, ...]: ...
def __mul__(self, n: int) -> Tuple[_T_co, ...]: ...
def __rmul__(self, n: int) -> Tuple[_T_co, ...]: ...
def count(self, __value: Any) -> int: ...
def index(self, __value: Any, __start: int = ..., __stop: int = ...) -> int: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class function:
# TODO not defined in builtins!
__name__: str
__module__: str
__code__: CodeType
__qualname__: str
__annotations__: Dict[str, Any]
class list(MutableSequence[_T], Generic[_T]):
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, iterable: Iterable[_T]) -> None: ...
def clear(self) -> None: ...
def copy(self) -> List[_T]: ...
def append(self, __object: _T) -> None: ...
def extend(self, __iterable: Iterable[_T]) -> None: ...
def pop(self, __index: int = ...) -> _T: ...
def index(self, __value: _T, __start: int = ..., __stop: int = ...) -> int: ...
def count(self, __value: _T) -> int: ...
def insert(self, __index: int, __object: _T) -> None: ...
def remove(self, __value: _T) -> None: ...
def reverse(self) -> None: ...
@overload
def sort(self: List[SupportsLessThanT], *, key: None = ..., reverse: bool = ...) -> None: ...
@overload
def sort(self, *, key: Callable[[_T], SupportsLessThan], reverse: bool = ...) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
__hash__: None # type: ignore
@overload
def __getitem__(self, i: _SupportsIndex) -> _T: ...
@overload
def __getitem__(self, s: slice) -> List[_T]: ...
@overload
def __setitem__(self, i: _SupportsIndex, o: _T) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
def __delitem__(self, i: Union[_SupportsIndex, slice]) -> None: ...
def __add__(self, x: List[_T]) -> List[_T]: ...
def __iadd__(self: _S, x: Iterable[_T]) -> _S: ...
def __mul__(self, n: int) -> List[_T]: ...
def __rmul__(self, n: int) -> List[_T]: ...
def __imul__(self: _S, n: int) -> _S: ...
def __contains__(self, o: object) -> bool: ...
def __reversed__(self) -> Iterator[_T]: ...
def __gt__(self, x: List[_T]) -> bool: ...
def __ge__(self, x: List[_T]) -> bool: ...
def __lt__(self, x: List[_T]) -> bool: ...
def __le__(self, x: List[_T]) -> bool: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
@overload
def __init__(self: Dict[_KT, _VT]) -> None: ...
@overload
def __init__(self: Dict[str, _VT], **kwargs: _VT) -> None: ...
@overload
def __init__(self, map: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ...
def clear(self) -> None: ...
def copy(self) -> Dict[_KT, _VT]: ...
def popitem(self) -> Tuple[_KT, _VT]: ...
def setdefault(self, __key: _KT, __default: _VT = ...) -> _VT: ...
@overload
def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
@overload
def update(self, **kwargs: _VT) -> None: ...
def keys(self) -> KeysView[_KT]: ...
def values(self) -> ValuesView[_VT]: ...
def items(self) -> ItemsView[_KT, _VT]: ...
@classmethod
@overload
def fromkeys(cls, __iterable: Iterable[_T]) -> Dict[_T, Any]: ...
@classmethod
@overload
def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> Dict[_T, _S]: ...
def __len__(self) -> int: ...
def __getitem__(self, k: _KT) -> _VT: ...
def __setitem__(self, k: _KT, v: _VT) -> None: ...
def __delitem__(self, v: _KT) -> None: ...
def __iter__(self) -> Iterator[_KT]: ...
if sys.version_info >= (3, 8):
def __reversed__(self) -> Iterator[_KT]: ...
def __str__(self) -> str: ...
__hash__: None # type: ignore
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
def __or__(self, __value: Mapping[_KT, _VT]) -> Dict[_KT, _VT]: ...
def __ior__(self, __value: Mapping[_KT, _VT]) -> Dict[_KT, _VT]: ...
class set(MutableSet[_T], Generic[_T]):
def __init__(self, iterable: Iterable[_T] = ...) -> None: ...
def add(self, element: _T) -> None: ...
def clear(self) -> None: ...
def copy(self) -> Set[_T]: ...
def difference(self, *s: Iterable[Any]) -> Set[_T]: ...
def difference_update(self, *s: Iterable[Any]) -> None: ...
def discard(self, element: _T) -> None: ...
def intersection(self, *s: Iterable[Any]) -> Set[_T]: ...
def intersection_update(self, *s: Iterable[Any]) -> None: ...
def isdisjoint(self, s: Iterable[Any]) -> bool: ...
def issubset(self, s: Iterable[Any]) -> bool: ...
def issuperset(self, s: Iterable[Any]) -> bool: ...
def pop(self) -> _T: ...
def remove(self, element: _T) -> None: ...
def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ...
def symmetric_difference_update(self, s: Iterable[_T]) -> None: ...
def union(self, *s: Iterable[_T]) -> Set[_T]: ...
def update(self, *s: Iterable[_T]) -> None: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T]: ...
def __str__(self) -> str: ...
def __and__(self, s: AbstractSet[object]) -> Set[_T]: ...
def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ...
def __or__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __ior__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __sub__(self, s: AbstractSet[Optional[_T]]) -> Set[_T]: ...
def __isub__(self, s: AbstractSet[Optional[_T]]) -> Set[_T]: ...
def __xor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __ixor__(self, s: AbstractSet[_S]) -> Set[Union[_T, _S]]: ...
def __le__(self, s: AbstractSet[object]) -> bool: ...
def __lt__(self, s: AbstractSet[object]) -> bool: ...
def __ge__(self, s: AbstractSet[object]) -> bool: ...
def __gt__(self, s: AbstractSet[object]) -> bool: ...
__hash__: None # type: ignore
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class frozenset(AbstractSet[_T_co], Generic[_T_co]):
def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
def copy(self) -> FrozenSet[_T_co]: ...
def difference(self, *s: Iterable[object]) -> FrozenSet[_T_co]: ...
def intersection(self, *s: Iterable[object]) -> FrozenSet[_T_co]: ...
def isdisjoint(self, s: Iterable[_T_co]) -> bool: ...
def issubset(self, s: Iterable[object]) -> bool: ...
def issuperset(self, s: Iterable[object]) -> bool: ...
def symmetric_difference(self, s: Iterable[_T_co]) -> FrozenSet[_T_co]: ...
def union(self, *s: Iterable[_T_co]) -> FrozenSet[_T_co]: ...
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[_T_co]: ...
def __str__(self) -> str: ...
def __and__(self, s: AbstractSet[_T_co]) -> FrozenSet[_T_co]: ...
def __or__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T_co, _S]]: ...
def __sub__(self, s: AbstractSet[_T_co]) -> FrozenSet[_T_co]: ...
def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[Union[_T_co, _S]]: ...
def __le__(self, s: AbstractSet[object]) -> bool: ...
def __lt__(self, s: AbstractSet[object]) -> bool: ...
def __ge__(self, s: AbstractSet[object]) -> bool: ...
def __gt__(self, s: AbstractSet[object]) -> bool: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class enumerate(Iterator[Tuple[int, _T]], Generic[_T]):
def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ...
def __iter__(self) -> Iterator[Tuple[int, _T]]: ...
def __next__(self) -> Tuple[int, _T]: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
class range(Sequence[int]):
start: int
stop: int
step: int
@overload
def __init__(self, stop: _SupportsIndex) -> None: ...
@overload
def __init__(self, start: _SupportsIndex, stop: _SupportsIndex, step: _SupportsIndex = ...) -> None: ...
def count(self, value: int) -> int: ...
def index(self, value: int) -> int: ... # type: ignore
def __len__(self) -> int: ...
def __contains__(self, o: object) -> bool: ...
def __iter__(self) -> Iterator[int]: ...
@overload
def __getitem__(self, i: _SupportsIndex) -> int: ...
@overload
def __getitem__(self, s: slice) -> range: ...
def __repr__(self) -> str: ...
def __reversed__(self) -> Iterator[int]: ...
class property(object):
def __init__(
self,
fget: Optional[Callable[[Any], Any]] = ...,
fset: Optional[Callable[[Any, Any], None]] = ...,
fdel: Optional[Callable[[Any], None]] = ...,
doc: Optional[str] = ...,
) -> None: ...
def getter(self, fget: Callable[[Any], Any]) -> property: ...
def setter(self, fset: Callable[[Any, Any], None]) -> property: ...
def deleter(self, fdel: Callable[[Any], None]) -> property: ...
def __get__(self, obj: Any, type: Optional[type] = ...) -> Any: ...
def __set__(self, obj: Any, value: Any) -> None: ...
def __delete__(self, obj: Any) -> None: ...
def fget(self) -> Any: ...
def fset(self, value: Any) -> None: ...
def fdel(self) -> None: ...
class _NotImplementedType(Any): # type: ignore
# A little weird, but typing the __call__ as NotImplemented makes the error message
# for NotImplemented() much better
__call__: NotImplemented # type: ignore
NotImplemented: _NotImplementedType
def abs(__x: SupportsAbs[_T]) -> _T: ...
def all(__iterable: Iterable[object]) -> bool: ...
def any(__iterable: Iterable[object]) -> bool: ...
def ascii(__obj: object) -> str: ...
def bin(__number: Union[int, _SupportsIndex]) -> str: ...
if sys.version_info >= (3, 7):
def breakpoint(*args: Any, **kws: Any) -> None: ...
def callable(__obj: object) -> bool: ...
def chr(__i: int) -> str: ...
# This class is to be exported as PathLike from os,
# but we define it here as _PathLike to avoid import cycle issues.
# See https://github.com/python/typeshed/pull/991#issuecomment-288160993
_AnyStr_co = TypeVar("_AnyStr_co", str, bytes, covariant=True)
@runtime_checkable
class _PathLike(Protocol[_AnyStr_co]):
def __fspath__(self) -> _AnyStr_co: ...
if sys.version_info >= (3, 8):
def compile(
source: Union[str, bytes, mod, AST],
filename: Union[str, bytes, _PathLike[Any]],
mode: str,
flags: int = ...,
dont_inherit: int = ...,
optimize: int = ...,
*,
_feature_version: int = ...,
) -> Any: ...
else:
def compile(
source: Union[str, bytes, mod, AST],
filename: Union[str, bytes, _PathLike[Any]],
mode: str,
flags: int = ...,
dont_inherit: int = ...,
optimize: int = ...,
) -> Any: ...
def copyright() -> None: ...
def credits() -> None: ...
def delattr(__obj: Any, __name: str) -> None: ...
def dir(__o: object = ...) -> List[str]: ...
@overload
def divmod(__x: SupportsDivMod[_T_contra, _T_co], __y: _T_contra) -> _T_co: ...
@overload
def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ...
def eval(
__source: Union[str, bytes, CodeType], __globals: Optional[Dict[str, Any]] = ..., __locals: Optional[Mapping[str, Any]] = ...
) -> Any: ...
def exec(
__source: Union[str, bytes, CodeType],
__globals: Optional[Dict[str, Any]] = ...,
__locals: Optional[Mapping[str, Any]] = ...,
) -> Any: ...
def exit(code: object = ...) -> NoReturn: ...
@overload
def filter(__function: None, __iterable: Iterable[Optional[_T]]) -> Iterator[_T]: ...
@overload
def filter(__function: Callable[[_T], Any], __iterable: Iterable[_T]) -> Iterator[_T]: ...
def format(__value: object, __format_spec: str = ...) -> str: ... # TODO unicode
def getattr(__o: Any, name: str, __default: Any = ...) -> Any: ...
def globals() -> Dict[str, Any]: ...
def hasattr(__obj: Any, __name: str) -> bool: ...
def hash(__obj: object) -> int: ...
def help(*args: Any, **kwds: Any) -> None: ...
def hex(__number: Union[int, _SupportsIndex]) -> str: ...
def id(__obj: object) -> int: ...
def input(__prompt: Any = ...) -> str: ...
@overload
def iter(__iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
def iter(__function: Callable[[], Optional[_T]], __sentinel: None) -> Iterator[_T]: ...
@overload
def iter(__function: Callable[[], _T], __sentinel: Any) -> Iterator[_T]: ...
def isinstance(__obj: object, __class_or_tuple: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ...
def issubclass(__cls: type, __class_or_tuple: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]) -> bool: ...
def len(__obj: Sized) -> int: ...
def license() -> None: ...
def locals() -> Dict[str, Any]: ...
@overload
def map(__func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Iterator[_S]: ...
@overload
def map(__func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]
) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[[_T1, _T2, _T3, _T4], _S],
__iter1: Iterable[_T1],
__iter2: Iterable[_T2],
__iter3: Iterable[_T3],
__iter4: Iterable[_T4],
) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[[_T1, _T2, _T3, _T4, _T5], _S],
__iter1: Iterable[_T1],
__iter2: Iterable[_T2],
__iter3: Iterable[_T3],
__iter4: Iterable[_T4],
__iter5: Iterable[_T5],
) -> Iterator[_S]: ...
@overload
def map(
__func: Callable[..., _S],
__iter1: Iterable[Any],
__iter2: Iterable[Any],
__iter3: Iterable[Any],
__iter4: Iterable[Any],
__iter5: Iterable[Any],
__iter6: Iterable[Any],
*iterables: Iterable[Any],
) -> Iterator[_S]: ...
@overload
def max(
__arg1: SupportsLessThanT, __arg2: SupportsLessThanT, *_args: SupportsLessThanT, key: None = ...
) -> SupportsLessThanT: ...
@overload
def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def max(__iterable: Iterable[SupportsLessThanT], *, key: None = ...) -> SupportsLessThanT: ...
@overload
def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def max(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., default: _T) -> Union[SupportsLessThanT, _T]: ...
@overload
def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsLessThanT], default: _T2) -> Union[_T1, _T2]: ...
@overload
def min(
__arg1: SupportsLessThanT, __arg2: SupportsLessThanT, *_args: SupportsLessThanT, key: None = ...
) -> SupportsLessThanT: ...
@overload
def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def min(__iterable: Iterable[SupportsLessThanT], *, key: None = ...) -> SupportsLessThanT: ...
@overload
def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThanT]) -> _T: ...
@overload
def min(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., default: _T) -> Union[SupportsLessThanT, _T]: ...
@overload
def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsLessThanT], default: _T2) -> Union[_T1, _T2]: ...
@overload
def next(__i: Iterator[_T]) -> _T: ...
@overload
def next(__i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ...
def oct(__number: Union[int, _SupportsIndex]) -> str: ...
_OpenFile = Union[AnyPath, int]
_Opener = Callable[[str, int], int]
# Text mode: always returns a TextIOWrapper
@overload
def open(
file: _OpenFile,
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> TextIOWrapper: ...
# Unbuffered binary mode: returns a FileIO
@overload
def open(
file: _OpenFile,
mode: OpenBinaryMode,
buffering: Literal[0],
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> FileIO: ...
# Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter
@overload
def open(
file: _OpenFile,
mode: OpenBinaryModeUpdating,
buffering: Literal[-1, 1] = ...,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BufferedRandom: ...
@overload
def open(
file: _OpenFile,
mode: OpenBinaryModeWriting,
buffering: Literal[-1, 1] = ...,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BufferedWriter: ...
@overload
def open(
file: _OpenFile,
mode: OpenBinaryModeReading,
buffering: Literal[-1, 1] = ...,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BufferedReader: ...
# Buffering cannot be determined: fall back to BinaryIO
@overload
def open(
file: _OpenFile,
mode: OpenBinaryMode,
buffering: int,
encoding: None = ...,
errors: None = ...,
newline: None = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> BinaryIO: ...
# Fallback if mode is not specified
@overload
def open(
file: _OpenFile,
mode: str,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
closefd: bool = ...,
opener: Optional[_Opener] = ...,
) -> IO[Any]: ...
def ord(__c: Union[str, bytes]) -> int: ...
def print(
*values: object,
sep: Optional[str] = ...,
end: Optional[str] = ...,
file: Optional[SupportsWrite[str]] = ...,
flush: bool = ...,
) -> None: ...
_E = TypeVar("_E", contravariant=True)
_M = TypeVar("_M", contravariant=True)
class _SupportsPow2(Protocol[_E, _T_co]):
def __pow__(self, __other: _E) -> _T_co: ...
class _SupportsPow3(Protocol[_E, _M, _T_co]):
def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ...
if sys.version_info >= (3, 8):
@overload
def pow(base: int, exp: int, mod: None = ...) -> Any: ... # returns int or float depending on whether exp is non-negative
@overload
def pow(base: int, exp: int, mod: int) -> int: ...
@overload
def pow(base: float, exp: float, mod: None = ...) -> float: ...
@overload
def pow(base: _SupportsPow2[_E, _T_co], exp: _E) -> _T_co: ...
@overload
def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ...
else:
@overload
def pow(
__base: int, __exp: int, __mod: None = ...
) -> Any: ... # returns int or float depending on whether exp is non-negative
@overload
def pow(__base: int, __exp: int, __mod: int) -> int: ...
@overload
def pow(__base: float, __exp: float, __mod: None = ...) -> float: ...
@overload
def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E) -> _T_co: ...
@overload
def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M) -> _T_co: ...
def quit(code: object = ...) -> NoReturn: ...
@overload
def reversed(__sequence: Sequence[_T]) -> Iterator[_T]: ...
@overload
def reversed(__sequence: Reversible[_T]) -> Iterator[_T]: ...
def repr(__obj: object) -> str: ...
@overload
def round(number: SupportsRound[Any]) -> int: ...
@overload
def round(number: SupportsRound[Any], ndigits: None) -> int: ...
@overload
def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
def setattr(__obj: Any, __name: str, __value: Any) -> None: ...
@overload
def sorted(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., reverse: bool = ...) -> List[SupportsLessThanT]: ...
@overload
def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThan], reverse: bool = ...) -> List[_T]: ...
if sys.version_info >= (3, 8):
@overload
def sum(__iterable: Iterable[_T]) -> Union[_T, int]: ...
@overload
def sum(__iterable: Iterable[_T], start: _S) -> Union[_T, _S]: ...
else:
@overload
def sum(__iterable: Iterable[_T]) -> Union[_T, int]: ...
@overload
def sum(__iterable: Iterable[_T], __start: _S) -> Union[_T, _S]: ...
def vars(__object: Any = ...) -> Dict[str, Any]: ...
@overload
def zip(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
@overload
def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Iterator[Tuple[_T1, _T2]]: ...
@overload
def zip(__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> Iterator[Tuple[_T1, _T2, _T3]]: ...
@overload
def zip(
__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4]
) -> Iterator[Tuple[_T1, _T2, _T3, _T4]]: ...
@overload
def zip(
__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5]
) -> Iterator[Tuple[_T1, _T2, _T3, _T4, _T5]]: ...
@overload
def zip(
__iter1: Iterable[Any],
__iter2: Iterable[Any],
__iter3: Iterable[Any],
__iter4: Iterable[Any],
__iter5: Iterable[Any],
__iter6: Iterable[Any],
*iterables: Iterable[Any],
) -> Iterator[Tuple[Any, ...]]: ...
def __import__(
name: str,
globals: Optional[Mapping[str, Any]] = ...,
locals: Optional[Mapping[str, Any]] = ...,
fromlist: Sequence[str] = ...,
level: int = ...,
) -> Any: ...
# Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
# not exposed anywhere under that name, we make it private here.
class ellipsis: ...
Ellipsis: ellipsis
class BaseException(object):
args: Tuple[Any, ...]
__cause__: Optional[BaseException]
__context__: Optional[BaseException]
__suppress_context__: bool
__traceback__: Optional[TracebackType]
def __init__(self, *args: object) -> None: ...
def __str__(self) -> str: ...
def __repr__(self) -> str: ...
def with_traceback(self: _TBE, tb: Optional[TracebackType]) -> _TBE: ...
class GeneratorExit(BaseException): ...
class KeyboardInterrupt(BaseException): ...
class SystemExit(BaseException):
code: int
class Exception(BaseException): ...
class StopIteration(Exception):
value: Any
_StandardError = Exception
class OSError(Exception):
errno: int
strerror: str
# filename, filename2 are actually Union[str, bytes, None]
filename: Any
filename2: Any
if sys.platform == "win32":
winerror: int
EnvironmentError = OSError
IOError = OSError
if sys.platform == "win32":
WindowsError = OSError
class ArithmeticError(_StandardError): ...
class AssertionError(_StandardError): ...
class AttributeError(_StandardError): ...
class BufferError(_StandardError): ...
class EOFError(_StandardError): ...
class ImportError(_StandardError):
def __init__(self, *args: object, name: Optional[str] = ..., path: Optional[str] = ...) -> None: ...
name: Optional[str]
path: Optional[str]
msg: str # undocumented
class LookupError(_StandardError): ...
class MemoryError(_StandardError): ...
class NameError(_StandardError): ...
class ReferenceError(_StandardError): ...
class RuntimeError(_StandardError): ...
class StopAsyncIteration(Exception):
value: Any
class SyntaxError(_StandardError):
msg: str
lineno: Optional[int]
offset: Optional[int]
text: Optional[str]
filename: Optional[str]
class SystemError(_StandardError): ...
class TypeError(_StandardError): ...
class ValueError(_StandardError): ...
class FloatingPointError(ArithmeticError): ...
class OverflowError(ArithmeticError): ...
class ZeroDivisionError(ArithmeticError): ...
class ModuleNotFoundError(ImportError): ...
class IndexError(LookupError): ...
class KeyError(LookupError): ...
class UnboundLocalError(NameError): ...
class BlockingIOError(OSError):
characters_written: int
class ChildProcessError(OSError): ...
class ConnectionError(OSError): ...
class BrokenPipeError(ConnectionError): ...
class ConnectionAbortedError(ConnectionError): ...
class ConnectionRefusedError(ConnectionError): ...
class ConnectionResetError(ConnectionError): ...
class FileExistsError(OSError): ...
class FileNotFoundError(OSError): ...
class InterruptedError(OSError): ...
class IsADirectoryError(OSError): ...
class NotADirectoryError(OSError): ...
class PermissionError(OSError): ...
class ProcessLookupError(OSError): ...
class TimeoutError(OSError): ...
class NotImplementedError(RuntimeError): ...
class RecursionError(RuntimeError): ...
class IndentationError(SyntaxError): ...
class TabError(IndentationError): ...
class UnicodeError(ValueError): ...
class UnicodeDecodeError(UnicodeError):
encoding: str
object: bytes
start: int
end: int
reason: str
def __init__(self, __encoding: str, __object: bytes, __start: int, __end: int, __reason: str) -> None: ...
class UnicodeEncodeError(UnicodeError):
encoding: str
object: str
start: int
end: int
reason: str
def __init__(self, __encoding: str, __object: str, __start: int, __end: int, __reason: str) -> None: ...
class UnicodeTranslateError(UnicodeError): ...
class Warning(Exception): ...
class UserWarning(Warning): ...
class DeprecationWarning(Warning): ...
class SyntaxWarning(Warning): ...
class RuntimeWarning(Warning): ...
class FutureWarning(Warning): ...
class PendingDeprecationWarning(Warning): ...
class ImportWarning(Warning): ...
class UnicodeWarning(Warning): ...
class BytesWarning(Warning): ...
class ResourceWarning(Warning): ...
| [
"[email protected]"
] | |
b9dac58212d011f1c76f030f0631c55f20b3f02f | 77ab593ed55a6d46b1778f6d41bc70ced3f8cd46 | /face_ID_net/face_1024s/face_1024_vals.py | 2b929cc2b67254c5a37f697a6093fc0d6f3d68f1 | [] | no_license | wosxcc/bot | e93b92fbca79a915feb186160f3f72c99218ffcb | c097f5455bc6264c9f778fb72900475963836153 | refs/heads/master | 2021-06-12T12:43:47.314071 | 2018-12-14T08:51:43 | 2018-12-14T08:51:43 | 128,619,488 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,983 | py | import os
import cv2 as cv
import numpy as np
import random
import tensorflow as tf
from face_ID_net.face_1024s.ID_pb_net1024s import face_net
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
IMG_H=64
IMG_W =64
N_CLASSES =1024
learning_rate =0.001
def face_val(image_arr,run_train):
print('搞毛线啊')
log_dir = './face72/face_big1024/'
with tf.Graph().as_default():
graph = face_net(1, IMG_H,IMG_W, N_CLASSES,learning_rate,2,run_train)
saver = tf.train.Saver()
with tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(log_dir)
if ckpt and ckpt.model_checkpoint_path:
global_step = ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1]
saver.restore(sess, ckpt.model_checkpoint_path)
else:
print('没有保存的模型')
if run_train ==True:
pos_d,neg_d = sess.run([graph['d_pos'],graph['d_neg']],feed_dict={graph['x']: np.reshape(image_arr, (3, 64, 64, 3))})
return pos_d, neg_d
elif run_train ==False:
print('下面出错了',len(image_arr),image_arr[0].shape)
anchor_data = sess.run(graph['anchor_out'],feed_dict={graph['x']: np.reshape(image_arr, ( 1, 64, 64, 3))})
print('上面出错了')
return anchor_data
pacth = 'E:/faceID'
for i in range(10):
file = random.sample(os.listdir(pacth),1)[0]
while(1):
negative_file= random.sample(os.listdir(pacth),1)[0]
if negative_file!=file:
break
print(file,negative_file)
anchor_img = random.sample(os.listdir(pacth+'/'+file),1)[0]
while(1):
positive_img = random.sample(os.listdir(pacth+'/'+file),1)[0]
if anchor_img!=positive_img:
break
negative_img = random.sample(os.listdir(pacth+'/'+negative_file),1)[0]
img_anchor=cv.imread(pacth+'/'+file+'/'+anchor_img)
img_positive=cv.imread(pacth+'/'+file+'/'+positive_img)
img_negative=cv.imread(pacth+'/'+negative_file+'/'+negative_img)
sh_anchor=cv.resize(img_anchor,(240,240),interpolation=cv.INTER_CUBIC)
sh_positive=cv.resize(img_positive,(240,240),interpolation=cv.INTER_CUBIC)
sh_negative=cv.resize(img_negative,(240,240),interpolation=cv.INTER_CUBIC)
image_data=[]
image_data.append(cv.resize(img_anchor,(64,64),interpolation=cv.INTER_CUBIC))
image_data.append(cv.resize(img_negative,(64,64),interpolation=cv.INTER_CUBIC))
image_data.append(cv.resize(img_positive,(64,64),interpolation=cv.INTER_CUBIC))
image_data =np.array(image_data,dtype='float32')
image_data =(image_data-128.0)/256.0
anchor_score = face_val(image_data[0],False)
print(anchor_score)
pos_d,neg_d =face_val(image_data,True)
print(pos_d,neg_d)
cv.imshow('anchor', sh_anchor)
cv.imshow('positive', sh_positive)
cv.imshow('negative', sh_negative)
cv.waitKey()
cv.destroyAllWindows()
| [
"[email protected]"
] | |
77e79f9ef67d7b2a99e8a1c2d037a274848b9c17 | ea3272d707f3a6e5d097301d300a0ea97ddd82b5 | /psm/oop1/oop1_2/info_hiding_property.py | b12f77b06d357a78fd4c81646ba553fa9c6dce8c | [] | no_license | gambler1541/BootCamp | d05850f256ed7a8baa02545551176959a66a9bb3 | b025dd07a8fedd58366f96c9b516f134a95138f1 | refs/heads/master | 2020-04-07T07:21:51.363439 | 2019-06-08T11:10:27 | 2019-06-08T11:10:27 | 158,173,790 | 1 | 0 | null | 2018-11-19T06:38:36 | 2018-11-19T06:38:36 | null | UTF-8 | Python | false | false | 830 | py | class Account:
def __init__(self, name, money):
self.user = name
# 인스턴스 멤버 선언이 아니라 setter 메서드를 호출
self.balance = money
@property
def balance(self):
return self._balance
@balance.setter
def balance(self, money):
if money < 0:
return
# 실제 인스턴스 멤버 선언이 일어나는 부분
self._balance = money
if __name__ == '__main__':
my_acnt = Account('greg', 5000)
# setter 함수를 통해 변경을 시도하므로 _balance 메버의 값은 음수로 변경되지 않음
# 음수로 변경되지 않았으므로 실행 결과는 5000이 나옴
my_acnt.balance =- 3000
# getter 함수인 balance() 메서드를 호출해 _balance apaqjdp wjqrms,
print(my_acnt.balance)
| [
"[email protected]"
] | |
b4b2aa8f7d0110d5a1ee9b8e0de04c1e02146c12 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_zoologists.py | d5eaad0aa4529df66ccc13452502429859ae6960 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
#calss header
class _ZOOLOGISTS():
def __init__(self,):
self.name = "ZOOLOGISTS"
self.definitions = zoologist
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['zoologist']
| [
"[email protected]"
] | |
29459d2f2495bd6eabb00953ccd6e2064a3749f5 | d82a8844c7d46c752e567cca41a8ae1c15c975f7 | /API/urls.py | aaae4d1d1c0b11959a544fed6876085e896c1700 | [] | no_license | golammahmud/job_evaluations_project | f1be9f8f8b27c0f9db6539294ccff25254ff08f3 | fe362f2d6bc57e1d550c39263312ef046eb7754c | refs/heads/master | 2023-08-04T10:20:59.442703 | 2021-09-27T02:31:03 | 2021-09-27T02:31:03 | 410,347,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | from django.contrib import admin
from django.urls import path,include
from rest_framework import routers
from .views import UserInputView,UserBasedInputView
from rest_framework_simplejwt.views import TokenObtainPairView,TokenRefreshView
router=routers.DefaultRouter()
router.register('all-userinputs',UserInputView)
router.register('user-based-inputs',UserBasedInputView)
urlpatterns = [
path('', include(router.urls)),
path('api-auth/',include('rest_framework.urls')),
path('get_token/', TokenObtainPairView.as_view(), name='token_obtain_pair'), #get token
path('token_refresh/', TokenRefreshView.as_view(), name='token_refresh'),# get refresh token
] | [
"[email protected]"
] | |
f7863d8927d006aaf6bb1f2450af7fe6550ab070 | e34d4bf879910b8f41068c1efb90915897e53d53 | /system_design_ladder/GeoHashII.py | b15bec1dd5ca21a631b684b5a96092a0772cec5f | [] | no_license | ZhouningMan/LeetCodePython | 6cfc30f0b76f6162502410fef5639fde4801bd74 | cad9585c440efb329c9321648f94c58ded198438 | refs/heads/master | 2020-12-10T03:53:48.824344 | 2020-01-13T02:29:02 | 2020-01-13T02:29:02 | 233,494,907 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,179 | py | class GeoHash:
BASE32 = "0123456789bcdefghjkmnpqrstuvwxyz"
"""
@param: geohash: geohash a base32 string
@return: latitude and longitude a location coordinate pair
"""
def decode(self, geohash):
binary = self._to_bin(geohash)
lon_bin = [binary[i] for i in range(0, len(binary), 2)]
lat_bin = [binary[i] for i in range(1, len(binary), 2)]
longitude = self._bin_to_val(-180, 180, lon_bin)
latitude = self._bin_to_val(-90, 90, lat_bin)
return latitude, longitude
def _to_bin(self, geohash):
binary = ''
for c in geohash:
idx = GeoHash.BASE32.index(c)
b = ''
for i in range(5):
b = str(idx % 2) + b
idx = idx // 2
binary += b
return binary
def _bin_to_val(self, low, high, binary):
for b in binary:
mid = (high + low) / 2
if b == '1': # our value is higher
low = mid
else: # our value is lower
high = mid
return (low + high) / 2
if __name__ == '__main__':
geoHash = GeoHash()
geoHash.decode("wx4g0s")
| [
"[email protected]"
] | |
f6b693f1370e3d80c736a6b08d507d671d4a8bc5 | 008c065391d766fec2f2af252dd8a5e9bf5cb815 | /Even Matrix.py | 7e545a6a78adeb1c5ec75a406ef4644cbe57e481 | [] | no_license | 22Rahul22/Codechef | b261ab43ff5ff64648a75ad1195e33cac2cfec52 | 1f645c779a250a71d75598e1eabad7e52dd6b031 | refs/heads/master | 2022-11-29T21:51:09.578798 | 2020-08-19T06:20:23 | 2020-08-19T06:20:23 | 288,650,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 890 | py | t = int(input())
for _ in range(t):
n = int(input())
arr = [[0 for i in range(n)] for j in range(n)]
sr = 0
er = n
sc = 0
ec = n
z = 0
num = 1
if n % 2 == 0:
x = n // 2
else:
x = 1 + (n // 2)
while z != x:
j = sc
while j < ec:
arr[sr][j] = num
num += 1
j += 1
sr += 1
i = sr
while i < er:
arr[i][ec - 1] = num
num += 1
i += 1
ec -= 1
j = ec - 1
while j >= sc:
arr[er - 1][j] = num
num += 1
j -= 1
er -= 1
i = er - 1
while i >= sr:
arr[i][sc] = num
num += 1
i -= 1
sc += 1
z += 1
for i in range(n):
for j in range(n):
print(arr[i][j], end=" ")
print() | [
"[email protected]"
] | |
30afeecf7a442f626392bcc9b54728254bb8a8be | 60d5ea4f007d49768d250ef394003f554003e4d0 | /python/Linked List/142.Linked List Cycle II.py | dec51f534aabccb931d8e8932d39d11aac643c6f | [] | no_license | EvanJamesMG/Leetcode | dd7771beb119ea1250dbb3b147a09053298cd63b | fa638c7fda3802e9f4e0751a2c4c084edf09a441 | refs/heads/master | 2021-01-10T17:11:10.896393 | 2017-12-01T16:04:44 | 2017-12-01T16:04:44 | 46,968,756 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,911 | py | # coding=utf-8
'''
Given a linked list, return the node where the cycle begins. If there is no cycle, return null.
Note: Do not modify the linked list.
'''
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
'''
使用快慢指针。若链表存在环,两指针必在环中相遇,此时将慢指针移回头结点,
两指针以相同的速度移动,在环开始的节点处再次相遇。
图中(http://www.cnblogs.com/zuoyuan/p/3701877.html),head到环路起点的距离为K,起点到fast和slow的相遇点的距离为M,环路周长为L。假设,在fast和slow相遇时,fast走过了Lfast,slow走过了Lslow。根据题意:
Lslow=K+M;Lfast=K+M+n*L(n为正整数);Lfast=2*Lslow
可以推出:Lslow=n*L;K=n*L-M
则当slow重新回到head,而fast还在相遇点,slow和fast都向前走,且每次走一个节点。
则slow从head走到起点走了K,而fast从相遇点出发也走了K,而fast向前走了距离K后到了哪里呢?由于K=(n-1)*L+(L-M),所以fast转了n-1圈,再走L-M,也到了起点。这样起点就找到了。
'''
class Solution(object):
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if head == None or head.next == None:
return None
slow = fast = head
while fast and fast.next:
slow = slow.next
fast = fast.next.next
if fast == slow:
break
if slow == fast:
slow = head
while slow != fast:
slow = slow.next
fast = fast.next
return slow
return None
# if __name__ == "__main__":
#
# result = Solution().numTrees(3)
# print result
| [
"[email protected]"
] | |
6f54f5939a8fda03c24dfa9d9dbe33c08f498424 | 096ccaca86872b03a137edf58221413073d770cb | /helpers/24_web_apis_sources.py | 0a219f85661a944bd17fb1db67075e5cf05ea372 | [] | no_license | DH-heima/webscrapping | f142962b50deed2628052dd7a48098a4afbcbada | 1dc8f81f45db0d4366391c3052c5ab36f4d4bc5d | refs/heads/master | 2022-02-02T23:26:22.520064 | 2019-06-13T13:38:10 | 2019-06-13T13:38:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,587 | py | import os
# from ers import shops, COLLECTION_DATE, web_apis_traffic_sources_csv, web_apis_traffic_sources_aggregates_csv
import os.path as op
import numpy as np
import pandas as pd
BASE_DIR = "/code/mhers"
WAVE_NUMBER = 8
shops = pd.read_excel(op.join(BASE_DIR, "ressources/ERS-referential-shops.xlsx"), index_col=None)
COLLECTION_DATE = "2018-06-10"
web_apis_traffic_sources_csv = os.path.join(BASE_DIR,'data/w_{}/final_csvs'.format(WAVE_NUMBER), 'shopgrid_details - web_apis_traffic_sources_w{}.csv'.format(WAVE_NUMBER))
web_apis_traffic_sources_aggregates_csv = os.path.join(BASE_DIR,'data/w_{}/final_csvs'.format(WAVE_NUMBER), 'shopgrid_summary - web_apis_traffic_sources_w{}.csv'.format(WAVE_NUMBER))
# #####################################################################################################################
# web_apis_demographics_csv
# #####################################################################################################################
# This generates the dummy data and shouldn't be in production
mask = pd.DataFrame({'to_delete': [1]})
df = pd.DataFrame()
for c, row in shops.iterrows():
tmp = pd.DataFrame(mask.copy())
for k in ['shop_id', 'continent', 'country', 'region', 'segment']:
tmp[k] = row[k]
df = df.append(tmp)
df.drop(columns=['to_delete'], inplace=True)
# TODO : delete the random data creation and fetch the data in the proper dataset
df['direct'] = np.random.random(size=(df.shape[0], 1)) * 0.3
df['email'] = np.random.random(size=(df.shape[0], 1)) * 0.2
df['referrals'] = np.random.random(size=(df.shape[0], 1)) * 0.2
df['social'] = np.random.random(size=(df.shape[0], 1)) * 0.1
df['paid_search'] = np.random.random(size=(df.shape[0], 1)) * 0.1
df['display_ads'] = np.random.random(size=(df.shape[0], 1)) * 0.1
df['organic_search'] = 1 - df['direct'] - df['email'] - df['referrals'] - df['social'] - df['paid_search'] - df['display_ads']
# Todo : Time Span is the time over which the aggregates are calculated
df['time_span'] = "Apr. 2016 - Aug. 2018"
# Collection date
print('WARNING : PLEASE ENSURE THE COLLECTION_DATE is accurate :', COLLECTION_DATE)
df['collection_date'] = COLLECTION_DATE
final_cols = ['collection_date', 'time_span', 'continent', 'country', 'region', 'segment', 'shop_id', 'direct', 'email',
'referrals', 'social', 'paid_search', 'display_ads', 'organic_search']
df = df[final_cols]
df.to_csv(web_apis_traffic_sources_csv, sep=';', index=False, encoding='utf-8')
print("File web_apis_traffic_sources_csv stored at : ", web_apis_traffic_sources_csv)
# #####################################################################################################################
# web_apis_demographics_aggregates_csv
# #####################################################################################################################
df['region'].fillna("", inplace=True)
# Aggregating
res = []
agregation_levels_list = [
['continent', 'country', 'region', 'segment'],
['continent', 'country', 'segment'],
['continent', 'segment'],
['segment'],
['continent', 'country', 'region'],
['continent', 'country'],
['continent'],
['collection_date']
]
# All agregations
for agg_level in agregation_levels_list:
dfG2 = df.groupby(agg_level, as_index=False)
dfG2 = dfG2.agg({
'direct': {'direct': 'mean'},
'email': {'email': 'mean'},
'referrals': {'referrals': 'mean'},
'social': {'social': 'mean'},
'paid_search': {'paid_search': 'mean'},
'display_ads': {'display_ads': 'mean'},
'organic_search': {'organic_search': 'mean'},
}).reset_index()
dfG2.columns = dfG2.columns.droplevel(1)
dfG2 = pd.DataFrame(dfG2)
print(agg_level, 'adding', dfG2.shape)
res.append(dfG2)
# Aggregate on all-levels
all_dfs = pd.concat(res, axis=0, ignore_index=True)
# Collection date
print('WARNING : PLEASE ENSURE THE COLLECTION_DATE is accurate :', COLLECTION_DATE)
all_dfs['collection_date'] = COLLECTION_DATE
# Todo : Time Span is the time over which the aggregates are calculated
all_dfs['time_span'] = "Apr. 2016 - Aug. 2018"
final_cols = ['collection_date', 'time_span', 'continent', 'country', 'region', 'segment', 'direct', 'display_ads',
'email', 'organic_search', 'paid_search', 'referrals', 'social']
all_dfs = all_dfs[final_cols]
all_dfs.to_csv(web_apis_traffic_sources_aggregates_csv, sep=';', index=None, encoding='utf-8')
print("File web_apis_traffic_sources_aggregates_csv stored at : ", web_apis_traffic_sources_aggregates_csv, " -")
| [
"[email protected]"
] | |
baba79af33bbf688b0cc90d14d78060c6b946973 | 3a771b72dae1aae406b94726bcbcf73915577b18 | /q56.py | e7701a5af9e0d8749ed043cc4977a73042423870 | [] | no_license | SHANK885/Python-Basic-Programs | 4fcb29280412baa63ffd33efba56d9f59770c9dc | 157f0f871b31c4523b6873ce5dfe0d6e26a6dc61 | refs/heads/master | 2021-07-18T18:24:10.455282 | 2018-11-19T07:02:27 | 2018-11-19T07:02:27 | 138,009,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | unicodeString =u"Heello world"
print(unicodeString) | [
"[email protected]"
] | |
41255f5976ab13155649263d540c618488794b94 | 08a329d07172a384be41eb58a0586032b18787d2 | /if1.py | c5867a5c4dee5850f5bb2049812193dbe20c31e6 | [] | no_license | atsuhisa-i/Python_study1 | 9bc39d058fe8bdd00adb35324758ad8fa08f4ca1 | 439a654f09e81208658355d99c8ce1c3cd4bcc4e | refs/heads/main | 2023-04-06T12:44:12.099067 | 2021-04-14T13:24:56 | 2021-04-14T13:24:56 | 348,309,405 | 0 | 0 | null | 2021-04-14T13:24:57 | 2021-03-16T10:45:48 | Python | UTF-8 | Python | false | false | 69 | py | number = '123456'
if number == '123456':
print('1st Prize:Money') | [
"[email protected]"
] | |
f7e2098e769e91b1838c62aee43d87264b9aa9cb | 052d6ac57f2026aba22249368149b18027c78342 | /frontstage_api/resources/register/confirm_organisation_survey.py | 6331b7150306a3ab3887cebaf9c1d5eb733780ca | [
"MIT"
] | permissive | ONSdigital/ras-frontstage-api | c34b41185cc825b49262c1879ad559778a54dbfc | 7bb32a85868e2a241b8a0331b884155a36450669 | refs/heads/master | 2018-07-15T00:35:22.130352 | 2018-06-01T14:09:13 | 2018-06-01T14:09:13 | 105,001,932 | 2 | 1 | MIT | 2018-06-01T14:09:14 | 2017-09-27T09:54:26 | Python | UTF-8 | Python | false | false | 1,917 | py | import logging
from flask import jsonify, make_response, request
from flask_restplus import Resource, fields
from structlog import wrap_logger
from frontstage_api import auth, register_api
from frontstage_api.controllers import case_controller, collection_exercise_controller, iac_controller, party_controller, survey_controller
logger = wrap_logger(logging.getLogger(__name__))
enrolment_details = register_api.model('EnrolmentDetails', {
'enrolment_code': fields.String(required=True),
})
@register_api.route('/confirm-organisation-survey')
class ConfirmOrganisationSurvey(Resource):
@staticmethod
@auth.login_required
@register_api.expect(enrolment_details, validate=True)
def post():
logger.info('Attempting to retrieve organisation and survey data')
enrolment_code = request.get_json().get('enrolment_code')
# Verify enrolment code is active
iac = iac_controller.get_iac_from_enrolment(enrolment_code)
if not iac['active']:
return make_response(jsonify(iac), 401)
# Get organisation name
case = case_controller.get_case_by_enrolment_code(enrolment_code)
business_party_id = case['caseGroup']['partyId']
organisation_name = party_controller.get_party_by_business_id(business_party_id).get('name')
# Get survey name
collection_exercise_id = case['caseGroup']['collectionExerciseId']
collection_exercise = collection_exercise_controller.get_collection_exercise(collection_exercise_id)
survey_id = collection_exercise['surveyId']
survey_name = survey_controller.get_survey(survey_id).get('longName')
response_json = {
"organisation_name": organisation_name,
"survey_name": survey_name
}
logger.info('Successfully retrieved organisation and survey data')
return make_response(jsonify(response_json), 200)
| [
"[email protected]"
] | |
6e55abddbe446bbbe2e2f07ae0edd692a27197ed | b3ac12dfbb8fa74500b406a0907337011d4aac72 | /goldcoin/full_node/weight_proof.py | c12b097a836dbee13ac9816cccf3f9361015586b | [
"Apache-2.0"
] | permissive | chia-os/goldcoin-blockchain | ab62add5396b7734c11d3c37c41776994489d5e7 | 5c294688dbbe995ae1d4422803f6fcf3e1cc6077 | refs/heads/main | 2023-08-11T23:58:53.617051 | 2021-09-12T15:33:26 | 2021-09-12T15:33:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67,454 | py | import asyncio
import dataclasses
import logging
import math
import random
from concurrent.futures.process import ProcessPoolExecutor
from typing import Dict, List, Optional, Tuple
from goldcoin.consensus.block_header_validation import validate_finished_header_block
from goldcoin.consensus.block_record import BlockRecord
from goldcoin.consensus.blockchain_interface import BlockchainInterface
from goldcoin.consensus.constants import ConsensusConstants
from goldcoin.consensus.deficit import calculate_deficit
from goldcoin.consensus.full_block_to_block_record import header_block_to_sub_block_record
from goldcoin.consensus.pot_iterations import (
calculate_ip_iters,
calculate_iterations_quality,
calculate_sp_iters,
is_overflow_block,
)
from goldcoin.consensus.vdf_info_computation import get_signage_point_vdf_info
from goldcoin.types.blockchain_format.classgroup import ClassgroupElement
from goldcoin.types.blockchain_format.sized_bytes import bytes32
from goldcoin.types.blockchain_format.slots import ChallengeChainSubSlot, RewardChainSubSlot
from goldcoin.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from goldcoin.types.blockchain_format.vdf import VDFInfo
from goldcoin.types.end_of_slot_bundle import EndOfSubSlotBundle
from goldcoin.types.header_block import HeaderBlock
from goldcoin.types.weight_proof import (
SubEpochChallengeSegment,
SubEpochData,
SubSlotData,
WeightProof,
SubEpochSegments,
RecentChainData,
)
from goldcoin.util.block_cache import BlockCache
from goldcoin.util.hash import std_hash
from goldcoin.util.ints import uint8, uint32, uint64, uint128
from goldcoin.util.streamable import dataclass_from_dict, recurse_jsonify
log = logging.getLogger(__name__)
class WeightProofHandler:
LAMBDA_L = 100
C = 0.5
MAX_SAMPLES = 20
def __init__(
self,
constants: ConsensusConstants,
blockchain: BlockchainInterface,
):
self.tip: Optional[bytes32] = None
self.proof: Optional[WeightProof] = None
self.constants = constants
self.blockchain = blockchain
self.lock = asyncio.Lock()
async def get_proof_of_weight(self, tip: bytes32) -> Optional[WeightProof]:
tip_rec = self.blockchain.try_block_record(tip)
if tip_rec is None:
log.error("unknown tip")
return None
if tip_rec.height < self.constants.WEIGHT_PROOF_RECENT_BLOCKS:
log.debug("chain to short for weight proof")
return None
async with self.lock:
if self.proof is not None:
if self.proof.recent_chain_data[-1].header_hash == tip:
return self.proof
wp = await self._create_proof_of_weight(tip)
if wp is None:
return None
self.proof = wp
self.tip = tip
return wp
def get_sub_epoch_data(self, tip_height: uint32, summary_heights: List[uint32]) -> List[SubEpochData]:
sub_epoch_data: List[SubEpochData] = []
for sub_epoch_n, ses_height in enumerate(summary_heights):
if ses_height > tip_height:
break
ses = self.blockchain.get_ses(ses_height)
log.debug(f"handle sub epoch summary {sub_epoch_n} at height: {ses_height} ses {ses}")
sub_epoch_data.append(_create_sub_epoch_data(ses))
return sub_epoch_data
async def _create_proof_of_weight(self, tip: bytes32) -> Optional[WeightProof]:
"""
Creates a weight proof object
"""
assert self.blockchain is not None
sub_epoch_segments: List[SubEpochChallengeSegment] = []
tip_rec = self.blockchain.try_block_record(tip)
if tip_rec is None:
log.error("failed not tip in cache")
return None
log.info(f"create weight proof peak {tip} {tip_rec.height}")
recent_chain = await self._get_recent_chain(tip_rec.height)
if recent_chain is None:
return None
summary_heights = self.blockchain.get_ses_heights()
prev_ses_block = await self.blockchain.get_block_record_from_db(self.blockchain.height_to_hash(uint32(0)))
if prev_ses_block is None:
return None
sub_epoch_data = self.get_sub_epoch_data(tip_rec.height, summary_heights)
# use second to last ses as seed
seed = self.get_seed_for_proof(summary_heights, tip_rec.height)
rng = random.Random(seed)
weight_to_check = _get_weights_for_sampling(rng, tip_rec.weight, recent_chain)
sample_n = 0
ses_blocks = await self.blockchain.get_block_records_at(summary_heights)
if ses_blocks is None:
return None
for sub_epoch_n, ses_height in enumerate(summary_heights):
if ses_height > tip_rec.height:
break
# if we have enough sub_epoch samples, dont sample
if sample_n >= self.MAX_SAMPLES:
log.debug("reached sampled sub epoch cap")
break
# sample sub epoch
# next sub block
ses_block = ses_blocks[sub_epoch_n]
if ses_block is None or ses_block.sub_epoch_summary_included is None:
log.error("error while building proof")
return None
if _sample_sub_epoch(prev_ses_block.weight, ses_block.weight, weight_to_check): # type: ignore
sample_n += 1
segments = await self.blockchain.get_sub_epoch_challenge_segments(ses_block.header_hash)
if segments is None:
segments = await self.__create_sub_epoch_segments(ses_block, prev_ses_block, uint32(sub_epoch_n))
if segments is None:
log.error(
f"failed while building segments for sub epoch {sub_epoch_n}, ses height {ses_height} "
)
return None
await self.blockchain.persist_sub_epoch_challenge_segments(ses_block.header_hash, segments)
log.debug(f"sub epoch {sub_epoch_n} has {len(segments)} segments")
sub_epoch_segments.extend(segments)
prev_ses_block = ses_block
log.debug(f"sub_epochs: {len(sub_epoch_data)}")
return WeightProof(sub_epoch_data, sub_epoch_segments, recent_chain)
def get_seed_for_proof(self, summary_heights: List[uint32], tip_height) -> bytes32:
count = 0
ses = None
for sub_epoch_n, ses_height in enumerate(reversed(summary_heights)):
if ses_height <= tip_height:
count += 1
if count == 2:
ses = self.blockchain.get_ses(ses_height)
break
assert ses is not None
seed = ses.get_hash()
return seed
async def _get_recent_chain(self, tip_height: uint32) -> Optional[List[HeaderBlock]]:
recent_chain: List[HeaderBlock] = []
ses_heights = self.blockchain.get_ses_heights()
min_height = 0
count_ses = 0
for ses_height in reversed(ses_heights):
if ses_height <= tip_height:
count_ses += 1
if count_ses == 2:
min_height = ses_height - 1
break
log.debug(f"start {min_height} end {tip_height}")
headers = await self.blockchain.get_header_blocks_in_range(min_height, tip_height, tx_filter=False)
blocks = await self.blockchain.get_block_records_in_range(min_height, tip_height)
ses_count = 0
curr_height = tip_height
blocks_n = 0
while ses_count < 2:
if curr_height == 0:
break
# add to needed reward chain recent blocks
header_block = headers[self.blockchain.height_to_hash(curr_height)]
block_rec = blocks[header_block.header_hash]
if header_block is None:
log.error("creating recent chain failed")
return None
recent_chain.insert(0, header_block)
if block_rec.sub_epoch_summary_included:
ses_count += 1
curr_height = uint32(curr_height - 1) # type: ignore
blocks_n += 1
header_block = headers[self.blockchain.height_to_hash(curr_height)]
recent_chain.insert(0, header_block)
log.info(
f"recent chain, "
f"start: {recent_chain[0].reward_chain_block.height} "
f"end: {recent_chain[-1].reward_chain_block.height} "
)
return recent_chain
async def create_prev_sub_epoch_segments(self):
log.debug("create prev sub_epoch_segments")
heights = self.blockchain.get_ses_heights()
if len(heights) < 3:
return None
count = len(heights) - 2
ses_sub_block = self.blockchain.height_to_block_record(heights[-2])
prev_ses_sub_block = self.blockchain.height_to_block_record(heights[-3])
assert prev_ses_sub_block.sub_epoch_summary_included is not None
segments = await self.__create_sub_epoch_segments(ses_sub_block, prev_ses_sub_block, uint32(count))
assert segments is not None
await self.blockchain.persist_sub_epoch_challenge_segments(ses_sub_block.header_hash, segments)
log.debug("sub_epoch_segments done")
return None
async def create_sub_epoch_segments(self):
log.debug("check segments in db")
"""
Creates a weight proof object
"""
assert self.blockchain is not None
peak_height = self.blockchain.get_peak_height()
if peak_height is None:
log.error("no peak yet")
return None
summary_heights = self.blockchain.get_ses_heights()
prev_ses_block = await self.blockchain.get_block_record_from_db(self.blockchain.height_to_hash(uint32(0)))
if prev_ses_block is None:
return None
ses_blocks = await self.blockchain.get_block_records_at(summary_heights)
if ses_blocks is None:
return None
for sub_epoch_n, ses_height in enumerate(summary_heights):
log.debug(f"check db for sub epoch {sub_epoch_n}")
if ses_height > peak_height:
break
ses_block = ses_blocks[sub_epoch_n]
if ses_block is None or ses_block.sub_epoch_summary_included is None:
log.error("error while building proof")
return None
await self.__create_persist_segment(prev_ses_block, ses_block, ses_height, sub_epoch_n)
prev_ses_block = ses_block
await asyncio.sleep(2)
log.debug("done checking segments")
return None
async def __create_persist_segment(self, prev_ses_block, ses_block, ses_height, sub_epoch_n):
segments = await self.blockchain.get_sub_epoch_challenge_segments(ses_block.header_hash)
if segments is None:
segments = await self.__create_sub_epoch_segments(ses_block, prev_ses_block, uint32(sub_epoch_n))
if segments is None:
log.error(f"failed while building segments for sub epoch {sub_epoch_n}, ses height {ses_height} ")
return None
await self.blockchain.persist_sub_epoch_challenge_segments(ses_block.header_hash, segments)
async def __create_sub_epoch_segments(
self, ses_block: BlockRecord, se_start: BlockRecord, sub_epoch_n: uint32
) -> Optional[List[SubEpochChallengeSegment]]:
segments: List[SubEpochChallengeSegment] = []
start_height = await self.get_prev_two_slots_height(se_start)
blocks = await self.blockchain.get_block_records_in_range(
start_height, ses_block.height + self.constants.MAX_SUB_SLOT_BLOCKS
)
header_blocks = await self.blockchain.get_header_blocks_in_range(
start_height, ses_block.height + self.constants.MAX_SUB_SLOT_BLOCKS, tx_filter=False
)
curr: Optional[HeaderBlock] = header_blocks[se_start.header_hash]
height = se_start.height
assert curr is not None
first = True
idx = 0
while curr.height < ses_block.height:
if blocks[curr.header_hash].is_challenge_block(self.constants):
log.debug(f"challenge segment {idx}, starts at {curr.height} ")
seg, height = await self._create_challenge_segment(curr, sub_epoch_n, header_blocks, blocks, first)
if seg is None:
log.error(f"failed creating segment {curr.header_hash} ")
return None
segments.append(seg)
idx += 1
first = False
else:
height = height + uint32(1) # type: ignore
curr = header_blocks[self.blockchain.height_to_hash(height)]
if curr is None:
return None
log.debug(f"next sub epoch starts at {height}")
return segments
async def get_prev_two_slots_height(self, se_start: BlockRecord) -> uint32:
# find prev 2 slots height
slot = 0
batch_size = 50
curr_rec = se_start
blocks = await self.blockchain.get_block_records_in_range(curr_rec.height - batch_size, curr_rec.height)
end = curr_rec.height
while slot < 2 and curr_rec.height > 0:
if curr_rec.first_in_sub_slot:
slot += 1
if end - curr_rec.height == batch_size - 1:
blocks = await self.blockchain.get_block_records_in_range(curr_rec.height - batch_size, curr_rec.height)
end = curr_rec.height
curr_rec = blocks[self.blockchain.height_to_hash(uint32(curr_rec.height - 1))]
return curr_rec.height
async def _create_challenge_segment(
self,
header_block: HeaderBlock,
sub_epoch_n: uint32,
header_blocks: Dict[bytes32, HeaderBlock],
blocks: Dict[bytes32, BlockRecord],
first_segment_in_sub_epoch: bool,
) -> Tuple[Optional[SubEpochChallengeSegment], uint32]:
assert self.blockchain is not None
sub_slots: List[SubSlotData] = []
log.debug(f"create challenge segment block {header_block.header_hash} block height {header_block.height} ")
# VDFs from sub slots before challenge block
first_sub_slots, first_rc_end_of_slot_vdf = await self.__first_sub_slot_vdfs(
header_block, header_blocks, blocks, first_segment_in_sub_epoch
)
if first_sub_slots is None:
log.error("failed building first sub slots")
return None, uint32(0)
sub_slots.extend(first_sub_slots)
ssd = await _challenge_block_vdfs(
self.constants,
header_block,
blocks[header_block.header_hash],
blocks,
)
sub_slots.append(ssd)
# # VDFs from slot after challenge block to end of slot
log.debug(f"create slot end vdf for block {header_block.header_hash} height {header_block.height} ")
challenge_slot_end_sub_slots, end_height = await self.__slot_end_vdf(
uint32(header_block.height + 1), header_blocks, blocks
)
if challenge_slot_end_sub_slots is None:
log.error("failed building slot end ")
return None, uint32(0)
sub_slots.extend(challenge_slot_end_sub_slots)
if first_segment_in_sub_epoch and sub_epoch_n != 0:
return (
SubEpochChallengeSegment(sub_epoch_n, sub_slots, first_rc_end_of_slot_vdf),
end_height,
)
return SubEpochChallengeSegment(sub_epoch_n, sub_slots, None), end_height
# returns a challenge chain vdf from slot start to signage point
async def __first_sub_slot_vdfs(
self,
header_block: HeaderBlock,
header_blocks: Dict[bytes32, HeaderBlock],
blocks: Dict[bytes32, BlockRecord],
first_in_sub_epoch: bool,
) -> Tuple[Optional[List[SubSlotData]], Optional[VDFInfo]]:
# combine cc vdfs of all reward blocks from the start of the sub slot to end
header_block_sub_rec = blocks[header_block.header_hash]
# find slot start
curr_sub_rec = header_block_sub_rec
first_rc_end_of_slot_vdf = None
if first_in_sub_epoch and curr_sub_rec.height > 0:
while not curr_sub_rec.sub_epoch_summary_included:
curr_sub_rec = blocks[curr_sub_rec.prev_hash]
first_rc_end_of_slot_vdf = self.first_rc_end_of_slot_vdf(header_block, blocks, header_blocks)
else:
if header_block_sub_rec.overflow and header_block_sub_rec.first_in_sub_slot:
sub_slots_num = 2
while sub_slots_num > 0 and curr_sub_rec.height > 0:
if curr_sub_rec.first_in_sub_slot:
assert curr_sub_rec.finished_challenge_slot_hashes is not None
sub_slots_num -= len(curr_sub_rec.finished_challenge_slot_hashes)
curr_sub_rec = blocks[curr_sub_rec.prev_hash]
else:
while not curr_sub_rec.first_in_sub_slot and curr_sub_rec.height > 0:
curr_sub_rec = blocks[curr_sub_rec.prev_hash]
curr = header_blocks[curr_sub_rec.header_hash]
sub_slots_data: List[SubSlotData] = []
tmp_sub_slots_data: List[SubSlotData] = []
while curr.height < header_block.height:
if curr is None:
log.error("failed fetching block")
return None, None
if curr.first_in_sub_slot:
# if not blue boxed
if not blue_boxed_end_of_slot(curr.finished_sub_slots[0]):
sub_slots_data.extend(tmp_sub_slots_data)
for idx, sub_slot in enumerate(curr.finished_sub_slots):
curr_icc_info = None
if sub_slot.infused_challenge_chain is not None:
curr_icc_info = sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf
sub_slots_data.append(handle_finished_slots(sub_slot, curr_icc_info))
tmp_sub_slots_data = []
ssd = SubSlotData(
None,
None,
None,
None,
None,
curr.reward_chain_block.signage_point_index,
None,
None,
None,
None,
curr.reward_chain_block.challenge_chain_ip_vdf,
curr.reward_chain_block.infused_challenge_chain_ip_vdf,
curr.total_iters,
)
tmp_sub_slots_data.append(ssd)
curr = header_blocks[self.blockchain.height_to_hash(uint32(curr.height + 1))]
if len(tmp_sub_slots_data) > 0:
sub_slots_data.extend(tmp_sub_slots_data)
for idx, sub_slot in enumerate(header_block.finished_sub_slots):
curr_icc_info = None
if sub_slot.infused_challenge_chain is not None:
curr_icc_info = sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf
sub_slots_data.append(handle_finished_slots(sub_slot, curr_icc_info))
return sub_slots_data, first_rc_end_of_slot_vdf
def first_rc_end_of_slot_vdf(
self,
header_block,
blocks: Dict[bytes32, BlockRecord],
header_blocks: Dict[bytes32, HeaderBlock],
) -> Optional[VDFInfo]:
curr = blocks[header_block.header_hash]
while curr.height > 0 and not curr.sub_epoch_summary_included:
curr = blocks[curr.prev_hash]
return header_blocks[curr.header_hash].finished_sub_slots[-1].reward_chain.end_of_slot_vdf
async def __slot_end_vdf(
self, start_height: uint32, header_blocks: Dict[bytes32, HeaderBlock], blocks: Dict[bytes32, BlockRecord]
) -> Tuple[Optional[List[SubSlotData]], uint32]:
# gets all vdfs first sub slot after challenge block to last sub slot
log.debug(f"slot end vdf start height {start_height}")
curr = header_blocks[self.blockchain.height_to_hash(start_height)]
curr_header_hash = curr.header_hash
sub_slots_data: List[SubSlotData] = []
tmp_sub_slots_data: List[SubSlotData] = []
while not blocks[curr_header_hash].is_challenge_block(self.constants):
if curr.first_in_sub_slot:
sub_slots_data.extend(tmp_sub_slots_data)
curr_prev_header_hash = curr.prev_header_hash
# add collected vdfs
for idx, sub_slot in enumerate(curr.finished_sub_slots):
prev_rec = blocks[curr_prev_header_hash]
eos_vdf_iters = prev_rec.sub_slot_iters
if idx == 0:
eos_vdf_iters = uint64(prev_rec.sub_slot_iters - prev_rec.ip_iters(self.constants))
sub_slots_data.append(handle_end_of_slot(sub_slot, eos_vdf_iters))
tmp_sub_slots_data = []
tmp_sub_slots_data.append(self.handle_block_vdfs(curr, blocks))
curr = header_blocks[self.blockchain.height_to_hash(uint32(curr.height + 1))]
curr_header_hash = curr.header_hash
if len(tmp_sub_slots_data) > 0:
sub_slots_data.extend(tmp_sub_slots_data)
log.debug(f"slot end vdf end height {curr.height} slots {len(sub_slots_data)} ")
return sub_slots_data, curr.height
def handle_block_vdfs(self, curr: HeaderBlock, blocks: Dict[bytes32, BlockRecord]):
cc_sp_proof = None
icc_ip_proof = None
cc_sp_info = None
icc_ip_info = None
block_record = blocks[curr.header_hash]
if curr.infused_challenge_chain_ip_proof is not None:
assert curr.reward_chain_block.infused_challenge_chain_ip_vdf
icc_ip_proof = curr.infused_challenge_chain_ip_proof
icc_ip_info = curr.reward_chain_block.infused_challenge_chain_ip_vdf
if curr.challenge_chain_sp_proof is not None:
assert curr.reward_chain_block.challenge_chain_sp_vdf
cc_sp_vdf_info = curr.reward_chain_block.challenge_chain_sp_vdf
if not curr.challenge_chain_sp_proof.normalized_to_identity:
(_, _, _, _, cc_vdf_iters, _,) = get_signage_point_vdf_info(
self.constants,
curr.finished_sub_slots,
block_record.overflow,
None if curr.height == 0 else blocks[curr.prev_header_hash],
BlockCache(blocks),
block_record.sp_total_iters(self.constants),
block_record.sp_iters(self.constants),
)
cc_sp_vdf_info = VDFInfo(
curr.reward_chain_block.challenge_chain_sp_vdf.challenge,
cc_vdf_iters,
curr.reward_chain_block.challenge_chain_sp_vdf.output,
)
cc_sp_proof = curr.challenge_chain_sp_proof
cc_sp_info = cc_sp_vdf_info
return SubSlotData(
None,
cc_sp_proof,
curr.challenge_chain_ip_proof,
icc_ip_proof,
cc_sp_info,
curr.reward_chain_block.signage_point_index,
None,
None,
None,
None,
curr.reward_chain_block.challenge_chain_ip_vdf,
icc_ip_info,
curr.total_iters,
)
def validate_weight_proof_single_proc(self, weight_proof: WeightProof) -> Tuple[bool, uint32]:
assert self.blockchain is not None
assert len(weight_proof.sub_epochs) > 0
if len(weight_proof.sub_epochs) == 0:
return False, uint32(0)
peak_height = weight_proof.recent_chain_data[-1].reward_chain_block.height
log.info(f"validate weight proof peak height {peak_height}")
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self.constants, weight_proof)
if summaries is None:
log.warning("weight proof failed sub epoch data validation")
return False, uint32(0)
constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes(
self.constants, summaries, weight_proof
)
log.info("validate sub epoch challenge segments")
seed = summaries[-2].get_hash()
rng = random.Random(seed)
if not validate_sub_epoch_sampling(rng, sub_epoch_weight_list, weight_proof):
log.error("failed weight proof sub epoch sample validation")
return False, uint32(0)
if not _validate_sub_epoch_segments(constants, rng, wp_segment_bytes, summary_bytes):
return False, uint32(0)
log.info("validate weight proof recent blocks")
if not _validate_recent_blocks(constants, wp_recent_chain_bytes, summary_bytes):
return False, uint32(0)
return True, self.get_fork_point(summaries)
def get_fork_point_no_validations(self, weight_proof: WeightProof) -> Tuple[bool, uint32]:
log.debug("get fork point skip validations")
assert self.blockchain is not None
assert len(weight_proof.sub_epochs) > 0
if len(weight_proof.sub_epochs) == 0:
return False, uint32(0)
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self.constants, weight_proof)
if summaries is None:
log.warning("weight proof failed to validate sub epoch summaries")
return False, uint32(0)
return True, self.get_fork_point(summaries)
async def validate_weight_proof(self, weight_proof: WeightProof) -> Tuple[bool, uint32, List[SubEpochSummary]]:
assert self.blockchain is not None
assert len(weight_proof.sub_epochs) > 0
if len(weight_proof.sub_epochs) == 0:
return False, uint32(0), []
peak_height = weight_proof.recent_chain_data[-1].reward_chain_block.height
log.info(f"validate weight proof peak height {peak_height}")
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self.constants, weight_proof)
if summaries is None:
log.error("weight proof failed sub epoch data validation")
return False, uint32(0), []
seed = summaries[-2].get_hash()
rng = random.Random(seed)
if not validate_sub_epoch_sampling(rng, sub_epoch_weight_list, weight_proof):
log.error("failed weight proof sub epoch sample validation")
return False, uint32(0), []
executor = ProcessPoolExecutor(1)
constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes(
self.constants, summaries, weight_proof
)
segment_validation_task = asyncio.get_running_loop().run_in_executor(
executor, _validate_sub_epoch_segments, constants, rng, wp_segment_bytes, summary_bytes
)
recent_blocks_validation_task = asyncio.get_running_loop().run_in_executor(
executor, _validate_recent_blocks, constants, wp_recent_chain_bytes, summary_bytes
)
valid_segment_task = segment_validation_task
valid_recent_blocks_task = recent_blocks_validation_task
valid_recent_blocks = await valid_recent_blocks_task
if not valid_recent_blocks:
log.error("failed validating weight proof recent blocks")
return False, uint32(0), []
valid_segments = await valid_segment_task
if not valid_segments:
log.error("failed validating weight proof sub epoch segments")
return False, uint32(0), []
return True, self.get_fork_point(summaries), summaries
def get_fork_point(self, received_summaries: List[SubEpochSummary]) -> uint32:
# iterate through sub epoch summaries to find fork point
fork_point_index = 0
ses_heights = self.blockchain.get_ses_heights()
for idx, summary_height in enumerate(ses_heights):
log.debug(f"check summary {idx} height {summary_height}")
local_ses = self.blockchain.get_ses(summary_height)
if idx == len(received_summaries) - 1:
# end of wp summaries, local chain is longer or equal to wp chain
break
if local_ses is None or local_ses.get_hash() != received_summaries[idx].get_hash():
break
fork_point_index = idx
if fork_point_index > 2:
# Two summeries can have different blocks and still be identical
# This gets resolved after one full sub epoch
height = ses_heights[fork_point_index - 2]
else:
height = uint32(0)
return height
def _get_weights_for_sampling(
rng: random.Random, total_weight: uint128, recent_chain: List[HeaderBlock]
) -> Optional[List[uint128]]:
weight_to_check = []
last_l_weight = recent_chain[-1].reward_chain_block.weight - recent_chain[0].reward_chain_block.weight
delta = last_l_weight / total_weight
prob_of_adv_succeeding = 1 - math.log(WeightProofHandler.C, delta)
if prob_of_adv_succeeding <= 0:
return None
queries = -WeightProofHandler.LAMBDA_L * math.log(2, prob_of_adv_succeeding)
for i in range(int(queries) + 1):
u = rng.random()
q = 1 - delta ** u
# todo check division and type conversions
weight = q * float(total_weight)
weight_to_check.append(uint128(int(weight)))
weight_to_check.sort()
return weight_to_check
def _sample_sub_epoch(
start_of_epoch_weight: uint128,
end_of_epoch_weight: uint128,
weight_to_check: List[uint128],
) -> bool:
"""
weight_to_check: List[uint128] is expected to be sorted
"""
if weight_to_check is None:
return True
if weight_to_check[-1] < start_of_epoch_weight:
return False
if weight_to_check[0] > end_of_epoch_weight:
return False
choose = False
for weight in weight_to_check:
if weight > end_of_epoch_weight:
return False
if start_of_epoch_weight < weight < end_of_epoch_weight:
log.debug(f"start weight: {start_of_epoch_weight}")
log.debug(f"weight to check {weight}")
log.debug(f"end weight: {end_of_epoch_weight}")
choose = True
break
return choose
# wp creation methods
def _create_sub_epoch_data(
sub_epoch_summary: SubEpochSummary,
) -> SubEpochData:
reward_chain_hash: bytes32 = sub_epoch_summary.reward_chain_hash
# Number of subblocks overflow in previous slot
previous_sub_epoch_overflows: uint8 = sub_epoch_summary.num_blocks_overflow # total in sub epoch - expected
# New work difficulty and iterations per sub-slot
sub_slot_iters: Optional[uint64] = sub_epoch_summary.new_sub_slot_iters
new_difficulty: Optional[uint64] = sub_epoch_summary.new_difficulty
return SubEpochData(reward_chain_hash, previous_sub_epoch_overflows, sub_slot_iters, new_difficulty)
async def _challenge_block_vdfs(
constants: ConsensusConstants,
header_block: HeaderBlock,
block_rec: BlockRecord,
sub_blocks: Dict[bytes32, BlockRecord],
):
(_, _, _, _, cc_vdf_iters, _,) = get_signage_point_vdf_info(
constants,
header_block.finished_sub_slots,
block_rec.overflow,
None if header_block.height == 0 else sub_blocks[header_block.prev_header_hash],
BlockCache(sub_blocks),
block_rec.sp_total_iters(constants),
block_rec.sp_iters(constants),
)
cc_sp_info = None
if header_block.reward_chain_block.challenge_chain_sp_vdf:
cc_sp_info = header_block.reward_chain_block.challenge_chain_sp_vdf
assert header_block.challenge_chain_sp_proof
if not header_block.challenge_chain_sp_proof.normalized_to_identity:
cc_sp_info = VDFInfo(
header_block.reward_chain_block.challenge_chain_sp_vdf.challenge,
cc_vdf_iters,
header_block.reward_chain_block.challenge_chain_sp_vdf.output,
)
ssd = SubSlotData(
header_block.reward_chain_block.proof_of_space,
header_block.challenge_chain_sp_proof,
header_block.challenge_chain_ip_proof,
None,
cc_sp_info,
header_block.reward_chain_block.signage_point_index,
None,
None,
None,
None,
header_block.reward_chain_block.challenge_chain_ip_vdf,
header_block.reward_chain_block.infused_challenge_chain_ip_vdf,
block_rec.total_iters,
)
return ssd
def handle_finished_slots(end_of_slot: EndOfSubSlotBundle, icc_end_of_slot_info):
return SubSlotData(
None,
None,
None,
None,
None,
None,
None
if end_of_slot.proofs.challenge_chain_slot_proof is None
else end_of_slot.proofs.challenge_chain_slot_proof,
None
if end_of_slot.proofs.infused_challenge_chain_slot_proof is None
else end_of_slot.proofs.infused_challenge_chain_slot_proof,
end_of_slot.challenge_chain.challenge_chain_end_of_slot_vdf,
icc_end_of_slot_info,
None,
None,
None,
)
def handle_end_of_slot(
sub_slot: EndOfSubSlotBundle,
eos_vdf_iters: uint64,
):
assert sub_slot.infused_challenge_chain
assert sub_slot.proofs.infused_challenge_chain_slot_proof
if sub_slot.proofs.infused_challenge_chain_slot_proof.normalized_to_identity:
icc_info = sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf
else:
icc_info = VDFInfo(
sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf.challenge,
eos_vdf_iters,
sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf.output,
)
if sub_slot.proofs.challenge_chain_slot_proof.normalized_to_identity:
cc_info = sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf
else:
cc_info = VDFInfo(
sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
eos_vdf_iters,
sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf.output,
)
assert sub_slot.proofs.infused_challenge_chain_slot_proof is not None
return SubSlotData(
None,
None,
None,
None,
None,
None,
sub_slot.proofs.challenge_chain_slot_proof,
sub_slot.proofs.infused_challenge_chain_slot_proof,
cc_info,
icc_info,
None,
None,
None,
)
def compress_segments(full_segment_index, segments: List[SubEpochChallengeSegment]) -> List[SubEpochChallengeSegment]:
compressed_segments = []
compressed_segments.append(segments[0])
for idx, segment in enumerate(segments[1:]):
if idx != full_segment_index:
# remove all redundant values
segment = compress_segment(segment)
compressed_segments.append(segment)
return compressed_segments
def compress_segment(segment: SubEpochChallengeSegment) -> SubEpochChallengeSegment:
# find challenge slot
comp_seg = SubEpochChallengeSegment(segment.sub_epoch_n, [], segment.rc_slot_end_info)
for slot in segment.sub_slots:
comp_seg.sub_slots.append(slot)
if slot.is_challenge():
break
return segment
# wp validation methods
def _validate_sub_epoch_summaries(
constants: ConsensusConstants,
weight_proof: WeightProof,
) -> Tuple[Optional[List[SubEpochSummary]], Optional[List[uint128]]]:
last_ses_hash, last_ses_sub_height = _get_last_ses_hash(constants, weight_proof.recent_chain_data)
if last_ses_hash is None:
log.warning("could not find last ses block")
return None, None
summaries, total, sub_epoch_weight_list = _map_sub_epoch_summaries(
constants.SUB_EPOCH_BLOCKS,
constants.GENESIS_CHALLENGE,
weight_proof.sub_epochs,
constants.DIFFICULTY_STARTING,
)
log.info(f"validating {len(summaries)} sub epochs")
# validate weight
if not _validate_summaries_weight(constants, total, summaries, weight_proof):
log.error("failed validating weight")
return None, None
last_ses = summaries[-1]
log.debug(f"last ses sub height {last_ses_sub_height}")
# validate last ses_hash
if last_ses.get_hash() != last_ses_hash:
log.error(f"failed to validate ses hashes block height {last_ses_sub_height}")
return None, None
return summaries, sub_epoch_weight_list
def _map_sub_epoch_summaries(
sub_blocks_for_se: uint32,
ses_hash: bytes32,
sub_epoch_data: List[SubEpochData],
curr_difficulty: uint64,
) -> Tuple[List[SubEpochSummary], uint128, List[uint128]]:
total_weight: uint128 = uint128(0)
summaries: List[SubEpochSummary] = []
sub_epoch_weight_list: List[uint128] = []
for idx, data in enumerate(sub_epoch_data):
ses = SubEpochSummary(
ses_hash,
data.reward_chain_hash,
data.num_blocks_overflow,
data.new_difficulty,
data.new_sub_slot_iters,
)
if idx < len(sub_epoch_data) - 1:
delta = 0
if idx > 0:
delta = sub_epoch_data[idx].num_blocks_overflow
log.debug(f"sub epoch {idx} start weight is {total_weight+curr_difficulty} ")
sub_epoch_weight_list.append(uint128(total_weight + curr_difficulty))
total_weight = total_weight + uint128( # type: ignore
curr_difficulty * (sub_blocks_for_se + sub_epoch_data[idx + 1].num_blocks_overflow - delta)
)
# if new epoch update diff and iters
if data.new_difficulty is not None:
curr_difficulty = data.new_difficulty
# add to dict
summaries.append(ses)
ses_hash = std_hash(ses)
# add last sub epoch weight
sub_epoch_weight_list.append(uint128(total_weight + curr_difficulty))
return summaries, total_weight, sub_epoch_weight_list
def _validate_summaries_weight(constants: ConsensusConstants, sub_epoch_data_weight, summaries, weight_proof) -> bool:
num_over = summaries[-1].num_blocks_overflow
ses_end_height = (len(summaries) - 1) * constants.SUB_EPOCH_BLOCKS + num_over - 1
curr = None
for block in weight_proof.recent_chain_data:
if block.reward_chain_block.height == ses_end_height:
curr = block
if curr is None:
return False
return curr.reward_chain_block.weight == sub_epoch_data_weight
def _validate_sub_epoch_segments(
constants_dict: Dict,
rng: random.Random,
weight_proof_bytes: bytes,
summaries_bytes: List[bytes],
):
constants, summaries = bytes_to_vars(constants_dict, summaries_bytes)
sub_epoch_segments: SubEpochSegments = SubEpochSegments.from_bytes(weight_proof_bytes)
rc_sub_slot_hash = constants.GENESIS_CHALLENGE
total_blocks, total_ip_iters = 0, 0
total_slot_iters, total_slots = 0, 0
total_ip_iters = 0
prev_ses: Optional[SubEpochSummary] = None
segments_by_sub_epoch = map_segments_by_sub_epoch(sub_epoch_segments.challenge_segments)
curr_ssi = constants.SUB_SLOT_ITERS_STARTING
for sub_epoch_n, segments in segments_by_sub_epoch.items():
prev_ssi = curr_ssi
curr_difficulty, curr_ssi = _get_curr_diff_ssi(constants, sub_epoch_n, summaries)
log.debug(f"validate sub epoch {sub_epoch_n}")
# recreate RewardChainSubSlot for next ses rc_hash
sampled_seg_index = rng.choice(range(len(segments)))
if sub_epoch_n > 0:
rc_sub_slot = __get_rc_sub_slot(constants, segments[0], summaries, curr_ssi)
prev_ses = summaries[sub_epoch_n - 1]
rc_sub_slot_hash = rc_sub_slot.get_hash()
if not summaries[sub_epoch_n].reward_chain_hash == rc_sub_slot_hash:
log.error(f"failed reward_chain_hash validation sub_epoch {sub_epoch_n}")
return False
for idx, segment in enumerate(segments):
valid_segment, ip_iters, slot_iters, slots = _validate_segment(
constants, segment, curr_ssi, prev_ssi, curr_difficulty, prev_ses, idx == 0, sampled_seg_index == idx
)
if not valid_segment:
log.error(f"failed to validate sub_epoch {segment.sub_epoch_n} segment {idx} slots")
return False
prev_ses = None
total_blocks += 1
total_slot_iters += slot_iters
total_slots += slots
total_ip_iters += ip_iters
return True
def _validate_segment(
constants: ConsensusConstants,
segment: SubEpochChallengeSegment,
curr_ssi: uint64,
prev_ssi: uint64,
curr_difficulty: uint64,
ses: Optional[SubEpochSummary],
first_segment_in_se: bool,
sampled: bool,
) -> Tuple[bool, int, int, int]:
ip_iters, slot_iters, slots = 0, 0, 0
after_challenge = False
for idx, sub_slot_data in enumerate(segment.sub_slots):
if sampled and sub_slot_data.is_challenge():
after_challenge = True
required_iters = __validate_pospace(constants, segment, idx, curr_difficulty, ses, first_segment_in_se)
if required_iters is None:
return False, uint64(0), uint64(0), uint64(0)
assert sub_slot_data.signage_point_index is not None
ip_iters = ip_iters + calculate_ip_iters( # type: ignore
constants, curr_ssi, sub_slot_data.signage_point_index, required_iters
)
if not _validate_challenge_block_vdfs(constants, idx, segment.sub_slots, curr_ssi):
log.error(f"failed to validate challenge slot {idx} vdfs")
return False, uint64(0), uint64(0), uint64(0)
elif sampled and after_challenge:
if not _validate_sub_slot_data(constants, idx, segment.sub_slots, curr_ssi):
log.error(f"failed to validate sub slot data {idx} vdfs")
return False, uint64(0), uint64(0), uint64(0)
slot_iters = slot_iters + curr_ssi # type: ignore
slots = slots + uint64(1) # type: ignore
return True, ip_iters, slot_iters, slots
def _validate_challenge_block_vdfs(
constants: ConsensusConstants,
sub_slot_idx: int,
sub_slots: List[SubSlotData],
ssi: uint64,
) -> bool:
sub_slot_data = sub_slots[sub_slot_idx]
if sub_slot_data.cc_signage_point is not None and sub_slot_data.cc_sp_vdf_info:
assert sub_slot_data.signage_point_index
sp_input = ClassgroupElement.get_default_element()
if not sub_slot_data.cc_signage_point.normalized_to_identity and sub_slot_idx >= 1:
is_overflow = is_overflow_block(constants, sub_slot_data.signage_point_index)
prev_ssd = sub_slots[sub_slot_idx - 1]
sp_input = sub_slot_data_vdf_input(
constants, sub_slot_data, sub_slot_idx, sub_slots, is_overflow, prev_ssd.is_end_of_slot(), ssi
)
if not sub_slot_data.cc_signage_point.is_valid(constants, sp_input, sub_slot_data.cc_sp_vdf_info):
log.error(f"failed to validate challenge chain signage point 2 {sub_slot_data.cc_sp_vdf_info}")
return False
assert sub_slot_data.cc_infusion_point
assert sub_slot_data.cc_ip_vdf_info
ip_input = ClassgroupElement.get_default_element()
cc_ip_vdf_info = sub_slot_data.cc_ip_vdf_info
if not sub_slot_data.cc_infusion_point.normalized_to_identity and sub_slot_idx >= 1:
prev_ssd = sub_slots[sub_slot_idx - 1]
if prev_ssd.cc_slot_end is None:
assert prev_ssd.cc_ip_vdf_info
assert prev_ssd.total_iters
assert sub_slot_data.total_iters
ip_input = prev_ssd.cc_ip_vdf_info.output
ip_vdf_iters = uint64(sub_slot_data.total_iters - prev_ssd.total_iters)
cc_ip_vdf_info = VDFInfo(
sub_slot_data.cc_ip_vdf_info.challenge, ip_vdf_iters, sub_slot_data.cc_ip_vdf_info.output
)
if not sub_slot_data.cc_infusion_point.is_valid(constants, ip_input, cc_ip_vdf_info):
log.error(f"failed to validate challenge chain infusion point {sub_slot_data.cc_ip_vdf_info}")
return False
return True
def _validate_sub_slot_data(
constants: ConsensusConstants,
sub_slot_idx: int,
sub_slots: List[SubSlotData],
ssi: uint64,
) -> bool:
sub_slot_data = sub_slots[sub_slot_idx]
assert sub_slot_idx > 0
prev_ssd = sub_slots[sub_slot_idx - 1]
if sub_slot_data.is_end_of_slot():
if sub_slot_data.icc_slot_end is not None:
input = ClassgroupElement.get_default_element()
if not sub_slot_data.icc_slot_end.normalized_to_identity and prev_ssd.icc_ip_vdf_info is not None:
assert prev_ssd.icc_ip_vdf_info
input = prev_ssd.icc_ip_vdf_info.output
assert sub_slot_data.icc_slot_end_info
if not sub_slot_data.icc_slot_end.is_valid(constants, input, sub_slot_data.icc_slot_end_info, None):
log.error(f"failed icc slot end validation {sub_slot_data.icc_slot_end_info} ")
return False
assert sub_slot_data.cc_slot_end_info
assert sub_slot_data.cc_slot_end
input = ClassgroupElement.get_default_element()
if (not prev_ssd.is_end_of_slot()) and (not sub_slot_data.cc_slot_end.normalized_to_identity):
assert prev_ssd.cc_ip_vdf_info
input = prev_ssd.cc_ip_vdf_info.output
if not sub_slot_data.cc_slot_end.is_valid(constants, input, sub_slot_data.cc_slot_end_info):
log.error(f"failed cc slot end validation {sub_slot_data.cc_slot_end_info}")
return False
else:
# find end of slot
idx = sub_slot_idx
while idx < len(sub_slots) - 1:
curr_slot = sub_slots[idx]
if curr_slot.is_end_of_slot():
# dont validate intermediate vdfs if slot is blue boxed
assert curr_slot.cc_slot_end
if curr_slot.cc_slot_end.normalized_to_identity is True:
log.debug(f"skip intermediate vdfs slot {sub_slot_idx}")
return True
else:
break
idx += 1
if sub_slot_data.icc_infusion_point is not None and sub_slot_data.icc_ip_vdf_info is not None:
input = ClassgroupElement.get_default_element()
if not prev_ssd.is_challenge() and prev_ssd.icc_ip_vdf_info is not None:
input = prev_ssd.icc_ip_vdf_info.output
if not sub_slot_data.icc_infusion_point.is_valid(constants, input, sub_slot_data.icc_ip_vdf_info, None):
log.error(f"failed icc infusion point vdf validation {sub_slot_data.icc_slot_end_info} ")
return False
assert sub_slot_data.signage_point_index is not None
if sub_slot_data.cc_signage_point:
assert sub_slot_data.cc_sp_vdf_info
input = ClassgroupElement.get_default_element()
if not sub_slot_data.cc_signage_point.normalized_to_identity:
is_overflow = is_overflow_block(constants, sub_slot_data.signage_point_index)
input = sub_slot_data_vdf_input(
constants, sub_slot_data, sub_slot_idx, sub_slots, is_overflow, prev_ssd.is_end_of_slot(), ssi
)
if not sub_slot_data.cc_signage_point.is_valid(constants, input, sub_slot_data.cc_sp_vdf_info):
log.error(f"failed cc signage point vdf validation {sub_slot_data.cc_sp_vdf_info}")
return False
input = ClassgroupElement.get_default_element()
assert sub_slot_data.cc_ip_vdf_info
assert sub_slot_data.cc_infusion_point
cc_ip_vdf_info = sub_slot_data.cc_ip_vdf_info
if not sub_slot_data.cc_infusion_point.normalized_to_identity and prev_ssd.cc_slot_end is None:
assert prev_ssd.cc_ip_vdf_info
input = prev_ssd.cc_ip_vdf_info.output
assert sub_slot_data.total_iters
assert prev_ssd.total_iters
ip_vdf_iters = uint64(sub_slot_data.total_iters - prev_ssd.total_iters)
cc_ip_vdf_info = VDFInfo(
sub_slot_data.cc_ip_vdf_info.challenge, ip_vdf_iters, sub_slot_data.cc_ip_vdf_info.output
)
if not sub_slot_data.cc_infusion_point.is_valid(constants, input, cc_ip_vdf_info):
log.error(f"failed cc infusion point vdf validation {sub_slot_data.cc_slot_end_info}")
return False
return True
def sub_slot_data_vdf_input(
constants: ConsensusConstants,
sub_slot_data: SubSlotData,
sub_slot_idx: int,
sub_slots: List[SubSlotData],
is_overflow: bool,
new_sub_slot: bool,
ssi: uint64,
) -> ClassgroupElement:
cc_input = ClassgroupElement.get_default_element()
sp_total_iters = get_sp_total_iters(constants, is_overflow, ssi, sub_slot_data)
ssd: Optional[SubSlotData] = None
if is_overflow and new_sub_slot:
if sub_slot_idx >= 2:
if sub_slots[sub_slot_idx - 2].cc_slot_end_info is None:
for ssd_idx in reversed(range(0, sub_slot_idx - 1)):
ssd = sub_slots[ssd_idx]
if ssd.cc_slot_end_info is not None:
ssd = sub_slots[ssd_idx + 1]
break
if not (ssd.total_iters > sp_total_iters):
break
if ssd and ssd.cc_ip_vdf_info is not None:
if ssd.total_iters < sp_total_iters:
cc_input = ssd.cc_ip_vdf_info.output
return cc_input
elif not is_overflow and not new_sub_slot:
for ssd_idx in reversed(range(0, sub_slot_idx)):
ssd = sub_slots[ssd_idx]
if ssd.cc_slot_end_info is not None:
ssd = sub_slots[ssd_idx + 1]
break
if not (ssd.total_iters > sp_total_iters):
break
assert ssd is not None
if ssd.cc_ip_vdf_info is not None:
if ssd.total_iters < sp_total_iters:
cc_input = ssd.cc_ip_vdf_info.output
return cc_input
elif not new_sub_slot and is_overflow:
slots_seen = 0
for ssd_idx in reversed(range(0, sub_slot_idx)):
ssd = sub_slots[ssd_idx]
if ssd.cc_slot_end_info is not None:
slots_seen += 1
if slots_seen == 2:
return ClassgroupElement.get_default_element()
if ssd.cc_slot_end_info is None and not (ssd.total_iters > sp_total_iters):
break
assert ssd is not None
if ssd.cc_ip_vdf_info is not None:
if ssd.total_iters < sp_total_iters:
cc_input = ssd.cc_ip_vdf_info.output
return cc_input
def _validate_recent_blocks(constants_dict: Dict, recent_chain_bytes: bytes, summaries_bytes: List[bytes]) -> bool:
constants, summaries = bytes_to_vars(constants_dict, summaries_bytes)
recent_chain: RecentChainData = RecentChainData.from_bytes(recent_chain_bytes)
sub_blocks = BlockCache({})
first_ses_idx = _get_ses_idx(recent_chain.recent_chain_data)
ses_idx = len(summaries) - len(first_ses_idx)
ssi: uint64 = constants.SUB_SLOT_ITERS_STARTING
diff: Optional[uint64] = constants.DIFFICULTY_STARTING
last_blocks_to_validate = 100 # todo remove cap after benchmarks
for summary in summaries[:ses_idx]:
if summary.new_sub_slot_iters is not None:
ssi = summary.new_sub_slot_iters
if summary.new_difficulty is not None:
diff = summary.new_difficulty
ses_blocks, sub_slots, transaction_blocks = 0, 0, 0
challenge, prev_challenge = None, None
tip_height = recent_chain.recent_chain_data[-1].height
prev_block_record = None
deficit = uint8(0)
for idx, block in enumerate(recent_chain.recent_chain_data):
required_iters = uint64(0)
overflow = False
ses = False
height = block.height
for sub_slot in block.finished_sub_slots:
prev_challenge = challenge
challenge = sub_slot.challenge_chain.get_hash()
deficit = sub_slot.reward_chain.deficit
if sub_slot.challenge_chain.subepoch_summary_hash is not None:
ses = True
assert summaries[ses_idx].get_hash() == sub_slot.challenge_chain.subepoch_summary_hash
ses_idx += 1
if sub_slot.challenge_chain.new_sub_slot_iters is not None:
ssi = sub_slot.challenge_chain.new_sub_slot_iters
if sub_slot.challenge_chain.new_difficulty is not None:
diff = sub_slot.challenge_chain.new_difficulty
if (challenge is not None) and (prev_challenge is not None):
overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index)
deficit = get_deficit(constants, deficit, prev_block_record, overflow, len(block.finished_sub_slots))
log.debug(f"wp, validate block {block.height}")
if sub_slots > 2 and transaction_blocks > 11 and (tip_height - block.height < last_blocks_to_validate):
required_iters, error = validate_finished_header_block(
constants, sub_blocks, block, False, diff, ssi, ses_blocks > 2
)
if error is not None:
log.error(f"block {block.header_hash} failed validation {error}")
return False
else:
required_iters = _validate_pospace_recent_chain(
constants, block, challenge, diff, overflow, prev_challenge
)
if required_iters is None:
return False
curr_block_ses = None if not ses else summaries[ses_idx - 1]
block_record = header_block_to_sub_block_record(
constants, required_iters, block, ssi, overflow, deficit, height, curr_block_ses
)
log.debug(f"add block {block_record.height} to tmp sub blocks")
sub_blocks.add_block_record(block_record)
if block.first_in_sub_slot:
sub_slots += 1
if block.is_transaction_block:
transaction_blocks += 1
if ses:
ses_blocks += 1
prev_block_record = block_record
return True
def _validate_pospace_recent_chain(
constants: ConsensusConstants,
block: HeaderBlock,
challenge: bytes32,
diff: uint64,
overflow: bool,
prev_challenge: bytes32,
):
if block.reward_chain_block.challenge_chain_sp_vdf is None:
# Edge case of first sp (start of slot), where sp_iters == 0
cc_sp_hash: bytes32 = challenge
else:
cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
assert cc_sp_hash is not None
q_str = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
constants,
challenge if not overflow else prev_challenge,
cc_sp_hash,
)
if q_str is None:
log.error(f"could not verify proof of space block {block.height} {overflow}")
return None
required_iters = calculate_iterations_quality(
constants.DIFFICULTY_CONSTANT_FACTOR,
q_str,
block.reward_chain_block.proof_of_space.size,
diff,
cc_sp_hash,
)
return required_iters
def __validate_pospace(
constants: ConsensusConstants,
segment: SubEpochChallengeSegment,
idx: int,
curr_diff: uint64,
ses: Optional[SubEpochSummary],
first_in_sub_epoch: bool,
) -> Optional[uint64]:
if first_in_sub_epoch and segment.sub_epoch_n == 0 and idx == 0:
cc_sub_slot_hash = constants.GENESIS_CHALLENGE
else:
cc_sub_slot_hash = __get_cc_sub_slot(segment.sub_slots, idx, ses).get_hash()
sub_slot_data: SubSlotData = segment.sub_slots[idx]
if sub_slot_data.signage_point_index and is_overflow_block(constants, sub_slot_data.signage_point_index):
curr_slot = segment.sub_slots[idx - 1]
assert curr_slot.cc_slot_end_info
challenge = curr_slot.cc_slot_end_info.challenge
else:
challenge = cc_sub_slot_hash
if sub_slot_data.cc_sp_vdf_info is None:
cc_sp_hash = cc_sub_slot_hash
else:
cc_sp_hash = sub_slot_data.cc_sp_vdf_info.output.get_hash()
# validate proof of space
assert sub_slot_data.proof_of_space is not None
q_str = sub_slot_data.proof_of_space.verify_and_get_quality_string(
constants,
challenge,
cc_sp_hash,
)
if q_str is None:
log.error("could not verify proof of space")
return None
return calculate_iterations_quality(
constants.DIFFICULTY_CONSTANT_FACTOR,
q_str,
sub_slot_data.proof_of_space.size,
curr_diff,
cc_sp_hash,
)
def __get_rc_sub_slot(
constants: ConsensusConstants,
segment: SubEpochChallengeSegment,
summaries: List[SubEpochSummary],
curr_ssi: uint64,
) -> RewardChainSubSlot:
ses = summaries[uint32(segment.sub_epoch_n - 1)]
# find first challenge in sub epoch
first_idx = None
first = None
for idx, curr in enumerate(segment.sub_slots):
if curr.cc_slot_end is None:
first_idx = idx
first = curr
break
assert first_idx
idx = first_idx
slots = segment.sub_slots
# number of slots to look for
slots_n = 1
assert first
assert first.signage_point_index is not None
if is_overflow_block(constants, first.signage_point_index):
if idx >= 2 and slots[idx - 2].cc_slot_end is None:
slots_n = 2
new_diff = None if ses is None else ses.new_difficulty
new_ssi = None if ses is None else ses.new_sub_slot_iters
ses_hash = None if ses is None else ses.get_hash()
overflow = is_overflow_block(constants, first.signage_point_index)
if overflow:
if idx >= 2 and slots[idx - 2].cc_slot_end is not None and slots[idx - 1].cc_slot_end is not None:
ses_hash = None
new_ssi = None
new_diff = None
sub_slot = slots[idx]
while True:
if sub_slot.cc_slot_end:
slots_n -= 1
if slots_n == 0:
break
idx -= 1
sub_slot = slots[idx]
icc_sub_slot_hash: Optional[bytes32] = None
assert sub_slot is not None
assert sub_slot.cc_slot_end_info is not None
assert segment.rc_slot_end_info is not None
if idx != 0:
cc_vdf_info = VDFInfo(sub_slot.cc_slot_end_info.challenge, curr_ssi, sub_slot.cc_slot_end_info.output)
if sub_slot.icc_slot_end_info is not None:
icc_slot_end_info = VDFInfo(
sub_slot.icc_slot_end_info.challenge, curr_ssi, sub_slot.icc_slot_end_info.output
)
icc_sub_slot_hash = icc_slot_end_info.get_hash()
else:
cc_vdf_info = sub_slot.cc_slot_end_info
if sub_slot.icc_slot_end_info is not None:
icc_sub_slot_hash = sub_slot.icc_slot_end_info.get_hash()
cc_sub_slot = ChallengeChainSubSlot(
cc_vdf_info,
icc_sub_slot_hash,
ses_hash,
new_ssi,
new_diff,
)
rc_sub_slot = RewardChainSubSlot(
segment.rc_slot_end_info,
cc_sub_slot.get_hash(),
icc_sub_slot_hash,
constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK,
)
return rc_sub_slot
def __get_cc_sub_slot(sub_slots: List[SubSlotData], idx, ses: Optional[SubEpochSummary]) -> ChallengeChainSubSlot:
sub_slot: Optional[SubSlotData] = None
for i in reversed(range(0, idx)):
sub_slot = sub_slots[i]
if sub_slot.cc_slot_end_info is not None:
break
assert sub_slot is not None
assert sub_slot.cc_slot_end_info is not None
icc_vdf = sub_slot.icc_slot_end_info
icc_vdf_hash: Optional[bytes32] = None
if icc_vdf is not None:
icc_vdf_hash = icc_vdf.get_hash()
cc_sub_slot = ChallengeChainSubSlot(
sub_slot.cc_slot_end_info,
icc_vdf_hash,
None if ses is None else ses.get_hash(),
None if ses is None else ses.new_sub_slot_iters,
None if ses is None else ses.new_difficulty,
)
return cc_sub_slot
def _get_curr_diff_ssi(constants: ConsensusConstants, idx, summaries):
curr_difficulty = constants.DIFFICULTY_STARTING
curr_ssi = constants.SUB_SLOT_ITERS_STARTING
for ses in reversed(summaries[0:idx]):
if ses.new_sub_slot_iters is not None:
curr_ssi = ses.new_sub_slot_iters
curr_difficulty = ses.new_difficulty
break
return curr_difficulty, curr_ssi
def vars_to_bytes(constants, summaries, weight_proof):
constants_dict = recurse_jsonify(dataclasses.asdict(constants))
wp_recent_chain_bytes = bytes(RecentChainData(weight_proof.recent_chain_data))
wp_segment_bytes = bytes(SubEpochSegments(weight_proof.sub_epoch_segments))
summary_bytes = []
for summary in summaries:
summary_bytes.append(bytes(summary))
return constants_dict, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes
def bytes_to_vars(constants_dict, summaries_bytes):
summaries = []
for summary in summaries_bytes:
summaries.append(SubEpochSummary.from_bytes(summary))
constants: ConsensusConstants = dataclass_from_dict(ConsensusConstants, constants_dict)
return constants, summaries
def _get_last_ses_hash(
constants: ConsensusConstants, recent_reward_chain: List[HeaderBlock]
) -> Tuple[Optional[bytes32], uint32]:
for idx, block in enumerate(reversed(recent_reward_chain)):
if (block.reward_chain_block.height % constants.SUB_EPOCH_BLOCKS) == 0:
idx = len(recent_reward_chain) - 1 - idx # reverse
# find first block after sub slot end
while idx < len(recent_reward_chain):
curr = recent_reward_chain[idx]
if len(curr.finished_sub_slots) > 0:
for slot in curr.finished_sub_slots:
if slot.challenge_chain.subepoch_summary_hash is not None:
return (
slot.challenge_chain.subepoch_summary_hash,
curr.reward_chain_block.height,
)
idx += 1
return None, uint32(0)
def _get_ses_idx(recent_reward_chain: List[HeaderBlock]) -> List[int]:
idxs: List[int] = []
for idx, curr in enumerate(recent_reward_chain):
if len(curr.finished_sub_slots) > 0:
for slot in curr.finished_sub_slots:
if slot.challenge_chain.subepoch_summary_hash is not None:
idxs.append(idx)
return idxs
def get_deficit(
constants: ConsensusConstants,
curr_deficit: uint8,
prev_block: BlockRecord,
overflow: bool,
num_finished_sub_slots: int,
) -> uint8:
if prev_block is None:
if curr_deficit >= 1 and not (overflow and curr_deficit == constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK):
curr_deficit -= 1
return curr_deficit
return calculate_deficit(constants, uint32(prev_block.height + 1), prev_block, overflow, num_finished_sub_slots)
def get_sp_total_iters(constants: ConsensusConstants, is_overflow: bool, ssi: uint64, sub_slot_data: SubSlotData):
assert sub_slot_data.cc_ip_vdf_info is not None
assert sub_slot_data.total_iters is not None
assert sub_slot_data.signage_point_index is not None
sp_iters: uint64 = calculate_sp_iters(constants, ssi, sub_slot_data.signage_point_index)
ip_iters: uint64 = sub_slot_data.cc_ip_vdf_info.number_of_iterations
sp_sub_slot_total_iters = uint128(sub_slot_data.total_iters - ip_iters)
if is_overflow:
sp_sub_slot_total_iters = uint128(sp_sub_slot_total_iters - ssi)
return sp_sub_slot_total_iters + sp_iters
def blue_boxed_end_of_slot(sub_slot: EndOfSubSlotBundle):
if sub_slot.proofs.challenge_chain_slot_proof.normalized_to_identity:
if sub_slot.proofs.infused_challenge_chain_slot_proof is not None:
if sub_slot.proofs.infused_challenge_chain_slot_proof.normalized_to_identity:
return True
else:
return True
return False
def validate_sub_epoch_sampling(rng, sub_epoch_weight_list, weight_proof):
tip = weight_proof.recent_chain_data[-1]
weight_to_check = _get_weights_for_sampling(rng, tip.weight, weight_proof.recent_chain_data)
sampled_sub_epochs: dict[int, bool] = {}
for idx in range(1, len(sub_epoch_weight_list)):
if _sample_sub_epoch(sub_epoch_weight_list[idx - 1], sub_epoch_weight_list[idx], weight_to_check):
sampled_sub_epochs[idx - 1] = True
if len(sampled_sub_epochs) == WeightProofHandler.MAX_SAMPLES:
break
curr_sub_epoch_n = -1
for sub_epoch_segment in weight_proof.sub_epoch_segments:
if curr_sub_epoch_n < sub_epoch_segment.sub_epoch_n:
if sub_epoch_segment.sub_epoch_n in sampled_sub_epochs:
del sampled_sub_epochs[sub_epoch_segment.sub_epoch_n]
curr_sub_epoch_n = sub_epoch_segment.sub_epoch_n
if len(sampled_sub_epochs) > 0:
return False
return True
def map_segments_by_sub_epoch(sub_epoch_segments) -> Dict[int, List[SubEpochChallengeSegment]]:
segments: Dict[int, List[SubEpochChallengeSegment]] = {}
curr_sub_epoch_n = -1
for idx, segment in enumerate(sub_epoch_segments):
if curr_sub_epoch_n < segment.sub_epoch_n:
curr_sub_epoch_n = segment.sub_epoch_n
segments[curr_sub_epoch_n] = []
segments[curr_sub_epoch_n].append(segment)
return segments
def validate_total_iters(
segment: SubEpochChallengeSegment,
sub_slot_data_idx,
expected_sub_slot_iters: uint64,
finished_sub_slots_since_prev: int,
prev_b: SubSlotData,
prev_sub_slot_data_iters,
genesis,
) -> bool:
sub_slot_data = segment.sub_slots[sub_slot_data_idx]
if genesis:
total_iters: uint128 = uint128(expected_sub_slot_iters * finished_sub_slots_since_prev)
elif segment.sub_slots[sub_slot_data_idx - 1].is_end_of_slot():
assert prev_b.total_iters
assert prev_b.cc_ip_vdf_info
total_iters = prev_b.total_iters
# Add the rest of the slot of prev_b
total_iters = uint128(total_iters + prev_sub_slot_data_iters - prev_b.cc_ip_vdf_info.number_of_iterations)
# Add other empty slots
total_iters = uint128(total_iters + (expected_sub_slot_iters * (finished_sub_slots_since_prev - 1)))
else:
# Slot iters is guaranteed to be the same for header_block and prev_b
# This takes the beginning of the slot, and adds ip_iters
assert prev_b.cc_ip_vdf_info
assert prev_b.total_iters
total_iters = uint128(prev_b.total_iters - prev_b.cc_ip_vdf_info.number_of_iterations)
total_iters = uint128(total_iters + sub_slot_data.cc_ip_vdf_info.number_of_iterations)
return total_iters == sub_slot_data.total_iters
| [
"[email protected]"
] | |
a6feea4e0041758fbcfcdf08169d6272e1d4ea41 | bdba52c756cc09f192b720ea318510c265665dcd | /swagger_client/api/character_api.py | c1e1cd12abcec0f72554fd46436981b2dad6fbd7 | [
"MIT"
] | permissive | rseichter/bootini-star | 6b38195890f383615cc2b422c365ac28c5b87292 | a80258f01a05e4df38748b8cb47dfadabd42c20d | refs/heads/master | 2020-03-14T03:17:11.385048 | 2018-06-28T17:23:23 | 2018-06-28T17:23:23 | 131,416,504 | 0 | 0 | MIT | 2018-05-01T14:26:04 | 2018-04-28T14:28:46 | Python | UTF-8 | Python | false | false | 94,170 | py | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online # noqa: E501
OpenAPI spec version: 0.8.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class CharacterApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_characters_character_id(self, character_id, **kwargs): # noqa: E501
"""Get character's public information # noqa: E501
Public information about a character --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get character's public information # noqa: E501
Public information about a character --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v4/characters/{character_id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCharactersCharacterIdOk', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_agents_research(self, character_id, **kwargs): # noqa: E501
"""Get agents research # noqa: E501
Return a list of agents research information for a character. The formula for finding the current research points with an agent is: currentPoints = remainderPoints + pointsPerDay * days(currentTime - researchStartDate) --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_agents_research(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdAgentsResearch200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_agents_research_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_agents_research_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_agents_research_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get agents research # noqa: E501
Return a list of agents research information for a character. The formula for finding the current research points with an agent is: currentPoints = remainderPoints + pointsPerDay * days(currentTime - researchStartDate) --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_agents_research_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdAgentsResearch200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_agents_research" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_agents_research`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_agents_research`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/agents_research/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdAgentsResearch200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_blueprints(self, character_id, **kwargs): # noqa: E501
"""Get blueprints # noqa: E501
Return a list of blueprints the character owns --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_blueprints(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param int page: Which page of results to return
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdBlueprints200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_blueprints_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_blueprints_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_blueprints_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get blueprints # noqa: E501
Return a list of blueprints the character owns --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_blueprints_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param int page: Which page of results to return
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdBlueprints200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'page', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_blueprints" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_blueprints`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_blueprints`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v2/characters/{character_id}/blueprints/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdBlueprints200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_chat_channels(self, character_id, **kwargs): # noqa: E501
"""Get chat channels # noqa: E501
Return chat channels that a character is the owner or operator of --- This route is cached for up to 300 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_chat_channels(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdChatChannels200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_chat_channels_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_chat_channels_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_chat_channels_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get chat channels # noqa: E501
Return chat channels that a character is the owner or operator of --- This route is cached for up to 300 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_chat_channels_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdChatChannels200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_chat_channels" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_chat_channels`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_chat_channels`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/chat_channels/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdChatChannels200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_corporationhistory(self, character_id, **kwargs): # noqa: E501
"""Get corporation history # noqa: E501
Get a list of all the corporations a character has been a member of --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_corporationhistory(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdCorporationhistory200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_corporationhistory_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_corporationhistory_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_corporationhistory_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get corporation history # noqa: E501
Get a list of all the corporations a character has been a member of --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_corporationhistory_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdCorporationhistory200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_corporationhistory" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_corporationhistory`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_corporationhistory`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/corporationhistory/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdCorporationhistory200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_fatigue(self, character_id, **kwargs): # noqa: E501
"""Get jump fatigue # noqa: E501
Return a character's jump activation and fatigue information --- This route is cached for up to 300 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_fatigue(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdFatigueOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_fatigue_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_fatigue_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_fatigue_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get jump fatigue # noqa: E501
Return a character's jump activation and fatigue information --- This route is cached for up to 300 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_fatigue_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdFatigueOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_fatigue" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_fatigue`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_fatigue`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/fatigue/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCharactersCharacterIdFatigueOk', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_medals(self, character_id, **kwargs): # noqa: E501
"""Get medals # noqa: E501
Return a list of medals the character has --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_medals(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdMedals200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_medals_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_medals_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_medals_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get medals # noqa: E501
Return a list of medals the character has --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_medals_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdMedals200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_medals" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_medals`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_medals`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/medals/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdMedals200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_notifications(self, character_id, **kwargs): # noqa: E501
"""Get character notifications # noqa: E501
Return character notifications --- This route is cached for up to 600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_notifications(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdNotifications200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_notifications_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_notifications_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_notifications_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get character notifications # noqa: E501
Return character notifications --- This route is cached for up to 600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_notifications_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdNotifications200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_notifications" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_notifications`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_notifications`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v2/characters/{character_id}/notifications/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdNotifications200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_notifications_contacts(self, character_id, **kwargs): # noqa: E501
"""Get new contact notifications # noqa: E501
Return notifications about having been added to someone's contact list --- This route is cached for up to 600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_notifications_contacts(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdNotificationsContacts200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_notifications_contacts_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_notifications_contacts_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_notifications_contacts_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get new contact notifications # noqa: E501
Return notifications about having been added to someone's contact list --- This route is cached for up to 600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_notifications_contacts_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdNotificationsContacts200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_notifications_contacts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_notifications_contacts`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_notifications_contacts`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/notifications/contacts/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdNotificationsContacts200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_portrait(self, character_id, **kwargs): # noqa: E501
"""Get character portraits # noqa: E501
Get portrait urls for a character --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_portrait(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdPortraitOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_portrait_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_portrait_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_portrait_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get character portraits # noqa: E501
Get portrait urls for a character --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_portrait_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdPortraitOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_portrait" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_portrait`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_portrait`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v2/characters/{character_id}/portrait/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCharactersCharacterIdPortraitOk', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_roles(self, character_id, **kwargs): # noqa: E501
"""Get character corporation roles # noqa: E501
Returns a character's corporation roles --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_roles(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdRolesOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_roles_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_roles_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_roles_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get character corporation roles # noqa: E501
Returns a character's corporation roles --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_roles_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetCharactersCharacterIdRolesOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_roles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_roles`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_roles`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v2/characters/{character_id}/roles/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCharactersCharacterIdRolesOk', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_standings(self, character_id, **kwargs): # noqa: E501
"""Get standings # noqa: E501
Return character standings from agents, NPC corporations, and factions --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_standings(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdStandings200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_standings_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_standings_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_standings_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get standings # noqa: E501
Return character standings from agents, NPC corporations, and factions --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_standings_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdStandings200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_standings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_standings`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_standings`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/standings/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdStandings200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_stats(self, character_id, **kwargs): # noqa: E501
"""Yearly aggregate stats # noqa: E501
Returns aggregate yearly stats for a character --- This route is cached for up to 86400 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_stats(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdStats200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_stats_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_stats_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_stats_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Yearly aggregate stats # noqa: E501
Returns aggregate yearly stats for a character --- This route is cached for up to 86400 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_stats_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdStats200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_stats" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_stats`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_stats`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v2/characters/{character_id}/stats/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdStats200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_character_id_titles(self, character_id, **kwargs): # noqa: E501
"""Get character corporation titles # noqa: E501
Returns a character's titles --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_titles(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdTitles200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_character_id_titles_with_http_info(character_id, **kwargs) # noqa: E501
else:
(data) = self.get_characters_character_id_titles_with_http_info(character_id, **kwargs) # noqa: E501
return data
def get_characters_character_id_titles_with_http_info(self, character_id, **kwargs): # noqa: E501
"""Get character corporation titles # noqa: E501
Returns a character's titles --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_character_id_titles_with_http_info(character_id, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersCharacterIdTitles200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_character_id_titles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `get_characters_character_id_titles`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `get_characters_character_id_titles`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v1/characters/{character_id}/titles/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersCharacterIdTitles200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_characters_names(self, character_ids, **kwargs): # noqa: E501
"""Get character names # noqa: E501
Resolve a set of character IDs to character names --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_names(character_ids, async=True)
>>> result = thread.get()
:param async bool
:param list[int] character_ids: A comma separated list of character IDs (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersNames200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_characters_names_with_http_info(character_ids, **kwargs) # noqa: E501
else:
(data) = self.get_characters_names_with_http_info(character_ids, **kwargs) # noqa: E501
return data
def get_characters_names_with_http_info(self, character_ids, **kwargs): # noqa: E501
"""Get character names # noqa: E501
Resolve a set of character IDs to character names --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_characters_names_with_http_info(character_ids, async=True)
>>> result = thread.get()
:param async bool
:param list[int] character_ids: A comma separated list of character IDs (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[GetCharactersNames200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_ids', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_characters_names" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_ids' is set
if ('character_ids' not in params or
params['character_ids'] is None):
raise ValueError("Missing the required parameter `character_ids` when calling `get_characters_names`") # noqa: E501
if ('character_ids' in params and
len(params['character_ids']) > 1000):
raise ValueError("Invalid value for parameter `character_ids` when calling `get_characters_names`, number of items must be less than or equal to `1000`") # noqa: E501
if ('character_ids' in params and
len(params['character_ids']) < 1):
raise ValueError("Invalid value for parameter `character_ids` when calling `get_characters_names`, number of items must be greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'character_ids' in params:
query_params.append(('character_ids', params['character_ids'])) # noqa: E501
collection_formats['character_ids'] = 'csv' # noqa: E501
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/characters/names/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetCharactersNames200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_characters_affiliation(self, characters, **kwargs): # noqa: E501
"""Character affiliation # noqa: E501
Bulk lookup of character IDs to corporation, alliance and faction --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.post_characters_affiliation(characters, async=True)
>>> result = thread.get()
:param async bool
:param list[int] characters: The character IDs to fetch affiliations for. All characters must exist, or none will be returned. (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[PostCharactersAffiliation200Ok]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.post_characters_affiliation_with_http_info(characters, **kwargs) # noqa: E501
else:
(data) = self.post_characters_affiliation_with_http_info(characters, **kwargs) # noqa: E501
return data
def post_characters_affiliation_with_http_info(self, characters, **kwargs): # noqa: E501
"""Character affiliation # noqa: E501
Bulk lookup of character IDs to corporation, alliance and faction --- This route is cached for up to 3600 seconds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.post_characters_affiliation_with_http_info(characters, async=True)
>>> result = thread.get()
:param async bool
:param list[int] characters: The character IDs to fetch affiliations for. All characters must exist, or none will be returned. (required)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: list[PostCharactersAffiliation200Ok]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['characters', 'datasource', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_characters_affiliation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'characters' is set
if ('characters' not in params or
params['characters'] is None):
raise ValueError("Missing the required parameter `characters` when calling `post_characters_affiliation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'characters' in params:
body_params = params['characters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/characters/affiliation/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PostCharactersAffiliation200Ok]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_characters_character_id_cspa(self, character_id, characters, **kwargs): # noqa: E501
"""Calculate a CSPA charge cost # noqa: E501
Takes a source character ID in the url and a set of target character ID's in the body, returns a CSPA charge cost --- # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.post_characters_character_id_cspa(character_id, characters, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param list[int] characters: The target characters to calculate the charge for (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: float
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.post_characters_character_id_cspa_with_http_info(character_id, characters, **kwargs) # noqa: E501
else:
(data) = self.post_characters_character_id_cspa_with_http_info(character_id, characters, **kwargs) # noqa: E501
return data
def post_characters_character_id_cspa_with_http_info(self, character_id, characters, **kwargs): # noqa: E501
"""Calculate a CSPA charge cost # noqa: E501
Takes a source character ID in the url and a set of target character ID's in the body, returns a CSPA charge cost --- # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.post_characters_character_id_cspa_with_http_info(character_id, characters, async=True)
>>> result = thread.get()
:param async bool
:param int character_id: An EVE character ID (required)
:param list[int] characters: The target characters to calculate the charge for (required)
:param str datasource: The server name you would like data from
:param str token: Access token to use if unable to set a header
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: float
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['character_id', 'characters', 'datasource', 'token', 'user_agent', 'x_user_agent'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_characters_character_id_cspa" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'character_id' is set
if ('character_id' not in params or
params['character_id'] is None):
raise ValueError("Missing the required parameter `character_id` when calling `post_characters_character_id_cspa`") # noqa: E501
# verify the required parameter 'characters' is set
if ('characters' not in params or
params['characters'] is None):
raise ValueError("Missing the required parameter `characters` when calling `post_characters_character_id_cspa`") # noqa: E501
if 'character_id' in params and params['character_id'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `character_id` when calling `post_characters_character_id_cspa`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'character_id' in params:
path_params['character_id'] = params['character_id'] # noqa: E501
query_params = []
if 'datasource' in params:
query_params.append(('datasource', params['datasource'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'user_agent' in params:
query_params.append(('user_agent', params['user_agent'])) # noqa: E501
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'characters' in params:
body_params = params['characters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['evesso'] # noqa: E501
return self.api_client.call_api(
'/v4/characters/{character_id}/cspa/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='float', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
6dd47cf9abf6588f76b33b1300c80b06fe34f86b | 304e75224229786ba64c6ef2124007c305019b23 | /src/easy/test_build_array_from_permutation.py | 8fd8efbd03f279c3c5d2f1ed987d934e5687eadc | [] | no_license | Takuma-Ikeda/other-LeetCode | 9179a8100e07d56138fd3f3f626951195e285da2 | 499616d07011bee730b9967e9861e341e62d606d | refs/heads/master | 2023-04-14T06:09:35.341039 | 2023-04-10T02:29:18 | 2023-04-10T02:29:18 | 226,260,312 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | import unittest
from answer.build_array_from_permutation import Solution
class TestSolution(unittest.TestCase):
def setUp(self):
self.nums = [
[0, 2, 1, 5, 3, 4],
[5, 0, 1, 2, 3, 4],
]
self.answers = [
[0, 1, 2, 4, 5, 3],
[4, 5, 0, 1, 2, 3],
]
def solution(self, i):
s = Solution()
result = s.buildArray(self.nums[i])
self.assertEqual(self.answers[i], result)
def test_solution0(self):
self.solution(0)
def test_solution1(self):
self.solution(1)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
d8cb4d738e3fca2d4ddb17040fa4fe5a789e0334 | 63e2bed7329c79bf67279f9071194c9cba88a82c | /SevOneApi/python-client/test/test_flow_falcon_visualization.py | 51a57732e471078c158cccc29b73d4aae5586ecf | [] | no_license | jsthomason/LearningPython | 12422b969dbef89578ed326852dd65f65ab77496 | 2f71223250b6a198f2736bcb1b8681c51aa12c03 | refs/heads/master | 2021-01-21T01:05:46.208994 | 2019-06-27T13:40:37 | 2019-06-27T13:40:37 | 63,447,703 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 977 | py | # coding: utf-8
"""
SevOne API Documentation
Supported endpoints by the new RESTful API # noqa: E501
OpenAPI spec version: 2.1.18, Hash: db562e6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.flow_falcon_visualization import FlowFalconVisualization # noqa: E501
from swagger_client.rest import ApiException
class TestFlowFalconVisualization(unittest.TestCase):
"""FlowFalconVisualization unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testFlowFalconVisualization(self):
"""Test FlowFalconVisualization"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.flow_falcon_visualization.FlowFalconVisualization() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
652e8c01463ca031788666db93024bbc761ec75d | 14856ffe01c711af7a41af0b1abf0378ba4ffde6 | /Python/Fundamentals/Fun_with_Functions.py | 4db600213841d74d4382c1514cc6f369abdc29a8 | [] | no_license | sharonanchel/coding-dojo | 9a8db24eec17b0ae0c220592e6864510297371c3 | d6c4a7efd0804353b27a49e16255984c4f4b7f2a | refs/heads/master | 2021-05-05T18:17:48.101853 | 2017-06-23T23:53:51 | 2017-06-23T23:53:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | # Odd/Even
def odd_even():
for i in range(1,2001):
if i%2 == 0:
type = 'even'
else:
type = 'odd'
print 'Number is',i, 'This is an',type,'number.'
odd_even()
# Multiply
def multiply(arr, num):
for i in range(0,len(arr)):
arr[i] *= num
return arr
print multiply([2,4,10,16],5)
# Hacker Challenge
def layered_multiples(arr):
newerArray = []
for i in arr:
newArray = []
for x in range(0,i):
newArray.append(1)
newerArray.append(newArray)
return newerArray
print layered_multiples(multiply([2,4,5],3))
| [
"[email protected]"
] | |
b0a496a2adad7d4299f3c94ceb3f5651a373a629 | ee8c4c954b7c1711899b6d2527bdb12b5c79c9be | /assessment2/amazon/run/core/controllers/manage.py | 436b9b54ecb5b87023cfad764e11bb94a803445a | [] | no_license | sqlconsult/byte | 02ac9899aebea4475614969b594bfe2992ffe29a | 548f6cb5038e927b54adca29caf02c981fdcecfc | refs/heads/master | 2021-01-25T14:45:42.120220 | 2018-08-11T23:45:31 | 2018-08-11T23:45:31 | 117,135,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | #!/usr/bin/env python3
from flask import Blueprint, Flask, render_template, request, url_for
controller = Blueprint('manage', __name__, url_prefix='/manage')
# @controller.route('/<string:title>', methods=['GET'])
# def lookup(title):
# if title == 'Republic': # TODO 2
# return render_template('republic.html') # TODO 2
# else:
# pass
| [
"[email protected]"
] | |
79c1cfdd225efbe367313be720d75fd7941a44b2 | 4eebce0d0c1132aed8227325bd58faf61a4010c7 | /CONTEXT_178/d2.py | 7a83e4ac92f0948ab14911f4a674624665be9101 | [] | no_license | xu1718191411/AT_CODE_BEGINNER_SELECTION | 05836cfcc63dab2a0a82166c8f4c43c82b72686b | e4e412733d1a632ce6c33c739064fe036367899e | refs/heads/master | 2021-07-17T00:59:46.315645 | 2020-09-22T06:14:27 | 2020-09-22T06:14:27 | 214,153,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | def calculate(n):
arr = [0 for i in range(2000 + 1)]
arr[3] = 1
arr[4] = 1
arr[5] = 1
for i in range(6, n + 1):
arr[i] = (arr[i - 2] + arr[i - 3] + arr[i - 4]) % (1000000000 + 7)
print(arr[n])
calculate(int(input())) | [
"[email protected]"
] | |
5038904bb1fd03747bf1c26a2daa2a87b5a5bcd8 | fee88a67d4706bddb8999ce2701315c5f62f6e78 | /onmt/modules/extensions/mlp/mlp_gelu.py | a1a1d998fa59cc0685f65e54bba7b2fe97e1aee0 | [
"MIT"
] | permissive | Dan-hbd/NMTGMinor | 5cade7d3b6de83cc45a618ab59420274bcd86f15 | 84e59ac8391ee78852d7c71afc60c3c8b8e3d44d | refs/heads/master | 2023-05-30T16:22:58.148920 | 2021-06-15T14:28:48 | 2021-06-15T14:28:48 | 372,408,488 | 0 | 0 | NOASSERTION | 2021-05-31T06:44:22 | 2021-05-31T06:44:22 | null | UTF-8 | Python | false | false | 15,164 | py | from copy import copy, deepcopy
import math
import torch
from torch import nn
import torch.nn.functional as F
import unittest
from time import time
import numpy as np
import random
import silu_cuda
try:
import apex.amp as amp
from apex.amp import half_function
except (ModuleNotFoundError, ImportError) as e:
amp = None
from ..optimized.compat import half_function
try:
from torch.cuda.amp import custom_fwd, custom_bwd
except (ModuleNotFoundError, ImportError) as e:
from ..optimized.compat import custom_fwd, custom_bwd
try:
import fused_mlp_relu
except (ModuleNotFoundError, ImportError) as e:
fused_mlp_relu = None
try:
import fused_mlp_agelu
except (ModuleNotFoundError, ImportError) as e:
fused_mlp_agelu = None
try:
import fused_mlp_gelu
except (ModuleNotFoundError, ImportError) as e:
fused_mlp_gelu = None
#
# class MlpReluFunction(torch.autograd.Function):
# @staticmethod
# @custom_fwd(cast_inputs=torch.float16)
# def forward(ctx, activation, *args):
# output = fused_mlp.forward(args)
# ctx.save_for_backward(*args)
# ctx.outputs = output
# return output[0]
#
# @staticmethod
# @custom_bwd
# def backward(ctx, grad_o):
# grads = fused_mlp.backward(grad_o, ctx.outputs, ctx.saved_tensors)
# del ctx.outputs
# return (None, *grads)
#
#
class MlpReluFunction(torch.autograd.Function):
@staticmethod
@custom_fwd(cast_inputs=torch.float16)
def forward(ctx, p, *args):
output = fused_mlp_relu.forward(p, args)
ctx.save_for_backward(*args)
ctx.outputs = output
dropout_mask = output[-1]
ctx.p = p
return output[0], dropout_mask
@staticmethod
@custom_bwd
def backward(ctx, *grad_o):
p = ctx.p
grads = fused_mlp_relu.backward(p, grad_o[0], ctx.outputs, ctx.saved_tensors)
del ctx.outputs
return (None, *grads)
class MlpSiluFunction(torch.autograd.Function):
@staticmethod
@custom_fwd(cast_inputs=torch.float16)
def forward(ctx, p, *args):
outputs = fused_mlp_silu.forward(p, args)
ctx.save_for_backward(*args)
ctx.outputs = outputs
dropout_mask = outputs[-1]
ctx.p = p
return outputs[0], dropout_mask
@staticmethod
@custom_bwd
def backward(ctx, *grad_o):
p = ctx.p
grads = fused_mlp_silu.backward(p, grad_o[0], ctx.outputs, ctx.saved_tensors)
del ctx.outputs
return (None, *grads)
class MlpAGeLUFunction(torch.autograd.Function):
@staticmethod
@custom_fwd(cast_inputs=torch.float16)
def forward(ctx, p, *args):
outputs = fused_mlp_agelu.forward(p, args)
ctx.save_for_backward(*args)
ctx.outputs = outputs
dropout_mask = outputs[-1]
ctx.p = p
return outputs[0], dropout_mask
@staticmethod
@custom_bwd
def backward(ctx, *grad_o):
p = ctx.p
grads = fused_mlp_agelu.backward(p, grad_o[0], ctx.outputs, ctx.saved_tensors)
del ctx.outputs
return (None, *grads)
class MlpGeLUFunction(torch.autograd.Function):
@staticmethod
@custom_fwd(cast_inputs=torch.float16)
def forward(ctx, p, *args):
outputs = fused_mlp_gelu.forward(p, args)
ctx.save_for_backward(*args)
ctx.outputs = outputs
dropout_mask = outputs[-1]
ctx.p = p
return outputs[0], dropout_mask
@staticmethod
@custom_bwd
def backward(ctx, *grad_o):
p = ctx.p
grads = fused_mlp_gelu.backward(p, grad_o[0], ctx.outputs, ctx.saved_tensors)
del ctx.outputs
return (None, *grads)
if fused_mlp_agelu:
mlp_agelu_function = half_function(MlpAGeLUFunction.apply)
else:
mlp_agelu_function = None
if fused_mlp_gelu:
mlp_gelu_function = half_function(MlpGeLUFunction.apply)
else:
mlp_gelu_function = None
class SwishFunction(torch.autograd.Function):
@staticmethod
@custom_fwd
def forward(ctx, inp):
ctx.save_for_backward(inp)
return silu_cuda.forward(inp)
@staticmethod
@custom_bwd
def backward(ctx, grad_out):
inp, = ctx.saved_tensors
if not ctx.needs_input_grad[0]: return (None,)
return silu_cuda.backward(inp, grad_out)
def fast_silu(input):
return SwishFunction.apply(input)
class FastSiLU(torch.nn.Module):
def forward(self, input):
return fast_silu(input)
class AGELUFunction(torch.autograd.Function):
@staticmethod
@custom_fwd
def forward(ctx, x):
ctx.save_for_backward(x)
SQRT_M2_PI = 0.7978845608
COEFF = 0.044715
return 0.5 * x * (1.0 + torch.tanh(SQRT_M2_PI * (x + COEFF * torch.pow(x, 3))))
@staticmethod
@custom_bwd
def backward(ctx, grad_out):
x, = ctx.saved_tensors
SQRT_M2_PI = 0.7978845608
COEFF = 0.044715
BACKCOEFF = 0.1070322243
tanh_outf = torch.tanh(SQRT_M2_PI * (x + COEFF * torch.pow(x, 3)))
retf = 0.5 * x * (1.0 - torch.pow(tanh_outf, 2)) * (SQRT_M2_PI + BACKCOEFF * torch.pow(x, 2)) + 0.5 * (
1.0 + tanh_outf)
return grad_out * retf
class AGELU(torch.nn.Module):
def forward(self, input):
return AGELUFunction.apply(input)
def agelu(x):
SQRT_M2_PI = 0.7978845608
COEFF = 0.044715
BACKCOEFF = SQRT_M2_PI * COEFF * 3
return 0.5 * x * (1.0 + torch.tanh(SQRT_M2_PI * (x + COEFF * torch.pow(x, 3))))
def agelu_backward(x, dy):
SQRT_M2_PI = 0.7978845608
COEFF = 0.044715
BACKCOEFF = 0.1070322243
tanh_outf = torch.tanh(SQRT_M2_PI * (x + COEFF * torch.pow(x, 3)))
retf = 0.5 * x * (1.0 - torch.pow(tanh_outf, 2)) * (SQRT_M2_PI + BACKCOEFF * torch.pow(x, 2)) + 0.5 * (
1.0 + tanh_outf)
return dy * retf
if __name__ == '__main__':
class MLP(torch.nn.Module):
"""Launch MLP in C++
Args:
mlp_sizes (list of int): MLP sizes. Example: [1024,1024,1024] will create 2 MLP layers with shape 1024x1024
bias (bool): Default True:
relu (bool): Default True
"""
def __init__(self, mlp_sizes, activation='gelu', dropout=0.25):
super(MLP, self).__init__()
self.num_layers = len(mlp_sizes) - 1
self.mlp_sizes = copy(mlp_sizes)
self.dropout = dropout
if activation is 'relu':
self.activation = 1
elif activation is 'sigmoid':
self.activation = 2
elif activation is 'gelu':
self.activation = 3
else:
raise TypeError("activation must be relu or none.")
self.weights = []
self.biases = []
for i in range(self.num_layers):
w = torch.nn.Parameter(torch.empty(mlp_sizes[i + 1], mlp_sizes[i]))
self.weights.append(w)
name = 'weight_{}'.format(i)
setattr(self, name, w)
b = torch.nn.Parameter(torch.empty(mlp_sizes[i + 1]))
self.biases.append(b)
name = 'bias_{}'.format(i)
setattr(self, name, b)
self.reset_parameters()
def reset_parameters(self):
for weight in self.weights:
dimsum = weight.size(0) + weight.size(1)
std = math.sqrt(2. / float(dimsum))
nn.init.normal_(weight, 0., std)
for bias in self.biases:
std = math.sqrt(1. / float(bias.size(0)))
nn.init.normal_(bias, 0., std)
def forward(self, input, mask=None, ref=False):
if ref:
return self.forward_ref(input, mask)
# return mlp_relu_function(self.dropout, input, *self.weights, *self.biases)
# return mlp_agelu_function(self.dropout, input, *self.weights, *self.biases)
return mlp_gelu_function(self.dropout, input, *self.weights, *self.biases)
def forward_ref(self, input, mask):
i = 0
output = input
for l in range(self.num_layers):
output = F.linear(output, self.weights[l], self.biases[l])
dropout_mask = mask[i:i + output.numel()]
pinv = 1 / (1 - self.dropout)
if l < self.num_layers - 1:
# print(mask.size())
# output = fast_silu(output) * dropout_mask.view(output.size(0), -1) * pinv
# output = GELUFunction.apply(output) * dropout_mask.view(output.size(0), -1) * pinv
output = F.gelu(output) * dropout_mask.view(output.size(0), -1) * pinv
i += output.numel()
return output
def extra_repr(self):
# TODO add dropout probability
s = F"MLP sizes: {self.mlp_sizes}, activation={self.activation}"
return s
batch_size = 24568
mlp_sizes = [1024, 4096, 1024]
# mlp_sizes = [4, 7, 4]
num_iters = 10
class TestMLP(unittest.TestCase):
def test_creation(self):
MLP(mlp_sizes)
def test_numeric(self):
mlp = MLP(mlp_sizes, activation='relu').cuda()
print(mlp)
ref_mlp = deepcopy(mlp)
for _ in range(1):
bsz = random.randint(2850, batch_size // 8) * 8
test_input = torch.empty(bsz, mlp_sizes[0], device="cuda").uniform_(-1., 1.).requires_grad_()
ref_input = test_input.clone().detach().requires_grad_()
mlp_out, dropout_mask = mlp(test_input)
ref_out = ref_mlp.forward(ref_input, dropout_mask, ref=True)
print(dropout_mask.sum() / dropout_mask.numel())
np.testing.assert_allclose(
mlp_out.detach().cpu().numpy(),
ref_out.detach().cpu().numpy(),
atol=1e-5, rtol=1e-4)
# Use mean value as scalar loss. Multiply 10 to make it big enough not zero out
mlp_out.mean().mul(10.).backward()
ref_out.mean().mul(10.).backward()
np.testing.assert_allclose(
test_input.grad.detach().cpu().numpy(),
ref_input.grad.detach().cpu().numpy(),
atol=1e-7, rtol=1e-5)
np.testing.assert_allclose(
mlp.biases[0].grad.detach().cpu().numpy(),
ref_mlp.biases[0].grad.detach().cpu().numpy(),
atol=1e-7, rtol=1e-5)
def test_with_bias(self):
for use_activation in ['relu']:
mlp = MLP(mlp_sizes, activation=use_activation).cuda()
ref_mlp = deepcopy(mlp)
test_input = torch.empty(batch_size, mlp_sizes[0], device="cuda").uniform_(-1., 1.).requires_grad_()
ref_input = test_input.clone().detach().requires_grad_()
mlp_out, dropout_mask = mlp(test_input)
ref_out = ref_mlp(ref_input, dropout_mask, ref=True)
np.testing.assert_allclose(
mlp_out.detach().cpu().numpy(),
ref_out.detach().cpu().numpy(),
atol=1e-7, rtol=1e-5)
# Use mean value as scalar loss. Multiply 10 to make it big enough not zero out
mlp_out.mean().mul(10.).backward()
ref_out.mean().mul(10.).backward()
np.testing.assert_allclose(
test_input.grad.detach().cpu().numpy(),
ref_input.grad.detach().cpu().numpy(),
atol=1e-5, rtol=1e-4)
for l in range(mlp.num_layers):
np.testing.assert_allclose(
mlp.weights[l].grad.detach().cpu().numpy(),
ref_mlp.weights[l].grad.detach().cpu().numpy(),
atol=1e-7, rtol=1e-5)
np.testing.assert_allclose(
mlp.biases[l].grad.detach().cpu().numpy(),
ref_mlp.biases[l].grad.detach().cpu().numpy(),
atol=1e-7, rtol=1e-5)
def test_no_grad(self):
mlp = MLP(mlp_sizes).cuda()
ref_mlp = deepcopy(mlp)
test_input = torch.empty(batch_size, mlp_sizes[0], device="cuda").uniform_(-1., 1.)
ref_input = test_input.clone().detach()
mlp_out, dropout_mask = mlp(test_input)
ref_out = ref_mlp(ref_input, dropout_mask, ref=True)
np.testing.assert_allclose(
mlp_out.detach().cpu().numpy(),
ref_out.detach().cpu().numpy(),
atol=1e-7, rtol=1e-5)
def test_performance_half(self):
mlp = MLP(mlp_sizes).cuda().half()
mlp_layers = []
for i in range(mlp.num_layers):
linear = nn.Linear(mlp_sizes[i], mlp_sizes[i + 1])
mlp.weights[i].data.copy_(linear.weight)
mlp.biases[i].data.copy_(linear.bias)
mlp_layers.append(linear)
if i < mlp.num_layers - 1:
# mlp_layers.append(nn.ReLU(inplace=True))
mlp_layers.append(torch.nn.GELU())
mlp_layers.append(nn.Dropout(0.25))
ref_mlp = nn.Sequential(*mlp_layers).cuda().half()
test_input = torch.empty(
batch_size, mlp_sizes[0], device="cuda", dtype=torch.half).fill_(10.).requires_grad_()
ref_input = torch.empty(
batch_size, mlp_sizes[0], device="cuda", dtype=torch.half).fill_(10.).requires_grad_()
# Warm up GPU
for _ in range(100):
ref_out = ref_mlp(ref_input)
ref_loss = ref_out.mean()
ref_mlp.zero_grad()
ref_loss.backward()
mlp_out, _ = mlp(test_input)
test_loss = mlp_out.mean()
mlp.zero_grad()
test_loss.backward()
torch.cuda.profiler.start()
torch.cuda.synchronize()
start_time = time()
for _ in range(num_iters):
ref_out = ref_mlp(ref_input)
ref_loss = ref_out.mean()
ref_mlp.zero_grad()
ref_loss.backward()
torch.cuda.synchronize()
stop_time = time()
print(F"\nPytorch MLP time {(stop_time - start_time) * 1000. / num_iters:.4f} ms")
torch.cuda.synchronize()
start_time = time()
for _ in range(num_iters):
mlp_out, _ = mlp(test_input)
test_loss = mlp_out.mean()
mlp.zero_grad()
test_loss.backward()
torch.cuda.synchronize()
stop_time = time()
print(F"C++ MLP time {(stop_time - start_time) * 1000. / num_iters:.4f} ms")
torch.cuda.profiler.stop()
unittest.main()
# test = TestMLP()
# test.test_creation()
# test.test_performance_half()
# test.test_with_bias()
| [
"[email protected]"
] | |
4199440910460a422c013a0c40e9ecddfe383267 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnbelt.py | a561c79d95be99afc054e24528da2a296c42f2e6 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 1,067 | py | ii = [('LyelCPG2.py', 1), ('MarrFDI.py', 1), ('CoolWHM2.py', 2), ('KembFFF.py', 1), ('RogePAV.py', 4), ('RennJIT.py', 1), ('LeakWTI2.py', 1), ('LeakWTI3.py', 1), ('PettTHE.py', 1), ('TennAP.py', 1), ('PeckJNG.py', 1), ('BailJD2.py', 1), ('FitzRNS3.py', 2), ('WilkJMC2.py', 3), ('CarlTFR.py', 4), ('LyttELD.py', 1), ('BailJD1.py', 1), ('RoscTTI2.py', 1), ('CrokTPS.py', 1), ('ClarGE.py', 2), ('BuckWGM.py', 2), ('LyelCPG.py', 3), ('GilmCRS.py', 1), ('WestJIT2.py', 1), ('DibdTRL2.py', 1), ('CrocDNL.py', 3), ('FerrSDO2.py', 1), ('LeakWTI.py', 1), ('BachARE.py', 1), ('MereHHB3.py', 1), ('HowiWRL2.py', 3), ('BailJD3.py', 1), ('WilkJMC.py', 5), ('MartHRW.py', 4), ('MackCNH.py', 1), ('FitzRNS4.py', 5), ('CoolWHM3.py', 1), ('DequTKM.py', 5), ('FitzRNS.py', 3), ('BowrJMM.py', 1), ('LyttELD3.py', 1), ('RoscTTI.py', 2), ('LewiMJW.py', 1), ('JacoWHI2.py', 1), ('SomeMMH.py', 2), ('BrewDTO.py', 2), ('RogeSIP.py', 6), ('MartHRW2.py', 3), ('MartHSI.py', 3), ('DwigTHH.py', 1), ('BowrJMM2.py', 1), ('BowrJMM3.py', 2), ('BeckWRE.py', 1), ('KeigTSS.py', 1), ('HowiWRL.py', 1)] | [
"[email protected]"
] | |
1d04d3d4f51fb6e30bcbf047d655a4f3121f14ce | 73dc6b3fdb07592f10b8e02b7ca053deb61a2dc9 | /msof_api/comments/admin.py | 4e3be735e10ca856e607a6d04ccf86bad757bf99 | [] | no_license | likelion-kookmin/msof_api | 4143c09f93b68d219aa20de3bd57ec544c2bdf32 | f9fec7d31ebdb465a8935711da715db6d87c0fce | refs/heads/develop | 2023-06-28T15:35:45.240871 | 2021-07-31T16:38:35 | 2021-07-31T16:38:35 | 347,298,658 | 3 | 1 | null | 2021-07-31T16:38:36 | 2021-03-13T07:02:56 | Python | UTF-8 | Python | false | false | 784 | py | """# comments admin
- CommentAdmin
"""
from django.contrib import admin
from .models import Comment
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
"""## CommentAdmin
- admin에서 관리할 Comment 모델 설정
"""
list_display = [
'author',
'question',
'parent',
'selected',
'content',
'status',
'liked_count',
'disliked_count',
]
list_editable = [
'status',
]
list_filter = [
'author',
'question',
'parent',
'selected',
'status',
]
search_fields = [
'content',
'author__name',
'question__title',
'question__content',
]
ordering = [
'-updated_at',
]
| [
"[email protected]"
] | |
b089edef3519feb7f892bdd66d7ebb57fe321c27 | d214b72b3ae340d288c683afe356de6846a9b09d | /动态规划/最大矩形_85.py | d5fa9f35ee7dab90956eab9b4c2c0e9f34d1993c | [] | no_license | Xiaoctw/LeetCode1_python | 540af6402e82b3221dad8648bbdcce44954a9832 | b2228230c90d7c91b0a40399fa631520c290b61d | refs/heads/master | 2021-08-29T15:02:37.786181 | 2021-08-22T11:12:07 | 2021-08-22T11:12:07 | 168,444,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,071 | py | from typing import *
class Solution:
def maximalRectangle(self, matrix: List[List[str]]) -> int:
m, n = len(matrix), len(matrix[0])
num_up = [[0] * n for _ in range(m)]
for i in range(n):
if matrix[0][i] == '1':
num_up[0][i] = 1
for i in range(1, m):
for j in range(n):
if matrix[i][j] == '1':
num_up[i][j] = num_up[i - 1][j] + 1
ans = 0
for i in range(m):
pre_zero = -1
min_up = float('inf')
for j in range(n):
if matrix[i][j] == '0':
pre_zero = j
min_up=float('inf')
else:
min_up = min(min_up, num_up[i][j])
ans = max(ans, min_up * (j - pre_zero))
return ans
if __name__ == '__main__':
matrix = [["1", "0", "1", "0", "0"], ["1", "0", "1", "1", "1"], ["1", "1", "1", "1", "1"],
["1", "0", "0", "1", "0"]]
sol=Solution()
print(sol.maximalRectangle(matrix))
| [
"[email protected]"
] | |
19907e7cb61cd025d174242e51357e774a777801 | d257ddf7e6959d0989d76080a8a048e82393657f | /002_TemplateMatching/002_template_match_implemented.py | 112464bcd0690858ab97442b59d77b3d552eca7f | [
"MIT"
] | permissive | remichartier/027_selfDrivingCarND_ObjectDetectionExercises | d210f37b7baf306dd034c09f62e125b263f8270d | ccd853c975d35df5f31e1a445a1a8757b8bd13f5 | refs/heads/main | 2023-04-17T08:09:55.465143 | 2021-05-03T07:11:16 | 2021-05-03T07:11:16 | 362,013,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,661 | py | import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
image = mpimg.imread('bbox-example-image.jpg')
#image = mpimg.imread('temp-matching-example-2.jpg')
templist = ['cutout1.jpg', 'cutout2.jpg', 'cutout3.jpg',
'cutout4.jpg', 'cutout5.jpg', 'cutout6.jpg']
# Here is your draw_boxes function from the previous exercise
def draw_boxes(img, bboxes, color=(0, 0, 255), thick=6):
# Make a copy of the image
imcopy = np.copy(img)
# Iterate through the bounding boxes
for bbox in bboxes:
# Draw a rectangle given bbox coordinates
cv2.rectangle(imcopy, bbox[0], bbox[1], color, thick)
# Return the image copy with boxes drawn
return imcopy
# All the 6 methods for comparison in a list
# methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR',
# 'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED']
# Define a function that takes an image and a list of templates as inputs
# then searches the image and returns the a list of bounding boxes
# for matched templates
def find_matches(img, template_list):
# Make a copy of the image to draw on
imcopy = np.copy(img)
# Define an empty list to take bbox coords
bbox_list = []
# Iterate through template list
for temp in template_list:
# Read in templates one by one
templ = mpimg.imread(temp)
print(templ.shape[::-1])
l, w, h = templ.shape[::-1]
# Use cv2.matchTemplate() to search the image
# using whichever of the OpenCV search methods you prefer
#meth = 'cv2.TM_SQDIFF' # --> Not working
meth = 'cv2.TM_CCOEFF' # --> Working
#meth = 'cv2.TM_CCOEFF_NORMED' # --> Working
#meth = 'cv2.TM_CCORR' # --> Not working
#meth = 'cv2.TM_CCORR_NORMED' # --> Working
#meth = 'cv2.TM_SQDIFF' # --> Not working
#meth = 'cv2.TM_SQDIFF_NORMED' # --> Not working
method = eval(meth)
res = cv2.matchTemplate(img,templ,method)
# Use cv2.minMaxLoc() to extract the location of the best match
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
# Determine bounding box corners for the match
if meth in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
top_left = min_loc
else:
top_left = max_loc
bottom_right = (top_left[0] + w, top_left[1] + h)
bbox_list.append((top_left,bottom_right))
# Return the list of bounding boxes
return bbox_list
bboxes = find_matches(image, templist)
result = draw_boxes(image, bboxes)
plt.imshow(result) | [
"[email protected]"
] | |
76958178b7438bb05a58d4bf3edd04bf9ee28403 | cc212540f928a95fa56f4679e3eb58e2ad329ca5 | /annpy/training/trainer.py | c93d497850a77427e0a1ba0888254a24da4a10e7 | [
"LicenseRef-scancode-mit-taylor-variant",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | nirvguy/annpy | ec05e07316bddd4bc5fbbd3d9e73ec94dc52a4b9 | ea5f92048173d0ebd1ad134cf626fa623569905e | refs/heads/master | 2018-06-03T06:11:21.911758 | 2018-05-30T16:16:46 | 2018-05-30T16:16:48 | 118,555,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,346 | py | # See LICENSE file for copyright and license details.
import torch
class Trainer(object):
def __init__(self, learning_rule):
self._learning_rule = learning_rule
self._epoch = 0
self._hooks = []
self._remaining_epochs = 0
@property
def epoch(self):
return self._epoch
@staticmethod
def check_batch(batch):
if not isinstance(batch, torch.Tensor):
raise Exception("Batchs must be torch.Tensor's")
if len(batch.shape) <= 1:
raise Exception("Batch shape must have at least dimension two")
def _notify(self, msg):
for hook in self._hooks:
hook.notify(msg)
def train(self, batchs, epochs=1):
if len(batchs) == 0:
return
for batch in batchs:
self.check_batch(batch)
self._remaining_epochs = epochs
self._notify('pre_training')
for _ in range(epochs):
self._notify('pre_epoch')
for batch in batchs:
self._learning_rule.step(batch)
self._epoch += 1
self._remaining_epochs -= 1
self._notify('post_epoch')
self._notify('post_training')
def remaining_epochs(self):
return self._remaining_epochs
def attach(self, hook):
self._hooks.append(hook)
| [
"[email protected]"
] | |
1f57a94143af972a289bfc920a65f67f1bd6adf6 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_17171.py | 41ccf0e53304977e412d274d7efa76fe1482d35e | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35 | py | # Integer to Boolean
return a != b
| [
"[email protected]"
] | |
f841e9e9170838ca8c2972ca807eedb0e4ecd954 | e905abd9bb7bd7017657d0a0c4d724d16e37044c | /.history/article/settings_20210208181317.py | 5959719e37fa4bb9dcbc2f1420a4a206f030284f | [] | no_license | tabdelbari/articles | a8b921841f84fb473f5ed1cdcda743863e6bc246 | f0e1dfdc9e818e43095933139b6379a232647898 | refs/heads/main | 2023-03-05T10:21:35.565767 | 2021-02-10T13:35:14 | 2021-02-10T13:35:14 | 325,654,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,437 | py | # -*- coding: utf-8 -*-
# Scrapy settings for article project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
MONGO_URI = 'mongodb://localhost:27017/'
MONGO_DATABASE = 'articles'
BOT_NAME = 'article'
SPIDER_MODULES = ['article.spiders']
NEWSPIDER_MODULE = 'article.spiders'
SPLASH_URL = 'http://localhost:8050'
DUPEFILTER_CLASS = 'scrapy_splash.SplashAwareDupeFilter'
HTTPCACHE_STORAGE = 'scrapy_splash.SplashAwareFSCacheStorage'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'article (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
DOWNLOAD_DELAY = 10
# The download delay setting will honor only one of:
CONCURRENT_REQUESTS_PER_DOMAIN = 1
CONCURRENT_REQUESTS_PER_IP = 1
# Disable cookies (enabled by default)
COOKIES_ENABLED = True
COOKIES_DEBUG = True
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
DEFAULT_REQUEST_HEADERS = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en',
}
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
SPIDER_MIDDLEWARES = {
'scrapy_splash.SplashDeduplicateArgsMiddleware': 100,
}
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
DOWNLOADER_MIDDLEWARES = {
'scrapy_splash.SplashCookiesMiddleware': 723,
'scrapy_splash.SplashMiddleware': 725,
'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810,
}
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'article.pipelines.MongoPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"[email protected]"
] | |
30bcc49e425481fed6a6df0a85ca78f5255b3b93 | 200abee8ebb5fa255e594c8d901c8c68eb9c1a9c | /venv/03_old/hello_world.py | 3ef7463bc3ca43192af6add6ec132d91cd3a73f7 | [] | no_license | Vestenar/PythonProjects | f083cbc07df57ea7a560c6b18efed2bb0dc42efb | f8fdf9faff013165f8d835b0ccb807f8bef6dac4 | refs/heads/master | 2021-07-20T14:14:15.739074 | 2019-03-12T18:05:38 | 2019-03-12T18:05:38 | 163,770,129 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,760 | py | '''def sum(param1, param2):
return param1 + param2 # return result to the function caller
param1 = int(input())
param2 = int(input())
c = sum(param1, param2)
print(c)
nlen=0
def checkPalindrome(inputString):
nlen = len(inputString)
if nlen == 1:
t = True
else:
for i in range(nlen//2):
if inputString[i] == inputString[-i-1]:
t = True
else:
t = False
break
return t
inputString = "sasass"
print(checkPalindrome(inputString))'''
'''def adjacentElementsProduct(inputArray):
test = inputArray[0]*inputArray[1]
for i in range((len(inputArray)-2)):
nmax = inputArray[i+1]*inputArray[i+2]
if test < nmax:
test = nmax
return test
inputArray = [6, 2, 3, 8]
max = 0
max = adjacentElementsProduct(inputArray)
print(max)'''
'''sequence = [1, 3, 2, 1]
count = 0
t = True
t1 = True
t2 = True
narray = list(sequence)
for b in range(2):
for i in range(len(narray)-1):
if narray[i] < narray[i-1]:
narray[i-1:i] = []
count += 1
if count < 2:
t1 = False
count = 0
narray2 = list(sequence)
narray = list(sequence)
for b in range(2):
for i in range(len(narray)-1):
if narray[i] < narray[i-1]:
narray[i:i+1] = []
count += 1
if count < 2:
t1 = False
t = t1 or t2
print(narray)
print(narray2)
print(t1, t2, t)'''
'''t = True
count = 0
for i in range(len(sequence)):
if count > 2:
data = False
break
if i+1 < len(sequence) and sequence[i] >= sequence[i+1]:
count += 1
if i+2 < len(sequence) and sequence[i] >= sequence[i+2]:
count += 1
print(t)
'''
'''matrix = [[1,1,1],
[2,2,2],
[3,3,3]]
price = 0
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if i != 0 and matrix[i-1][j] == 0:
matrix[i][j] = 0
for row in matrix:
for elem in row:
price += elem
print(price)'''
'''inputArray = ["aba", "aa", "ad", "vcd", "aba"]
lenw = 0
out = []
for i in range(len(inputArray)):
if lenw < len(inputArray[i]):
lenw = len(inputArray[i])
for i in range(len(inputArray)):
if len(inputArray[i]) == max(len(s) for s in inputArray):
out.append(inputArray[i])
print(out)'''
'''s1 = "aabzca"
s2 = "adcaaz"
n = 0
for i in s1:
if i in s2:
n +=1
s2 = s2.replace(i, "0", 1)
print(n)'''
'''n = str(int(123610))
mid = len(n)//2
n1 = n[:mid]
n2 = n[mid:]
sum1 = 0
for i in range(len(n1)):
sum1 +=int(n1[i])
for i in range(len(n2)):
sum1 -=int(n2[i])
if sum1 == 0:
out = "Счастливый"
else:
out = "Обычный"
print(out)'''
'''s = 'aaaabbcccaabb'
t = s[0]
count = 0
out = ''
for i in s:
if i == t:
count += 1
else:
out = out + t+str(count)
t = i
count = 1
out = out + t + str(count)
print(t, out)'''
'''a = [23, 54, -1, 43, 1, -1, -1, 77, -1, -1, -1, 3]
print([1, 3, -1, 23, 43, -1, -1, 54, -1, -1, -1, 77])
m = max(a)
for i in range(1, len(a)):
if a[-i] != -1:
a[-i], a[a.index(m)] = a[a.index(m)], a[-i]
m = max(a[:-i])
print(a)
'''
'''s = "The ((quick (brown) (fox) jumps over the lazy) dog)"
count = s.count('(')
op = []
cl = []
id = 0
for ch in s:
if ch == '(':
op.append(id)
id += 1
op = op[::-1]
id = 0
'ускорить поиск скобок путем определения начала поиска'
for i in range(count):
for ch in s:
if ch == ')' and id > op[i] and id not in cl:
cl.append(id)
break
id += 1
id = 0
for i in range(count):
sh = s[op[i]+1:cl[i]]
s = s.replace(sh, sh[::-1])
s = s.replace("(", "")
s = s.replace(")", "")
print(s)'''
'''s = "The ((quick (brown) (fox) jumps over the lazy) dog)"
while ')' in s:
j = s.index(')')
i = s.rindex('(', 0, j)
s = s[:i] + s[j-1:i:-1] + s[j+1:]
print(s)'''
'''a = [50]
b = [0,0]
for i in range(len(a)):
b[i%2] += a[i]
print(b)'''
'''
a = ["*****",
"*abc*",
"*ded*",
"*****"]
picture = ["abc", "ded"]
picture.insert(0,"*" * len(picture[0]))
picture.append("*" * len(picture[0]))
for i in range(len(picture)):
test = picture[i]
test = "*" + test + "*"
picture[i] = test
print(picture)'''
'''def areSimilar(a, b):
idx = []
if len(a) != len(b):
return False
for i in range(len(a)):
if a[i] != b[i]:
idx.append(i)
if len(idx) == 0:
return True
if len(idx) != 2:
return False
if a[idx[0]] == b[idx[1]] and a[idx[1]] == b[idx[0]]:
return True
else:
return False
'заносим в массив idx только те символы, которые не совпадают в исходных массивах, если таких символов только две пары, то проверяем взаимозаменяемость пар'
a = [1, 2, 2]
b = [2, 1, 1]
print(areSimilar(a, b))'''
'''def arrayChange(inputArray):
n = 0
for i in range(1, len(inputArray)):
if inputArray[i] <= inputArray[i-1]:
n += inputArray[i - 1] - inputArray[i] + 1
inputArray[i] += inputArray[i-1] - inputArray[i] +1
return n
inputArray = [2, 3, 3, 5, 5, 5, 4, 12, 12, 10, 15]
print(arrayChange(inputArray))
'''
'''a = [int(i) for i in input().split()]
b = []
ans = ''
for i in range(len(a)):
if a.count(a[i]) > 1 and (a[i] not in b):
b.append(a[i])
for i in b:
ans += str(i) + ' '
print(ans)
'''
'''
проверка строки на возможность получить палиндром перестановкой символов.
считаем только символы, количество которых нечетное и заносим в массив
def palindromeRearranging(inputString):
a = []
for i in range(len(inputString)):
if inputString.count(inputString[i]) % 2 != 0:
if inputString[i] != inputString[i-1]:
a.append(inputString.count(inputString[i]))
return len(a) <= 1
task = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaccc'
print(palindromeRearranging(task))
'''
'''САПЕР codesignal
def minesweeper(matrix):
row, col = len(matrix), len(matrix[0])
ans = [[0 for c in range(col)] for r in range(row)]
for i in range(row):
for j in range(col):
if matrix[i][j]:
ans[i][j] = -1
for di in range(-1, 2):
for dj in range(-1, 2):
ai = i + di
aj = j + dj
if 0 <= ai < row and 0 <= aj < col and matrix[ai][aj]:
ans[i][j] += 1
return ans
task = [[True,False,False],[False,True,False],[False,False,False]]
print(minesweeper(task))
'''
'''
def avoidObstacles(inputArray):
jump = 1
a = 0
while a < max(inputArray)//jump:
jump += 1
for i in range(1, max(inputArray)//jump+1):
if jump*i not in inputArray:
a += 1
else:
a = 0
break
return jump
task = [5, 3, 6, 7, 9]
print(avoidObstacles(task))
'''
''' # эффект блюр для "фотографии"
def boxBlur(image):
row, col = len(image), len(image[0]) # row rows, col columns
ans = []
for i in range(1, row-1):
ans.append([])
for j in range(1, col-1):
flsum = 0
for k in range(-1, 2):
for l in range(-1, 2):
flsum += image[i+k][j+l]
ans[i-1].append(int(flsum/9))
return ans
task = [[7, 4, 0, 1], [5, 6, 2, 2], [6, 10, 7, 8], [1, 4, 2, 0]]
print(boxBlur(task))
'''
'''codesignal является ли имя переменой корректным
def variableName(name):
if not name[0].isalpha() and name[0] != '_':
return False
else:
for i in range(1, len(name)):
if not name[i].isalnum() and name[i] != '_':
return False
return True
name = 'var1_'
print(variableName(name))
'''
'''codesignal
def absoluteValuesSumMinimization(a):
# x = a[0]
list = {}
for i in range(len(a)):
sabs = 0
for j in range(len(a)):
sabs += abs(a[j] - a[-(i+1)])
list[sabs] = a[-(i+1)]
print(list)
return list[min(list)]
test = [1, 1, 3, 4]
print(absoluteValuesSumMinimization(test))
'''
''' задача на брутфорс всех перестановок
def stringsRearrangement(inputArray):
import itertools
perm = list(itertools.permutations(inputArray, len(inputArray))) #полный список всех перестановок
for k in perm: #проверяем каждый вариант перестановки
for i in range(1, len(k)):
a = k[i]
b = k[i-1]
count = 0
for index in range(len(a)):
if a[index] != b[index]:
count += 1
if count != 1:
break
if count == 1:
return True
return False'''
'''#codesignal
#Given array of integers, find the maximal possible sum of some of its k consecutive elements.
def arrayMaxConsecutiveSum(a, k):
c = m = sum(a[:k]) #посчитали исходную сумму
for i in range(len(a) - k):
c = c + a[i + k] - a[i] #уменьшили сумму на предыдущий элемент и увеличили на следующий
m = max(c, m) #проверили максимум и сохранили в m
return m
test = [1, 3, 2, 4]
k = 3
print(arrayMaxConsecutiveSum(test, k))''' | [
"[email protected]"
] | |
c42d909697d0db5a72ae51a3c5d635841a1787f8 | a8fca7b6bc1f0eeaba12b682a81d880dc71cc929 | /FlaskEndpoint/tests/system/test_home.py | 38225c4925d80136cac8cbc7e3a04b5a0ac7ca4e | [] | no_license | sineczek/Automated-Software-Testing-with-Python | cb74d8714ad5b2ec9a6ffc013a400f0181f8095b | 2e7c4ff4bb5acfd53afb43a4bfa7191eb58a899c | refs/heads/main | 2023-04-14T08:15:53.917614 | 2021-04-24T17:18:23 | 2021-04-24T17:18:23 | 345,342,351 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | from tests.system.base_test import BaseTest
import json
class TestHome(BaseTest):
def test_home(self):
with self.app() as c:
resp = c.get('/')
self.assertEqual(
resp.status_code, 200
)
self.assertEqual(
json.loads(resp.get_data()), # loads - ładuje stringa; potem zmienia go na json'a czyli słownik
{'message': 'Hello, world!'}
)
| [
"[email protected]"
] | |
d8a49d368a82b0008bacdd568c57aa745bde3133 | d86ed2c37a55b4a3118131a04f9a68dbd3b51a7f | /sherpatest/lev3fft-bar.py | 384437c626b0fbb39addb9d1c5274f6e57b5fd62 | [] | no_license | hamogu/sherpa-test-data | f745cc907c2535a721d46472b33f7281bd6e6711 | 77d9fc563875c59a4acff2960d46180ee7a8ec14 | refs/heads/master | 2023-06-18T22:30:44.947033 | 2020-08-03T12:07:13 | 2020-08-03T12:07:13 | 275,202,255 | 0 | 0 | null | 2020-06-26T16:38:19 | 2020-06-26T16:38:19 | null | UTF-8 | Python | false | false | 1,244 | py | #!/usr/bin/env python
from sherpa.astro.ui import *
image_file = "acisf07999_000N001_r0035_regevt3_srcimg.fits"
psf_file = "acisf07999_000N001_r0035b_psf3.fits"
reg_file = "ellipse(3145.8947368421,4520.7894736842,37.0615234375,15.3881587982,92.2273254395)"
srcid = 1
load_data(srcid, image_file)
load_psf("psf%i" % srcid, psf_file)
set_psf(srcid, "psf%i" % srcid)
set_coord(srcid, "physical")
notice2d_id(srcid, reg_file)
# Switch to WCS for fitting
set_coord(srcid, "wcs")
# Use Nelder-Mead, C-statistic as fit method, statistic
set_method("neldermead")
set_stat("cstat")
set_source(srcid, 'gauss2d.src + const2d.bkg')
guess(srcid, src)
image_file = "acisf08478_000N001_r0043_regevt3_srcimg.fits"
psf_file = "acisf08478_000N001_r0043b_psf3.fits"
reg_file = "ellipse(3144.5238095238,4518.8095238095,25.2978591919,19.1118583679,42.9872131348)"
srcid = 2
load_data(srcid, image_file)
load_psf("psf%i" % srcid, psf_file)
set_psf(srcid, "psf%i" % srcid)
set_coord(srcid, "physical")
notice2d_id(srcid, reg_file)
# Switch to WCS for fitting
set_coord(srcid, "wcs")
# Use Nelder-Mead, C-statistic as fit method, statistic
set_method("neldermead")
set_stat("cstat")
set_source(srcid, 'gauss2d.src + const2d.bkg')
guess(srcid, src)
fit()
| [
"[email protected]"
] | |
77c55d04b0a750c8b0c0dc571cf5927a6d78e179 | 356f3f1b7caf0ccb20cc830d40821dfb2cbda046 | /sfit/sfit/doctype/items/items.py | c1943c13dec9e21c63e99267eb3e87e7de102726 | [
"MIT"
] | permissive | vignesharumainayagam/sfit | f4b75b9a8b2de08d0eaa4eadbcd3d5e432ffba56 | a96afbf35b0e1635e44cb5f83d7f86c83abedb8f | refs/heads/master | 2021-09-05T18:22:43.494208 | 2018-01-30T07:23:02 | 2018-01-30T07:23:02 | 104,332,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Valiant Systems and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class Items(Document):
pass
| [
"[email protected]"
] | |
8f885274db507628a34e8f8f094526a25c935972 | cc9d1aeb8aefe3d4f86c94b4279a64e70bf5fd80 | /setup.py | be0365371238e8e2c7a86eb0bd4aa3c81f749446 | [
"MIT"
] | permissive | sdelquin/sendgrify | a520a2da7d6c6d7c4707c325f6d67523e53803eb | fe8ee1d0efd0c8d8034d1c57cfc07672f77d7e8e | refs/heads/main | 2023-06-11T15:49:27.284693 | 2023-05-28T12:54:34 | 2023-05-28T12:54:34 | 342,843,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | # read the contents of your README file
from pathlib import Path
from setuptools import setup
this_directory = Path(__file__).parent
long_description = (this_directory / 'README.md').read_text()
REQUIREMENTS = (
'sendgrid==5.3.0',
'markdown',
)
setup(
name='sendgrify',
version='2.0.3',
url='https://github.com/sdelquin/sendgrify.git',
author='Sergio Delgado Quintero',
author_email='[email protected]',
description='SendGrid for Humans',
license='MIT',
packages=['sendgrify'],
install_requires=REQUIREMENTS,
long_description=long_description,
long_description_content_type='text/markdown',
)
| [
"[email protected]"
] | |
0886616bd81e0a2e31e16fed2ae9620947223dac | ae326c4e6a2b2d5b67fa8d175249ef90f6a3021a | /leo/external/rope/ropetest/refactor/extracttest.py | 167f7984d254f4be25e2554d9f39807e0827d542 | [
"GPL-2.0-only",
"GPL-1.0-or-later",
"MIT"
] | permissive | frakel/leo-editor | f95e6c77d60485d80fddfbeaf35db961cf691177 | b574118ee3b7ffe8344fa0d00dac603096117ac7 | refs/heads/master | 2020-03-28T10:40:24.621077 | 2018-10-23T14:39:31 | 2018-10-23T14:39:31 | 148,132,817 | 0 | 0 | MIT | 2018-09-10T09:40:18 | 2018-09-10T09:40:18 | null | UTF-8 | Python | false | false | 42,781 | py | try:
import unittest2 as unittest
except ImportError:
import unittest
import rope.base.codeanalyze
import rope.base.exceptions
from rope.refactor import extract
from ropetest import testutils
class ExtractMethodTest(unittest.TestCase):
def setUp(self):
super(ExtractMethodTest, self).setUp()
self.project = testutils.sample_project()
self.pycore = self.project.pycore
def tearDown(self):
testutils.remove_project(self.project)
super(ExtractMethodTest, self).tearDown()
def do_extract_method(self, source_code, start, end, extracted, **kwds):
testmod = testutils.create_module(self.project, 'testmod')
testmod.write(source_code)
extractor = extract.ExtractMethod(
self.project, testmod, start, end)
self.project.do(extractor.get_changes(extracted, **kwds))
return testmod.read()
def do_extract_variable(self, source_code, start, end, extracted, **kwds):
testmod = testutils.create_module(self.project, 'testmod')
testmod.write(source_code)
extractor = extract.ExtractVariable(self.project, testmod, start, end)
self.project.do(extractor.get_changes(extracted, **kwds))
return testmod.read()
def _convert_line_range_to_offset(self, code, start, end):
lines = rope.base.codeanalyze.SourceLinesAdapter(code)
return lines.get_line_start(start), lines.get_line_end(end)
def test_simple_extract_function(self):
code = "def a_func():\n print('one')\n print('two')\n"
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'extracted')
expected = "def a_func():\n extracted()\n print('two')\n\n" \
"def extracted():\n print('one')\n"
self.assertEquals(expected, refactored)
def test_extract_function_at_the_end_of_file(self):
code = "def a_func():\n print('one')"
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'extracted')
expected = "def a_func():\n extracted()\n" \
"def extracted():\n print('one')\n"
self.assertEquals(expected, refactored)
def test_extract_function_after_scope(self):
code = "def a_func():\n print('one')\n print('two')" \
"\n\nprint('hey')\n"
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'extracted')
expected = "def a_func():\n extracted()\n print('two')\n\n" \
"def extracted():\n print('one')\n\nprint('hey')\n"
self.assertEquals(expected, refactored)
def test_simple_extract_function_with_parameter(self):
code = "def a_func():\n a_var = 10\n print(a_var)\n"
start, end = self._convert_line_range_to_offset(code, 3, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = "def a_func():\n a_var = 10\n new_func(a_var)\n\n" \
"def new_func(a_var):\n print(a_var)\n"
self.assertEquals(expected, refactored)
def test_not_unread_variables_as_parameter(self):
code = "def a_func():\n a_var = 10\n print('hey')\n"
start, end = self._convert_line_range_to_offset(code, 3, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = "def a_func():\n a_var = 10\n new_func()\n\n" \
"def new_func():\n print('hey')\n"
self.assertEquals(expected, refactored)
def test_simple_extract_function_with_two_parameter(self):
code = 'def a_func():\n a_var = 10\n another_var = 20\n' \
' third_var = a_var + another_var\n'
start, end = self._convert_line_range_to_offset(code, 4, 4)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n a_var = 10\n another_var = 20\n' \
' new_func(a_var, another_var)\n\n' \
'def new_func(a_var, another_var):\n' \
' third_var = a_var + another_var\n'
self.assertEquals(expected, refactored)
def test_simple_extract_function_with_return_value(self):
code = 'def a_func():\n a_var = 10\n print(a_var)\n'
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n a_var = new_func()' \
'\n print(a_var)\n\n' \
'def new_func():\n a_var = 10\n return a_var\n'
self.assertEquals(expected, refactored)
def test_extract_function_with_multiple_return_values(self):
code = 'def a_func():\n a_var = 10\n another_var = 20\n' \
' third_var = a_var + another_var\n'
start, end = self._convert_line_range_to_offset(code, 2, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n a_var, another_var = new_func()\n' \
' third_var = a_var + another_var\n\n' \
'def new_func():\n a_var = 10\n another_var = 20\n' \
' return a_var, another_var\n'
self.assertEquals(expected, refactored)
def test_simple_extract_method(self):
code = 'class AClass(object):\n\n' \
' def a_func(self):\n print(1)\n print(2)\n'
start, end = self._convert_line_range_to_offset(code, 4, 4)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'class AClass(object):\n\n' \
' def a_func(self):\n' \
' self.new_func()\n' \
' print(2)\n\n' \
' def new_func(self):\n print(1)\n'
self.assertEquals(expected, refactored)
def test_extract_method_with_args_and_returns(self):
code = 'class AClass(object):\n' \
' def a_func(self):\n' \
' a_var = 10\n' \
' another_var = a_var * 3\n' \
' third_var = a_var + another_var\n'
start, end = self._convert_line_range_to_offset(code, 4, 4)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'class AClass(object):\n' \
' def a_func(self):\n' \
' a_var = 10\n' \
' another_var = self.new_func(a_var)\n' \
' third_var = a_var + another_var\n\n' \
' def new_func(self, a_var):\n' \
' another_var = a_var * 3\n' \
' return another_var\n'
self.assertEquals(expected, refactored)
def test_extract_method_with_self_as_argument(self):
code = 'class AClass(object):\n' \
' def a_func(self):\n' \
' print(self)\n'
start, end = self._convert_line_range_to_offset(code, 3, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'class AClass(object):\n' \
' def a_func(self):\n' \
' self.new_func()\n\n' \
' def new_func(self):\n' \
' print(self)\n'
self.assertEquals(expected, refactored)
def test_extract_method_with_no_self_as_argument(self):
code = 'class AClass(object):\n' \
' def a_func():\n' \
' print(1)\n'
start, end = self._convert_line_range_to_offset(code, 3, 3)
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_with_multiple_methods(self):
code = 'class AClass(object):\n' \
' def a_func(self):\n' \
' print(self)\n\n' \
' def another_func(self):\n' \
' pass\n'
start, end = self._convert_line_range_to_offset(code, 3, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'class AClass(object):\n' \
' def a_func(self):\n' \
' self.new_func()\n\n' \
' def new_func(self):\n' \
' print(self)\n\n' \
' def another_func(self):\n' \
' pass\n'
self.assertEquals(expected, refactored)
def test_extract_function_with_function_returns(self):
code = 'def a_func():\n def inner_func():\n pass\n' \
' inner_func()\n'
start, end = self._convert_line_range_to_offset(code, 2, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n' \
' inner_func = new_func()\n inner_func()\n\n' \
'def new_func():\n' \
' def inner_func():\n pass\n' \
' return inner_func\n'
self.assertEquals(expected, refactored)
def test_simple_extract_global_function(self):
code = "print('one')\nprint('two')\nprint('three')\n"
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = "print('one')\n\ndef new_func():\n print('two')\n" \
"\nnew_func()\nprint('three')\n"
self.assertEquals(expected, refactored)
def test_extract_global_function_inside_ifs(self):
code = 'if True:\n a = 10\n'
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = '\ndef new_func():\n a = 10\n\nif True:\n' \
' new_func()\n'
self.assertEquals(expected, refactored)
def test_extract_function_while_inner_function_reads(self):
code = 'def a_func():\n a_var = 10\n' \
' def inner_func():\n print(a_var)\n' \
' return inner_func\n'
start, end = self._convert_line_range_to_offset(code, 3, 4)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n a_var = 10\n' \
' inner_func = new_func(a_var)' \
'\n return inner_func\n\n' \
'def new_func(a_var):\n' \
' def inner_func():\n print(a_var)\n' \
' return inner_func\n'
self.assertEquals(expected, refactored)
def test_extract_method_bad_range(self):
code = "def a_func():\n pass\na_var = 10\n"
start, end = self._convert_line_range_to_offset(code, 2, 3)
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_bad_range2(self):
code = "class AClass(object):\n pass\n"
start, end = self._convert_line_range_to_offset(code, 1, 1)
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_containing_return(self):
code = 'def a_func(arg):\n if arg:\n return arg * 2' \
'\n return 1'
start, end = self._convert_line_range_to_offset(code, 2, 4)
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_containing_yield(self):
code = "def a_func(arg):\n yield arg * 2\n"
start, end = self._convert_line_range_to_offset(code, 2, 2)
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_containing_uncomplete_lines(self):
code = 'a_var = 20\nanother_var = 30\n'
start = code.index('20')
end = code.index('30') + 2
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_containing_uncomplete_lines2(self):
code = 'a_var = 20\nanother_var = 30\n'
start = code.index('20')
end = code.index('another') + 5
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_function_and_argument_as_paramenter(self):
code = 'def a_func(arg):\n print(arg)\n'
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func(arg):\n new_func(arg)\n\n' \
'def new_func(arg):\n print(arg)\n'
self.assertEquals(expected, refactored)
def test_extract_function_and_end_as_the_start_of_a_line(self):
code = 'print("hey")\nif True:\n pass\n'
start = 0
end = code.index('\n') + 1
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = '\ndef new_func():\n print("hey")\n\n' \
'new_func()\nif True:\n pass\n'
self.assertEquals(expected, refactored)
def test_extract_function_and_indented_blocks(self):
code = 'def a_func(arg):\n if True:\n' \
' if True:\n print(arg)\n'
start, end = self._convert_line_range_to_offset(code, 3, 4)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func(arg):\n ' \
'if True:\n new_func(arg)\n\n' \
'def new_func(arg):\n if True:\n print(arg)\n'
self.assertEquals(expected, refactored)
def test_extract_method_and_multi_line_headers(self):
code = 'def a_func(\n arg):\n print(arg)\n'
start, end = self._convert_line_range_to_offset(code, 3, 3)
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func(\n arg):\n new_func(arg)\n\n' \
'def new_func(arg):\n print(arg)\n'
self.assertEquals(expected, refactored)
def test_single_line_extract_function(self):
code = 'a_var = 10 + 20\n'
start = code.index('10')
end = code.index('20') + 2
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = "\ndef new_func():\n " \
"return 10 + 20\n\na_var = new_func()\n"
self.assertEquals(expected, refactored)
def test_single_line_extract_function2(self):
code = 'def a_func():\n a = 10\n b = a * 20\n'
start = code.rindex('a')
end = code.index('20') + 2
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n a = 10\n b = new_func(a)\n' \
'\ndef new_func(a):\n return a * 20\n'
self.assertEquals(expected, refactored)
def test_single_line_extract_method_and_logical_lines(self):
code = 'a_var = 10 +\\\n 20\n'
start = code.index('10')
end = code.index('20') + 2
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = '\ndef new_func():\n ' \
'return 10 + 20\n\na_var = new_func()\n'
self.assertEquals(expected, refactored)
def test_single_line_extract_method_and_logical_lines2(self):
code = 'a_var = (10,\\\n 20)\n'
start = code.index('10') - 1
end = code.index('20') + 3
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = '\ndef new_func():\n' \
' return (10, 20)\n\na_var = new_func()\n'
self.assertEquals(expected, refactored)
def test_single_line_extract_method(self):
code = "class AClass(object):\n\n" \
" def a_func(self):\n a = 10\n b = a * a\n"
start = code.rindex('=') + 2
end = code.rindex('a') + 1
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'class AClass(object):\n\n' \
' def a_func(self):\n' \
' a = 10\n b = self.new_func(a)\n\n' \
' def new_func(self, a):\n return a * a\n'
self.assertEquals(expected, refactored)
def test_single_line_extract_function_if_condition(self):
code = 'if True:\n pass\n'
start = code.index('True')
end = code.index('True') + 4
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = "\ndef new_func():\n return True\n\nif new_func():" \
"\n pass\n"
self.assertEquals(expected, refactored)
def test_unneeded_params(self):
code = 'class A(object):\n ' \
'def a_func(self):\n a_var = 10\n a_var += 2\n'
start = code.rindex('2')
end = code.rindex('2') + 1
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'class A(object):\n' \
' def a_func(self):\n a_var = 10\n' \
' a_var += self.new_func()\n\n' \
' def new_func(self):\n return 2\n'
self.assertEquals(expected, refactored)
def test_breaks_and_continues_inside_loops(self):
code = 'def a_func():\n for i in range(10):\n continue\n'
start = code.index('for')
end = len(code) - 1
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n new_func()\n\n' \
'def new_func():\n' \
' for i in range(10):\n continue\n'
self.assertEquals(expected, refactored)
def test_breaks_and_continues_outside_loops(self):
code = 'def a_func():\n' \
' for i in range(10):\n a = i\n continue\n'
start = code.index('a = i')
end = len(code) - 1
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_variable_writes_followed_by_variable_reads_after_extraction(self):
code = 'def a_func():\n a = 1\n a = 2\n b = a\n'
start = code.index('a = 1')
end = code.index('a = 2') - 1
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n new_func()\n a = 2\n b = a\n\n' \
'def new_func():\n a = 1\n'
self.assertEquals(expected, refactored)
def test_var_writes_followed_by_var_reads_inside_extraction(self):
code = 'def a_func():\n a = 1\n a = 2\n b = a\n'
start = code.index('a = 2')
end = len(code) - 1
refactored = self.do_extract_method(code, start, end, 'new_func')
expected = 'def a_func():\n a = 1\n new_func()\n\n' \
'def new_func():\n a = 2\n b = a\n'
self.assertEquals(expected, refactored)
def test_extract_variable(self):
code = 'a_var = 10 + 20\n'
start = code.index('10')
end = code.index('20') + 2
refactored = self.do_extract_variable(code, start, end, 'new_var')
expected = 'new_var = 10 + 20\na_var = new_var\n'
self.assertEquals(expected, refactored)
def test_extract_variable_multiple_lines(self):
code = 'a = 1\nb = 2\n'
start = code.index('1')
end = code.index('1') + 1
refactored = self.do_extract_variable(code, start, end, 'c')
expected = 'c = 1\na = c\nb = 2\n'
self.assertEquals(expected, refactored)
def test_extract_variable_in_the_middle_of_statements(self):
code = 'a = 1 + 2\n'
start = code.index('1')
end = code.index('1') + 1
refactored = self.do_extract_variable(code, start, end, 'c')
expected = 'c = 1\na = c + 2\n'
self.assertEquals(expected, refactored)
def test_extract_variable_for_a_tuple(self):
code = 'a = 1, 2\n'
start = code.index('1')
end = code.index('2') + 1
refactored = self.do_extract_variable(code, start, end, 'c')
expected = 'c = 1, 2\na = c\n'
self.assertEquals(expected, refactored)
def test_extract_variable_for_a_string(self):
code = 'def a_func():\n a = "hey!"\n'
start = code.index('"')
end = code.rindex('"') + 1
refactored = self.do_extract_variable(code, start, end, 'c')
expected = 'def a_func():\n c = "hey!"\n a = c\n'
self.assertEquals(expected, refactored)
def test_extract_variable_inside_ifs(self):
code = 'if True:\n a = 1 + 2\n'
start = code.index('1')
end = code.rindex('2') + 1
refactored = self.do_extract_variable(code, start, end, 'b')
expected = 'if True:\n b = 1 + 2\n a = b\n'
self.assertEquals(expected, refactored)
def test_extract_variable_inside_ifs_and_logical_lines(self):
code = 'if True:\n a = (3 + \n(1 + 2))\n'
start = code.index('1')
end = code.index('2') + 1
refactored = self.do_extract_variable(code, start, end, 'b')
expected = 'if True:\n b = 1 + 2\n a = (3 + \n(b))\n'
self.assertEquals(expected, refactored)
# TODO: Handle when extracting a subexpression
def xxx_test_extract_variable_for_a_subexpression(self):
code = 'a = 3 + 1 + 2\n'
start = code.index('1')
end = code.index('2') + 1
refactored = self.do_extract_variable(code, start, end, 'b')
expected = 'b = 1 + 2\na = 3 + b\n'
self.assertEquals(expected, refactored)
def test_extract_variable_starting_from_the_start_of_the_line(self):
code = 'a_dict = {1: 1}\na_dict.values().count(1)\n'
start = code.rindex('a_dict')
end = code.index('count') - 1
refactored = self.do_extract_variable(code, start, end, 'values')
expected = 'a_dict = {1: 1}\n' \
'values = a_dict.values()\nvalues.count(1)\n'
self.assertEquals(expected, refactored)
def test_extract_variable_on_the_last_line_of_a_function(self):
code = 'def f():\n a_var = {}\n a_var.keys()\n'
start = code.rindex('a_var')
end = code.index('.keys')
refactored = self.do_extract_variable(code, start, end, 'new_var')
expected = 'def f():\n a_var = {}\n ' \
'new_var = a_var\n new_var.keys()\n'
self.assertEquals(expected, refactored)
def test_extract_variable_on_the_indented_function_statement(self):
code = 'def f():\n if True:\n a_var = 1 + 2\n'
start = code.index('1')
end = code.index('2') + 1
refactored = self.do_extract_variable(code, start, end, 'new_var')
expected = 'def f():\n if True:\n' \
' new_var = 1 + 2\n a_var = new_var\n'
self.assertEquals(expected, refactored)
def test_extract_method_on_the_last_line_of_a_function(self):
code = 'def f():\n a_var = {}\n a_var.keys()\n'
start = code.rindex('a_var')
end = code.index('.keys')
refactored = self.do_extract_method(code, start, end, 'new_f')
expected = 'def f():\n a_var = {}\n new_f(a_var).keys()\n\n' \
'def new_f(a_var):\n return a_var\n'
self.assertEquals(expected, refactored)
def test_raising_exception_when_on_incomplete_variables(self):
code = 'a_var = 10 + 20\n'
start = code.index('10') + 1
end = code.index('20') + 2
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_raising_exception_when_on_incomplete_variables_on_end(self):
code = 'a_var = 10 + 20\n'
start = code.index('10')
end = code.index('20') + 1
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_raising_exception_on_bad_parens(self):
code = 'a_var = (10 + 20) + 30\n'
start = code.index('20')
end = code.index('30') + 2
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_raising_exception_on_bad_operators(self):
code = 'a_var = 10 + 20 + 30\n'
start = code.index('10')
end = code.rindex('+') + 1
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
# FIXME: Extract method should be more intelligent about bad ranges
def xxx_test_raising_exception_on_function_parens(self):
code = 'a = range(10)'
start = code.index('(')
end = code.rindex(')') + 1
with self.assertRaises(rope.base.exceptions.RefactoringError):
self.do_extract_method(code, start, end, 'new_func')
def test_extract_method_and_extra_blank_lines(self):
code = '\nprint(1)\n'
refactored = self.do_extract_method(code, 0, len(code), 'new_f')
expected = '\n\ndef new_f():\n print(1)\n\nnew_f()\n'
self.assertEquals(expected, refactored)
def test_variable_writes_in_the_same_line_as_variable_read(self):
code = 'a = 1\na = 1 + a\n'
start = code.index('\n') + 1
end = len(code)
refactored = self.do_extract_method(code, start, end, 'new_f',
global_=True)
expected = 'a = 1\n\ndef new_f(a):\n a = 1 + a\n\nnew_f(a)\n'
self.assertEquals(expected, refactored)
def test_variable_writes_in_the_same_line_as_variable_read2(self):
code = 'a = 1\na += 1\n'
start = code.index('\n') + 1
end = len(code)
refactored = self.do_extract_method(code, start, end, 'new_f',
global_=True)
expected = 'a = 1\n\ndef new_f():\n a += 1\n\nnew_f()\n'
self.assertEquals(expected, refactored)
def test_variable_and_similar_expressions(self):
code = 'a = 1\nb = 1\n'
start = code.index('1')
end = start + 1
refactored = self.do_extract_variable(code, start, end,
'one', similar=True)
expected = 'one = 1\na = one\nb = one\n'
self.assertEquals(expected, refactored)
def test_definition_should_appear_before_the_first_use(self):
code = 'a = 1\nb = 1\n'
start = code.rindex('1')
end = start + 1
refactored = self.do_extract_variable(code, start, end,
'one', similar=True)
expected = 'one = 1\na = one\nb = one\n'
self.assertEquals(expected, refactored)
def test_extract_method_and_similar_expressions(self):
code = 'a = 1\nb = 1\n'
start = code.index('1')
end = start + 1
refactored = self.do_extract_method(code, start, end,
'one', similar=True)
expected = '\ndef one():\n return 1\n\na = one()\nb = one()\n'
self.assertEquals(expected, refactored)
def test_simple_extract_method_and_similar_statements(self):
code = 'class AClass(object):\n\n' \
' def func1(self):\n a = 1 + 2\n b = a\n' \
' def func2(self):\n a = 1 + 2\n b = a\n'
start, end = self._convert_line_range_to_offset(code, 4, 4)
refactored = self.do_extract_method(code, start, end,
'new_func', similar=True)
expected = 'class AClass(object):\n\n' \
' def func1(self):\n' \
' a = self.new_func()\n b = a\n\n' \
' def new_func(self):\n' \
' a = 1 + 2\n return a\n' \
' def func2(self):\n' \
' a = self.new_func()\n b = a\n'
self.assertEquals(expected, refactored)
def test_extract_method_and_similar_statements2(self):
code = 'class AClass(object):\n\n' \
' def func1(self, p1):\n a = p1 + 2\n' \
' def func2(self, p2):\n a = p2 + 2\n'
start = code.rindex('p1')
end = code.index('2\n') + 1
refactored = self.do_extract_method(code, start, end,
'new_func', similar=True)
expected = 'class AClass(object):\n\n' \
' def func1(self, p1):\n ' \
'a = self.new_func(p1)\n\n' \
' def new_func(self, p1):\n return p1 + 2\n' \
' def func2(self, p2):\n a = self.new_func(p2)\n'
self.assertEquals(expected, refactored)
def test_extract_method_and_similar_sttemnts_return_is_different(self):
code = 'class AClass(object):\n\n' \
' def func1(self, p1):\n a = p1 + 2\n' \
' def func2(self, p2):\n self.attr = p2 + 2\n'
start = code.rindex('p1')
end = code.index('2\n') + 1
refactored = self.do_extract_method(code, start, end,
'new_func', similar=True)
expected = 'class AClass(object):\n\n' \
' def func1(self, p1):' \
'\n a = self.new_func(p1)\n\n' \
' def new_func(self, p1):\n return p1 + 2\n' \
' def func2(self, p2):\n' \
' self.attr = self.new_func(p2)\n'
self.assertEquals(expected, refactored)
def test_definition_should_appear_where_it_is_visible(self):
code = 'if True:\n a = 1\nelse:\n b = 1\n'
start = code.rindex('1')
end = start + 1
refactored = self.do_extract_variable(code, start, end,
'one', similar=True)
expected = 'one = 1\nif True:\n a = one\nelse:\n b = one\n'
self.assertEquals(expected, refactored)
def test_extract_variable_and_similar_statements_in_classes(self):
code = 'class AClass(object):\n\n' \
' def func1(self):\n a = 1\n' \
' def func2(self):\n b = 1\n'
start = code.index(' 1') + 1
refactored = self.do_extract_variable(code, start, start + 1,
'one', similar=True)
expected = 'class AClass(object):\n\n' \
' def func1(self):\n one = 1\n a = one\n' \
' def func2(self):\n b = 1\n'
self.assertEquals(expected, refactored)
def test_extract_method_in_staticmethods(self):
code = 'class AClass(object):\n\n' \
' @staticmethod\n def func2():\n b = 1\n'
start = code.index(' 1') + 1
refactored = self.do_extract_method(code, start, start + 1,
'one', similar=True)
expected = 'class AClass(object):\n\n' \
' @staticmethod\n def func2():\n' \
' b = AClass.one()\n\n' \
' @staticmethod\n def one():\n' \
' return 1\n'
self.assertEquals(expected, refactored)
def test_extract_normal_method_with_staticmethods(self):
code = 'class AClass(object):\n\n' \
' @staticmethod\n def func1():\n b = 1\n' \
' def func2(self):\n b = 1\n'
start = code.rindex(' 1') + 1
refactored = self.do_extract_method(code, start, start + 1,
'one', similar=True)
expected = 'class AClass(object):\n\n' \
' @staticmethod\n def func1():\n b = 1\n' \
' def func2(self):\n b = self.one()\n\n' \
' def one(self):\n return 1\n'
self.assertEquals(expected, refactored)
def test_extract_variable_with_no_new_lines_at_the_end(self):
code = 'a_var = 10'
start = code.index('10')
end = start + 2
refactored = self.do_extract_variable(code, start, end, 'new_var')
expected = 'new_var = 10\na_var = new_var'
self.assertEquals(expected, refactored)
def test_extract_method_containing_return_in_functions(self):
code = 'def f(arg):\n return arg\nprint(f(1))\n'
start, end = self._convert_line_range_to_offset(code, 1, 3)
refactored = self.do_extract_method(code, start, end, 'a_func')
expected = '\ndef a_func():\n def f(arg):\n return arg\n' \
' print(f(1))\n\na_func()\n'
self.assertEquals(expected, refactored)
def test_extract_method_and_varying_first_parameter(self):
code = 'class C(object):\n' \
' def f1(self):\n print(str(self))\n' \
' def f2(self):\n print(str(1))\n'
start = code.index('print(') + 6
end = code.index('))\n') + 1
refactored = self.do_extract_method(code, start, end,
'to_str', similar=True)
expected = 'class C(object):\n' \
' def f1(self):\n print(self.to_str())\n\n' \
' def to_str(self):\n return str(self)\n' \
' def f2(self):\n print(str(1))\n'
self.assertEquals(expected, refactored)
def test_extract_method_when_an_attribute_exists_in_function_scope(self):
code = 'class A(object):\n def func(self):\n pass\n' \
'a = A()\n' \
'def f():\n' \
' func = a.func()\n' \
' print func\n'
start, end = self._convert_line_range_to_offset(code, 6, 6)
refactored = self.do_extract_method(code, start, end, 'g')
refactored = refactored[refactored.index('A()') + 4:]
expected = 'def f():\n func = g()\n print func\n\n' \
'def g():\n func = a.func()\n return func\n'
self.assertEquals(expected, refactored)
def test_global_option_for_extract_method(self):
code = 'def a_func():\n print(1)\n'
start, end = self._convert_line_range_to_offset(code, 2, 2)
refactored = self.do_extract_method(code, start, end,
'extracted', global_=True)
expected = 'def a_func():\n extracted()\n\n' \
'def extracted():\n print(1)\n'
self.assertEquals(expected, refactored)
def test_global_extract_method(self):
code = 'class AClass(object):\n\n' \
' def a_func(self):\n print(1)\n'
start, end = self._convert_line_range_to_offset(code, 4, 4)
refactored = self.do_extract_method(code, start, end,
'new_func', global_=True)
expected = 'class AClass(object):\n\n' \
' def a_func(self):\n new_func()\n\n' \
'def new_func():\n print(1)\n'
self.assertEquals(expected, refactored)
def test_extract_method_with_multiple_methods(self): # noqa
code = 'class AClass(object):\n' \
' def a_func(self):\n' \
' print(1)\n\n' \
' def another_func(self):\n' \
' pass\n'
start, end = self._convert_line_range_to_offset(code, 3, 3)
refactored = self.do_extract_method(code, start, end,
'new_func', global_=True)
expected = 'class AClass(object):\n' \
' def a_func(self):\n' \
' new_func()\n\n' \
' def another_func(self):\n' \
' pass\n\n' \
'def new_func():\n' \
' print(1)\n'
self.assertEquals(expected, refactored)
def test_where_to_seach_when_extracting_global_names(self):
code = 'def a():\n return 1\ndef b():\n return 1\nb = 1\n'
start = code.index('1')
end = start + 1
refactored = self.do_extract_variable(code, start, end, 'one',
similar=True, global_=True)
expected = 'def a():\n return one\none = 1\n' \
'def b():\n return one\nb = one\n'
self.assertEquals(expected, refactored)
def test_extracting_pieces_with_distinct_temp_names(self):
code = 'a = 1\nprint a\nb = 1\nprint b\n'
start = code.index('a')
end = code.index('\nb')
refactored = self.do_extract_method(code, start, end, 'f',
similar=True, global_=True)
expected = '\ndef f():\n a = 1\n print a\n\nf()\nf()\n'
self.assertEquals(expected, refactored)
def test_extract_methods_in_glob_funcs_should_be_glob(self):
code = 'def f():\n a = 1\ndef g():\n b = 1\n'
start = code.rindex('1')
refactored = self.do_extract_method(code, start, start + 1, 'one',
similar=True, global_=False)
expected = 'def f():\n a = one()\ndef g():\n b = one()\n\n' \
'def one():\n return 1\n'
self.assertEquals(expected, refactored)
def test_extract_methods_in_glob_funcs_should_be_glob_2(self):
code = 'if 1:\n var = 2\n'
start = code.rindex('2')
refactored = self.do_extract_method(code, start, start + 1, 'two',
similar=True, global_=False)
expected = '\ndef two():\n return 2\n\nif 1:\n var = two()\n'
self.assertEquals(expected, refactored)
def test_extract_method_and_try_blocks(self):
code = 'def f():\n try:\n pass\n' \
' except Exception:\n pass\n'
start, end = self._convert_line_range_to_offset(code, 2, 5)
refactored = self.do_extract_method(code, start, end, 'g')
expected = 'def f():\n g()\n\ndef g():\n try:\n pass\n' \
' except Exception:\n pass\n'
self.assertEquals(expected, refactored)
def test_extract_and_not_passing_global_functions(self):
code = 'def next(p):\n return p + 1\nvar = next(1)\n'
start = code.rindex('next')
refactored = self.do_extract_method(code, start, len(code) - 1, 'two')
expected = 'def next(p):\n return p + 1\n' \
'\ndef two():\n return next(1)\n\nvar = two()\n'
self.assertEquals(expected, refactored)
def test_extracting_with_only_one_return(self):
code = 'def f():\n var = 1\n return var\n'
start, end = self._convert_line_range_to_offset(code, 2, 3)
refactored = self.do_extract_method(code, start, end, 'g')
expected = 'def f():\n return g()\n\n' \
'def g():\n var = 1\n return var\n'
self.assertEquals(expected, refactored)
def test_extracting_variable_and_implicit_continuations(self):
code = 's = ("1"\n "2")\n'
start = code.index('"')
end = code.rindex('"') + 1
refactored = self.do_extract_variable(code, start, end, 's2')
expected = 's2 = "1" "2"\ns = (s2)\n'
self.assertEquals(expected, refactored)
def test_extracting_method_and_implicit_continuations(self):
code = 's = ("1"\n "2")\n'
start = code.index('"')
end = code.rindex('"') + 1
refactored = self.do_extract_method(code, start, end, 'f')
expected = '\ndef f():\n return "1" "2"\n\ns = (f())\n'
self.assertEquals(expected, refactored)
def test_passing_conditional_updated_vars_in_extracted(self):
code = 'def f(a):\n' \
' if 0:\n' \
' a = 1\n' \
' print(a)\n'
start, end = self._convert_line_range_to_offset(code, 2, 4)
refactored = self.do_extract_method(code, start, end, 'g')
expected = 'def f(a):\n' \
' g(a)\n\n' \
'def g(a):\n' \
' if 0:\n' \
' a = 1\n' \
' print(a)\n'
self.assertEquals(expected, refactored)
def test_returning_conditional_updated_vars_in_extracted(self):
code = 'def f(a):\n' \
' if 0:\n' \
' a = 1\n' \
' print(a)\n'
start, end = self._convert_line_range_to_offset(code, 2, 3)
refactored = self.do_extract_method(code, start, end, 'g')
expected = 'def f(a):\n' \
' a = g(a)\n' \
' print(a)\n\n' \
'def g(a):\n' \
' if 0:\n' \
' a = 1\n' \
' return a\n'
self.assertEquals(expected, refactored)
def test_extract_method_with_variables_possibly_written_to(self):
code = "def a_func(b):\n" \
" if b > 0:\n" \
" a = 2\n" \
" print a\n"
start, end = self._convert_line_range_to_offset(code, 2, 3)
refactored = self.do_extract_method(code, start, end, 'extracted')
expected = "def a_func(b):\n" \
" a = extracted(b)\n" \
" print a\n\n" \
"def extracted(b):\n" \
" if b > 0:\n" \
" a = 2\n" \
" return a\n"
self.assertEquals(expected, refactored)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
39d7269798832e93cc7391c6516b8df87b50ca36 | 59c0669a38c4178f2f5cf8f9dca7553849c286a2 | /MyPro/pythonScript/QRCodeDetect/Invoice/hough_tansform_bad.py | 437f292bb460649c54b3fb981f99722309b81288 | [] | no_license | AUGUSTRUSH8/ImageProcess | f33ceaabaac67436df47fd1e1f115a8f44a6f556 | 46fc85b61dab52c3876dfacb4dfd22c962dc13bf | refs/heads/master | 2023-04-27T21:39:36.044320 | 2022-07-04T14:59:35 | 2022-07-04T14:59:35 | 174,789,186 | 31 | 17 | null | 2022-07-06T20:07:14 | 2019-03-10T07:01:13 | Java | UTF-8 | Python | false | false | 4,007 | py | # -*- coding: utf-8 -*-
import cv2
import numpy as np
from matplotlib import pyplot as plt
import math
def rotate_about_center2(src, radian, scale=1.):
#入参:弧度
w = src.shape[1]
h = src.shape[0]
angle = radian * 180 / np.pi
# now calculate new image width and height
nw = (abs(np.sin(radian)*h) + abs(np.cos(radian)*w))*scale
nh = (abs(np.cos(radian)*h) + abs(np.sin(radian)*w))*scale
# ask OpenCV for the rotation matrix
rot_mat = cv2.getRotationMatrix2D((nw*0.5, nh*0.5), angle, scale)
# calculate the move from the old center to the new center combined
# with the rotation
rot_move = np.dot(rot_mat, np.array([(nw-w)*0.5, (nh-h)*0.5,0]))
# the move only affects the translation, so update the translation
# part of the transform
rot_mat[0,2] += rot_move[0]
rot_mat[1,2] += rot_move[1]
return cv2.warpAffine(src, rot_mat, (int(math.ceil(nw)), int(math.ceil(nh))), flags=cv2.INTER_LANCZOS4)
def get_group(arr):
#按照4个弧度区间分组,返回不为空的分组数据
radian_45 = np.pi/4
radian_90 = np.pi/2
radian_135 = radian_45 * 3
radian_180 = np.pi
ret_arr = [[],[],[],[]]
for i in range(len(arr)):
if arr[i] < radian_45:
ret_arr[0].append(arr[i])
elif arr[i] < radian_90:
ret_arr[1].append(arr[i])
elif arr[i] < radian_135:
ret_arr[2].append(arr[i])
else:
ret_arr[3].append(arr[i])
while [] in ret_arr:
ret_arr.remove([])
#print ret_arr
return ret_arr
def get_min_var_avg(arr):
#按照不同弧度区间分组,返回方差最小的一个分组的弧度平均值
group_arr = get_group(arr)
print(group_arr)
cv2.waitKey(0)
var_arr = []
if len(group_arr) <= 1:
var_arr.append(np.var(group_arr[0]))
print(var_arr)
cv2.waitKey(0)
else:
for i in range(len(group_arr)):
var_arr.append(np.var(group_arr[i]))
print(var_arr)
min_var = 10000
min_i = 0
for i in range(len(var_arr)):
if var_arr[i] < min_var:
min_var = var_arr[i]
min_i = i
#print min_var, i
avg = np.mean(group_arr[min_i])
return avg
def get_rotate_radian(radian, reverse = False):
#旋转弧度转换
radian_45 = np.pi/4
radian_90 = np.pi/2
radian_135 = radian_45 * 3
radian_180 = np.pi
ret_radian = 0
if radian < radian_45:
ret_radian = radian
elif radian < radian_90:
ret_radian = radian - radian_90
elif radian < radian_135:
ret_radian = radian - radian_90
else:
ret_radian = radian - radian_180
if reverse:
ret_radian += radian_90
print(ret_radian)
return ret_radian
def rotate():
image = cv2.imread("test3.jpg", 0)
print(image.shape)
#高斯模糊
blur = cv2.GaussianBlur(image,(7,7),0)#自己调整,经验数据
cv2.imshow('image',blur)
cv2.waitKey(0)
#Canny边缘检测
canny = cv2.Canny(blur, 20, 150, 3)
cv2.imshow("canny",canny)
lines = cv2.HoughLines(canny, 1, np.pi/180, 200)#自己调整,经验数据
#求平均弧度
l = len(lines[0])
print(l)
theta_arr = [lines[0][i][1] for i in range(l)]
print(theta_arr)
cv2.waitKey(0)
rotate_theta = get_min_var_avg(theta_arr)
print(rotate_theta)
#print lines
'''for line in lines[0]:
rho = line[0]
theta = line[1]
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
cv2.line(image, (int(x0 - 1000*b), int(y0 + 1000*a)), (int(x0 + 1000*b), int(y0 - 1000*a)), (0,255,0), 2)
#cv2.imshow('image',image)
#cv2.waitKey(0)'''
img2 = rotate_about_center2(image, get_rotate_radian(rotate_theta, image.shape[0] > image.shape[1])) # hight > width
plt.imshow(img2)
plt.show()
if __name__ == '__main__':
rotate() | [
"l"
] | l |
4080d41a60b85ff5500efacfc8fa63c51b33899f | 2d1ffb862ec65116f88b0986e4f36d36110cbfe5 | /app/views.py | ced21fb3eae0537fbf78312e2c9f3eb801e59a90 | [] | no_license | stkc282/wedding | c38afc7861119b8cf4490fa35007841d58e161c7 | 1799b72820787a59d0d5b7edf7748b1ab7af9a98 | refs/heads/master | 2021-06-18T04:15:20.293547 | 2019-08-19T10:17:13 | 2019-08-19T10:17:13 | 202,826,952 | 0 | 0 | null | 2021-06-10T21:52:12 | 2019-08-17T02:48:38 | JavaScript | UTF-8 | Python | false | false | 3,288 | py | # from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse_lazy
from django.views.generic import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django_filters.views import FilterView
from .filters import ItemFilter
from .forms import ItemForm
from .models import Item
from django.shortcuts import render
# # Create your views here.
# # 検索一覧画面
class ItemFilterView(FilterView):
model = Item
#
# # デフォルトの並び順を新しい順とする
# queryset = Item.objects.all().order_by('-created_at')
#
# # django-filter用設定
# filterset_class = ItemFilter
# strict = False
#
# # 1ページあたりの表示件数
# paginate_by = 10
#
# # 検索条件をセッションに保存する
# def get(self, request, **kwargs):
# if request.GET:
# request.session['query'] = request.GET
# else:
# request.GET = request.GET.copy()
# if 'query' in request.session.keys():
# for key in request.session['query'].keys():
# request.GET[key] = request.session['query'][key]
#
# return super().get(request, **kwargs)
# # 検索一覧画面
# class ItemFilterView(LoginRequiredMixin, FilterView):
# model = Item
#
# # デフォルトの並び順を新しい順とする
# queryset = Item.objects.all().order_by('-created_at')
#
# # django-filter用設定
# filterset_class = ItemFilter
# strict = False
#
# # 1ページあたりの表示件数
# paginate_by = 10
#
# # 検索条件をセッションに保存する
# def get(self, request, **kwargs):
# if request.GET:
# request.session['query'] = request.GET
# else:
# request.GET = request.GET.copy()
# if 'query' in request.session.keys():
# for key in request.session['query'].keys():
# request.GET[key] = request.session['query'][key]
#
# return super().get(request, **kwargs)
#
# 詳細画面
class ItemDetailView( DetailView):
model = Item
# # 詳細画面
# class ItemDetailView(LoginRequiredMixin, DetailView):
# model = Item
# 登録画面
class ItemCreateView(CreateView):
model = Item
form_class = ItemForm
success_url = reverse_lazy('thanks')
# 更新画面
class ItemUpdateView(UpdateView):
model = Item
form_class = ItemForm
success_url = reverse_lazy('index')
# 削除画面
class ItemDeleteView(DeleteView):
model = Item
success_url = reverse_lazy('index')
def invitation(request):
# post = get_object_or_404(Post, pk=pk )
return render(request, 'app/invitation.html', {})
def thanks(request):
return render(request, 'app/thanks.html', {})
def access(request):
return render(request, 'app/access.html', {})
# def create(request):
# if request.method == 'POST':
# form_class = ItemForm(request.POST)
# if form_class.is_valid():
# model = form_class.save(commit=False)
# model.save()
# return redirect('index', pk=form_class.pk)
# else:
# form_class = ItemForm
# return render(request, 'app/thanks.html', {'form': form_class}) | [
"[email protected]"
] | |
e396119de92c2a9d0442f560d6abcdd894436e17 | 484f111548e9d7192a5748eb202c08802484d747 | /fw/flash.py | 8361fc57a27f60367e21952493f6068dcb8a037a | [
"Apache-2.0"
] | permissive | cmcmurrough/moteus | dafb2e5224409aaf1d57b66f58965d298845678d | 6780967ec40ad7f1ab76cdbd7021f2d07b739efe | refs/heads/main | 2023-07-11T10:29:58.645291 | 2021-08-13T13:38:32 | 2021-08-13T13:38:32 | 396,627,837 | 2 | 0 | Apache-2.0 | 2021-08-16T05:07:08 | 2021-08-16T05:07:07 | null | UTF-8 | Python | false | false | 2,193 | py | #!/usr/bin/python3
# Copyright 2021 Josh Pieper, [email protected].
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import platform
import subprocess
import sys
import tempfile
BINPREFIX = '' if platform.machine().startswith('arm') else 'arm-none-eabi-'
OBJCOPY = BINPREFIX + 'objcopy'
OPENOCD = 'openocd -f interface/stlink.cfg -f target/stm32g4x.cfg '
def main():
tmpdir = tempfile.TemporaryDirectory()
moteus_elffile = (
sys.argv[1]
if len(sys.argv) > 1 else
'bazel-out/stm32g4-opt/bin/fw/moteus.elf')
bootloader_elffile = (
sys.argv[2]
if len(sys.argv) > 2 else
'bazel-out/stm32g4-opt/bin/fw/can_bootloader.elf')
subprocess.check_call(
f'{OBJCOPY} -Obinary ' +
f'-j .isr_vector ' +
f'{moteus_elffile} {tmpdir.name}/out.08000000.bin',
shell=True)
subprocess.check_call(
f'{OBJCOPY} -Obinary ' +
f'-j .text -j .ARM.extab -j .ARM.exidx -j .data -j .bss ' +
f'{bootloader_elffile} {tmpdir.name}/out.0800c000.bin',
shell=True)
subprocess.check_call(
f'{OBJCOPY} -Obinary ' +
f'-j .text -j .ARM.extab -j .ARM.exidx -j .data -j .ccmram -j .bss ' +
f'{moteus_elffile} {tmpdir.name}/out.08010000.bin',
shell=True)
subprocess.check_call(
f'{OPENOCD} -c "init" ' +
f'-c "reset_config none separate; ' +
f' program {tmpdir.name}/out.08000000.bin verify 0x8000000; ' +
f' program {tmpdir.name}/out.0800c000.bin verify 0x800c000; ' +
f' program {tmpdir.name}/out.08010000.bin verify ' +
f' reset exit 0x08010000"',
shell=True)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
bb6e52fee441903389167e2b4292125b69cdb8b8 | ce3964c7195de67e07818b08a43286f7ec9fec3e | /dl_poly/get_pvt.py | 6fd5f7613ff6286470a47abe111c368b60d57ff7 | [] | no_license | zhuligs/physics | 82b601c856f12817c0cfedb17394b7b6ce6b843c | 7cbac1be7904612fd65b66b34edef453aac77973 | refs/heads/master | 2021-05-28T07:39:19.822692 | 2013-06-05T04:53:08 | 2013-06-05T04:53:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,852 | py | #!/usr/bin/env python
# Try retrieving P,V,T, etc. from the STATIS file, may be easier than from OUTPUT...
import os, sys, commands
def readlines(FILE,n):
'''Read n lines from FILE'''
for i in range(n):
FILE.readline()
try:
s = open('STATIS','r')
header1 = s.readline()
header2 = s.readline()
c = open('CONTROL','r')
lines = c.readlines()
for line in lines:
if len(line.split()) == 2:
var, value = line.split()
if var == 'steps':
steps = int(value)
elif var == 'stats':
stats = int(value)
c.close()
except:
print 'Could not open STATIS and CONTROL files successfully--stopping'
sys.exit(0)
# Total energy is row 1 value 1
# Temp is row 1, value 2
# Pres is row 6, value 2
# Vol is row 4, value 4
nblocks = int(steps)/int(stats)
out = open('pvt.dat','w')
out.write('# --Data extracted from STATIS file--\n')
out.write('#tstep\tpres (GPa)\tvol (ang^3)\ttemp (K)\tetot (eV)\t\tpot (eV)\n')
for i in range(nblocks):
tstep, t, elements = s.readline().split()
row1 = s.readline().split()
Etot = str( float(row1[0]) * 1.036426865E-4 ) # convert unit to eV
T = row1[1]
s.readline()
s.readline()
V = s.readline().split()[3]
s.readline()
P = str( float(s.readline().split()[1]) * 0.016605402 ) # convert atm unit to GPa
# Every line has 5 values, each line read is 5 elements gone
leftover = int(elements) - 5*6
if leftover % 5 == 0:
extra_lines = leftover/5
else:
extra_lines = leftover/5 + 1
readlines(s,extra_lines)
# Calculate Etot - 3*k_b*T
k_b = 8.617343E-5 # Boltzmann's const in eV/K
pot = str( float(Etot) - 3*k_b*float(T) )
out.write(tstep+'\t'+P+' \t'+V+'\t'+T+'\t'+Etot+'\t'+pot+'\n')
s.close()
out.close()
| [
"[email protected]"
] | |
fbb7c0b773c663b598397c813719054f055a6897 | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /lib/python2.7/site-packages/openpyxl/worksheet/pivot.py | b1905be6298ea1c57f774cae821fbc482b8bf25b | [
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 4,984 | py | from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
from openpyxl.descriptors import (
Bool,
Integer,
String,
Set,
NoneSet,
)
from openpyxl.descriptors.serialisable import Serialisable
class PivotSelection(Serialisable):
pane = Set(values=("bottomRight", "topRight", "bottomLeft", "topLeft"))
showHeader = Bool()
label = Bool()
data = Bool()
extendable = Bool()
count = Integer()
axis = String(allow_none=True)
dimension = Integer()
start = Integer()
min = Integer()
max = Integer()
activeRow = Integer()
activeCol = Integer()
previousRow = Integer()
previousCol = Integer()
click = Integer()
def __init__(self,
pane=None,
showHeader=None,
label=None,
data=None,
extendable=None,
count=None,
axis=None,
dimension=None,
start=None,
min=None,
max=None,
activeRow=None,
activeCol=None,
previousRow=None,
previousCol=None,
click=None):
self.pane = pane
self.showHeader = showHeader
self.label = label
self.data = data
self.extendable = extendable
self.count = count
self.axis = axis
self.dimension = dimension
self.start = start
self.min = min
self.max = max
self.activeRow = activeRow
self.activeCol = activeCol
self.previousRow = previousRow
self.previousCol = previousCol
self.click = click
class PivotArea(Serialisable):
field = Integer(allow_none=True)
type = NoneSet(values=("normal", "data", "all", "origin", "button", "topEnd"))
dataOnly = Bool()
labelOnly = Bool()
grandRow = Bool()
grandCol = Bool()
cacheIndex = Bool()
outline = Bool()
offset = String()
collapsedLevelsAreSubtotals = Bool()
axis = String(allow_none=True)
fieldPosition = Integer(allow_none=True)
def __init__(self,
field=None,
type=None,
dataOnly=None,
labelOnly=None,
grandRow=None,
grandCol=None,
cacheIndex=None,
outline=None,
offset=None,
collapsedLevelsAreSubtotals=None,
axis=None,
fieldPosition=None):
self.field = field
self.type = type
self.dataOnly = dataOnly
self.labelOnly = labelOnly
self.grandRow = grandRow
self.grandCol = grandCol
self.cacheIndex = cacheIndex
self.outline = outline
self.offset = offset
self.collapsedLevelsAreSubtotals = collapsedLevelsAreSubtotals
self.axis = axis
self.fieldPosition = fieldPosition
class PivotAreaReferences(Serialisable):
count = Integer()
def __init__(self, count=None):
count = count
class PivotAreaReference(Serialisable):
field = Integer(allow_none=True)
count = Integer()
selected = Bool()
byPosition = Bool()
relative = Bool()
defaultSubtotal = Bool()
sumSubtotal = Bool()
countASubtotal = Bool()
avgSubtotal = Bool()
maxSubtotal = Bool()
minSubtotal = Bool()
productSubtotal = Bool()
countSubtotal = Bool()
stdDevSubtotal = Bool()
stdDevPSubtotal = Bool()
varSubtotal = Bool()
varPSubtotal = Bool()
def __init__(self,
field=None,
count=None,
selected=None,
byPosition=None,
relative=None,
defaultSubtotal=None,
sumSubtotal=None,
countASubtotal=None,
avgSubtotal=None,
maxSubtotal=None,
minSubtotal=None,
productSubtotal=None,
countSubtotal=None,
stdDevSubtotal=None,
stdDevPSubtotal=None,
varSubtotal=None,
varPSubtotal=None):
self.field = field
self.count = count
self.selected = selected
self.byPosition = byPosition
self.relative = relative
self.defaultSubtotal = defaultSubtotal
self.sumSubtotal = sumSubtotal
self.countASubtotal = countASubtotal
self.avgSubtotal = avgSubtotal
self.maxSubtotal = maxSubtotal
self.minSubtotal = minSubtotal
self.productSubtotal = productSubtotal
self.countSubtotal = countSubtotal
self.stdDevSubtotal = stdDevSubtotal
self.stdDevPSubtotal = stdDevPSubtotal
self.varSubtotal = varSubtotal
self.varPSubtotal = varPSubtotal
class Index(Serialisable):
v = Integer()
def __init__(self, v=None):
self.v = v
| [
"[email protected]"
] | |
40b5a7f814ed68cbc12969cb867747a1687e0e1b | ac1e60fd4bb3b7cc04e413ae394836abad8947b1 | /email_verification_api/wsgi.py | e60483842d64ef833b28dfd12be0cfe5d6bf9eba | [] | no_license | Taycode/email-verification-api | 9c48642f34671232c388a7c763541f02ff9ae614 | f3abe35a010d5b2d3d2c269fa728eb40f26630a0 | refs/heads/master | 2020-08-04T11:00:29.103892 | 2019-10-01T14:49:14 | 2019-10-01T14:49:14 | 212,114,710 | 0 | 0 | null | 2019-10-01T14:16:59 | 2019-10-01T14:16:58 | null | UTF-8 | Python | false | false | 421 | py | """
WSGI config for email_verification_api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'email_verification_api.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
490df8c8807c725fdf915ccba2ff1496bd0ac937 | 60cb975f3e0251c73c457271bce8a7b2036e422b | /studysrc/mytest/websppider/transtest.py | 23c308fa3b1c83bba1c6cd379e0c29e746a2f19d | [] | no_license | 49257620/reboot | 0a2341f23bc1a6f3ae47b59f772919228c623544 | 86b348228d1a25d78c45b0e9022d7c773544373b | refs/heads/master | 2018-11-17T19:19:58.969710 | 2018-09-25T03:15:57 | 2018-09-25T03:15:57 | 125,727,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 976 | py | # encoding: utf-8
# Author: LW
import urllib.request
import urllib.parse
import time
import random
import hashlib
content = 'what fuck'
url = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'
data = {}
'''
1523493384696
1523493371204
351ac046404e1bbcb9442615f964a96d
cb2731255a15489013919b3788953bdc
'''
u = 'fanyideskweb'
d = content
f = str(int(time.time()*1000) + random.randint(1,10))
c = 'ebSeFb%=XZ%T[KZ)c(sy!'
sign = hashlib.md5((u + d + f + c).encode('utf-8')).hexdigest()
print(f)
print(sign)
data['i']: content
data['from']: 'AUTO'
data['to']: 'AUTO'
data['smartresult']: 'dict'
data['client']: 'fanyideskweb'
data['salt'] = f
data['sign'] = sign
data['doctype']: 'json'
data['version']: '2.1'
data['keyfrom']: 'fanyi.web'
data['action']: 'FY_BY_CLICKBUTTION'
data['typoResult']: 'false'
data = urllib.parse.urlencode(data).encode('utf-8')
response = urllib.request.urlopen(url, data)
html = response.read().decode('utf-8')
print(html)
| [
"[email protected]"
] | |
08273d87152e339e41af2407ff4bbad8cc28e79c | f2b91692a434ee79ff5d68ed3111d60d90315f00 | /src/command_modules/azure-cli-servicebus/azure/cli/command_modules/servicebus/_validators.py | 6a4509e9f662b17fe8494f89fce3441aa9719205 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | cal5barton/azure-cli | f883bc7d481b163d4c4af1fa154a990182e5de80 | 6ebc6f810f32b8fce30a360633a70fcfdea15e7b | refs/heads/dev | 2023-05-24T18:12:36.151238 | 2018-07-12T16:16:29 | 2018-07-12T16:16:29 | 140,749,210 | 0 | 0 | MIT | 2023-05-15T18:58:31 | 2018-07-12T18:13:18 | Python | UTF-8 | Python | false | false | 4,322 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=unused-variable
import re
from datetime import timedelta
from isodate import parse_duration
from knack.util import CLIError
# PARAMETER VALIDATORS
# Type ISO 8061 duration
iso8601pattern = re.compile("^P(?!$)(\\d+Y)?(\\d+M)?(\\d+W)?(\\d+D)?(T(?=\\d)(\\d+H)?(\\d+M)?(\\d+.)?(\\d+S)?)?$")
timedeltapattern = re.compile("^\\d+:\\d+:\\d+$")
def _validate_lock_duration(namespace):
if namespace.lock_duration:
if iso8601pattern.match(namespace.lock_duration):
if parse_duration(namespace.lock_duration) > timedelta(days=0, minutes=6, seconds=0):
raise CLIError(
'--lock-duration Value Error : {0} value, The maximum value for LockDuration is 5 minutes; the default value is 1 minute.'.format(
namespace.lock_duration))
elif timedeltapattern.match(namespace.lock_duration):
day, miniute, seconds = namespace.lock_duration.split(":")
if int(day) > 0 or int(miniute) > 6:
raise CLIError(
'--lock-duration Value Error : {0} value, The maximum value for LockDuration is 5 minutes; the default value is 1 minute.'.format(
namespace.lock_duration))
else:
raise CLIError('--lock-duration Value Error : {0} value is not in ISO 8601 timespan / duration format. e.g.'
' PT10M for duration of 10 min or 00:10:00 for duration of 10 min'.format(namespace.lock_duration))
def _validate_default_message_time_to_live(namespace):
if namespace.default_message_time_to_live:
if not iso8601pattern.match(namespace.default_message_time_to_live) and not timedeltapattern.match(namespace.default_message_time_to_live):
raise CLIError('--default-message-time-to-live Value Error : {0} value is not in ISO 8601 timespan / duration format. e.g. PT10M for duration of 10 min or 00:10:00 for duration of 10 min'.format(namespace.default_message_time_to_live))
def _validate_duplicate_detection_history_time_window(namespace):
if namespace.duplicate_detection_history_time_window:
if iso8601pattern.match(namespace.duplicate_detection_history_time_window):
pass
elif timedeltapattern.match(namespace.duplicate_detection_history_time_window):
pass
else:
raise CLIError('--duplicate-detection-history-time-window Value Error : {0} value is not in ISO 8601 timespan / duration format. e.g. PT10M for duration of 10 min or 00:10:00 for duration of 10 min'.format(namespace.duplicate_detection_history_time_window))
def _validate_auto_delete_on_idle(namespace):
if namespace.auto_delete_on_idle:
if iso8601pattern.match(namespace.auto_delete_on_idle):
pass
elif timedeltapattern.match(namespace.auto_delete_on_idle):
pass
else:
raise CLIError('--auto-delete-on-idle Value Error : {0} value is not in ISO 8601 timespan / duration format. e.g. PT10M for duration of 10 min or 00:10:00 for duration of 10 min'.format(namespace.auto_delete_on_idle))
def validate_partner_namespace(cmd, namespace):
from azure.cli.core.commands.client_factory import get_subscription_id
from msrestazure.tools import is_valid_resource_id, resource_id
if namespace.partner_namespace:
if not is_valid_resource_id(namespace.partner_namespace):
namespace.partner_namespace = resource_id(
subscription=get_subscription_id(cmd.cli_ctx),
resource_group=namespace.resource_group_name,
namespace='Microsoft.ServiceBus',
type='namespaces',
name=namespace.partner_namespace)
def validate_premiumsku_capacity(namespace):
if namespace.sku and namespace.sku != 'Premium' and namespace.capacity:
raise CLIError('--capacity - This property is only applicable to namespaces of Premium SKU')
| [
"[email protected]"
] | |
9ec5875503577bf114e6521a6174ca229c968b95 | c1e0874f55d05ee990ed2d637c2910701b32d246 | /soft_uni_OOP/Defining Classes/lab/scope_mess_3.py | 03d81f4774c92bdc435a7583da245e72d79f8461 | [] | no_license | borislavstoychev/Soft_Uni | 5d047bef402c50215e0abc825476326889ffd0be | ccc0b2fb18f8ad6809b475eb20e82a9e4eb4b0b0 | refs/heads/master | 2023-05-11T12:27:08.672058 | 2021-05-28T18:00:10 | 2021-05-28T18:00:10 | 277,556,731 | 3 | 2 | null | 2021-02-11T19:57:37 | 2020-07-06T13:58:23 | Python | UTF-8 | Python | false | false | 311 | py | x = "global"
def outer():
x = "local"
def inner():
nonlocal x
x = "nonlocal"
print("inner:", x)
def change_global():
global x
x = "global: changed!"
print("outer:", x)
inner()
print("outer:", x)
change_global()
print(x)
outer()
print(x) | [
"[email protected]"
] | |
4d641b7b452b7e43378724205d8c5690b44cd11a | 5b9c50baaa3182868c9f4a744a7361abe422a510 | /tests/test_base.py | f7f5133f7951074f1287e3257df0b73b129805e8 | [
"MIT"
] | permissive | jasontangxf/geometer | 3307889c087a1f498d58b5ae6bbf1b037119ca46 | 931df0aff6c680ad13a6c5989f2a89c276370c5e | refs/heads/master | 2023-01-06T17:39:41.837342 | 2020-11-07T15:42:10 | 2020-11-07T15:42:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,251 | py | import numpy as np
from geometer.base import TensorDiagram, Tensor, TensorCollection, LeviCivitaTensor, KroneckerDelta
class TestTensor:
def test_arithmetic(self):
a = Tensor(2, 3)
b = Tensor(5, 4)
# vector operations
assert a + b == Tensor(7, 7)
assert a - b == Tensor(-3, -1)
assert -a == Tensor(-2, -3)
# scalar operations
assert a + 6 == Tensor(8, 9)
assert a - 6 == Tensor(-4, -3)
assert a * 6 == Tensor(12, 18)
assert a / 6 == Tensor(1/3, 0.5)
def test_transpose(self):
a = Tensor([[1, 2],
[3, 4]], covariant=[0])
assert a.transpose() == Tensor([[1, 3], [2, 4]])
assert a.T._covariant_indices == {1}
assert a.T.T == a
def test_getitem(self):
a = Tensor([[1, 2],
[3, 4]], covariant=[0])
assert a[0, 1] == 2
assert a[None, 1] == [[3, 4]]
assert a[None, 1].tensor_shape == (0, 1)
assert a[::-1, 0] == [3, 1]
assert a[::-1, 0].tensor_shape == (1, 0)
def test_dtype(self):
a = Tensor(2, 3, dtype=np.float32)
assert a.dtype == np.float32
a = Tensor(2, 3, dtype=np.complex64)
assert a.dtype == np.complex64
class TestTensorCollection:
def test_init(self):
# empty list
a = TensorCollection([])
assert len(a) == 0
# numpy array
a = TensorCollection(np.ones((1, 2, 3)))
assert len(a) == 1
assert a.size == 2
# nested list of numbers
a = TensorCollection([[1, 2], [3, 4]])
assert len(a) == 2
assert a.size == 2
# nested tuple of numbers
a = TensorCollection(((1, 2), (3, 4)))
assert len(a) == 2
assert a.size == 2
# nested list of Tensor objects
a = TensorCollection([[Tensor(1, 2, 3), Tensor(3, 4, 5)]])
assert a.shape == (1, 2, 3)
assert len(a) == 1
assert a.size == 2
# object with __array__ function
class A:
def __array__(self):
return np.array([Tensor(1, 2), Tensor(3, 4)])
a = TensorCollection(A())
assert len(a) == 2
assert a.size == 2
def test_flat(self):
a = [Tensor([[1, 2], [3, 4]]), Tensor([[5, 6], [7, 8]])]
b = TensorCollection([a], tensor_rank=2)
assert list(b.flat) == a
def test_getitem(self):
a = Tensor([[1, 2],
[3, 4]])
b = Tensor([[5, 6],
[7, 8]])
c = TensorCollection([a, b])
assert c[0] == a
assert c[1] == b
assert list(c) == [a, b]
assert c[:, 1] == TensorCollection([Tensor([3, 4]), Tensor([7, 8])])
assert c[:, 0, 0] == [1, 5]
class TestTensorDiagram:
def test_add_edge(self):
a = Tensor([1, 0, 0, 0])
b = Tensor([[42, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], covariant=False)
diagram = TensorDiagram((a, b))
assert diagram.calculate() == Tensor([42, 0, 0, 0])
diagram.add_edge(a.copy(), b)
assert diagram.calculate() == 42
def test_tensor_product(self):
e1 = Tensor(1, 0)
e2 = Tensor(0, 1)
a = Tensor([0, 1],
[1, 0], covariant=[0])
b = Tensor([1, 0],
[0, 1], covariant=[0])
m = a.tensor_product(b)
e = e1.tensor_product(e2)
assert TensorDiagram((e, m), (e, m)).calculate() == (a * e1).tensor_product(b * e2)
d = TensorDiagram()
d.add_node(a)
d.add_node(b)
assert d.calculate() == a.tensor_product(b)
def test_epsilon_delta_rule(self):
e1 = LeviCivitaTensor(3, True)
e2 = LeviCivitaTensor(3, False)
d = KroneckerDelta(3)
d2 = d.tensor_product(d)
d1 = d2.transpose((0, 1))
diagram = TensorDiagram((e1, e2.transpose()))
assert diagram.calculate() == d1 - d2
def test_kronecker_delta(self):
d = KroneckerDelta(4, 3)
assert d.array.shape == (4,)*6
assert d.array[0, 1, 2, 0, 1, 2] == 1
assert d.array[0, 2, 1, 0, 1, 2] == -1
| [
"[email protected]"
] | |
4c10f5dbe66a1ecd6b2cb0e0d1cb6a3481ac2ca0 | 1b94c7cfd66804fe8d40b5def35e4b9b18d69ba2 | /old_py2/controllers/apiai_controller.py | dfff3930d0c210a7d0d4eb8c2af95d15d9d7e374 | [
"MIT"
] | permissive | the-blue-alliance/the-blue-alliance | 3dc210a9611ce9b240907ffd420f78040318dcdc | 6d42f3cdb2f785d192f2871419e58aaae3445029 | refs/heads/py3 | 2023-08-22T21:02:36.398100 | 2023-08-22T19:14:01 | 2023-08-22T19:14:01 | 888,427 | 344 | 263 | MIT | 2023-09-14T18:35:20 | 2010-09-04T20:34:11 | HTML | UTF-8 | Python | false | false | 635 | py | import json
from base_controller import LoggedInHandler
from helpers.apiai_helper import APIAIHelper
from models.sitevar import Sitevar
class APIAIHandler(LoggedInHandler):
def __init__(self, *args, **kw):
super(APIAIHandler, self).__init__(*args, **kw)
def post(self):
if self.request.headers.get('X-TBA-APIAI-Auth') != Sitevar.get_by_id('apiai.secrets').contents['key']:
return
request = json.loads(self.request.body)
self.response.headers['content-type'] = 'application/json; charset="utf-8"'
self.response.out.write(json.dumps(APIAIHelper.process_request(request)))
| [
"[email protected]"
] | |
e03dd7cf9f30096a3fcd724160094c5729decd0e | 7949f96ee7feeaa163608dbd256b0b76d1b89258 | /toontown/coghq/DistributedBanquetTable.py | a40dee6c0601918967b38ce75552be557387627e | [] | no_license | xxdecryptionxx/ToontownOnline | 414619744b4c40588f9a86c8e01cb951ffe53e2d | e6c20e6ce56f2320217f2ddde8f632a63848bd6b | refs/heads/master | 2021-01-11T03:08:59.934044 | 2018-07-27T01:26:21 | 2018-07-27T01:26:21 | 71,086,644 | 8 | 10 | null | 2018-06-01T00:13:34 | 2016-10-17T00:39:41 | Python | UTF-8 | Python | false | false | 50,791 | py | # File: t (Python 2.4)
import math
import random
from pandac.PandaModules import NodePath, Point3, VBase4, TextNode, Vec3, deg2Rad, CollisionSegment, CollisionHandlerQueue, CollisionNode, BitMask32, SmoothMover
from direct.fsm import FSM
from direct.distributed import DistributedObject
from direct.distributed.ClockDelta import globalClockDelta
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import Sequence, ProjectileInterval, Parallel, LerpHprInterval, ActorInterval, Func, Wait, SoundInterval, LerpPosHprInterval, LerpScaleInterval
from direct.gui.DirectGui import DGG, DirectButton, DirectLabel, DirectWaitBar
from direct.task import Task
from toontown.suit import Suit
from toontown.suit import SuitDNA
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from toontown.coghq import BanquetTableBase
from toontown.coghq import DinerStatusIndicator
from toontown.battle import MovieUtil
class DistributedBanquetTable(DistributedObject.DistributedObject, FSM.FSM, BanquetTableBase.BanquetTableBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBanquetTable')
rotationsPerSeatIndex = [
90,
90,
0,
0,
-90,
-90,
180,
180]
pitcherMinH = -360
pitcherMaxH = 360
rotateSpeed = 30
waterPowerSpeed = base.config.GetDouble('water-power-speed', 15)
waterPowerExponent = base.config.GetDouble('water-power-exponent', 0.75)
useNewAnimations = True
TugOfWarControls = False
OnlyUpArrow = True
if OnlyUpArrow:
BASELINE_KEY_RATE = 3
else:
BASELINE_KEY_RATE = 6
UPDATE_KEY_PRESS_RATE_TASK = 'BanquetTableUpdateKeyPressRateTask'
YELLOW_POWER_THRESHOLD = 0.75
RED_POWER_THRESHOLD = 0.96999999999999997
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
FSM.FSM.__init__(self, 'DistributedBanquetTable')
self.boss = None
self.index = -1
self.diners = { }
self.dinerStatus = { }
self.serviceLocs = { }
self.chairLocators = { }
self.sitLocators = { }
self.activeIntervals = { }
self.dinerStatusIndicators = { }
self.preparedForPhaseFour = False
self.avId = 0
self.toon = None
self.pitcherSmoother = SmoothMover()
self.pitcherSmoother.setSmoothMode(SmoothMover.SMOn)
self.smoothStarted = 0
self._DistributedBanquetTable__broadcastPeriod = 0.20000000000000001
self.changeSeq = 0
self.lastChangeSeq = 0
self.pitcherAdviceLabel = None
self.fireLength = 250
self.fireTrack = None
self.hitObject = None
self.setupPowerBar()
self.aimStart = None
self.toonPitcherPosition = Point3(0, -2, 0)
self.allowLocalRequestControl = True
self.fadeTrack = None
self.grabTrack = None
self.gotHitByBoss = False
self.keyTTL = []
self.keyRate = 0
self.buttons = [
0,
1]
self.lastPowerFired = 0
self.moveSound = None
self.releaseTrack = None
def disable(self):
DistributedObject.DistributedObject.disable(self)
taskMgr.remove(self.triggerName)
taskMgr.remove(self.smoothName)
taskMgr.remove(self.watchControlsName)
taskMgr.remove(self.pitcherAdviceName)
taskMgr.remove(self.posHprBroadcastName)
taskMgr.remove(self.waterPowerTaskName)
if self.releaseTrack:
self.releaseTrack.finish()
self.releaseTrack = None
if self.fireTrack:
self.fireTrack.finish()
self.fireTrack = None
self.cleanupIntervals()
def delete(self):
DistributedObject.DistributedObject.delete(self)
self.boss = None
self.ignoreAll()
for indicator in self.dinerStatusIndicators.values():
indicator.delete()
self.dinerStatusIndicators = { }
for diner in self.diners.values():
diner.delete()
self.diners = { }
self.powerBar.destroy()
self.powerBar = None
self.pitcherMoveSfx.stop()
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
self.loadAssets()
self.smoothName = self.uniqueName('pitcherSmooth')
self.pitcherAdviceName = self.uniqueName('pitcherAdvice')
self.posHprBroadcastName = self.uniqueName('pitcherBroadcast')
self.waterPowerTaskName = self.uniqueName('updateWaterPower')
self.triggerName = self.uniqueName('trigger')
self.watchControlsName = self.uniqueName('watchControls')
def setBossCogId(self, bossCogId):
self.bossCogId = bossCogId
self.boss = base.cr.doId2do[bossCogId]
self.boss.setTable(self, self.index)
def setIndex(self, index):
self.index = index
def setState(self, state, avId, extraInfo):
self.gotHitByBoss = extraInfo
if state == 'F':
self.demand('Off')
elif state == 'N':
self.demand('On')
elif state == 'I':
self.demand('Inactive')
elif state == 'R':
self.demand('Free')
elif state == 'C':
self.demand('Controlled', avId)
elif state == 'L':
self.demand('Flat', avId)
else:
self.notify.error('Invalid state from AI: %s' % state)
def setNumDiners(self, numDiners):
self.numDiners = numDiners
def setDinerInfo(self, hungryDurations, eatingDurations, dinerLevels):
self.dinerInfo = { }
for i in xrange(len(hungryDurations)):
hungryDur = hungryDurations[i]
eatingDur = eatingDurations[i]
dinerLevel = dinerLevels[i]
self.dinerInfo[i] = (hungryDur, eatingDur, dinerLevel)
def loadAssets(self):
self.tableGroup = loader.loadModel('phase_12/models/bossbotHQ/BanquetTableChairs')
tableLocator = self.boss.geom.find('**/TableLocator_%d' % (self.index + 1))
if tableLocator.isEmpty():
self.tableGroup.reparentTo(render)
self.tableGroup.setPos(0, 75, 0)
else:
self.tableGroup.reparentTo(tableLocator)
self.tableGeom = self.tableGroup.find('**/Geometry')
self.setupDiners()
self.setupChairCols()
self.squirtSfx = loader.loadSfx('phase_4/audio/sfx/AA_squirt_seltzer_miss.mp3')
self.hitBossSfx = loader.loadSfx('phase_5/audio/sfx/SA_watercooler_spray_only.mp3')
self.hitBossSoundInterval = SoundInterval(self.hitBossSfx, node = self.boss, volume = 1.0)
self.serveFoodSfx = loader.loadSfx('phase_4/audio/sfx/MG_sfx_travel_game_bell_for_trolley.mp3')
self.pitcherMoveSfx = base.loadSfx('phase_4/audio/sfx/MG_cannon_adjust.mp3')
def setupDiners(self):
for i in xrange(self.numDiners):
newDiner = self.createDiner(i)
self.diners[i] = newDiner
self.dinerStatus[i] = self.HUNGRY
def createDiner(self, i):
diner = Suit.Suit()
diner.dna = SuitDNA.SuitDNA()
level = self.dinerInfo[i][2]
level -= 4
diner.dna.newSuitRandom(level = level, dept = 'c')
diner.setDNA(diner.dna)
if self.useNewAnimations:
diner.loop('sit', fromFrame = i)
else:
diner.pose('landing', 0)
locator = self.tableGroup.find('**/chair_%d' % (i + 1))
locatorScale = locator.getNetTransform().getScale()[0]
correctHeadingNp = locator.attachNewNode('correctHeading')
self.chairLocators[i] = correctHeadingNp
heading = self.rotationsPerSeatIndex[i]
correctHeadingNp.setH(heading)
sitLocator = correctHeadingNp.attachNewNode('sitLocator')
base.sitLocator = sitLocator
pos = correctHeadingNp.getPos(render)
if SuitDNA.getSuitBodyType(diner.dna.name) == 'c':
sitLocator.setPos(0.5, 3.6499999999999999, -3.75)
else:
sitLocator.setZ(-2.3999999999999999)
sitLocator.setY(2.5)
sitLocator.setX(0.5)
self.sitLocators[i] = sitLocator
diner.setScale(1.0 / locatorScale)
diner.reparentTo(sitLocator)
newLoc = NodePath('serviceLoc-%d-%d' % (self.index, i))
newLoc.reparentTo(correctHeadingNp)
newLoc.setPos(0, 3.0, 1)
self.serviceLocs[i] = newLoc
base.serviceLoc = newLoc
head = diner.find('**/joint_head')
newIndicator = DinerStatusIndicator.DinerStatusIndicator(parent = head, pos = Point3(0, 0, 3.5), scale = 5.0)
newIndicator.wrtReparentTo(diner)
self.dinerStatusIndicators[i] = newIndicator
return diner
def setupChairCols(self):
for i in xrange(self.numDiners):
chairCol = self.tableGroup.find('**/collision_chair_%d' % (i + 1))
colName = 'ChairCol-%d-%d' % (self.index, i)
chairCol.setTag('chairIndex', str(i))
chairCol.setName(colName)
chairCol.setCollideMask(ToontownGlobals.WallBitmask)
self.accept('enter' + colName, self.touchedChair)
def touchedChair(self, colEntry):
chairIndex = int(colEntry.getIntoNodePath().getTag('chairIndex'))
if chairIndex in self.dinerStatus:
status = self.dinerStatus[chairIndex]
if status in (self.HUNGRY, self.ANGRY):
self.boss.localToonTouchedChair(self.index, chairIndex)
def serveFood(self, food, chairIndex):
self.removeFoodModel(chairIndex)
serviceLoc = self.serviceLocs.get(chairIndex)
if not food or food.isEmpty():
foodModel = loader.loadModel('phase_12/models/bossbotHQ/canoffood')
foodModel.setScale(ToontownGlobals.BossbotFoodModelScale)
foodModel.reparentTo(serviceLoc)
else:
food.wrtReparentTo(serviceLoc)
tray = food.find('**/tray')
if not tray.isEmpty():
tray.hide()
ivalDuration = 1.5
foodMoveIval = Parallel(SoundInterval(self.serveFoodSfx, node = food), ProjectileInterval(food, duration = ivalDuration, startPos = food.getPos(serviceLoc), endPos = serviceLoc.getPos(serviceLoc)), LerpHprInterval(food, ivalDuration, Point3(0, -360, 0)))
intervalName = 'serveFood-%d-%d' % (self.index, chairIndex)
foodMoveIval.start()
self.activeIntervals[intervalName] = foodMoveIval
def setDinerStatus(self, chairIndex, status):
if chairIndex in self.dinerStatus:
oldStatus = self.dinerStatus[chairIndex]
self.dinerStatus[chairIndex] = status
if oldStatus != status:
if status == self.EATING:
self.changeDinerToEating(chairIndex)
elif status == self.HUNGRY:
self.changeDinerToHungry(chairIndex)
elif status == self.ANGRY:
self.changeDinerToAngry(chairIndex)
elif status == self.DEAD:
self.changeDinerToDead(chairIndex)
elif status == self.HIDDEN:
self.changeDinerToHidden(chairIndex)
def removeFoodModel(self, chairIndex):
serviceLoc = self.serviceLocs.get(chairIndex)
if serviceLoc:
for i in xrange(serviceLoc.getNumChildren()):
serviceLoc.getChild(0).removeNode()
def changeDinerToEating(self, chairIndex):
indicator = self.dinerStatusIndicators.get(chairIndex)
eatingDuration = self.dinerInfo[chairIndex][1]
if indicator:
indicator.request('Eating', eatingDuration)
diner = self.diners[chairIndex]
intervalName = 'eating-%d-%d' % (self.index, chairIndex)
eatInTime = 32.0 / 24.0
eatOutTime = 21.0 / 24.0
eatLoopTime = 19 / 24.0
rightHand = diner.getRightHand()
waitTime = 5
loopDuration = eatingDuration - eatInTime - eatOutTime - waitTime
serviceLoc = self.serviceLocs[chairIndex]
def foodAttach(self = self, diner = diner):
foodModel = self.serviceLocs[chairIndex].getChild(0)
(foodModel.reparentTo(diner.getRightHand()),)
(foodModel.setHpr(Point3(0, -94, 0)),)
(foodModel.setPos(Point3(-0.14999999999999999, -0.69999999999999996, -0.40000000000000002)),)
scaleAdj = 1
if SuitDNA.getSuitBodyType(diner.dna.name) == 'c':
scaleAdj = 0.59999999999999998
(foodModel.setPos(Point3(0.10000000000000001, -0.25, -0.31)),)
else:
scaleAdj = 0.80000000000000004
(foodModel.setPos(Point3(-0.25, -0.84999999999999998, -0.34000000000000002)),)
oldScale = foodModel.getScale()
newScale = oldScale * scaleAdj
foodModel.setScale(newScale)
def foodDetach(self = self, diner = diner):
foodModel = diner.getRightHand().getChild(0)
(foodModel.reparentTo(serviceLoc),)
(foodModel.setPosHpr(0, 0, 0, 0, 0, 0),)
scaleAdj = 1
if SuitDNA.getSuitBodyType(diner.dna.name) == 'c':
scaleAdj = 0.59999999999999998
else:
scakeAdj = 0.80000000000000004
oldScale = foodModel.getScale()
newScale = oldScale / scaleAdj
foodModel.setScale(newScale)
eatIval = Sequence(ActorInterval(diner, 'sit', duration = waitTime), ActorInterval(diner, 'sit-eat-in', startFrame = 0, endFrame = 6), Func(foodAttach), ActorInterval(diner, 'sit-eat-in', startFrame = 6, endFrame = 32), ActorInterval(diner, 'sit-eat-loop', duration = loopDuration, loop = 1), ActorInterval(diner, 'sit-eat-out', startFrame = 0, endFrame = 12), Func(foodDetach), ActorInterval(diner, 'sit-eat-out', startFrame = 12, endFrame = 21))
eatIval.start()
self.activeIntervals[intervalName] = eatIval
def changeDinerToHungry(self, chairIndex):
intervalName = 'eating-%d-%d' % (self.index, chairIndex)
if intervalName in self.activeIntervals:
self.activeIntervals[intervalName].finish()
self.removeFoodModel(chairIndex)
indicator = self.dinerStatusIndicators.get(chairIndex)
if indicator:
indicator.request('Hungry', self.dinerInfo[chairIndex][0])
diner = self.diners[chairIndex]
if random.choice([
0,
1]):
diner.loop('sit-hungry-left')
else:
diner.loop('sit-hungry-right')
def changeDinerToAngry(self, chairIndex):
self.removeFoodModel(chairIndex)
indicator = self.dinerStatusIndicators.get(chairIndex)
if indicator:
indicator.request('Angry')
diner = self.diners[chairIndex]
diner.loop('sit-angry')
def changeDinerToDead(self, chairIndex):
def removeDeathSuit(suit, deathSuit):
if not deathSuit.isEmpty():
deathSuit.detachNode()
suit.cleanupLoseActor()
self.removeFoodModel(chairIndex)
indicator = self.dinerStatusIndicators.get(chairIndex)
if indicator:
indicator.request('Dead')
diner = self.diners[chairIndex]
deathSuit = diner
locator = self.tableGroup.find('**/chair_%d' % (chairIndex + 1))
deathSuit = diner.getLoseActor()
ival = Sequence(Func(self.notify.debug, 'before actorinterval sit-lose'), ActorInterval(diner, 'sit-lose'), Func(self.notify.debug, 'before deathSuit.setHpr'), Func(deathSuit.setHpr, diner.getHpr()), Func(self.notify.debug, 'before diner.hide'), Func(diner.hide), Func(self.notify.debug, 'before deathSuit.reparentTo'), Func(deathSuit.reparentTo, self.chairLocators[chairIndex]), Func(self.notify.debug, 'befor ActorInterval lose'), ActorInterval(deathSuit, 'lose', duration = MovieUtil.SUIT_LOSE_DURATION), Func(self.notify.debug, 'before remove deathsuit'), Func(removeDeathSuit, diner, deathSuit, name = 'remove-death-suit-%d-%d' % (chairIndex, self.index)), Func(self.notify.debug, 'diner.stash'), Func(diner.stash))
spinningSound = base.loadSfx('phase_3.5/audio/sfx/Cog_Death.mp3')
deathSound = base.loadSfx('phase_3.5/audio/sfx/ENC_cogfall_apart.mp3')
deathSoundTrack = Sequence(Wait(0.80000000000000004), SoundInterval(spinningSound, duration = 1.2, startTime = 1.5, volume = 0.20000000000000001, node = deathSuit), SoundInterval(spinningSound, duration = 3.0, startTime = 0.59999999999999998, volume = 0.80000000000000004, node = deathSuit), SoundInterval(deathSound, volume = 0.32000000000000001, node = deathSuit))
intervalName = 'dinerDie-%d-%d' % (self.index, chairIndex)
deathIval = Parallel(ival, deathSoundTrack)
deathIval.start()
self.activeIntervals[intervalName] = deathIval
def changeDinerToHidden(self, chairIndex):
self.removeFoodModel(chairIndex)
indicator = self.dinerStatusIndicators.get(chairIndex)
if indicator:
indicator.request('Inactive')
diner = self.diners[chairIndex]
diner.hide()
def setAllDinersToSitNeutral(self):
startFrame = 0
for diner in self.diners.values():
if not diner.isHidden():
diner.loop('sit', fromFrame = startFrame)
startFrame += 1
continue
def cleanupIntervals(self):
for interval in self.activeIntervals.values():
interval.finish()
self.activeIntervals = { }
def clearInterval(self, name, finish = 1):
if self.activeIntervals.has_key(name):
ival = self.activeIntervals[name]
if finish:
ival.finish()
else:
ival.pause()
if self.activeIntervals.has_key(name):
del self.activeIntervals[name]
else:
self.notify.debug('interval: %s already cleared' % name)
def finishInterval(self, name):
if self.activeIntervals.has_key(name):
interval = self.activeIntervals[name]
interval.finish()
def getNotDeadInfo(self):
notDeadList = []
for i in xrange(self.numDiners):
if self.dinerStatus[i] != self.DEAD:
notDeadList.append((self.index, i, 12))
continue
return notDeadList
def enterOn(self):
pass
def exitOn(self):
pass
def enterInactive(self):
for chairIndex in xrange(self.numDiners):
indicator = self.dinerStatusIndicators.get(chairIndex)
if indicator:
indicator.request('Inactive')
self.removeFoodModel(chairIndex)
def exitInactive(self):
pass
def enterFree(self):
self.resetPowerBar()
if self.fadeTrack:
self.fadeTrack.finish()
self.fadeTrack = None
self.prepareForPhaseFour()
if self.avId == localAvatar.doId:
self.tableGroup.setAlphaScale(0.29999999999999999)
self.tableGroup.setTransparency(1)
taskMgr.doMethodLater(5, self._DistributedBanquetTable__allowDetect, self.triggerName)
self.fadeTrack = Sequence(Func(self.tableGroup.setTransparency, 1), self.tableGroup.colorScaleInterval(0.20000000000000001, VBase4(1, 1, 1, 0.29999999999999999)))
self.fadeTrack.start()
self.allowLocalRequestControl = False
else:
self.allowLocalRequestControl = True
self.avId = 0
def exitFree(self):
pass
def touchedTable(self, colEntry):
tableIndex = int(colEntry.getIntoNodePath().getTag('tableIndex'))
if self.state == 'Free' and self.avId == 0 and self.allowLocalRequestControl:
self.d_requestControl()
def prepareForPhaseFour(self):
if not self.preparedForPhaseFour:
for i in xrange(8):
chair = self.tableGroup.find('**/chair_%d' % (i + 1))
if not chair.isEmpty():
chair.hide()
colChairs = self.tableGroup.findAllMatches('**/ChairCol*')
for i in xrange(colChairs.getNumPaths()):
col = colChairs.getPath(i)
col.stash()
colChairs = self.tableGroup.findAllMatches('**/collision_chair*')
for i in xrange(colChairs.getNumPaths()):
col = colChairs.getPath(i)
col.stash()
tableCol = self.tableGroup.find('**/collision_table')
colName = 'TableCol-%d' % self.index
tableCol.setTag('tableIndex', str(self.index))
tableCol.setName(colName)
tableCol.setCollideMask(ToontownGlobals.WallBitmask | ToontownGlobals.BanquetTableBitmask)
self.accept('enter' + colName, self.touchedTable)
self.preparedForPhaseFour = True
self.waterPitcherModel = loader.loadModel('phase_12/models/bossbotHQ/tt_m_ara_bhq_seltzerBottle')
lampNode = self.tableGroup.find('**/lamp_med_5')
pos = lampNode.getPos(self.tableGroup)
lampNode.hide()
bottleLocator = self.tableGroup.find('**/bottle_locator')
pos = bottleLocator.getPos(self.tableGroup)
self.waterPitcherNode = self.tableGroup.attachNewNode('pitcherNode')
self.waterPitcherNode.setPos(pos)
self.waterPitcherModel.reparentTo(self.waterPitcherNode)
self.waterPitcherModel.ls()
self.nozzle = self.waterPitcherModel.find('**/nozzle_tip')
self.handLocator = self.waterPitcherModel.find('**/hand_locator')
self.handPos = self.handLocator.getPos()
def d_requestControl(self):
self.sendUpdate('requestControl')
def d_requestFree(self, gotHitByBoss):
self.sendUpdate('requestFree', [
gotHitByBoss])
def enterControlled(self, avId):
self.prepareForPhaseFour()
self.avId = avId
toon = base.cr.doId2do.get(avId)
if not toon:
return None
self.toon = toon
self.grabTrack = self.makeToonGrabInterval(toon)
self.notify.debug('grabTrack=%s' % self.grabTrack)
self.pitcherCamPos = Point3(0, -50, 40)
self.pitcherCamHpr = Point3(0, -21, 0)
if avId == localAvatar.doId:
self.boss.toMovieMode()
self._DistributedBanquetTable__enableControlInterface()
self.startPosHprBroadcast()
self.grabTrack = Sequence(self.grabTrack, Func(camera.wrtReparentTo, localAvatar), LerpPosHprInterval(camera, 1, self.pitcherCamPos, self.pitcherCamHpr), Func(self.boss.toCraneMode))
if self.TugOfWarControls:
self._DistributedBanquetTable__spawnUpdateKeyPressRateTask()
self.accept('exitCrane', self.gotBossZapped)
else:
self.startSmooth()
toon.stopSmooth()
self.grabTrack.start()
def exitControlled(self):
self.ignore('exitCrane')
if self.grabTrack:
self.grabTrack.finish()
self.grabTrack = None
nextState = self.getCurrentOrNextState()
self.notify.debug('nextState=%s' % nextState)
if nextState == 'Flat':
place = base.cr.playGame.getPlace()
self.notify.debug('%s' % place.fsm)
if self.avId == localAvatar.doId:
self._DistributedBanquetTable__disableControlInterface()
elif self.toon and not self.toon.isDisabled():
self.toon.loop('neutral')
self.toon.startSmooth()
self.releaseTrack = self.makeToonReleaseInterval(self.toon)
self.stopPosHprBroadcast()
self.stopSmooth()
if self.avId == localAvatar.doId:
localAvatar.wrtReparentTo(render)
self._DistributedBanquetTable__disableControlInterface()
camera.reparentTo(base.localAvatar)
camera.setPos(base.localAvatar.cameraPositions[0][0])
camera.setHpr(0, 0, 0)
self.goToFinalBattle()
self.safeBossToFinalBattleMode()
else:
toon = base.cr.doId2do.get(self.avId)
if toon:
toon.wrtReparentTo(render)
self.releaseTrack.start()
def safeBossToFinalBattleMode(self):
if self.boss:
self.boss.toFinalBattleMode()
def goToFinalBattle(self):
if self.cr:
place = self.cr.playGame.getPlace()
if place and hasattr(place, 'fsm'):
if place.fsm.getCurrentState().getName() == 'crane':
place.setState('finalBattle')
def makeToonGrabInterval(self, toon):
toon.pose('leverNeutral', 0)
toon.update()
rightHandPos = toon.rightHand.getPos(toon)
self.toonPitcherPosition = Point3(self.handPos[0] - rightHandPos[0], self.handPos[1] - rightHandPos[1], 0)
destZScale = rightHandPos[2] / self.handPos[2]
grabIval = Sequence(Func(toon.wrtReparentTo, self.waterPitcherNode), Func(toon.loop, 'neutral'), Parallel(ActorInterval(toon, 'jump'), Sequence(Wait(0.42999999999999999), Parallel(ProjectileInterval(toon, duration = 0.90000000000000002, startPos = toon.getPos(self.waterPitcherNode), endPos = self.toonPitcherPosition), LerpHprInterval(toon, 0.90000000000000002, Point3(0, 0, 0)), LerpScaleInterval(self.waterPitcherModel, 0.90000000000000002, Point3(1, 1, destZScale))))), Func(toon.setPos, self.toonPitcherPosition), Func(toon.loop, 'leverNeutral'))
return grabIval
def makeToonReleaseInterval(self, toon):
temp1 = self.waterPitcherNode.attachNewNode('temp1')
temp1.setPos(self.toonPitcherPosition)
temp2 = self.waterPitcherNode.attachNewNode('temp2')
temp2.setPos(0, -10, -self.waterPitcherNode.getZ())
startPos = temp1.getPos(render)
endPos = temp2.getPos(render)
temp1.removeNode()
temp2.removeNode()
def getSlideToPos(toon = toon):
return render.getRelativePoint(toon, Point3(0, -10, 0))
if self.gotHitByBoss:
self.notify.debug('creating zap interval instead')
grabIval = Sequence(Func(toon.loop, 'neutral'), Func(toon.wrtReparentTo, render), Parallel(ActorInterval(toon, 'slip-backward'), toon.posInterval(0.5, getSlideToPos, fluid = 1)))
else:
grabIval = Sequence(Func(toon.loop, 'neutral'), Func(toon.wrtReparentTo, render), Parallel(ActorInterval(toon, 'jump'), Sequence(Wait(0.42999999999999999), ProjectileInterval(toon, duration = 0.90000000000000002, startPos = startPos, endPos = endPos))))
return grabIval
def b_clearSmoothing(self):
self.d_clearSmoothing()
self.clearSmoothing()
def d_clearSmoothing(self):
self.sendUpdate('clearSmoothing', [
0])
def clearSmoothing(self, bogus = None):
self.pitcherSmoother.clearPositions(1)
def doSmoothTask(self, task):
self.pitcherSmoother.computeAndApplySmoothHpr(self.waterPitcherNode)
return Task.cont
def startSmooth(self):
if not self.smoothStarted:
taskName = self.smoothName
taskMgr.remove(taskName)
self.reloadPosition()
taskMgr.add(self.doSmoothTask, taskName)
self.smoothStarted = 1
def stopSmooth(self):
if self.smoothStarted:
taskName = self.smoothName
taskMgr.remove(taskName)
self.forceToTruePosition()
self.smoothStarted = 0
def _DistributedBanquetTable__enableControlInterface(self):
gui = loader.loadModel('phase_3.5/models/gui/avatar_panel_gui')
self.closeButton = DirectButton(image = (gui.find('**/CloseBtn_UP'), gui.find('**/CloseBtn_DN'), gui.find('**/CloseBtn_Rllvr'), gui.find('**/CloseBtn_UP')), relief = None, scale = 2, text = TTLocalizer.BossbotPitcherLeave, text_scale = 0.040000000000000001, text_pos = (0, -0.070000000000000007), text_fg = VBase4(1, 1, 1, 1), pos = (1.05, 0, -0.81999999999999995), command = self._DistributedBanquetTable__exitPitcher)
self.accept('escape', self._DistributedBanquetTable__exitPitcher)
self.accept('control', self._DistributedBanquetTable__controlPressed)
self.accept('control-up', self._DistributedBanquetTable__controlReleased)
self.accept('InputState-forward', self._DistributedBanquetTable__upArrow)
self.accept('InputState-reverse', self._DistributedBanquetTable__downArrow)
self.accept('InputState-turnLeft', self._DistributedBanquetTable__leftArrow)
self.accept('InputState-turnRight', self._DistributedBanquetTable__rightArrow)
self.accept('arrow_up', self._DistributedBanquetTable__upArrowKeyPressed)
self.accept('arrow_down', self._DistributedBanquetTable__downArrowKeyPressed)
taskMgr.add(self._DistributedBanquetTable__watchControls, self.watchControlsName)
taskMgr.doMethodLater(5, self._DistributedBanquetTable__displayPitcherAdvice, self.pitcherAdviceName)
self.arrowVert = 0
self.arrowHorz = 0
self.powerBar.show()
def _DistributedBanquetTable__disableControlInterface(self):
if self.closeButton:
self.closeButton.destroy()
self.closeButton = None
self._DistributedBanquetTable__cleanupPitcherAdvice()
self.ignore('escape')
self.ignore('control')
self.ignore('control-up')
self.ignore('InputState-forward')
self.ignore('InputState-reverse')
self.ignore('InputState-turnLeft')
self.ignore('InputState-turnRight')
self.ignore('arrow_up')
self.ignore('arrow_down')
self.arrowVert = 0
self.arrowHorz = 0
taskMgr.remove(self.watchControlsName)
taskMgr.remove(self.waterPowerTaskName)
self.resetPowerBar()
self.aimStart = None
self.powerBar.hide()
if self.TugOfWarControls:
self._DistributedBanquetTable__killUpdateKeyPressRateTask()
self.keyTTL = []
self._DistributedBanquetTable__setMoveSound(None)
def _DistributedBanquetTable__displayPitcherAdvice(self, task):
if self.pitcherAdviceLabel == None:
self.pitcherAdviceLabel = DirectLabel(text = TTLocalizer.BossbotPitcherAdvice, text_fg = VBase4(1, 1, 1, 1), text_align = TextNode.ACenter, relief = None, pos = (0, 0, 0.68999999999999995), scale = 0.10000000000000001)
def _DistributedBanquetTable__cleanupPitcherAdvice(self):
if self.pitcherAdviceLabel:
self.pitcherAdviceLabel.destroy()
self.pitcherAdviceLabel = None
taskMgr.remove(self.pitcherAdviceName)
def showExiting(self):
if self.closeButton:
self.closeButton.destroy()
self.closeButton = DirectLabel(relief = None, text = TTLocalizer.BossbotPitcherLeaving, pos = (1.05, 0, -0.88), text_pos = (0, 0), text_scale = 0.059999999999999998, text_fg = VBase4(1, 1, 1, 1))
self._DistributedBanquetTable__cleanupPitcherAdvice()
def _DistributedBanquetTable__exitPitcher(self):
self.showExiting()
self.d_requestFree(False)
def _DistributedBanquetTable__controlPressed(self):
self._DistributedBanquetTable__cleanupPitcherAdvice()
if self.TugOfWarControls:
if self.power:
self.aimStart = 1
self._DistributedBanquetTable__endFireWater()
elif self.state == 'Controlled':
self._DistributedBanquetTable__beginFireWater()
def _DistributedBanquetTable__controlReleased(self):
if self.TugOfWarControls:
pass
1
if self.state == 'Controlled':
self._DistributedBanquetTable__endFireWater()
def _DistributedBanquetTable__upArrow(self, pressed):
self._DistributedBanquetTable__incrementChangeSeq()
self._DistributedBanquetTable__cleanupPitcherAdvice()
if pressed:
self.arrowVert = 1
elif self.arrowVert > 0:
self.arrowVert = 0
def _DistributedBanquetTable__downArrow(self, pressed):
self._DistributedBanquetTable__incrementChangeSeq()
self._DistributedBanquetTable__cleanupPitcherAdvice()
if pressed:
self.arrowVert = -1
elif self.arrowVert < 0:
self.arrowVert = 0
def _DistributedBanquetTable__rightArrow(self, pressed):
self._DistributedBanquetTable__incrementChangeSeq()
self._DistributedBanquetTable__cleanupPitcherAdvice()
if pressed:
self.arrowHorz = 1
elif self.arrowHorz > 0:
self.arrowHorz = 0
def _DistributedBanquetTable__leftArrow(self, pressed):
self._DistributedBanquetTable__incrementChangeSeq()
self._DistributedBanquetTable__cleanupPitcherAdvice()
if pressed:
self.arrowHorz = -1
elif self.arrowHorz < 0:
self.arrowHorz = 0
def _DistributedBanquetTable__incrementChangeSeq(self):
self.changeSeq = self.changeSeq + 1 & 255
def stopPosHprBroadcast(self):
taskName = self.posHprBroadcastName
taskMgr.remove(taskName)
def startPosHprBroadcast(self):
taskName = self.posHprBroadcastName
self.b_clearSmoothing()
self.d_sendPitcherPos()
taskMgr.remove(taskName)
taskMgr.doMethodLater(self._DistributedBanquetTable__broadcastPeriod, self._DistributedBanquetTable__posHprBroadcast, taskName)
def _DistributedBanquetTable__posHprBroadcast(self, task):
self.d_sendPitcherPos()
taskName = self.posHprBroadcastName
taskMgr.doMethodLater(self._DistributedBanquetTable__broadcastPeriod, self._DistributedBanquetTable__posHprBroadcast, taskName)
return Task.done
def d_sendPitcherPos(self):
timestamp = globalClockDelta.getFrameNetworkTime()
self.sendUpdate('setPitcherPos', [
self.changeSeq,
self.waterPitcherNode.getH(),
timestamp])
def setPitcherPos(self, changeSeq, h, timestamp):
self.changeSeq = changeSeq
if self.smoothStarted:
now = globalClock.getFrameTime()
local = globalClockDelta.networkToLocalTime(timestamp, now)
self.pitcherSmoother.setH(h)
self.pitcherSmoother.setTimestamp(local)
self.pitcherSmoother.markPosition()
else:
self.waterPitcherNode.setH(h)
def _DistributedBanquetTable__watchControls(self, task):
if self.arrowHorz:
self._DistributedBanquetTable__movePitcher(self.arrowHorz)
else:
self._DistributedBanquetTable__setMoveSound(None)
return Task.cont
def _DistributedBanquetTable__movePitcher(self, xd):
dt = globalClock.getDt()
h = self.waterPitcherNode.getH() - xd * self.rotateSpeed * dt
h %= 360
self.notify.debug('rotSpeed=%.2f curH=%.2f xd =%.2f, dt = %.2f, h=%.2f' % (self.rotateSpeed, self.waterPitcherNode.getH(), xd, dt, h))
limitH = h
self.waterPitcherNode.setH(limitH)
if xd:
self._DistributedBanquetTable__setMoveSound(self.pitcherMoveSfx)
def reloadPosition(self):
self.pitcherSmoother.clearPositions(0)
self.pitcherSmoother.setHpr(self.waterPitcherNode.getHpr())
self.pitcherSmoother.setPhonyTimestamp()
def forceToTruePosition(self):
if self.pitcherSmoother.getLatestPosition():
self.pitcherSmoother.applySmoothHpr(self.waterPitcherNode)
self.pitcherSmoother.clearPositions(1)
def getSprayTrack(self, color, origin, target, dScaleUp, dHold, dScaleDown, horizScale = 1.0, vertScale = 1.0, parent = render):
track = Sequence()
SPRAY_LEN = 1.5
sprayProp = MovieUtil.globalPropPool.getProp('spray')
sprayScale = hidden.attachNewNode('spray-parent')
sprayRot = hidden.attachNewNode('spray-rotate')
spray = sprayRot
spray.setColor(color)
if color[3] < 1.0:
spray.setTransparency(1)
def showSpray(sprayScale, sprayRot, sprayProp, origin, target, parent):
if callable(origin):
origin = origin()
if callable(target):
target = target()
sprayRot.reparentTo(parent)
sprayRot.clearMat()
sprayScale.reparentTo(sprayRot)
sprayScale.clearMat()
sprayProp.reparentTo(sprayScale)
sprayProp.clearMat()
sprayRot.setPos(origin)
sprayRot.lookAt(Point3(target))
track.append(Func(showSpray, sprayScale, sprayRot, sprayProp, origin, target, parent))
def calcTargetScale(target = target, origin = origin, horizScale = horizScale, vertScale = vertScale):
if callable(target):
target = target()
if callable(origin):
origin = origin()
distance = Vec3(target - origin).length()
yScale = distance / SPRAY_LEN
targetScale = Point3(yScale * horizScale, yScale, yScale * vertScale)
return targetScale
track.append(LerpScaleInterval(sprayScale, dScaleUp, calcTargetScale, startScale = Point3(0.01, 0.01, 0.01)))
track.append(Func(self.checkHitObject))
track.append(Wait(dHold))
def prepareToShrinkSpray(spray, sprayProp, origin, target):
if callable(target):
target = target()
if callable(origin):
origin = origin()
sprayProp.setPos(Point3(0.0, -SPRAY_LEN, 0.0))
spray.setPos(target)
track.append(Func(prepareToShrinkSpray, spray, sprayProp, origin, target))
track.append(LerpScaleInterval(sprayScale, dScaleDown, Point3(0.01, 0.01, 0.01)))
def hideSpray(spray, sprayScale, sprayRot, sprayProp, propPool):
sprayProp.detachNode()
MovieUtil.removeProp(sprayProp)
sprayRot.removeNode()
sprayScale.removeNode()
track.append(Func(hideSpray, spray, sprayScale, sprayRot, sprayProp, MovieUtil.globalPropPool))
return track
def checkHitObject(self):
if not self.hitObject:
return None
if self.avId != base.localAvatar.doId:
return None
tag = self.hitObject.getNetTag('pieCode')
pieCode = int(tag)
if pieCode == ToontownGlobals.PieCodeBossCog:
self.hitBossSoundInterval.start()
self.sendUpdate('waterHitBoss', [
self.index])
if self.TugOfWarControls:
damage = 1
if self.lastPowerFired < self.YELLOW_POWER_THRESHOLD:
damage = 1
elif self.lastPowerFired < self.RED_POWER_THRESHOLD:
damage = 2
else:
damage = 3
self.boss.d_hitBoss(damage)
else:
damage = 1
if self.lastPowerFired < self.YELLOW_POWER_THRESHOLD:
damage = 1
elif self.lastPowerFired < self.RED_POWER_THRESHOLD:
damage = 2
else:
damage = 3
self.boss.d_hitBoss(damage)
def waterHitBoss(self, tableIndex):
if self.index == tableIndex:
self.hitBossSoundInterval.start()
def setupPowerBar(self):
self.powerBar = DirectWaitBar(pos = (0.0, 0, -0.93999999999999995), relief = DGG.SUNKEN, frameSize = (-2.0, 2.0, -0.20000000000000001, 0.20000000000000001), borderWidth = (0.02, 0.02), scale = 0.25, range = 1, sortOrder = 50, frameColor = (0.5, 0.5, 0.5, 0.5), barColor = (0.75, 0.75, 1.0, 0.80000000000000004), text = '', text_scale = 0.26000000000000001, text_fg = (1, 1, 1, 1), text_align = TextNode.ACenter, text_pos = (0, -0.050000000000000003))
self.power = 0
self.powerBar['value'] = self.power
self.powerBar.hide()
def resetPowerBar(self):
self.power = 0
self.powerBar['value'] = self.power
self.powerBar['text'] = ''
self.keyTTL = []
def _DistributedBanquetTable__beginFireWater(self):
if self.fireTrack and self.fireTrack.isPlaying():
return None
if self.aimStart != None:
return None
if not self.state == 'Controlled':
return None
if not self.avId == localAvatar.doId:
return None
time = globalClock.getFrameTime()
self.aimStart = time
messenger.send('wakeup')
taskMgr.add(self._DistributedBanquetTable__updateWaterPower, self.waterPowerTaskName)
def _DistributedBanquetTable__endFireWater(self):
if self.aimStart == None:
return None
if not self.state == 'Controlled':
return None
if not self.avId == localAvatar.doId:
return None
taskMgr.remove(self.waterPowerTaskName)
messenger.send('wakeup')
self.aimStart = None
origin = self.nozzle.getPos(render)
target = self.boss.getPos(render)
angle = deg2Rad(self.waterPitcherNode.getH() + 90)
x = math.cos(angle)
y = math.sin(angle)
fireVector = Point3(x, y, 0)
if self.power < 0.001:
self.power = 0.001
self.lastPowerFired = self.power
fireVector *= self.fireLength * self.power
target = origin + fireVector
segment = CollisionSegment(origin[0], origin[1], origin[2], target[0], target[1], target[2])
fromObject = render.attachNewNode(CollisionNode('pitcherColNode'))
fromObject.node().addSolid(segment)
fromObject.node().setFromCollideMask(ToontownGlobals.PieBitmask | ToontownGlobals.CameraBitmask | ToontownGlobals.FloorBitmask)
fromObject.node().setIntoCollideMask(BitMask32.allOff())
queue = CollisionHandlerQueue()
base.cTrav.addCollider(fromObject, queue)
base.cTrav.traverse(render)
queue.sortEntries()
self.hitObject = None
if queue.getNumEntries():
entry = queue.getEntry(0)
target = entry.getSurfacePoint(render)
self.hitObject = entry.getIntoNodePath()
base.cTrav.removeCollider(fromObject)
fromObject.removeNode()
self.d_firingWater(origin, target)
self.fireWater(origin, target)
self.resetPowerBar()
def _DistributedBanquetTable__updateWaterPower(self, task):
if not self.powerBar:
print '### no power bar!!!'
return task.done
newPower = self._DistributedBanquetTable__getWaterPower(globalClock.getFrameTime())
self.power = newPower
self.powerBar['value'] = newPower
if self.power < self.YELLOW_POWER_THRESHOLD:
self.powerBar['barColor'] = VBase4(0.75, 0.75, 1.0, 0.80000000000000004)
elif self.power < self.RED_POWER_THRESHOLD:
self.powerBar['barColor'] = VBase4(1.0, 1.0, 0.0, 0.80000000000000004)
else:
self.powerBar['barColor'] = VBase4(1.0, 0.0, 0.0, 0.80000000000000004)
return task.cont
def _DistributedBanquetTable__getWaterPower(self, time):
elapsed = max(time - self.aimStart, 0.0)
t = elapsed / self.waterPowerSpeed
exponent = self.waterPowerExponent
if t > 1:
t = t % 1
power = 1 - math.pow(1 - t, exponent)
if power > 1.0:
power = 1.0
return power
def d_firingWater(self, origin, target):
self.sendUpdate('firingWater', [
origin[0],
origin[1],
origin[2],
target[0],
target[1],
target[2]])
def firingWater(self, startX, startY, startZ, endX, endY, endZ):
origin = Point3(startX, startY, startZ)
target = Point3(endX, endY, endZ)
self.fireWater(origin, target)
def fireWater(self, origin, target):
color = VBase4(0.75, 0.75, 1, 0.80000000000000004)
dScaleUp = 0.10000000000000001
dHold = 0.29999999999999999
dScaleDown = 0.10000000000000001
horizScale = 0.10000000000000001
vertScale = 0.10000000000000001
sprayTrack = self.getSprayTrack(color, origin, target, dScaleUp, dHold, dScaleDown, horizScale, vertScale)
duration = self.squirtSfx.length()
if sprayTrack.getDuration() < duration:
duration = sprayTrack.getDuration()
soundTrack = SoundInterval(self.squirtSfx, node = self.waterPitcherModel, duration = duration)
self.fireTrack = Parallel(sprayTrack, soundTrack)
self.fireTrack.start()
def getPos(self, wrt = render):
return self.tableGroup.getPos(wrt)
def getLocator(self):
return self.tableGroup
def enterFlat(self, avId):
self.prepareForPhaseFour()
self.resetPowerBar()
self.notify.debug('enterFlat %d' % self.index)
if self.avId:
toon = base.cr.doId2do.get(self.avId)
if toon:
toon.wrtReparentTo(render)
toon.setZ(0)
self.tableGroup.setScale(1, 1, 0.01)
if self.avId and self.avId == localAvatar.doId:
localAvatar.b_squish(ToontownGlobals.BossCogDamageLevels[ToontownGlobals.BossCogMoveAttack])
def exitFlat(self):
self.tableGroup.setScale(1.0)
if self.avId:
toon = base.cr.doId2do.get(self.avId)
if toon:
if toon == localAvatar:
self.boss.toCraneMode()
toon.b_setAnimState('neutral')
toon.setAnimState('neutral')
toon.loop('leverNeutral')
def _DistributedBanquetTable__allowDetect(self, task):
if self.fadeTrack:
self.fadeTrack.finish()
self.fadeTrack = Sequence(self.tableGroup.colorScaleInterval(0.20000000000000001, VBase4(1, 1, 1, 1)), Func(self.tableGroup.clearColorScale), Func(self.tableGroup.clearTransparency))
self.fadeTrack.start()
self.allowLocalRequestControl = True
def gotBossZapped(self):
self.showExiting()
self.d_requestFree(True)
def _DistributedBanquetTable__upArrowKeyPressed(self):
if self.TugOfWarControls:
self._DistributedBanquetTable__pressHandler(0)
def _DistributedBanquetTable__downArrowKeyPressed(self):
if self.TugOfWarControls:
self._DistributedBanquetTable__pressHandler(1)
def _DistributedBanquetTable__pressHandler(self, index):
if index == self.buttons[0]:
self.keyTTL.insert(0, 1.0)
if not self.OnlyUpArrow:
self.buttons.reverse()
def _DistributedBanquetTable__spawnUpdateKeyPressRateTask(self):
taskMgr.remove(self.taskName(self.UPDATE_KEY_PRESS_RATE_TASK))
taskMgr.doMethodLater(0.10000000000000001, self._DistributedBanquetTable__updateKeyPressRateTask, self.taskName(self.UPDATE_KEY_PRESS_RATE_TASK))
def _DistributedBanquetTable__killUpdateKeyPressRateTask(self):
taskMgr.remove(self.taskName(self.UPDATE_KEY_PRESS_RATE_TASK))
def _DistributedBanquetTable__updateKeyPressRateTask(self, task):
if self.state not in 'Controlled':
return Task.done
for i in range(len(self.keyTTL)):
self.keyTTL[i] -= 0.10000000000000001
for i in range(len(self.keyTTL)):
if self.keyTTL[i] <= 0:
a = self.keyTTL[0:i]
del self.keyTTL
self.keyTTL = a
break
continue
self.keyRate = len(self.keyTTL)
keyRateDiff = self.keyRate - self.BASELINE_KEY_RATE
diffPower = keyRateDiff / 300.0
if self.power < 1 and diffPower > 0:
diffPower = diffPower * math.pow(1 - self.power, 1.25)
newPower = self.power + diffPower
if newPower > 1:
newPower = 1
elif newPower < 0:
newPower = 0
self.notify.debug('diffPower=%.2f keyRate = %d, newPower=%.2f' % (diffPower, self.keyRate, newPower))
self.power = newPower
self.powerBar['value'] = newPower
if self.power < self.YELLOW_POWER_THRESHOLD:
self.powerBar['barColor'] = VBase4(0.75, 0.75, 1.0, 0.80000000000000004)
elif self.power < self.RED_POWER_THRESHOLD:
self.powerBar['barColor'] = VBase4(1.0, 1.0, 0.0, 0.80000000000000004)
else:
self.powerBar['barColor'] = VBase4(1.0, 0.0, 0.0, 0.80000000000000004)
self._DistributedBanquetTable__spawnUpdateKeyPressRateTask()
return Task.done
def _DistributedBanquetTable__setMoveSound(self, sfx):
if sfx != self.moveSound:
if self.moveSound:
self.moveSound.stop()
self.moveSound = sfx
if self.moveSound:
base.playSfx(self.moveSound, looping = 1, volume = 0.5)
| [
"[email protected]"
] | |
d716a64d25d8ed53904876bd54c1a98a7b88deb5 | 9dab41a71bf19a9ad17ee3e9f77c0f58aebd1d6d | /python/uline/uline/uline/handlers/app/distributor/balance/distributorBalanceList.py | 4116d637e99da40fb08daa5c8fdc82a1bdbb023b | [] | no_license | apollowesley/Demo | f0ef8ec6c4ceb0aec76771da8dd9a62fb579eac8 | 471c4af95d3a7222d6933afc571a8e52e8fe4aee | refs/heads/master | 2021-02-15T04:01:51.590697 | 2018-01-29T01:44:29 | 2018-01-29T01:44:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,367 | py | # -*- coding: utf-8 -*-
from __future__ import division
import tornado.web
import tornado.gen
from uline.handlers.baseHandlers import DtAdminHandler
from .form import DistributorBalanceList
from uline.public.constants import TO_PAY, PAY_CHANNEL
from datetime import timedelta, datetime
from uline.public.permit import check_permission
class DistributorBalanceListHandler(DtAdminHandler):
@tornado.web.authenticated
@check_permission
def prepare(self):
form = DistributorBalanceList(self)
if not form.validate():
self.redirect('/dist/balance/dt/list')
return
self.dt_daily_balance_no = form.ddb_no.data
@tornado.web.asynchronous
@tornado.gen.coroutine
def get(self):
data = self.do_execute()
self.render('distributor/balance/distributorBalanceList.html', data=data)
def do_execute(self):
dt_id = self.current_user
query = """select
to_char(ddbi.pay_start_time, 'YYYY-MM-DD HH24:MI:SS'),
to_char(ddbi.need_pay_time,'YYYY-MM-DD'),
ddbi.rcvAcctName,
ddbi.channel,
ddbi.rcvacctno,
ddbi.rcvBankName,
ddbi.tranAmt,
ddbi.pay_status,
ddbi.failure_details
from dt_daily_balance_info as ddbi
inner join dt_balance db on db.dt_id = ddbi.dt_id
where ddbi.dt_id=%(dt_id)s
and ddbi.dt_daily_balance_no=%(dt_daily_balance_no)s;"""
ret = self.db.selectSQL(query, {'dt_daily_balance_no': self.dt_daily_balance_no, 'dt_id': dt_id})
fields = ['create_at', 'need_pay_time', 'rcvAcctName', 'channel', 'balance_account', 'rcvBankName',
'tranAmt', 'pay_status', 'failure_details']
dt_info = dict(zip(fields, ret))
dt_info['tranAmt'], dt_info['pay_status'], dt_info['channel'] = dt_info['tranAmt'] / 100, \
TO_PAY[str(dt_info['pay_status'])], \
PAY_CHANNEL[str(dt_info['channel'])],
dt_info['need_pay_time'] = datetime.strptime(dt_info['need_pay_time'], '%Y-%m-%d') - timedelta(days=1)
dt_info['need_pay_time'] = datetime.strftime(dt_info['need_pay_time'], '%Y-%m-%d')
# todo 缺少划付状态详情数据表
return dt_info
| [
"[email protected]"
] | |
8b37209b33d201b789d2658845aa87843ef7a8e0 | db144fdc9a1948cce066bed20912c32e1a18a8aa | /accounts/views.py | 49c0aa25bf7c13a2faa3ed61bf4acc3c6a75f458 | [] | no_license | masato932/django-blog3 | cd01101cbffdbaa33d2cb9bf696e5a5cdf8cd6fa | 769068ba356cf8e0cc0bbde76e82e116e58b8bab | refs/heads/main | 2023-05-13T20:14:43.706480 | 2021-06-05T14:03:13 | 2021-06-05T14:03:13 | 365,480,981 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | from django.shortcuts import render, redirect
from allauth.account import views
class LoginView(views.LoginView):
template_name = 'accounts/login.html'
class LogoutView(views.LogoutView):
template_name = 'accounts/logout.html'
def post(self, *args, **kwargs):
if self.request.user.is_authenticated:
self.logout()
return redirect('/')
class SignupView(views.SignupView):
template_name = 'accounts/signup.html'
# Create your views here.
| [
"[email protected]"
] | |
b62893ee1712e3ddf4365071e6596e2d820ac5dc | cf57cd3355471f035ca429302742b4eb4baf1214 | /Comparações/SHI-TOMASI/SHI-TOMASI_sift.py | 7a91ba9cb2b4ae56f47b6d8069c64cbee54c797b | [] | no_license | RobotColony-UEFS/feature-match | c56d78230d86948e5612a9645c71a0647eb94604 | ac421989aa1ee3893243122a0cf041b30e038a28 | refs/heads/master | 2022-11-27T15:31:20.570505 | 2020-08-04T19:24:17 | 2020-08-04T19:24:17 | 285,063,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,365 | py | #coding: utf-8
import cv2
import numpy as np
import mysql.connector
import math
mydb = mysql.connector.connect(
host="localhost",
user="descritores",
passwd="12345678",
database="referencias"
)
def desvio (vetResult):
# Desvio padrão populacional
soma = float(sum(vetResult))
media = soma/len(vetResult)
res = 0
for valor in vetResult:
res += ((valor - media)**2)
desvio = (math.sqrt(res/len(vetResult)))
return (media, desvio)
vet_matches = []
vet_corretos = []
img11 = cv2.imread("../../imgReferencia/img00.jpg", 0)
altura = img11.shape[0]
largura = img11.shape[1]
img1 = cv2.resize(img11, (int(largura*0.4), int(altura*0.4)))
corners11 = cv2.goodFeaturesToTrack(img1, 100, 0.01, 10)
corners1 = np.int0(corners11)
kp1 = cv2.KeyPoint_convert(corners1)
sift = cv2.xfeatures2d.SIFT_create()
bf = cv2.BFMatcher(cv2.NORM_L2, crossCheck=True)
kp1, des1 = sift.compute(img1, kp1)
quantidadeImagens = 1
while(quantidadeImagens<=13):
acertos = 0
img22 = cv2.imread("../../imgTeste/img"+str(quantidadeImagens)+".jpg", 0)
altura2 = img22.shape[0]
largura2 = img22.shape[1]
img2 = cv2.resize(img22, (int(largura2*0.4), int(altura2*0.4)))
corners22 = cv2.goodFeaturesToTrack(img2, 100, 0.01, 10)
corners2 = np.int0(corners22)
kp2 = cv2.KeyPoint_convert(corners2)
kp2, des2 = sift.compute(img2, kp2)
mat = bf.match(des1,des2)
mat = sorted(mat, key = lambda x:x.distance)
matches = mat[0:150]
with open("../../imgTeste/img"+str(quantidadeImagens)+".txt",'r') as f:
texto=f.readlines()
posicao_x= np.float_(texto[0:4])
posicao_y = np.float_(texto[4:8])
min_x = float(min(posicao_x))
max_x = float(max(posicao_x))
min_y = float(min(posicao_y))
max_y = float(max(posicao_y))
if len(matches)>10:
src_pts = np.float32([ kp1[m.queryIdx].pt for m in matches ]).reshape(-1,1,2)
dst_pts = np.float32([ kp2[m.trainIdx].pt for m in matches ]).reshape(-1,1,2)
M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
h,w = img1.shape
pts = np.float32([ [0,0],[0,h-1],[w-1,h-1],[w-1,0] ]).reshape(-1,1,2)
dst = cv2.perspectiveTransform(pts,M)
img2 = cv2.polylines(img2,[np.int32(dst)],True,255,3, cv2.LINE_AA)
for pos in dst_pts:
if((pos[0][0]>(min_x) and pos[0][0]<(max_x)) and (pos[0][1]>(min_y) and pos[0][1]<(max_y))):
acertos+=1
img3 = cv2.drawMatches(img1,kp1,img2,kp2,matches[:],None,flags=2)
cv2.imwrite("../resultados/shiTomasi-sift/img"+str(quantidadeImagens)+".jpg", img3)
vet_matches.append(len(matches))
vet_corretos.append(acertos)
mycursor = mydb.cursor()
sql = "INSERT INTO shiTomasi_sift(Nome, Matches, Correto, ImgReferente) VALUES (%s, %s, %s, %s)"
valor = ("ShiTomasi-Sift"+str(quantidadeImagens), len(matches), acertos, "img"+str(quantidadeImagens)+".jpg")
mycursor.execute(sql, valor)
mydb.commit()
print(len(matches), acertos)
quantidadeImagens+=1
media_matches, desvio_matches = desvio(vet_matches)
media_corretos, desvio_corretos = desvio(vet_corretos)
porcentagem = (media_corretos/media_matches)*100
sql2 = "INSERT INTO medias_desvios(Nome, MediaMatches, DesvioMatches, MediaCorretos, DesvioCorretos, Porcentagem) VALUES (%s, %s, %s, %s, %s, %s)"
valor2 = ("shiTomasi_sift", media_matches, desvio_matches, media_corretos, desvio_corretos, porcentagem)
mycursor.execute(sql2, valor2)
mydb.commit() | [
"[email protected]"
] | |
9c85a3150d50dce18e37c4fd3faae85c74370fc8 | 32b628faa8b8ca8d11d8837cc495c0013f58b71a | /scripts/matrix2matrix.py | db91bd6468fb706939d9b97cc2c5810de2e084d0 | [
"BSD-2-Clause"
] | permissive | jaquol/cgat | 40b81617625ae9f0ba352caf38c2afd6a13c58f6 | d26fab0dff2192d4accc128d2895e668254d7b65 | refs/heads/master | 2021-01-12T22:33:46.186451 | 2016-01-15T16:56:43 | 2016-01-15T16:56:43 | 49,868,597 | 1 | 0 | null | 2016-01-18T10:10:24 | 2016-01-18T10:10:24 | null | UTF-8 | Python | false | false | 17,852 | py | '''
matrix2matrix.py - operate on matrices
======================================
:Author: Andreas Heger
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
* full: full matrix with row and column headers (unless --no-headers is given.)
* sparse: sparse matrix
* phylip: phylip formatted matrix, but using tabs as separators and long names.
Methods:
sort-rows
sort rows by order in --rows-tsv-file
sort-columns
sort columns by order in --columns-tsv-file
mask-rows
set rows matching ids in --rows-tsv-file to --value
mask-columns
set columns matching ids in --columns-tsv-file to --value
mask-rows-and-columns
set rows and columns matching ids in --columns-tsv-file to --value (and)
Usage
-----
Example::
python matrix2matrix.py --help
Type::
python matrix2matrix.py --help
for command line help.
Command line options
--------------------
'''
import sys
import math
import StringIO
import numpy
import CGAT.Experiment as E
import CGAT.IOTools as IOTools
import CGAT.CorrespondenceAnalysis as CorrespondenceAnalysis
import CGAT.MatlabTools as MatlabTools
import scipy
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id: matrix2matrix.py 2782 2009-09-10 11:40:29Z andreas $")
parser.add_option("-m", "--method", dest="methods", type="choice", action="append",
choices=("normalize-by-min-diagonal", "normalize-by-column",
"log", "ln", "negzero2value",
"set-diagonal",
"subtract-matrix", "mix-matrix", "normalize-by-matrix",
"normalize-by-column-max", "normalize-by-row-max",
"normalize-by-column-min", "normalize-by-row-min",
"normalize-by-column-median", "normalize-by-row-median",
"normalize-by-column-mean", "normalize-by-row-mean",
"normalize-by-column-total", "normalize-by-row-total",
"correspondence-analysis",
"normalize-by-value",
"add-value",
"sort-rows", "sort-columns",
"transpose",
"upper-bound", "lower-bound",
"subtract-first-col", "multiply-by-value", "divide-by-value",
"mask-rows", "mask-columns", "mask-rows-and-columns",
"symmetrize-mean", "symmetrize-max", "symmetrize-min",
),
help="""method to use [default=%default]""" )
parser.add_option("-s", "--scale", dest="scale", type="float",
help="factor to scale matrix by [default=%default].")
parser.add_option("-f", "--format", dest="format", type="string",
help="output number format [default=%default].")
parser.add_option("--rows-tsv-file", dest="filename_rows", type="string",
help="filename with rows to mask [default=%default].")
parser.add_option("--columns-tsv-file", dest="filename_columns", type="string",
help="filename with columns to mask [default=%default].")
parser.add_option("-p", "--parameters", dest="parameters", type="string",
help="Parameters for various functions.")
parser.add_option("-t", "--header-names", dest="headers", action="store_true",
help="matrix has row/column headers.")
parser.add_option("--no-headers", dest="headers", action="store_false",
help="matrix has no row/column headers.")
parser.add_option("-a", "--value", dest="value", type="float",
help="value to use for various algorithms.")
parser.add_option("-i", "--input-format", dest="input_format", type="choice",
choices=("full", "sparse", "phylip"),
help="""input format for matrix.""" )
parser.add_option("-o", "--output-format", dest="output_format", type="choice",
choices=("full", "sparse", "phylip"),
help="""output format for matrix.""" )
parser.add_option("--missing-value", dest="missing", type="float",
help="value to use for missing values. If not set, missing values will cause the script to fail [default=%default].")
parser.set_defaults(
methods=[],
scale=1.0,
headers=True,
format="%6.4f",
output_format="full",
input_format="full",
value=0.0,
parameters="",
write_separators=True,
filename_rows=None,
filename_columns=None,
missing=None,
)
(options, args) = E.Start(parser)
options.parameters = options.parameters.split(",")
lines = filter(lambda x: x[0] != "#", sys.stdin.readlines())
if len(lines) == 0:
raise IOError("no input")
chunks = filter(lambda x: lines[x][0] == ">", range(len(lines)))
if not chunks:
options.write_separators = False
chunks = [-1]
chunks.append(len(lines))
if options.filename_rows:
row_names, n = IOTools.ReadList(open(options.filename_rows, "r"))
if options.filename_columns:
column_names, n = IOTools.ReadList(open(options.filename_columns, "r"))
for chunk in range(len(chunks) - 1):
try:
raw_matrix, row_headers, col_headers = MatlabTools.readMatrix(StringIO.StringIO("".join(lines[chunks[chunk] + 1:chunks[chunk + 1]])),
format=options.input_format,
headers=options.headers,
missing=options.missing)
except ValueError, msg:
E.warn("matrix could not be read: %s" % msg)
continue
nrows, ncols = raw_matrix.shape
E.debug("read matrix: %i x %i, %i row titles, %i colum titles" %
(nrows, ncols, len(row_headers), len(col_headers)))
parameter = 0
for method in options.methods:
matrix = numpy.reshape(numpy.array(raw_matrix), raw_matrix.shape)
if method in ("normalize-by-matrix", "subtract-matrix", "mix-matrix", "add-matrix"):
other_matrix, other_row_headers, other_col_headers = MatlabTools.ReadMatrix(open(options.parameters[parameter], "r"),
headers=options.headers)
other_nrows, other_ncols = other_matrix.shape
if options.loglevel >= 2:
options.stdlog.write("# read second matrix from %s: %i x %i, %i row titles, %i colum titles.\n" %
(options.parameters[parameter],
other_nrows, other_ncols, len(other_row_headers), len(other_col_headers)))
parameter += 1
elif method == "normalize-by-min-diagonal":
for x in range(nrows):
for y in range(ncols):
m = min(raw_matrix[x, x], raw_matrix[y, y])
if m > 0:
matrix[x, y] = raw_matrix[x, y] / m
elif method == "normalize-by-column":
if nrows != ncols:
raise "only supported for symmeric matrices."
for x in range(nrows):
for y in range(ncols):
if raw_matrix[y, y] > 0:
matrix[x, y] = raw_matrix[x, y] / raw_matrix[y, y]
elif method == "normalize-by-value":
matrix = raw_matrix / float(options.parameters[parameter])
parameter += 1
elif method == "normalize-by-row":
if nrows != ncols:
raise "only supported for symmeric matrices."
for x in range(nrows):
for y in range(ncols):
if raw_matrix[y, y] > 0:
matrix[x, y] = raw_matrix[x, y] / raw_matrix[x, x]
elif method == "subtract-first-col":
for x in range(nrows):
for y in range(ncols):
matrix[x, y] -= raw_matrix[x, 0]
elif method.startswith("normalize-by-column"):
if method.endswith("max"):
f = max
elif method.endswith("min"):
f = min
elif method.endswith("median"):
f = scipy.median
elif method.endswith("mean"):
f = scipy.mean
elif method.endswith("total"):
f = sum
for y in range(ncols):
m = f(matrix[:, y])
if m != 0:
for x in range(nrows):
matrix[x, y] = matrix[x, y] / m
elif method.startswith("normalize-by-row"):
if method.endswith("max"):
f = max
elif method.endswith("min"):
f = min
elif method.endswith("median"):
f = scipy.median
elif method.endswith("mean"):
f = scipy.mean
elif method.endswith("total"):
f = sum
for x in range(nrows):
m = f(matrix[x, :])
if m != 0:
for y in range(ncols):
matrix[x, y] = raw_matrix[x, y] / m
elif method == "negzero2value":
# set zero/negative values to a value
for x in range(nrows):
for y in range(ncols):
if matrix[x, y] <= 0:
matrix[x, y] = options.value
elif method == "minmax":
# set zero/negative values to a value
for x in range(nrows):
for y in range(ncols):
matrix[x, y], matrix[y, x] = \
min(matrix[x, y], matrix[y, x]), \
max(matrix[x, y], matrix[y, x])
elif method == "log":
# apply log to all values.
for x in range(nrows):
for y in range(ncols):
if matrix[x, y] > 0:
matrix[x, y] = math.log10(matrix[x, y])
elif method == "ln":
for x in range(nrows):
for y in range(ncols):
if matrix[x, y] > 0:
matrix[x, y] = math.log(matrix[x, y])
elif method == "transpose":
matrix = numpy.transpose(matrix)
row_headers, col_headers = col_headers, row_headers
nrows, ncols = ncols, nrows
elif method == "mul":
matrix = numpy.dot(matrix, numpy.transpose(matrix))
col_headers = row_headers
elif method == "multiply-by-value":
matrix *= options.value
elif method == "divide-by-value":
matrix /= options.value
elif method == "add-value":
matrix += options.value
elif method == "angle":
# write angles between col vectors
v1 = numpy.sqrt(numpy.sum(numpy.power(matrix, 2), 0))
matrix = numpy.dot(numpy.transpose(matrix), matrix)
row_headers = col_headers
nrows = ncols
for x in range(nrows):
for y in range(ncols):
matrix[x, y] /= v1[x] * v1[y]
elif method == "euclid":
# convert to euclidean distance matrix
matrix = numpy.zeros((ncols, ncols), numpy.float)
for c1 in range(0, ncols - 1):
for c2 in range(c1 + 1, ncols):
for r in range(0, nrows):
d = raw_matrix[r][c1] - raw_matrix[r][c2]
matrix[c1, c2] += (d * d)
matrix[c2, c1] = matrix[c1, c2]
matrix = numpy.sqrt(matrix)
row_headers = col_headers
nrows = ncols
elif method.startswith("symmetrize"):
f = method.split("-")[1]
if f == "max":
f = max
elif f == "min":
f = min
elif f == "mean":
f = lambda x, y: float(x + y) / 2
if nrows != ncols:
raise ValueError(
"symmetrize only available for symmetric matrices")
if row_headers != col_headers:
raise ValueError(
"symmetrize not available for permuted matrices")
for x in range(nrows):
for y in range(ncols):
matrix[x, y] = matrix[y, x] = f(
matrix[x, y], matrix[y, x])
elif method == "sub":
matrix = options.value - matrix
elif method in ("lower-bound", "upper-bound"):
boundary = float(options.parameters[parameter])
new_value = float(options.parameters[parameter + 1])
parameter += 2
if method == "upper-bound":
for x in range(nrows):
for y in range(ncols):
if matrix[x, y] > boundary:
matrix[x, y] = new_value
else:
for x in range(nrows):
for y in range(ncols):
if matrix[x, y] < boundary:
matrix[x, y] = new_value
elif method == "subtract-matrix":
matrix = matrix - other_matrix
elif method == "add-matrix":
matrix = matrix + other_matrix
elif method == "normalize-by-matrix":
# set 0s to 1 in the other matrix
for x in range(nrows):
for y in range(ncols):
if other_matrix[x, y] == 0:
other_matrix[x, y] = 1.0
matrix = matrix / other_matrix
elif method == "mix-matrix":
for x in range(len(other_row_headers) - 1):
for y in range(x + 1, len(other_col_headers)):
matrix[x, y] = other_matrix[x, y]
elif method == "set-diagonal":
value = float(options.parameters[parameter])
for x in range(min(nrows, ncols)):
matrix[x, x] = value
parameter += 1
elif method == "transpose":
matrix = numpy.transpose(raw_matrix)
row_headers, col_headers = col_headers, row_headers
elif method == "correspondence-analysis":
row_indices, col_indices = CorrespondenceAnalysis.GetIndices(
raw_matrix)
map_row_new2old = numpy.argsort(row_indices)
map_col_new2old = numpy.argsort(col_indices)
matrix, row_headers, col_headers = CorrespondenceAnalysis.GetPermutatedMatrix(raw_matrix,
map_row_new2old,
map_col_new2old,
row_headers=row_headers,
col_headers=col_headers)
elif method == "mask-rows":
r = set(row_names)
for x in range(len(row_headers)):
if row_headers[x] in r:
matrix[x, :] = options.value
elif method == "mask-columns":
r = set(column_names)
for x in range(len(col_headers)):
if col_headers[x] in r:
matrix[:, x] = options.value
elif method == "mask-rows-and-columns":
r = set(row_names)
c = set(column_names)
for x in range(len(row_headers)):
for y in range(len(col_headers)):
if row_headers[x] in r and col_headers[y] in c:
matrix[x, y] = options.value
raw_matrix = numpy.reshape(numpy.array(matrix), matrix.shape)
else:
# for simple re-formatting jobs
matrix = raw_matrix
if options.write_separators:
options.stdout.write(lines[chunks[chunk]])
MatlabTools.writeMatrix(sys.stdout, matrix,
value_format=options.format,
format=options.output_format,
row_headers=row_headers,
col_headers=col_headers)
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| [
"[email protected]"
] | |
24d38b1c79dc504b389b64276c398a8a39f2423d | d7016f69993570a1c55974582cda899ff70907ec | /sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2022_02_01_preview/aio/operations/_operations.py | 2a6e7c95997bb3ead85375c355f3241e726885e6 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 4,963 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._operations import build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerregistry.v2022_02_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.OperationListResult"]:
"""Lists all of the available Azure Container Registry REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerregistry.v2022_02_01_preview.models.OperationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2022-02-01-preview") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/providers/Microsoft.ContainerRegistry/operations"} # type: ignore
| [
"[email protected]"
] | |
3ec19460ce437a87592c19e706dffcfc90cf10ba | 70b1c91530b28d86e0fe8f46187b33322a6b9228 | /backend/manage.py | 00289a0b69ac55cbdc207727fa8d516788743952 | [] | no_license | crowdbotics-apps/satsuscreen-dev-1528 | 34fd3d6a3c4a717dcc657eb8ef055e736f33b70d | 9b910435fc4ef034efe521985446055c688d52d7 | refs/heads/master | 2022-04-04T20:58:24.447428 | 2020-01-21T17:16:51 | 2020-01-21T17:16:51 | 235,398,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 640 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'satsuscreen_dev_1528.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6430ad985b5c08e8f0e7f98428386d3713eb65b2 | c45c9e74ffafcceebf395cc1c5f5d31659988c19 | /answer_search.py | ff4bf3d7d7e148d57a000bb5cd58779991814eb8 | [] | no_license | tedrepo/QABasedOnMedicalKnowledgeGraph | f68ca297254218c72ef18a26c98f1910610f7154 | f690b80e2a7fb85455b45d3829b6998be9ebc739 | refs/heads/master | 2020-03-30T23:14:39.416415 | 2018-10-05T04:12:19 | 2018-10-05T04:12:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | #!/usr/bin/env python3
# coding: utf-8
# File: answer_search.py
# Author: lhy<[email protected],https://huangyong.github.io>
# Date: 18-10-5
from py2neo import Graph,Node
class AnswerSearcher:
def __init__(self):
self.g = Graph(
host="127.0.0.1",
http_port=7474,
user="lhy",
password="lhy123")
'''执行cypher查询,并返回相应结果'''
def search_main(self, sqls):
for sql in sqls:
ress = self.g.run(sql).data()
for res in ress:
print(res)
return
if __name__ == '__main__':
searcher = AnswerSearch() | [
"[email protected]"
] | |
b47c9a85013089dec45758e6489eb731972070ee | 4ece3041f2ed0cd312dc70fd3c7c240924dbb6ae | /pyathena/__init__.py | 8335fb21281d596d87e5bc8a90d091895483fde9 | [
"MIT"
] | permissive | ivssh/PyAthena | 175c5dfff0289a7ceccfe9a47ac490985535f669 | 156c51f19b46ea2f89612b3383937d78942bc990 | refs/heads/master | 2020-03-27T13:07:58.417397 | 2018-07-21T13:08:41 | 2018-07-21T13:08:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,351 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
from pyathena.error import * # noqa
__version__ = '1.3.0'
# Globals https://www.python.org/dev/peps/pep-0249/#globals
apilevel = '2.0'
threadsafety = 3
paramstyle = 'pyformat'
class DBAPITypeObject:
"""Type Objects and Constructors
https://www.python.org/dev/peps/pep-0249/#type-objects-and-constructors
"""
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
def __eq__(self, other):
return other in self.values
# https://docs.aws.amazon.com/athena/latest/ug/data-types.html
STRING = DBAPITypeObject('char', 'varchar', 'map', 'array', 'row')
BINARY = DBAPITypeObject('varbinary')
BOOLEAN = DBAPITypeObject('boolean')
NUMBER = DBAPITypeObject('tinyint', 'smallint', 'bigint', 'integer',
'real', 'double', 'float', 'decimal')
DATE = DBAPITypeObject('date')
DATETIME = DBAPITypeObject('timestamp')
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def connect(*args, **kwargs):
from pyathena.connection import Connection
return Connection(*args, **kwargs)
| [
"[email protected]"
] | |
2a012620dfe09c0f6c1c04320e49696991285bed | 8e6203db7383475f1c24a590f0456330b969bb4b | /optbinning/binning/distributed/plots.py | dba20f0cab79a00b42588937c020ed96d925680e | [
"Apache-2.0"
] | permissive | guillermo-navas-palencia/optbinning | 6fdfc764a214052b4d7d8e0b59114f0a63e6d5a8 | 73aee82008ebe88b732430e7c5764da57fb4d3ae | refs/heads/master | 2023-08-28T13:33:43.536143 | 2023-08-22T19:20:18 | 2023-08-22T19:20:18 | 231,076,826 | 377 | 91 | Apache-2.0 | 2023-09-05T20:14:14 | 2019-12-31T11:17:44 | Python | UTF-8 | Python | false | false | 1,370 | py | """
Binning sketch plots.
"""
# Guillermo Navas-Palencia <[email protected]>
# Copyright (C) 2020
import matplotlib.pyplot as plt
import numpy as np
def plot_progress_divergence(df, divergence):
n = len(df)
n_add = df.n_add
n_records = df.n_records
div = df.divergence
mv_div_mean = div.rolling(n, min_periods=1).mean()
mv_div_std = div.rolling(n, min_periods=1).std()
mv_div_std /= np.sqrt(np.arange(1, n+1))
div_low = np.maximum(0, div - mv_div_std * 1.959963984540054)
div_high = div + mv_div_std * 1.959963984540054
div_label = "divergence ({:.5f})".format(div.values[-1])
mv_div_label = "moving mean ({:.5f})".format(mv_div_mean.values[-1])
mv_std_label = "standard error ({:.5f})".format(mv_div_std.values[-1])
plt.plot(n_records, div, label=div_label)
plt.plot(n_records, mv_div_mean, linestyle="-.", color="green",
label=mv_div_label)
plt.fill_between(n_records, div_low, div_high, alpha=0.2, color="green",
label=mv_std_label)
plt.title("Progress after {:} add and {} processed records".
format(int(n_add.values[-1]), int(n_records.values[-1])),
fontsize=14)
plt.xlabel("Processed records", fontsize=12)
plt.ylabel("Divergence: {}".format(divergence), fontsize=12)
plt.legend(fontsize=12)
plt.show()
| [
"[email protected]"
] | |
142b4edaf5e0cb5022cd5869f8cbdf4542e77689 | a4df0ee67d0d56fc8595877470318aed20dd4511 | /vplexapi-6.2.0.3/vplexapi/models/health_state.py | 819d13f492a6fb68862c506a14264a4633267ac3 | [
"Apache-2.0"
] | permissive | QD888/python-vplex | b5a7de6766840a205583165c88480d446778e529 | e2c49faee3bfed343881c22e6595096c7f8d923d | refs/heads/main | 2022-12-26T17:11:43.625308 | 2020-10-07T09:40:04 | 2020-10-07T09:40:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,553 | py | # coding: utf-8
"""
VPlex REST API
A defnition for the next-gen VPlex API # noqa: E501
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class HealthState(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
allowed enum values
"""
UNKNOWN = "unknown"
OK = "ok"
DEGRADED = "degraded"
MINOR_FAILURE = "minor-failure"
MAJOR_FAILURE = "major-failure"
CRITICAL_FAILURE = "critical_failure"
NON_RECOVERABLE_ERROR = "non-recoverable-error"
ISOLATED = "isolated"
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""HealthState - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HealthState):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
56b442f8b7bfc47ef533d1a9d1c90373518ecca3 | df7736726d5b041e46b490e409a1d4481ef8c7f1 | /tools/rosmaster/src/rosmaster/threadpool.py | 1261e2f5e4aa3947450c12ff477e0830735e537e | [] | no_license | strawlab/ros_comm | 62f5d2bc68d6cbe85c071eabb7487164d6c328be | 6f7ea2feeb3c890699518cb6eb3d33faa15c5306 | refs/heads/master | 2020-05-18T02:26:43.463444 | 2012-08-05T07:10:58 | 2012-08-05T07:10:58 | 5,301,610 | 13 | 31 | null | 2019-09-24T22:49:12 | 2012-08-05T07:10:44 | Python | UTF-8 | Python | false | false | 8,088 | py | # Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id: threadpool.py 8327 2010-02-17 01:23:15Z kwc $
"""
Internal threadpool library for zenmaster.
Adapted from U{http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/203871}
Added a 'marker' to tasks so that multiple tasks with the same
marker are not executed. As we are using the thread pool for i/o
tasks, the marker is set to the i/o name. This prevents a slow i/o
for gobbling up all of our threads
"""
import threading, logging, traceback
from time import sleep
class MarkedThreadPool:
"""Flexible thread pool class. Creates a pool of threads, then
accepts tasks that will be dispatched to the next available
thread."""
def __init__(self, numThreads):
"""Initialize the thread pool with numThreads workers."""
self.__threads = []
self.__resizeLock = threading.Condition(threading.Lock())
self.__taskLock = threading.Condition(threading.Lock())
self.__tasks = []
self.__markers = set()
self.__isJoining = False
self.set_thread_count(numThreads)
def set_thread_count(self, newNumThreads):
""" External method to set the current pool size. Acquires
the resizing lock, then calls the internal version to do real
work."""
# Can't change the thread count if we're shutting down the pool!
if self.__isJoining:
return False
self.__resizeLock.acquire()
try:
self.__set_thread_count_nolock(newNumThreads)
finally:
self.__resizeLock.release()
return True
def __set_thread_count_nolock(self, newNumThreads):
"""Set the current pool size, spawning or terminating threads
if necessary. Internal use only; assumes the resizing lock is
held."""
# If we need to grow the pool, do so
while newNumThreads > len(self.__threads):
newThread = ThreadPoolThread(self)
self.__threads.append(newThread)
newThread.start()
# If we need to shrink the pool, do so
while newNumThreads < len(self.__threads):
self.__threads[0].go_away()
del self.__threads[0]
def get_thread_count(self):
"""@return: number of threads in the pool."""
self.__resizeLock.acquire()
try:
return len(self.__threads)
finally:
self.__resizeLock.release()
def queue_task(self, marker, task, args=None, taskCallback=None):
"""Insert a task into the queue. task must be callable;
args and taskCallback can be None."""
if self.__isJoining == True:
return False
if not callable(task):
return False
self.__taskLock.acquire()
try:
self.__tasks.append((marker, task, args, taskCallback))
return True
finally:
self.__taskLock.release()
def remove_marker(self, marker):
"""Remove the marker from the currently executing tasks. Only one
task with the given marker can be executed at a given time"""
if marker is None:
return
self.__taskLock.acquire()
try:
self.__markers.remove(marker)
finally:
self.__taskLock.release()
def get_next_task(self):
""" Retrieve the next task from the task queue. For use
only by ThreadPoolThread objects contained in the pool."""
self.__taskLock.acquire()
try:
retval = None
for marker, task, args, callback in self.__tasks:
# unmarked or not currently executing
if marker is None or marker not in self.__markers:
retval = (marker, task, args, callback)
break
if retval:
# add the marker so we don't do any similar tasks
self.__tasks.remove(retval)
if marker is not None:
self.__markers.add(marker)
return retval
else:
return (None, None, None, None)
finally:
self.__taskLock.release()
def join_all(self, wait_for_tasks = True, wait_for_threads = True):
""" Clear the task queue and terminate all pooled threads,
optionally allowing the tasks and threads to finish."""
# Mark the pool as joining to prevent any more task queueing
self.__isJoining = True
# Wait for tasks to finish
if wait_for_tasks:
while self.__tasks != []:
sleep(.1)
# Tell all the threads to quit
self.__resizeLock.acquire()
try:
self.__set_thread_count_nolock(0)
self.__isJoining = True
# Wait until all threads have exited
if wait_for_threads:
for t in self.__threads:
t.join()
del t
# Reset the pool for potential reuse
self.__isJoining = False
finally:
self.__resizeLock.release()
class ThreadPoolThread(threading.Thread):
"""
Pooled thread class.
"""
threadSleepTime = 0.1
def __init__(self, pool):
"""Initialize the thread and remember the pool."""
threading.Thread.__init__(self)
self.setDaemon(True) #don't block program exit
self.__pool = pool
self.__isDying = False
def run(self):
"""
Until told to quit, retrieve the next task and execute
it, calling the callback if any.
"""
while self.__isDying == False:
marker, cmd, args, callback = self.__pool.get_next_task()
# If there's nothing to do, just sleep a bit
if cmd is None:
sleep(ThreadPoolThread.threadSleepTime)
else:
try:
try:
result = cmd(*args)
finally:
self.__pool.remove_marker(marker)
if callback is not None:
callback(result)
except Exception, e:
logging.getLogger('rosmaster.threadpool').error(traceback.format_exc())
def go_away(self):
""" Exit the run loop next time through."""
self.__isDying = True
| [
"[email protected]"
] | |
05f98c995114c13d415121f855678ae770c9123b | d93fe0484fc3b32c8fd9b33cc66cfd636a148ec4 | /AtCoder/ABC-D/107probD.py | 261c5013ca5189665dd06803268802f1623a399f | [] | no_license | wattaihei/ProgrammingContest | 0d34f42f60fa6693e04c933c978527ffaddceda7 | c26de8d42790651aaee56df0956e0b206d1cceb4 | refs/heads/master | 2023-04-22T19:43:43.394907 | 2021-05-02T13:05:21 | 2021-05-02T13:05:21 | 264,400,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,046 | py | # 足す時は0~iまで一律に足し、返すのはi番目の値
class imosBIT():
def __init__(self, N):
self.N = N
self.bit = [0 for _ in range(self.N+1)]
def __str__(self):
ret = []
for i in range(1, self.N+1):
ret.append(self.__getitem__(i))
return "[" + ", ".join([str(a) for a in ret]) + "]"
def __getitem__(self, i):
s = 0
while i > 0:
s += self.bit[i]
i -= i & -i
return s
def add(self, i, x):
while i <= self.N:
self.bit[i] += x
i += i & -i
import sys
input = sys.stdin.readline
N = int(input())
A = list(map(int, input().split()))
l = 0
r = max(A) + 1
while r-l > 1:
x = (l+r)//2
P = [0]
for a in A:
p = +1 if a >= x else -1
P.append(P[-1]+p)
score = 0
bit = imosBIT(2*N+5)
for p in P:
p += N+1
score += bit[p]
bit.add(p, 1)
if score >= (N*(N+1)//2+1)//2:
l = x
else:
r = x
print(l) | [
"[email protected]"
] | |
9afa4e20081e1cfa380b6474b33c811305e13c9a | 29623d43b2ab99e55a5d102e8d718015053073a3 | /aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/QueryCensorJobListRequest.py | 8bb5ce2996660555cc680f7e5f65657a90cfa511 | [
"Apache-2.0"
] | permissive | zxsted/aliyun-openapi-python-sdk | ed41e1d93c63557ecfbcffb6c84f87d4ed2a7f59 | a539d6e268fc07f314c5114c21ced4c8ead51dee | refs/heads/master | 2021-05-13T19:58:10.000697 | 2018-01-09T11:16:55 | 2018-01-09T11:16:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,894 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class QueryCensorJobListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Mts', '2014-06-18', 'QueryCensorJobList')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_JobIds(self):
return self.get_query_params().get('JobIds')
def set_JobIds(self,JobIds):
self.add_query_param('JobIds',JobIds)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
"[email protected]"
] | |
16526d6d991321e879d46e8d8cd308ef7e4677b9 | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Difference/trend_MovingMedian/cycle_5/ar_/test_artificial_1024_Difference_MovingMedian_5__20.py | f424dd4077963cad7c75f615bce42289c823621a | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 274 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 5, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0); | [
"[email protected]"
] | |
2d9579c4f46e697e49c1ff2919d1f2e549706639 | 51305c54f8a316b6878a4462e1ba58a55c8e320f | /manager/thumbgen.py | 49950650252902242789d77d34a36ed34f1f3ad4 | [] | no_license | coinmenace/gck | 5b15b460335c0b52925f1875ccb4fecd416008e7 | fade84780cda218291cb2066808310c4871a06c8 | refs/heads/master | 2020-03-27T06:58:54.878353 | 2018-10-22T12:32:20 | 2018-10-22T12:32:20 | 146,153,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,078 | py | from PIL import Image, ImageFile
import glob, os
from threading import *
ImageFile.LOAD_TRUNCATED_IMAGES = True
class Thumbgen:
def __init__(self,file,fullname,identifier):
sizes = [(32, 32),(64, 64),(128, 128),(256, 256),(512, 512),(1024, 1024),(2048, 2048)]
self.generateThumb(identifier,file,fullname,sizes)
def generateThumb(self,identifier,file,fullname,sizes):
for size in sizes:
t=Thread(target=generateImages,args=(identifier,file,fullname,size,))
t.start()
t.join()
def generateImages(identifier,file,fullname,size):
#print "Open "+fullname
im = Image.open(fullname)
im.thumbnail(size)
if not os.path.exists("website/static/thumbs/"+identifier+"/"):
os.mkdir("website/static/thumbs/"+identifier+"/")
file="website/static/thumbs/"+identifier+"/"+file.split(".")[0]+"_"+str(size[0])+"_"+str(size[1])
im.save(file + ".png",format="PNG", quality=95, optimize=True, progressive=True)
if __name__=="__main__":
filename="sample.png"
t=Thumbgen(filename)
| [
"[email protected]"
] | |
82a203f3a27ae3767dc8c58441b3f4644e5a1399 | a2e607593dcbe5feaeedd9e9bd4caeaf06e46733 | /tests/ui/menus/test_opmenu.py | 464f5422d23c0778525972d3ce32d53d5aa537af | [] | no_license | all-in-one-of/Houdini-Toolbox | dd05b2c869e663b185c1997d326bfe7548fbf55f | c10663c46c0f1249a9b3c6b32d4384a4399849ed | refs/heads/master | 2020-06-13T01:10:11.832715 | 2019-08-30T07:24:47 | 2019-08-30T07:24:47 | 194,484,242 | 0 | 0 | null | 2019-06-30T06:42:17 | 2019-06-30T06:42:17 | null | UTF-8 | Python | false | false | 1,738 | py | """Tests for ht.ui.menus.opmenu module."""
# =============================================================================
# IMPORTS
# =============================================================================
# Python Imports
from mock import MagicMock, patch
import unittest
# Houdini Toolbox Imports
import ht.ui.menus.opmenu
# Houdini Imports
import hou
reload(ht.ui.menus.opmenu)
# =============================================================================
# CLASSES
# =============================================================================
class Test_create_absolute_reference_copy(unittest.TestCase):
"""Test ht.ui.menus.opmenu.create_absolute_reference_copy."""
def test(self):
"""Test creating an absolute reference copy."""
mock_node = MagicMock(spec=hou.Node)
scriptargs = {
"node": mock_node
}
mock_ui = MagicMock()
hou.ui = mock_ui
ht.ui.menus.opmenu.create_absolute_reference_copy(scriptargs)
mock_node.parent.return_value.copyItems.assert_called_with([mock_node], channel_reference_originals=True, relative_references=False)
del hou.ui
class Test_save_item_to_file(unittest.TestCase):
"""Test ht.ui.menus.opmenu.save_item_to_file."""
@patch("ht.ui.menus.opmenu.copy_item")
def test(self, mock_copy):
"""Test saving an item to a file."""
mock_node = MagicMock(spec=hou.Node)
scriptargs = {
"node": mock_node
}
ht.ui.menus.opmenu.save_item_to_file(scriptargs)
mock_copy.assert_called_with(mock_node)
# =============================================================================
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
08de1f917dace86cb7124d6906b69f3af922f184 | 7b55cfc4ffa7678e4c7b8f2312831ebbd549e54f | /proj1/tests/other-tests/strictfp_tests/error/test_if_continue.py | 99887a16a7eb4db8dd94fbe0fc70562f84d77ea9 | [] | no_license | czchen1/cs164-projects | 0d330efef85421e611a436b165428ba0ddfb3512 | a04cafbcaafd32e518227dacf89a6d7837bf9f57 | refs/heads/master | 2020-03-27T04:03:31.727524 | 2018-08-23T21:43:46 | 2018-08-23T21:43:46 | 145,909,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19 | py | if 1:
continue
| [
"[email protected]"
] | |
cfbc0b358cbc8a73771ab602b38fe9a5b825e242 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/488/usersdata/341/112971/submittedfiles/AvF_Parte3.py | 25d6392521b197f54357fe6d625293d8a2655e93 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | # -*- coding: utf-8 -*-
n = int(input('Digite a quantidade de números: '))
a = []
for i in range (n):
a.append('Digite os respectivos números: ')) | [
"[email protected]"
] | |
b524fe5caa3d77e5a88deb2e1aca3844f930eedf | 40491d4649bc8f981cfd531657b0970e2577edd1 | /Policy_Gradient/Tank_1/params.py | 591b4c09a4383ccea277dcc219593c967ce568b8 | [] | no_license | emedd33/Reinforcement-Learning-in-Process-Control | d82ddab87dc6727a70ee38d53889aa8af87ade25 | 24bc1d9b72c0762bd92c215837347d6548099902 | refs/heads/master | 2022-07-12T02:53:52.208320 | 2022-04-05T15:23:48 | 2022-04-05T15:23:48 | 161,691,463 | 29 | 11 | null | 2022-06-21T21:39:15 | 2018-12-13T20:29:27 | Python | UTF-8 | Python | false | false | 1,408 | py | MAIN_PARAMS = {
"EPISODES": 20000,
"MEAN_EPISODE": 50,
"MAX_TIME": 200,
"RENDER": True,
"MAX_MEAN_REWARD": 200, # minimum reward before saving model
}
AGENT_PARAMS = {
"N_TANKS": 1,
"SS_POSITION": 0.5,
"VALVE_START_POSITION": 0.2,
"ACTION_DELAY": [5],
"INIT_ACTION": 0.3,
"VALVEPOS_UNCERTAINTY": 0,
"EPSILON_DECAY": [1],
"LEARNING_RATE": [0.0005],
"HIDDEN_LAYER_SIZE": [[5, 5]],
"BATCH_SIZE": 5,
"MEMORY_LENGTH": 10000,
"OBSERVATIONS": 4, # level, gradient, is_above 0.5, prevous valve position
"GAMMA": 0.9,
"EPSILON": [0],
"EPSILON_MIN": [0],
"BASE_LINE_LENGTH": 1,
"Z_VARIANCE": [0.05],
"SAVE_MODEL": [True],
"LOAD_MODEL": [False],
"TRAIN_MODEL": [True],
"LOAD_MODEL_NAME": [""],
"LOAD_MODEL_PATH": "Policy_Gradient/Tank_1/",
"SAVE_MODEL_PATH": "Policy_Gradient/Tank_1/",
}
# Model parameters Tank 1
TANK1_PARAMS = {
"height": 10,
"init_level": 0.5,
"width": 10,
"pipe_radius": 0.5,
"max_level": 0.75,
"min_level": 0.25,
}
TANK1_DIST = {
"add": True,
"pre_def_dist": False,
"nom_flow": 1, # 2.7503
"var_flow": 0.1,
"max_flow": 2,
"min_flow": 0.7,
"add_step": False,
"step_time": int(MAIN_PARAMS["MAX_TIME"] / 2),
"step_flow": 2,
"max_time": MAIN_PARAMS["MAX_TIME"],
}
TANK_PARAMS = [TANK1_PARAMS]
TANK_DIST = [TANK1_DIST]
| [
"[email protected]"
] | |
c3a893c3d848b53fed2af2a0af5ef2a746813b2d | 352f7d1258e51d3b7e8cfcbb4b527c3e27a68fe5 | /tests/test_img_server.py | b8eca0fb172da1de0c121455a4bcb1751b25020c | [] | no_license | lidingke/fiberGeometry | 67b53535ca1060af1ab29de915f1190258d7986e | 1455fd815884a735d5b9e87aff07244ca9a95a23 | refs/heads/master | 2020-05-21T16:45:06.374649 | 2018-02-25T06:30:15 | 2018-02-25T06:30:15 | 62,809,512 | 1 | 1 | null | 2017-08-29T03:21:54 | 2016-07-07T13:37:45 | C | UTF-8 | Python | false | false | 2,947 | py | # coding:utf-8
from setting.config import SIMULATOR_IMG_SERVER_COFIG
from SDK.simulator.client import Client
from SDK.simulator.server import ImgServer, SeverMain, SharpSever
from threading import Thread
import multiprocessing
from tornado.ioloop import IOLoop
from functools import partial
from util.getimg import getImage
from tornado.iostream import StreamClosedError
import time
import logging
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger(__name__)
def test_sharpserver():
ss = SharpSever()
ss.getAll()
def test_imgserver():
u"""测试摄像头模拟器/图片服务器的性能
:return:
"""
host, port, method, path = SIMULATOR_IMG_SERVER_COFIG
port = 9885
# port = 9801
Thread(target = SeverMain, args=(host, port, method, path)).start()
# multiprocessing.Process(target=servermain).start()
# time.sleep(1)
img = getImage('IMG/midoc.BMP')
imgstr = img.tobytes()
result = IOLoop.current().run_sync(Client(port=port).get_img_once)
assert len(result) == len(imgstr)
assert imgstr != result
print len(result)
para = ('getImage', 'IMG/midoc.BMP')
IOLoop.current().run_sync(partial(Client(port=port).get_change,para))
result = IOLoop.current().run_sync(Client(port=port).get_img_once)
assert len(result) == len(imgstr)
assert imgstr == result
para = ('randomImg', 'IMG/G652/pk/')
IOLoop.current().run_sync(partial(Client(port=port).get_change, para))
result = IOLoop.current().run_sync(Client(port=port).get_img_once)
assert len(result) == len(imgstr)
assert imgstr != result
IOLoop.current().run_sync(Client(port=port).close_server)
def test_getimg_multi_connect():
u"""测试连接池取图片
:return:
"""
host, port, method, path = SIMULATOR_IMG_SERVER_COFIG
port = 9883
# port = 9801
img = getImage('IMG/midoc.BMP')
imgstr = img.tobytes()
# port = 9801
Thread(target = SeverMain, args=(host, port, method, path)).start()
# multiprocessing.Process(target=SeverMain, args=(port,)).start()
print 'start multi connect'
for x in range(0,100):
try:
# time.sleep(0.5)
result = IOLoop.current().run_sync(Client(port=port).get_img_once)
assert len(result) == len(imgstr)
except StreamClosedError:
logger.warning("Lost host at client %s")
return
except Exception as e:
print 'range time', x
raise e
if x%50 == 0:
print 'create times',x, time.time()
IOLoop.current().run_sync(Client(port=port).close_server)
# def test_imgserver():
# Thread(target = SeverMain).start()
# multiprocessing.Process(target=servermain).start()
# time.sleep(1)
if __name__ == "__main__":
port = 9880
para = ('randomImg', 'IMG/emptytuple/eptlight2')
IOLoop.current().run_sync(partial(Client(port=port).get_change, para)) | [
"[email protected]"
] | |
ccffdde7de02461543a3f4f909b19626b7520c9f | f516b7561b93f640bcb376766a7ecc3440dcbb99 | /leetcode/easy/add-binary.py | a7a66ad52358184d587c15dba4b509ef2bcc902c | [
"Apache-2.0"
] | permissive | vtemian/interviews-prep | c41e1399cdaac9653c76d09598612f7450e6d302 | ddef96b5ecc699a590376a892a804c143fe18034 | refs/heads/master | 2020-04-30T15:44:42.116286 | 2019-09-10T19:41:41 | 2019-09-10T19:41:41 | 176,928,167 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 872 | py | class Solution(object):
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
if len(b) > len(a):
a, b = b, a
a = a[::-1]
b = b[::-1]
count = 0
remainder = 0
result = ""
while count < len(b):
b_a = a[count]
b_b = b[count]
result += str((int(b_a) + int(b_b) + remainder) % 2)
remainder = (int(b_a) + int(b_b) + remainder) / 2
count += 1
while count < len(a):
b_a = a[count]
result += str((int(b_a) + remainder) % 2)
remainder = (int(b_a) + remainder) / 2
count += 1
if remainder:
result += str(remainder)
return result[::-1]
result = Solution().addBinary('1010', '1011')
print(result)
| [
"[email protected]"
] | |
3bc801af96cf998efd961d2ff892da8cd5f95e93 | 3de11c5630cad4ca816ad17dd2f6c743b8799108 | /djangorestframework/tutorial/tutorial/settings.py | 57a3ef605fb5ea039f858ff6b08cc8fa7ff71296 | [] | no_license | greenfrog82/TIL_Python | a6f03b0ae6f2260310faa5ef59d4bd01dcf6a1ed | 015116c5ff4a14f531e3693f9cfd3a921a674b81 | refs/heads/master | 2022-12-09T22:34:49.485937 | 2021-05-11T10:59:41 | 2021-05-11T10:59:41 | 154,969,150 | 0 | 1 | null | 2022-12-08T01:20:11 | 2018-10-27T13:44:56 | Python | UTF-8 | Python | false | false | 4,216 | py | """
Django settings for tutorial project.
Generated by 'django-admin startproject' using Django 2.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import datetime
from django.core.management.utils import get_random_secret_key
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%pt7&cwica7@md!culsrv)0u)v$p*)ivk2-w5&lgv^5&2q5h7%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
# 'allauth',
# 'allauth.account',
'rest_auth.registration',
'snippets.apps.SnippetsConfig',
# 'users.apps.UsersConfig',
# 'comment.apps.CommnetConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
# 'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
}
# JWT_AUTH = {
# 'JWT_EXPIRATION_DELTA': datetime.timedelta(minutes=15),
# }
CUSTOM_USER_CONFIG = {
'PAGINATION_INFO': {
'PAGE_SIZE': 5,
'MAX_PAGE_SIZE': 10000
}
}
# For django-rest-auth
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
SITE_ID = 1
ACCOUNT_EMAIL_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_VERIFICATION = 'optional'
# For Hash ID
HASHID_FIELD_SALT = get_random_secret_key() | [
"[email protected]"
] | |
01fa61b61414d147b0eea7f2609800fd9d303acb | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/codeabbey/_Python_Problem_Solving-master/Greatest Common Divisor.py | a5d6275ab69ff29ca8c3202f4e265872f942f71d | [] | no_license | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 363 | py | a = int(input())
string = ''
for i in range(a):
temp1,temp2 = num1, num2 = [int(ele) for ele in input().split()]
while num1 != num2:
if num1 > num2:
num1 = num1 - num2
else:
num2 = num2 - num1
lcm = temp1 * temp2 / num1
string += '('+str(num1)+' '+str(int(lcm))+')'
string += ' '
print(string) | [
"[email protected]"
] | |
b85d7944f883d5fc1dae7e069f5d5cb234104815 | 0df124c41cbaa94750df79fc70bf911d298610a7 | /train_kFold.py | af272569fc2d9f5c6934814ab1624fffa7f18f92 | [] | no_license | bcaitech1/p2-klue-HYLee1008 | 7093a9245fe3ad9bf29251a4c12f12a801b9f4f5 | c22d1a1ba8e3aa89198d786845a0ad6efc69e27c | refs/heads/main | 2023-04-10T11:18:11.500052 | 2021-04-22T11:23:23 | 2021-04-22T11:23:23 | 360,466,733 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,318 | py | import argparse
import pickle as pickle
import os
import pandas as pd
import numpy as np
import torch
import random
import transformers
import glob
import time
import json
import wandb
from sklearn.metrics import accuracy_score
from transformers import AutoTokenizer, BertForSequenceClassification, Trainer, TrainingArguments, BertConfig, BertTokenizerFast, BertModel, XLMRobertaTokenizer
from pathlib import Path
from sklearn.model_selection import KFold
from load_data import *
from model import BERTClassifier, XLMRoBERTAClassifier, BERTLarge, KoElectraClassifier, mbart
from loss import LabelSmoothingLoss
from torch.utils.tensorboard import SummaryWriter
def increment_path(path, exist_ok=False):
""" Automatically increment path, i.e. runs/exp --> runs/exp0, runs/exp1 etc.
Args:
path (str or pathlib.Path): f"{model_dir}/{args.name}".
exist_ok (bool): whether increment path (increment if False).
"""
path = Path(path)
if (path.exists() and exist_ok) or (not path.exists()):
return str(path)
else:
dirs = glob.glob(f"{path}*")
matches = [re.search(rf"%s(\d+)" % path.stem, d) for d in dirs]
i = [int(m.groups()[0]) for m in matches if m]
n = max(i) + 1 if i else 2
return f"{path}{n}"
# seed 고정
def seed_everything(seed):
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed) # if use multi-GPU
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(seed)
random.seed(seed)
def train(args):
os.environ["TOKENIZERS_PARALLELISM"] = "false"
# model save path
save_dir = increment_path(os.path.join(args.model_dir, args.bert_model))
os.makedirs(save_dir, exist_ok=True)
# save args on .json file
with open(os.path.join(save_dir, 'config.json'), 'w', encoding='utf-8') as f:
json.dump(vars(args), f, ensure_ascii=False, indent=4)
# set random seed
seed_everything(args.seed)
# load model and tokenizer
tokenizer = AutoTokenizer.from_pretrained(args.bert_model)
# load dataset
train_dataset = load_data("/opt/ml/input/data/train/train.tsv")
train_label = train_dataset['label'].values
# tokenizing dataset
tokenized_train = tokenized_dataset(train_dataset, tokenizer)
# make dataset for pytorch.
RE_train_dataset = RE_Dataset(tokenized_train, train_label)
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
kfold = KFold(n_splits=5)
for fold, (train_index, valid_index) in enumerate(kfold.split(train_dataset), 1):
train_sub = torch.utils.data.Subset(RE_train_dataset, train_index)
valid_sub = torch.utils.data.Subset(RE_train_dataset, valid_index)
train_loader = torch.utils.data.DataLoader(
train_sub,
batch_size=args.batch_size,
shuffle=True,
num_workers=args.num_workers
)
valid_loader = torch.utils.data.DataLoader(
valid_sub,
batch_size=args.batch_size,
shuffle=False,
num_workers=args.num_workers
)
# load model
model = XLMRoBERTAClassifier(args.bert_model).to(device)
model = mbart(args.bert_model).to(device)
# load optimizer & criterion
no_decay = ['bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = torch.optim.AdamW(optimizer_grouped_parameters, lr=args.learning_rate)
criterion = LabelSmoothingLoss(smoothing=args.smoothing)
best_acc, last_epoch = 0, 0
for epoch in range(1, args.epochs + 1):
model.train()
loss_value = 0
start_time = time.time()
for batch_id, item in enumerate(train_loader):
input_ids = item['input_ids'].to(device)
# token_type_ids = item['token_type_ids'].to(device)
attention_mask = item['attention_mask'].to(device)
labels = item['labels'].to(device)
optimizer.zero_grad()
output = model(input_ids, attention_mask)
loss = criterion(output, labels)
loss_value += loss.item()
loss.backward()
optimizer.step()
# scheduler.step()
train_loss = loss_value / (batch_id + 1)
# evaluate model on dev set
with torch.no_grad():
model.eval()
acc_vals = 0
for batch_id, item in enumerate(valid_loader):
input_ids = item['input_ids'].to(device)
# token_type_ids = item['token_type_ids'].to(device)
attention_mask = item['attention_mask'].to(device)
labels = item['labels'].to(device)
output = model(input_ids, attention_mask)
pred = torch.argmax(output, dim=-1)
acc_item = (labels == pred).sum().item()
acc_vals += acc_item
val_acc = acc_vals / len(valid_sub)
time_taken = time.time() - start_time
# metric = {'val_acc': val_acc}
# wandb.log(metric)
print("fold: {} epoch: {}, loss: {}, val_acc: {}, time taken: {}".format(fold, epoch, train_loss, val_acc, time_taken))
if best_acc < val_acc:
print(f'best model! saved at fold {fold} epoch {epoch}')
if os.path.isfile(f"{save_dir}/{fold}_best_{last_epoch}.pth"):
os.remove(f"{save_dir}/{fold}_best_{last_epoch}.pth")
torch.save(model.state_dict(), f"{save_dir}/{fold}_best_{epoch}.pth")
best_acc = val_acc
last_epoch = epoch
# save model
torch.save(model.state_dict(), f"{save_dir}/{fold}_last_{epoch}.pth")
def main(args):
train(args)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Data and model checkpoints directories
parser.add_argument('--seed', type=int, default=1024, help='random seed (default: 1024)')
parser.add_argument('--epochs', type=int, default=10, help='number of epochs for train (deafult: 10)')
parser.add_argument('--batch_size', type=int, default=16, help='input batch size for training (deafult: 16)')
parser.add_argument('--num_workers', type=int, default=4, help='number of workers for dataloader (default: 4)')
parser.add_argument('--smoothing', type=float, default=0.2, help='label smoothing facotr for label smoothing loss (default: 0.2)')
parser.add_argument('--learning_rate', type=float, default=1e-5, help='learning rate for training (default: 1e-5)')
parser.add_argument('--weight_decay', type=float, default=0.01, help='weight decay (default: 0.01)')
parser.add_argument('--model_dir', type=str, default='./results/kfold', help='directory where model would be saved (default: ./results)')
# xlm-roberta-large
# joeddav/xlm-roberta-large-xnli
# monologg/koelectra-base-v3-discriminator
# facebook/mbart-large-cc25
parser.add_argument('--bert_model', type=str, default='xlm-roberta-large', help='backbone bert model for training (default: xlm-roberta-large)')
args = parser.parse_args()
main(args)
| [
"[email protected]"
] | |
b8e2120fcd66ff56ce5658f05e466269e248c642 | 99459cd11263f721155316164afddd1accf6419f | /stack.py | 7dd3b5ad53c0b158b87031a28ec838fc68eca0de | [] | no_license | dvmazuera/cs-data-structures-assessment | 5dc767241bb8a1821726c5b13a96140a59d0babf | 21082045955fa23cf26dd9dd52fdf9c22c0db31b | refs/heads/master | 2021-01-22T11:29:32.020412 | 2017-05-29T03:55:31 | 2017-05-29T03:55:31 | 92,704,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,950 | py | class StackEmptyError(IndexError):
"""Attempt to pop an empty stack."""
class Stack(object):
"""LIFO stack.
Implemented using a Python list; since stacks just need
to pop and push, a list is a good implementation, as
these are O(1) for native Python lists. However, in cases
where performance really matters, it might be best to
use a Python list directly, as it avoids the overhead
of a custom class.
Or, for even better performance (& typically smaller
memory footprint), you can use the `collections.deque`
object, which can act like a stack.
(We could also write our own LinkedList class for a
stack, where we push things onto the head and pop things
off the head (effectively reversing it), but that would be less
efficient than using a built-in Python list or a
`collections.deque` object)
"""
def __init__(self):
self._list = []
def __repr__(self):
if not self._list:
return "<Stack (empty)>"
else:
return "<Stack tail=%s length=%d>" % (
self._list[-1], len(self._list))
def push(self, item):
"""Add item to end of stack."""
self._list.append(item)
def pop(self):
"""Remove item from end of stack and return it."""
if not self._list:
raise StackEmptyError()
return self._list.pop()
def __iter__(self):
"""Allow iteration over list.
__iter__ is a special method that, when defined,
allows you to loop over a list, so you can say things
like "for item in my_stack", and it will pop
successive items off.
"""
while True:
try:
yield self.pop()
except StackEmptyError:
raise StopIteration
def length(self):
"""Return length of stack::
>>> s = Stack()
>>> s.length()
0
>>> s.push("dog")
>>> s.push("cat")
>>> s.push("fish")
>>> s.length()
3
"""
return len(self._list)
def empty(self):
"""Empty stack::
>>> s = Stack()
>>> s.push("dog")
>>> s.push("cat")
>>> s.push("fish")
>>> s.length()
3
>>> s.empty()
>>> s.length()
0
"""
self._list = []
def is_empty(self):
"""Is stack empty?
>>> s = Stack()
>>> s.is_empty()
True
>>> s.push("dog")
>>> s.push("cat")
>>> s.push("fish")
>>> s.is_empty()
False
"""
return not bool(self._list)
if __name__ == "__main__":
import doctest
print
result = doctest.testmod()
if not result.failed:
print "ALL TESTS PASSED. GOOD WORK!"
print
| [
"[email protected]"
] | |
0a81afd1bccfde119b3571c2a5ba4395ebb7b44f | e5cd01fd620e8e746a20b883de7ac32bec4feb5c | /Ejercicios python/PE4/PE4E3.py | 53f599b8b515986576a6731ce8932f4c3575fac2 | [] | no_license | eazapata/python | 0f6a422032d8fb70d26f1055dc97eed83fcdc572 | 559aa4151093a120527c459a406abd8f2ff6a7d8 | refs/heads/master | 2020-08-27T09:19:29.395109 | 2019-11-23T20:11:14 | 2019-11-23T20:11:14 | 217,314,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | #PE4E3 Eduardo Antonio Zapata Valero
#Pida al usuario si quiere calcular el área de un triángulo o un cuadrado,
#y pida los datos según que caso y muestre el resultado.
fig=(input("Quieres calcular el área de un triángulo (t) o de un cuadrado (c) "))
if (fig=="t"):
print ("Ha elegido triángulo, introduce base y altura del triángulo\n")
b=float(input())
h=float(input())
print("El área del triángulo es ",(b*h)/2)
elif(fig=="c"):
l=float(input("Has elegido cuadrado, introduce el valor del lado\n"))
print("El área del cudrado es ",(l*l))
else:
print("No se reconoce la figura que de la que quieres sacar el área")
| [
"[email protected]"
] | |
88f3a978e1ccdf33914b845f1988779d03433a82 | 3a2af7b4b801d9ba8d78713dcd1ed57ee35c0992 | /zerver/migrations/0051_realmalias_add_allow_subdomains.py | dec9cce79560fb47f11fae6a6962e964cc2a4a00 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | timabbott/zulip | 2b69bd3bb63539adbfc4c732a3ff9d52657f40ac | 42f239915526180a1a0cd6c3761c0efcd13ffe6f | refs/heads/master | 2023-08-30T21:45:39.197724 | 2020-02-13T23:09:22 | 2020-06-25T21:46:33 | 43,171,533 | 6 | 9 | Apache-2.0 | 2020-02-24T20:12:52 | 2015-09-25T19:34:16 | Python | UTF-8 | Python | false | false | 541 | py | # Generated by Django 1.10.5 on 2017-01-25 20:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0050_userprofile_avatar_version'),
]
operations = [
migrations.AddField(
model_name='realmalias',
name='allow_subdomains',
field=models.BooleanField(default=False),
),
migrations.AlterUniqueTogether(
name='realmalias',
unique_together={('realm', 'domain')},
),
]
| [
"[email protected]"
] | |
1176757494ee948beb10dc386770bfbd2a823956 | a29310948867f5f07109fcd225a84282ad7eea16 | /design_models/template_method.py | c4f800913310ae0c850b9c6b745efc7ed06b179d | [] | no_license | likeweilikewei/Python-study-demo | 09b266c0756b6e340e8b8e3153a7e497be8ee1a9 | 7dd4bc851273a5815d8980f9857828abfa5364a7 | refs/heads/master | 2020-06-26T21:17:27.095532 | 2019-07-31T02:17:43 | 2019-07-31T02:17:43 | 199,760,324 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,374 | py | #!/usr/bin/python
# coding:utf8
'''
Template Method
模板方法模式:
应用特性:重复做相同逻辑的事情,但是具体细节不同的场景
结构特性:相同逻辑抽取至父类,具体细节留置子类。可以说是对逻辑的抽象
'''
ingredients = "spam eggs apple"
line = '-' * 10
# Skeletons
def iter_elements(getter, action):
"""Template skeleton that iterates items"""
for element in getter():
action(element)
print(line)
def rev_elements(getter, action):
"""Template skeleton that iterates items in reverse order"""
for element in getter()[::-1]:
action(element)
print(line)
# Getters
def get_list():
return ingredients.split()
def get_lists():
return [list(x) for x in ingredients.split()]
# Actions
def print_item(item):
print(item)
def reverse_item(item):
print(item[::-1])
# Makes templates
def make_template(skeleton, getter, action):
"""Instantiate a template method with getter and action"""
def template():
skeleton(getter, action)
return template
# Create our template functions
templates = [make_template(s, g, a)
for g in (get_list, get_lists)
for a in (print_item, reverse_item)
for s in (iter_elements, rev_elements)]
# Execute them
for template in templates:
template()
| [
"1293120583@qq,com"
] | 1293120583@qq,com |
056c9e4811f80752b17207d170437781ff891727 | ad553dd718a8df51dabc9ba636040da740db57cf | /.history/app_20181213160433.py | 0cdeeb5a5dad82ce5b51349eae825d57abecf7ae | [] | no_license | NergisAktug/E-Commerce-PythonWithFlask-Sqlite3 | 8e67f12c28b11a7a30d13788f8dc991f80ac7696 | 69ff4433aa7ae52ef854d5e25472dbd67fd59106 | refs/heads/main | 2023-01-01T14:03:40.897592 | 2020-10-19T20:36:19 | 2020-10-19T20:36:19 | 300,379,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,372 | py | import datetime
from flask import Flask,flash, request, render_template_string, render_template
from flask import Flask, url_for, render_template, request, redirect, session, escape, render_template_string
from flask_babelex import Babel
from flask_sqlalchemy import SQLAlchemy
from flask_user import current_user, login_required, roles_required
from sqlalchemy.sql import table, column, select
from sqlalchemy import MetaData, create_engine
from flask_user import login_required, roles_required, UserManager, UserMixin
from flask_login import login_user
class ConfigClass(object):
SECRET_KEY = 'This is an INSECURE secret!! DO NOT use this in production!!'
SQLALCHEMY_DATABASE_URI = 'sqlite:///eticaret.sqlite'
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
MAIL_USERNAME = '[email protected]'
MAIL_PASSWORD = '05383896877'
MAIL_DEFAULT_SENDER = '"MyApp" <[email protected]>'
USER_ENABLE_EMAIL = True
USER_ENABLE_USERNAME = False
USER_EMAIL_SENDER_EMAIL = "[email protected]"
def create_app():
app = Flask(__name__)
app.config.from_object(__name__ + '.ConfigClass')
db = SQLAlchemy(app)
class Kullanici(db.Model):
__tablename__ = 'Kullanici'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(80), unique=True)
sifre = db.Column(db.String(80))
rolId = db.Column(db.Integer, db.ForeignKey('rol.rolId', ondelete='CASCADE'))
def __init__(self, email, sifre,rolId):
self.email = email
self.sifre = sifre
self.rolId =rolId
class Roller(db.Model):
__tablename__ = 'rol'
rolId = db.Column(db.Integer, primary_key=True)
rolisim = db.Column(db.String(80))
class urunler(db.Model):
__tablename__ = 'urunler'
urun_id = db.Column(db.Integer, primary_key=True)
urunismi = db.Column(db.String(80))
urunresmi = db.Column(db.String(80))
urunFiyati = db.Column(db.Integer)
markaId = db.Column(db.Integer(), db.ForeignKey('markalar.markaId', ondelete='CASCADE'))
def __init__(self, urunismi, urunresmi, urunFiyati,markaId):
self.urunismi =urunismi
self.urunresmi = urunresmi
self.urunFiyati = urunFiyati
self.markaId=markaId
class markalar(db.Model):
__tablename__ = 'markalar'
markaId = db.Column(db.Integer, primary_key=True)
markaadi = db.Column(db.String(80))
marka_modeli = db.Column(db.String(80))
def __init__(self, markaadi, marka_modeli):
self.markaadi = markaadi
self.marka_modeli = marka_modeli
class musteri(db.Model):
__tablename__ = 'musteri'
musteriId = db.Column(db.Integer, primary_key=True)
musteriadi = db.Column(db.String(80))
musterisoyadi = db.Column(db.String(80))
mail = db.Column(db.String(80), unique=True)
telefon = db.Column(db.Integer)
sifre = db.Column(db.String(80))
il = db.Column(db.String(80))
ilce = db.Column(db.String(80))
kullaniciId = db.Column(db.Integer(), db.ForeignKey('Kullanici.id', ondelete='CASCADE'))
def __init__(self, musteriadi, musterisoyadi, mail, telefon, sifre, il, ilce, kullaniciId):
self.musteriadi = musteriadi
self.musterisoyadi = musterisoyadi
self.mail = mail
self.telefon = telefon
self.sifre = sifre
self.il = il
self.ilce = ilce
self.kullaniciId = kullaniciId
class siparis(db.Model):
__tablename__ = 'siparis'
siparisId = db.Column(db.Integer, primary_key=True)
musteriId = db.Column(db.Integer(), db.ForeignKey('musteri.musteriId', ondelete='CASCADE'))
urunId = db.Column(db.Integer(), db.ForeignKey('urunler.urun_id', ondelete='CASCADE'))
siparisno = db.Column(db.Integer)
siparisTarihi = db.Column(db.Integer)
odemeId = db.Column(db.Integer())
def __init__(self, musteriId, urunId, siparisno, siparisTarihi, odemeId):
self.musteriId = musteriId
self.urunId = urunId
self.siparisno = siparisno
self.siparisTarihi = siparisTarihi
self.odemeId = odemeId
db.create_all()
urunler = urunler.query.all()
@app.route('/')
def anasayfa():
tumVeri=urunler.query.all()
return render_template('index.html',tumVeri=tumVeri)
@app.route('/kayit', methods=['GET', 'POST'])
def kayit():
if request.method == 'POST':
mail = request.form['email']
parola = request.form['sifre']
yeniKullanici = Kullanici(email=mail, sifre=parola,ro)
db.session.add(yeniKullanici)
db.session.commit()
if yeniKullanici is not None:
mesaj = "Kayıt Başarıyla Sağlanmıştır."
return render_template("index.html", mesaj=mesaj)
else:
return render_template('kayit.html')
@app.route('/admin')
def admin():
return render_template("admin.html")
@app.route('/uye', methods=['GET', 'POST'])
def uye():
return render_template("uyeGirisi.html")
@app.route('/giris', methods=['GET', 'POST'])
def giris():
if request.method == 'GET':
return render_template('uyeGiris.html')
else:
email = request.form['email']
sifre = request.form['sifre']
data = Kullanici.query.filter_by(email=email, sifre=sifre).first()
if data is not None:
if Kullanici.query.filter_by(email=email, sifre=sifre, yetki=1).first():
session['admin_giris'] = True
return render_template('admin.html',yetki = 1, giris = session['admin_giris'],urunler = urunler)
else:
session['uye_giris'] = True
return render_template('index.html',yetki = 0, giris = session['uye_giris'],urunler = urunler)
else:
return render_template('uyeGiris.html')
@app.route('/cikis')
def cikis():
session.pop('admin_giris',None)
session.pop('uye_giris',None)
return render_template("index.html")
@app.route('/urunEkle')
def urunGoster():
tumVeri=urunler.query.all()
return render_template("urunEkle.html",tumVeri=tumVeri)
@app.route('/urunEklemeYap',methods=['POST'])
def urunEklemeYap():
urunismi=request.form['urunismi']
urunResmi=request.form['urunresmi']
urunFiyati=request.form['fiyati']
markaId=request.form['markaId']
yeniUrun=urunler(urunismi=urunismi,urunresmi=urunResmi,urunFiyati=urunFiyati,markaId=markaId)
db.session.add(yeniUrun)
db.session.commit()
return redirect(url_for("urunGoster"))
@app.route("/sil/<string:id>")
def sil(id):
urun=urunler.query.filter_by(urun_id=id).first()
db.session.delete(urun)
db.session.commit()
return redirect(url_for('urunGoster'))
@app.route('/guncelle/<string:id>',methods=['POST','GET'])
def guncelle(id):
try:
urunismi = request.form.get("urunİsmi")
urunresmi = request.form.get("urunresmi")
urunFiyati = request.form.get("urunFiyati")
markaId = request.form.get("markaId")
urun = urunler.query.filter_by(urun_id=id).first()
urun.urunismi = urunismi
urun.urunresmi=urunresmi
urun.urunFiyati=urunFiyati
urun.markaId=markaId
db.session.commit()
except Exception as e:
print("güncelleme yapılamadı")
print(e)
return redirect(url_for('urunGoster'))
@app.route('/sepet')
def sepet():
return render_template("sepet.html")
@app.route('/Markalar')
def Markalar():
tumMarka=markalar.query.all()
return render_template("marka.html",tumMarka=tumMarka)
return app
if __name__ == '__main__':
app=create_app()
app.run(host='127.0.0.1', port=5000, debug=True) | [
"[email protected]"
] | |
3d58de779e6e9ce278cac6d0c11ec7646a8fb43e | 8f3336bbf7cd12485a4c52daa831b5d39749cf9b | /Python/maximum-average-subarray-i.py | a92f4789fc9c877e00d034b9d34aa0c4a577f269 | [] | no_license | black-shadows/LeetCode-Topicwise-Solutions | 9487de1f9a1da79558287b2bc2c6b28d3d27db07 | b1692583f7b710943ffb19b392b8bf64845b5d7a | refs/heads/master | 2022-05-30T22:16:38.536678 | 2022-05-18T09:18:32 | 2022-05-18T09:18:32 | 188,701,704 | 240 | 110 | null | 2020-05-08T13:04:36 | 2019-05-26T15:41:03 | C++ | UTF-8 | Python | false | false | 398 | py | # Time: O(n)
# Space: O(1)
class Solution(object):
def findMaxAverage(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: float
"""
result = total = sum(nums[:k])
for i in xrange(k, len(nums)):
total += nums[i] - nums[i-k]
result = max(result, total)
return float(result) / k
| [
"[email protected]"
] | |
a81da2eb335b9334d5ffe13dc3ee8929dd6a7c6e | 31b83dbd1098fbba49a1d559f9ecac4d3b118fc8 | /pyEX/premium/wallstreethorizon/__init__.py | 5cfb183f73a4383b500379e1896460420d585f9a | [
"Apache-2.0"
] | permissive | briangu/pyEX | 438f777bdf7661f47fe7b63a0a848d6e90f8e9df | 2eacc322932f4b686817b3d162cb1e4f399fd696 | refs/heads/main | 2023-07-15T16:15:17.277704 | 2021-08-17T16:36:36 | 2021-08-17T16:36:36 | 331,754,038 | 0 | 0 | Apache-2.0 | 2021-01-21T21:01:48 | 2021-01-21T21:01:48 | null | UTF-8 | Python | false | false | 23,317 | py | # *****************************************************************************
#
# Copyright (c) 2020, the pyEX authors.
#
# This file is part of the jupyterlab_templates library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
from functools import wraps
from ...common import _interval
from ...stocks import timeSeries, timeSeriesDF
@_interval(hours=4)
def _base(id, symbol="", **kwargs):
"""internal"""
kwargs["id"] = id
kwargs["key"] = symbol or kwargs.pop("key", "")
return timeSeries(**kwargs)
@_interval(hours=4)
def _baseDF(id, symbol="", **kwargs):
"""internal"""
kwargs["id"] = id
kwargs["key"] = symbol or kwargs.pop("key", "")
return timeSeriesDF(**kwargs)
@wraps(timeSeries)
def analystDays(symbol="", **kwargs):
"""This is a meeting where company executives provide information about the company’s performance and its future prospects.
https://iexcloud.io/docs/api/#analyst-days
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_ANALYST_DAY", symbol=symbol, **kwargs)
@wraps(timeSeries)
def analystDaysDF(symbol="", **kwargs):
"""This is a meeting where company executives provide information about the company’s performance and its future prospects.
https://iexcloud.io/docs/api/#analyst-days
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_ANALYST_DAY", symbol=symbol, **kwargs)
@wraps(timeSeries)
def boardOfDirectorsMeeting(symbol="", **kwargs):
"""This is an end-point for getting information about a formal meeting of a company’s board of directors to establish corporate management related policies and to make decisions on major company issues.
https://iexcloud.io/docs/api/#analyst-days
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_BOARD_OF_DIRECTORS_MEETING",
symbol=symbol,
**kwargs
)
@wraps(timeSeries)
def boardOfDirectorsMeetingDF(symbol="", **kwargs):
"""This is a meeting where company executives provide information about the company’s performance and its future prospects.
https://iexcloud.io/docs/api/#board-of-directors-meeting
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_BOARD_OF_DIRECTORS_MEETING",
symbol=symbol,
**kwargs
)
@wraps(timeSeries)
def businessUpdates(symbol="", **kwargs):
"""This is a meeting orconference call in which company information is reviewed by one or more company executives.
https://iexcloud.io/docs/api/#business-updates
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_BUSINESS_UPDATE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def businessUpdatesDF(symbol="", **kwargs):
"""This is a meeting orconference call in which company information is reviewed by one or more company executives.
https://iexcloud.io/docs/api/#business-updates
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_BUSINESS_UPDATE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def buybacks(symbol="", **kwargs):
"""The repurchase of outstanding shares by a company to reduce the number of shares on the market.
https://iexcloud.io/docs/api/#buybacks
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_BUYBACK", symbol=symbol, **kwargs)
@wraps(timeSeries)
def buybacksDF(symbol="", **kwargs):
"""The repurchase of outstanding shares by a company to reduce the number of shares on the market.
https://iexcloud.io/docs/api/#buybacks
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_BUYBACK", symbol=symbol, **kwargs)
@wraps(timeSeries)
def capitalMarketsDay(symbol="", **kwargs):
"""This is a meeting where company executives provide information about the company’s performance and its future prospects.
https://iexcloud.io/docs/api/#capital-markets-day
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_CAPITAL_MARKETS_DAY", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def capitalMarketsDayDF(symbol="", **kwargs):
"""This is a meeting where company executives provide information about the company’s performance and its future prospects.
https://iexcloud.io/docs/api/#capital-markets-day
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_CAPITAL_MARKETS_DAY", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def companyTravel(symbol="", **kwargs):
"""This is a roadshow or bus tour event in which one or more company executives speaks to interested investors and analysts.
https://iexcloud.io/docs/api/#company-travel
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_COMPANY_TRAVEL", symbol=symbol, **kwargs)
@wraps(timeSeries)
def companyTravelDF(symbol="", **kwargs):
"""This is a roadshow or bus tour event in which one or more company executives speaks to interested investors and analysts.
https://iexcloud.io/docs/api/#company-travel
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_COMPANY_TRAVEL", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def filingDueDates(symbol="", **kwargs):
"""This is an estimated date, based on historical trends for this company in which a company must file the appropriate Form for the quarter/year or file for an extension.
https://iexcloud.io/docs/api/#filing-due-dates
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_FILING_DUE_DATE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def filingDueDatesDF(symbol="", **kwargs):
"""This is an estimated date, based on historical trends for this company in which a company must file the appropriate Form for the quarter/year or file for an extension.
https://iexcloud.io/docs/api/#filing-due-dates
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_FILING_DUE_DATE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def fiscalQuarterEnd(symbol="", **kwargs):
"""This is a forecasted quarterly ending announcement date for a company. This may or may not correspond to a calendar quarter.
https://iexcloud.io/docs/api/#fiscal-quarter-end
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_FISCAL_QUARTER_END_DATE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def fiscalQuarterEndDF(symbol="", **kwargs):
"""This is a forecasted quarterly ending announcement date for a company. This may or may not correspond to a calendar quarter.
https://iexcloud.io/docs/api/#fiscal-quarter-end
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_FISCAL_QUARTER_END_DATE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def forum(symbol="", **kwargs):
"""This is a meeting where ideas and views of a business nature can be exchanged.
https://iexcloud.io/docs/api/#forum
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_FORUM", symbol=symbol, **kwargs)
@wraps(timeSeries)
def forumDF(symbol="", **kwargs):
"""This is a meeting where ideas and views of a business nature can be exchanged.
https://iexcloud.io/docs/api/#forum
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_FORUM", symbol=symbol, **kwargs)
@wraps(timeSeries)
def generalConference(symbol="", **kwargs):
"""This is a formal meeting in which representatives of many companies gather to discuss ideas or issues related to a particular topic or business, usually held for several days. This item indicates at least one representative from the company will be presenting at the conference on the specified date and time. Note: Conference details include full Conference dates.
https://iexcloud.io/docs/api/#general-conference
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_GENERAL_CONFERENCE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def generalConferenceDF(symbol="", **kwargs):
"""This is a formal meeting in which representatives of many companies gather to discuss ideas or issues related to a particular topic or business, usually held for several days. This item indicates at least one representative from the company will be presenting at the conference on the specified date and time. Note: Conference details include full Conference dates.
https://iexcloud.io/docs/api/#general-conference
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_GENERAL_CONFERENCE", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def fdaAdvisoryCommitteeMeetings(symbol="", **kwargs):
"""The FDA uses 50 committees and panels to obtain independent expert advice on scientific, technical, and policy matters
https://iexcloud.io/docs/api/#fda-advisory-committee-meetings
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_STOCK_SPECIFIC_FDA_ADVISORY_COMMITTEE_MEETING",
symbol=symbol,
**kwargs
)
@wraps(timeSeries)
def fdaAdvisoryCommitteeMeetingsDF(symbol="", **kwargs):
"""The FDA uses 50 committees and panels to obtain independent expert advice on scientific, technical, and policy matters
https://iexcloud.io/docs/api/#fda-advisory-committee-meetings
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_STOCK_SPECIFIC_FDA_ADVISORY_COMMITTEE_MEETING",
symbol=symbol,
**kwargs
)
@wraps(timeSeries)
def holidaysWSH(symbol="", **kwargs):
"""This returns a list of market holidays.
https://iexcloud.io/docs/api/#holidays
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_HOLIDAYS", symbol=symbol, **kwargs)
@wraps(timeSeries)
def holidaysWSHDF(symbol="", **kwargs):
"""This returns a list of market holidays.
https://iexcloud.io/docs/api/#holidays
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_HOLIDAYS", symbol=symbol, **kwargs)
@wraps(timeSeries)
def indexChanges(symbol="", **kwargs):
"""This shows additions and removals from various indexes for particular stocks.
https://iexcloud.io/docs/api/#index-changes
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_INDEX_CHANGE", symbol=symbol, **kwargs)
@wraps(timeSeries)
def indexChangesDF(symbol="", **kwargs):
"""This shows additions and removals from various indexes for particular stocks.
https://iexcloud.io/docs/api/#index-changes
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_INDEX_CHANGE", symbol=symbol, **kwargs)
@wraps(timeSeries)
def iposWSH(symbol="", **kwargs):
"""Get a list of upcoming IPOs.
https://iexcloud.io/docs/api/#ipos
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_INITIAL_PUBLIC_OFFERING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def iposWSHDF(symbol="", **kwargs):
"""Get a list of upcoming IPOs.
https://iexcloud.io/docs/api/#ipos
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_INITIAL_PUBLIC_OFFERING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def legalActions(symbol="", **kwargs):
"""These are legal actions where an individual represents a group in a court claim. The judgment from the suit is for all the members of the group or class.
https://iexcloud.io/docs/api/#legal-actions
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_LEGAL_ACTIONS", symbol=symbol, **kwargs)
@wraps(timeSeries)
def legalActionsDF(symbol="", **kwargs):
"""These are legal actions where an individual represents a group in a court claim. The judgment from the suit is for all the members of the group or class.
https://iexcloud.io/docs/api/#legal-actions
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_LEGAL_ACTIONS", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def mergersAndAcquisitions(symbol="", **kwargs):
"""These are a type of corporate action in which two companies combine to form a single company, or one company is taken over by another.
https://iexcloud.io/docs/api/#mergers-acquisitions
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_MERGER_ACQUISITIONS", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def mergersAndAcquisitionsDF(symbol="", **kwargs):
"""These are a type of corporate action in which two companies combine to form a single company, or one company is taken over by another.
https://iexcloud.io/docs/api/#mergers-acquisitions
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_MERGER_ACQUISITIONS", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def productEvents(symbol="", **kwargs):
"""Represents movie and video releases. This is the date on which a movie distributor plans to release a movie to theaters
https://iexcloud.io/docs/api/#product-events
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_PRODUCT_EVENTS", symbol=symbol, **kwargs)
@wraps(timeSeries)
def productEventsDF(symbol="", **kwargs):
"""Represents movie and video releases. This is the date on which a movie distributor plans to release a movie to theaters
https://iexcloud.io/docs/api/#product-events
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_PRODUCT_EVENTS", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def researchAndDevelopmentDays(symbol="", **kwargs):
"""This is a day in which investors and analysts can meet with a company’s R&D representatives to learn more about new or improved products and services.
https://iexcloud.io/docs/api/#research-and-development-days
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_RD_DAY", symbol=symbol, **kwargs)
@wraps(timeSeries)
def researchAndDevelopmentDaysDF(symbol="", **kwargs):
"""This is a day in which investors and analysts can meet with a company’s R&D representatives to learn more about new or improved products and services.
https://iexcloud.io/docs/api/#research-and-development-days
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_RD_DAY", symbol=symbol, **kwargs)
@wraps(timeSeries)
def sameStoreSales(symbol="", **kwargs):
"""Same-store sales, also referred to as comparable-store sales, SSS or identical-store sales, is a financial metric that companies in the retail industry use to evaluate the total dollar amount of sales in the company’s stores that have been operating for a year or more.
https://iexcloud.io/docs/api/#same-store-sales
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_SAME_STORE_SALES", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def sameStoreSalesDF(symbol="", **kwargs):
"""Same-store sales, also referred to as comparable-store sales, SSS or identical-store sales, is a financial metric that companies in the retail industry use to evaluate the total dollar amount of sales in the company’s stores that have been operating for a year or more.
https://iexcloud.io/docs/api/#same-store-sales
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_SAME_STORE_SALES", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def secondaryOfferings(symbol="", **kwargs):
"""Secondary Offerings are the issuance of new stock for public sale from a company that has already made its initial public offering (IPO).
Usually, these kinds of public offerings are made by companies wishing to refinance, or raise capital for growth.
Money raised from these kinds of secondary offerings goes to the company, through the investment bank that underwrites the offering.
Investment banks are issued an allotment, and possibly an overallotment which they may choose to exercise if there is a strong possibility of making money on the spread between the allotment price and the selling price of the securities. Short Selling is prohibited during the period of the secondary offering.
https://iexcloud.io/docs/api/#secondary-offerings
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_SECONDARY_OFFERING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def secondaryOfferingsDF(symbol="", **kwargs):
"""Secondary Offerings are the issuance of new stock for public sale from a company that has already made its initial public offering (IPO).
Usually, these kinds of public offerings are made by companies wishing to refinance, or raise capital for growth.
Money raised from these kinds of secondary offerings goes to the company, through the investment bank that underwrites the offering.
Investment banks are issued an allotment, and possibly an overallotment which they may choose to exercise if there is a strong possibility of making money on the spread between the allotment price and the selling price of the securities. Short Selling is prohibited during the period of the secondary offering.
https://iexcloud.io/docs/api/#secondary-offerings
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_SECONDARY_OFFERING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def seminars(symbol="", **kwargs):
"""This is an educational event that features one or more subject matter experts delivering information via lecture and discussion.
https://iexcloud.io/docs/api/#seminars
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_SEMINAR", symbol=symbol, **kwargs)
@wraps(timeSeries)
def seminarsDF(symbol="", **kwargs):
"""This is an educational event that features one or more subject matter experts delivering information via lecture and discussion.
https://iexcloud.io/docs/api/#seminars
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_SEMINAR", symbol=symbol, **kwargs)
@wraps(timeSeries)
def shareholderMeetings(symbol="", **kwargs):
"""This is a meeting, held at least annually, to elect members to the board of directors and hear reports on the business’ financial situation as well as new policy initiatives from the corporation’s management.
https://iexcloud.io/docs/api/#shareholder-meetings
Args:
symbol (str): symbol to use
"""
return _base(
id="PREMIUM_WALLSTREETHORIZON_SHAREHOLDER_MEETING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def shareholderMeetingsDF(symbol="", **kwargs):
"""This is a meeting, held at least annually, to elect members to the board of directors and hear reports on the business’ financial situation as well as new policy initiatives from the corporation’s management.
https://iexcloud.io/docs/api/#shareholder-meetings
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_SHAREHOLDER_MEETING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def summitMeetings(symbol="", **kwargs):
"""This is a gathering of people who are interested in the same business subject or topic.
https://iexcloud.io/docs/api/#summit-meetings
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_SUMMIT_MEETING", symbol=symbol, **kwargs)
@wraps(timeSeries)
def summitMeetingsDF(symbol="", **kwargs):
"""This is a gathering of people who are interested in the same business subject or topic.
https://iexcloud.io/docs/api/#summit-meetings
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_SUMMIT_MEETING", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def tradeShows(symbol="", **kwargs):
"""This is a large gathering in which different companies in a particular field or industry show their products to possible customers.
https://iexcloud.io/docs/api/#trade-shows
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_TRADE_SHOW", symbol=symbol, **kwargs)
@wraps(timeSeries)
def tradeShowsDF(symbol="", **kwargs):
"""This is a large gathering in which different companies in a particular field or industry show their products to possible customers.
https://iexcloud.io/docs/api/#trade-shows
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_TRADE_SHOW", symbol=symbol, **kwargs)
@wraps(timeSeries)
def witchingHours(symbol="", **kwargs):
"""This is when option contracts and futures contracts expire on the exact same day.
https://iexcloud.io/docs/api/#witching-hours
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_WITCHING_HOURS", symbol=symbol, **kwargs)
@wraps(timeSeries)
def witchingHoursDF(symbol="", **kwargs):
"""This is when option contracts and futures contracts expire on the exact same day.
https://iexcloud.io/docs/api/#witching-hours
Args:
symbol (str): symbol to use
"""
return _baseDF(
id="PREMIUM_WALLSTREETHORIZON_WITCHING_HOURS", symbol=symbol, **kwargs
)
@wraps(timeSeries)
def workshops(symbol="", **kwargs):
"""This is a meeting or series of meetings at which a group of people engage in discussion and activity on a particular subject, product or service to gain hands-on experience.
https://iexcloud.io/docs/api/#workshops
Args:
symbol (str): symbol to use
"""
return _base(id="PREMIUM_WALLSTREETHORIZON_WORKSHOP", symbol=symbol, **kwargs)
@wraps(timeSeries)
def workshopsDF(symbol="", **kwargs):
"""This is a meeting or series of meetings at which a group of people engage in discussion and activity on a particular subject, product or service to gain hands-on experience.
https://iexcloud.io/docs/api/#workshops
Args:
symbol (str): symbol to use
"""
return _baseDF(id="PREMIUM_WALLSTREETHORIZON_WORKSHOP", symbol=symbol, **kwargs)
| [
"[email protected]"
] | |
308bff52ce577ba49c9ba46d0fd7277f04669f7f | 0e94b21a64e01b992cdc0fff274af8d77b2ae430 | /spider/004_kekeenglish_daysentence.py | 35df50ab90f89a35b0fc40370d4b4fef02e20b22 | [] | no_license | yangnaGitHub/LearningProcess | 1aed2da306fd98f027dcca61309082f42b860975 | 250a8b791f7deda1e716f361a2f847f4d12846d3 | refs/heads/master | 2020-04-15T16:49:38.053846 | 2019-09-05T05:52:04 | 2019-09-05T05:52:04 | 164,852,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,383 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 11 13:18:16 2018
@author: Administrator
"""
import re
from lxml import etree
import urllib.request
import xlwt
response=urllib.request.urlopen('http://www.kekenet.com/kouyu/primary/chuji/')
html = response.read().decode("utf-8")
tr = etree.HTML(html)
#//div[@class="tb-btn-wait"]
#//ul[contains(@class,"J_TSaleProp")]
#//div[contains(@class,"tb-btn-buy")]/a[@id="J_LinkBuy"]
#contents = tr.xpath('//ul[@id="menu-list"]/li')
contents = tr.xpath('//div[@class="page th"]/a')
total_pages = 0
for content in contents:
total_pages = max(total_pages, int(content.text))
book = xlwt.Workbook()
sheet = book.add_sheet('translation')
row = 0
contentTexts = {}
errorRecords = {}
for page in range(total_pages, 0, -1):
if total_pages != page:
response=urllib.request.urlopen('http://www.kekenet.com/kouyu/primary/chuji/List_%d.shtml' % page)
html = response.read().decode("utf-8")
tr = etree.HTML(html)
allTests = tr.xpath("//text()")#所有的文本
contents = tr.xpath('//ul[@id="menu-list"]/li/h2/a')
prepareTexts = []
for content in contents:
prepareTexts.append(content.text)
for index, allTest in enumerate(allTests):
if allTest in prepareTexts:
needText = allTests[index + 3].replace('\n', '').replace(',', ',').replace('。', '.')
if re.findall('^[a-zA-Z]', needText):
pass
else:
needText = allTests[index + 2].replace('\n', '').replace(',', ',').replace('。', '.')
try:
slicePos = needText.find(re.findall('[\u2E80-\u9FFF]+', needText)[0])
contentTexts[needText[:slicePos].replace('\n', '')] = needText[slicePos:].replace('\n', '').replace(',', ',').replace('。', '.')
firstStr = needText[:slicePos].replace('\n', '')
secondStr = needText[slicePos:].replace('\n', '').replace(',', ',').replace('。', '.')
except IndexError:
print('find error (%d %d %d: %s)' % (page, index, row+1, allTest))
errorRecords[str(page) + str(index) + str(row+1)] = allTests
firstStr = ''
secondStr = ''
sheet.write(row, 0, firstStr)
sheet.write(row, 1, secondStr)
row += 1
book.save('translation.xlsx')
| [
"[email protected]"
] | |
3dbf3e87b4b004b83e913dd989ed2ab900c5eb16 | b9e9c89567894fd7e5ddfd27fe9068a074a92df7 | /pyramid_signup/tests/test_init.py | d1de398dc5429102f7465cb8ee45667f5212c697 | [] | no_license | AnneGilles/pyramid_signup | 8aeea113176dd64a326caa5f7704026e0538c94a | 0622d951e686f0926291d98559a6b4afa2c81241 | refs/heads/master | 2021-01-18T17:48:48.260300 | 2011-12-09T04:56:44 | 2011-12-09T04:56:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | from pyramid import testing
from pyramid_signup.models import User
from pyramid_signup.tests import UnitTestBase
from mock import patch
from mock import Mock
class TestInitCase(UnitTestBase):
def test_root_factory(self):
from pyramid_signup import RootFactory
from pyramid.security import Everyone
from pyramid.security import Authenticated
from pyramid.security import Allow
from pyramid.security import ALL_PERMISSIONS
root_factory = RootFactory(testing.DummyRequest())
assert len(root_factory.__acl__) == 2
for ace in root_factory.__acl__:
assert ace[0] == Allow
if ace[1] == 'group:admin':
assert ace[2] == ALL_PERMISSIONS
elif ace[1] == Authenticated:
assert ace[2] == 'view'
def test_request_factory(self):
from pyramid_signup import SignUpRequestFactory
user1 = User(username='sontek', first_name='john')
self.session.add(user1)
self.session.flush()
with patch('pyramid_signup.unauthenticated_userid') as unauth:
unauth.return_value = 1
request = SignUpRequestFactory({})
request.registry = Mock()
getUtility = Mock()
getUtility.return_value = self.session
request.registry.getUtility = getUtility
user = request.user
assert user == user1
| [
"[email protected]"
] | |
36d1cdb0cf14edfe05793a672c0556d8c5875baa | d1e4f29e583ee964d63bc48554eaa73d67d58eb2 | /zerver/migrations/0222_userprofile_fluid_layout_width.py | 3b5c232bee7088bb888cc76437ff9bc3df92ee7b | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | hygolei/zulip | 299f636f9238f50b0d2746f1c371748f182f1f4e | 39fe66ab0824bc439929debeb9883c3046c6ed70 | refs/heads/master | 2023-07-11T22:50:27.434398 | 2021-08-09T10:07:35 | 2021-08-09T10:07:35 | 375,401,165 | 1 | 1 | Apache-2.0 | 2021-08-09T10:07:36 | 2021-06-09T15:20:09 | Python | UTF-8 | Python | false | false | 428 | py | # Generated by Django 1.11.20 on 2019-04-15 17:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0221_subscription_notifications_data_migration"),
]
operations = [
migrations.AddField(
model_name="userprofile",
name="fluid_layout_width",
field=models.BooleanField(default=False),
),
]
| [
"[email protected]"
] | |
590d2207a922188f883dab5476511635e22f0ab1 | 408f8c561a695ac20b792ba0c4a230c154dad347 | /scripts/slurm.py | 1526201ab8cdf66bbed803e8fe3ad1e4f9c182d0 | [] | no_license | andnp/acceleration-v2 | a407888c74a247e6d441259d50d77cf6194f728b | 52b8a42c3e315ddbb4549a3a941afda81e92be9b | refs/heads/master | 2022-11-26T05:42:17.680125 | 2020-08-02T23:25:01 | 2020-08-02T23:25:01 | 204,991,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,639 | py | import time
import sys
import os
sys.path.append(os.getcwd())
from src.utils.model import loadExperiment
from PyExpUtils.runner import SlurmArgs
from PyExpUtils.results.paths import listResultsPaths
from PyExpUtils.utils.generator import group
from PyExpUtils.runner.Slurm import schedule, slurmOptionsFromFile
if len(sys.argv) < 4:
print('Please run again using')
print('python -m scripts.scriptName [src/entry.py] [path/to/slurm-def] [base_path] [runs] [paths/to/descriptions]...')
exit(0)
args = SlurmArgs.SlurmArgsModel({
'experiment_paths': sys.argv[5:],
'base_path': sys.argv[3],
'runs': 1,
'slurm_path': sys.argv[2],
'executable': "python " + sys.argv[1] + " " + sys.argv[4],
})
def generateMissing(paths):
for i, p in enumerate(paths):
summary_path = p + '/errors_summary.npy'
if not os.path.exists(summary_path):
yield i
def printProgress(size, it):
for i, _ in enumerate(it):
print(f'{i + 1}/{size}', end='\r')
if i - 1 == size:
print()
yield _
for path in args.experiment_paths:
print(path)
exp = loadExperiment(path)
slurm = slurmOptionsFromFile(args.slurm_path)
size = exp.permutations() * args.runs
paths = listResultsPaths(exp, args.runs)
paths = printProgress(size, paths)
indices = generateMissing(paths)
groupSize = slurm.tasks * slurm.tasksPerNode
for g in group(indices, groupSize):
l = list(g)
print("scheduling:", path, l)
slurm.tasks = min([slurm.tasks, len(l)])
schedule(slurm, args.executable + ' ' + path, l)
time.sleep(2)
| [
"[email protected]"
] | |
14147605374f069cff8d2de50567bb9cf4e835a8 | 7949f96ee7feeaa163608dbd256b0b76d1b89258 | /toontown/building/DistributedDoor.py | c8656242ed57b765f03716bc377f99ceb20175d6 | [] | no_license | xxdecryptionxx/ToontownOnline | 414619744b4c40588f9a86c8e01cb951ffe53e2d | e6c20e6ce56f2320217f2ddde8f632a63848bd6b | refs/heads/master | 2021-01-11T03:08:59.934044 | 2018-07-27T01:26:21 | 2018-07-27T01:26:21 | 71,086,644 | 8 | 10 | null | 2018-06-01T00:13:34 | 2016-10-17T00:39:41 | Python | UTF-8 | Python | false | false | 28,605 | py | # File: t (Python 2.4)
from toontown.toonbase.ToonBaseGlobal import *
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.distributed.ClockDelta import *
from toontown.toonbase import ToontownGlobals
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import ClassicFSM, State
from direct.distributed import DistributedObject
from toontown.hood import ZoneUtil
from toontown.suit import Suit
from toontown.distributed import DelayDelete
import FADoorCodes
from direct.task.Task import Task
import DoorTypes
from toontown.toontowngui import TTDialog
from toontown.toonbase import TTLocalizer
from toontown.toontowngui import TeaserPanel
from toontown.distributed.DelayDeletable import DelayDeletable
if __debug__:
import pdb
class DistributedDoor(DistributedObject.DistributedObject, DelayDeletable):
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.openSfx = base.loadSfx('phase_3.5/audio/sfx/Door_Open_1.mp3')
self.closeSfx = base.loadSfx('phase_3.5/audio/sfx/Door_Close_1.mp3')
self.nametag = None
self.fsm = ClassicFSM.ClassicFSM('DistributedDoor_right', [
State.State('off', self.enterOff, self.exitOff, [
'closing',
'closed',
'opening',
'open']),
State.State('closing', self.enterClosing, self.exitClosing, [
'closed',
'opening']),
State.State('closed', self.enterClosed, self.exitClosed, [
'opening']),
State.State('opening', self.enterOpening, self.exitOpening, [
'open']),
State.State('open', self.enterOpen, self.exitOpen, [
'closing',
'open'])], 'off', 'off')
self.fsm.enterInitialState()
self.exitDoorFSM = ClassicFSM.ClassicFSM('DistributedDoor_left', [
State.State('off', self.exitDoorEnterOff, self.exitDoorExitOff, [
'closing',
'closed',
'opening',
'open']),
State.State('closing', self.exitDoorEnterClosing, self.exitDoorExitClosing, [
'closed',
'opening']),
State.State('closed', self.exitDoorEnterClosed, self.exitDoorExitClosed, [
'opening']),
State.State('opening', self.exitDoorEnterOpening, self.exitDoorExitOpening, [
'open']),
State.State('open', self.exitDoorEnterOpen, self.exitDoorExitOpen, [
'closing',
'open'])], 'off', 'off')
self.exitDoorFSM.enterInitialState()
self.specialDoorTypes = {
DoorTypes.EXT_HQ: 0,
DoorTypes.EXT_COGHQ: 0,
DoorTypes.INT_COGHQ: 0,
DoorTypes.EXT_KS: 0,
DoorTypes.INT_KS: 0 }
self.doorX = 1.5
def generate(self):
DistributedObject.DistributedObject.generate(self)
self.avatarTracks = []
self.avatarExitTracks = []
self.avatarIDList = []
self.avatarExitIDList = []
self.doorTrack = None
self.doorExitTrack = None
def disable(self):
self.clearNametag()
taskMgr.remove(self.checkIsDoorHitTaskName())
self.ignore(self.getEnterTriggerEvent())
self.ignore(self.getExitTriggerEvent())
self.ignore('clearOutToonInterior')
self.fsm.request('off')
self.exitDoorFSM.request('off')
if self.__dict__.has_key('building'):
del self.building
self.finishAllTracks()
self.avatarIDList = []
self.avatarExitIDList = []
if hasattr(self, 'tempDoorNodePath'):
self.tempDoorNodePath.removeNode()
del self.tempDoorNodePath
DistributedObject.DistributedObject.disable(self)
def delete(self):
del self.fsm
del self.exitDoorFSM
del self.openSfx
del self.closeSfx
DistributedObject.DistributedObject.delete(self)
def wantsNametag(self):
return not ZoneUtil.isInterior(self.zoneId)
def setupNametag(self):
if not self.wantsNametag():
return None
if self.nametag == None:
self.nametag = NametagGroup()
self.nametag.setFont(ToontownGlobals.getBuildingNametagFont())
if TTLocalizer.BuildingNametagShadow:
self.nametag.setShadow(*TTLocalizer.BuildingNametagShadow)
self.nametag.setContents(Nametag.CName)
self.nametag.setColorCode(NametagGroup.CCToonBuilding)
self.nametag.setActive(0)
self.nametag.setAvatar(self.getDoorNodePath())
self.nametag.setObjectCode(self.block)
name = self.cr.playGame.dnaStore.getTitleFromBlockNumber(self.block)
self.nametag.setName(name)
self.nametag.manage(base.marginManager)
def clearNametag(self):
if self.nametag != None:
self.nametag.unmanage(base.marginManager)
self.nametag.setAvatar(NodePath())
self.nametag = None
def getTriggerName(self):
if self.doorType == DoorTypes.INT_HQ or self.specialDoorTypes.has_key(self.doorType):
return 'door_trigger_' + str(self.block) + '_' + str(self.doorIndex)
else:
return 'door_trigger_' + str(self.block)
def getTriggerName_wip(self):
name = 'door_trigger_%d' % (self.doId,)
return name
def getEnterTriggerEvent(self):
return 'enter' + self.getTriggerName()
def getExitTriggerEvent(self):
return 'exit' + self.getTriggerName()
def hideDoorParts(self):
if self.specialDoorTypes.has_key(self.doorType):
self.hideIfHasFlat(self.findDoorNode('rightDoor'))
self.hideIfHasFlat(self.findDoorNode('leftDoor'))
self.findDoorNode('doorFrameHoleRight').hide()
self.findDoorNode('doorFrameHoleLeft').hide()
else:
return None
def setTriggerName(self):
if self.specialDoorTypes.has_key(self.doorType):
building = self.getBuilding()
doorTrigger = building.find('**/door_' + str(self.doorIndex) + '/**/door_trigger*')
doorTrigger.node().setName(self.getTriggerName())
else:
return None
def setTriggerName_wip(self):
building = self.getBuilding()
doorTrigger = building.find('**/door_%d/**/door_trigger_%d' % (self.doorIndex, self.block))
if doorTrigger.isEmpty():
doorTrigger = building.find('**/door_trigger_%d' % (self.block,))
if doorTrigger.isEmpty():
doorTrigger = building.find('**/door_%d/**/door_trigger_*' % (self.doorIndex,))
if doorTrigger.isEmpty():
doorTrigger = building.find('**/door_trigger_*')
doorTrigger.node().setName(self.getTriggerName())
def setZoneIdAndBlock(self, zoneId, block):
self.zoneId = zoneId
self.block = block
def setDoorType(self, doorType):
self.notify.debug('Door type = ' + str(doorType) + ' on door #' + str(self.doId))
self.doorType = doorType
def setDoorIndex(self, doorIndex):
self.doorIndex = doorIndex
def setSwing(self, flags):
self.leftSwing = flags & 1 != 0
self.rightSwing = flags & 2 != 0
def setOtherZoneIdAndDoId(self, zoneId, distributedObjectID):
self.otherZoneId = zoneId
self.otherDoId = distributedObjectID
def setState(self, state, timestamp):
self.fsm.request(state, [
globalClockDelta.localElapsedTime(timestamp)])
def setExitDoorState(self, state, timestamp):
self.exitDoorFSM.request(state, [
globalClockDelta.localElapsedTime(timestamp)])
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
self.doPostAnnounceGenerate()
def doPostAnnounceGenerate(self):
if self.doorType == DoorTypes.INT_STANDARD:
self.bHasFlat = True
else:
self.bHasFlat = not self.findDoorNode('door*flat', True).isEmpty()
self.hideDoorParts()
self.setTriggerName()
self.accept(self.getEnterTriggerEvent(), self.doorTrigger)
self.acceptOnce('clearOutToonInterior', self.doorTrigger)
self.setupNametag()
def getBuilding(self):
if not self.__dict__.has_key('building'):
if self.doorType == DoorTypes.INT_STANDARD:
door = render.find('**/leftDoor;+s')
self.building = door.getParent()
elif self.doorType == DoorTypes.INT_HQ:
door = render.find('**/door_0')
self.building = door.getParent()
elif self.doorType == DoorTypes.INT_KS:
self.building = render.find('**/KartShop_Interior*')
elif self.doorType == DoorTypes.EXT_STANDARD and self.doorType == DoorTypes.EXT_HQ or self.doorType == DoorTypes.EXT_KS:
self.building = self.cr.playGame.hood.loader.geom.find('**/??' + str(self.block) + ':*_landmark_*_DNARoot;+s')
if self.building.isEmpty():
self.building = self.cr.playGame.hood.loader.geom.find('**/??' + str(self.block) + ':animated_building_*_DNARoot;+s')
elif self.doorType == DoorTypes.EXT_COGHQ or self.doorType == DoorTypes.INT_COGHQ:
self.building = self.cr.playGame.hood.loader.geom
else:
self.notify.error('No such door type as ' + str(self.doorType))
return self.building
def getBuilding_wip(self):
if not self.__dict__.has_key('building'):
if self.__dict__.has_key('block'):
self.building = self.cr.playGame.hood.loader.geom.find('**/??' + str(self.block) + ':*_landmark_*_DNARoot;+s')
else:
self.building = self.cr.playGame.hood.loader.geom
print '---------------- door is interior -------'
return self.building
def readyToExit(self):
base.transitions.fadeScreen(1.0)
self.sendUpdate('requestExit')
def avatarEnterDoorTrack(self, avatar, duration):
trackName = 'avatarEnterDoor-%d-%d' % (self.doId, avatar.doId)
track = Parallel(name = trackName)
otherNP = self.getDoorNodePath()
if hasattr(avatar, 'stopSmooth'):
avatar.stopSmooth()
if avatar.doId == base.localAvatar.doId:
track.append(LerpPosHprInterval(nodePath = camera, other = avatar, duration = duration, pos = Point3(0, -8, avatar.getHeight()), hpr = VBase3(0, 0, 0), blendType = 'easeInOut'))
finalPos = avatar.getParent().getRelativePoint(otherNP, Point3(self.doorX, 2, ToontownGlobals.FloorOffset))
moveHere = Sequence(self.getAnimStateInterval(avatar, 'walk'), LerpPosInterval(nodePath = avatar, duration = duration, pos = finalPos, blendType = 'easeIn'))
track.append(moveHere)
if avatar.doId == base.localAvatar.doId:
track.append(Sequence(Wait(duration * 0.5), Func(base.transitions.irisOut, duration * 0.5), Wait(duration * 0.5), Func(avatar.b_setParent, ToontownGlobals.SPHidden)))
track.delayDelete = DelayDelete.DelayDelete(avatar, 'avatarEnterDoorTrack')
return track
def avatarEnqueueTrack(self, avatar, duration):
if hasattr(avatar, 'stopSmooth'):
avatar.stopSmooth()
back = -5.0 - 2.0 * len(self.avatarIDList)
if back < -9.0:
back = -9.0
offset = Point3(self.doorX, back, ToontownGlobals.FloorOffset)
otherNP = self.getDoorNodePath()
walkLike = ActorInterval(avatar, 'walk', startTime = 1, duration = duration, endTime = 0.0001)
standHere = Sequence(LerpPosHprInterval(nodePath = avatar, other = otherNP, duration = duration, pos = offset, hpr = VBase3(0, 0, 0), blendType = 'easeInOut'), self.getAnimStateInterval(avatar, 'neutral'))
trackName = 'avatarEnqueueDoor-%d-%d' % (self.doId, avatar.doId)
track = Parallel(walkLike, standHere, name = trackName)
track.delayDelete = DelayDelete.DelayDelete(avatar, 'avatarEnqueueTrack')
return track
def getAnimStateInterval(self, avatar, animName):
isSuit = isinstance(avatar, Suit.Suit)
if isSuit:
return Func(avatar.loop, animName, 0)
else:
return Func(avatar.setAnimState, animName)
def isDoorHit(self):
vec = base.localAvatar.getRelativeVector(self.currentDoorNp, self.currentDoorVec)
netScale = self.currentDoorNp.getNetTransform().getScale()
yToTest = vec.getY() / netScale[1]
return yToTest < -0.5
def enterDoor(self):
if self.allowedToEnter():
messenger.send('DistributedDoor_doorTrigger')
self.sendUpdate('requestEnter')
else:
place = base.cr.playGame.getPlace()
if place:
place.fsm.request('stopped')
self.dialog = TeaserPanel.TeaserPanel(pageName = 'otherHoods', doneFunc = self.handleOkTeaser)
def handleOkTeaser(self):
self.accept(self.getEnterTriggerEvent(), self.doorTrigger)
self.dialog.destroy()
del self.dialog
place = base.cr.playGame.getPlace()
if place:
place.fsm.request('walk')
def allowedToEnter(self, zoneId = None):
allowed = False
if hasattr(base, 'ttAccess') and base.ttAccess:
if zoneId:
allowed = base.ttAccess.canAccess(zoneId)
else:
allowed = base.ttAccess.canAccess()
return allowed
def checkIsDoorHitTaskName(self):
return 'checkIsDoorHit' + self.getTriggerName()
def checkIsDoorHitTask(self, task):
if self.isDoorHit():
self.ignore(self.checkIsDoorHitTaskName())
self.ignore(self.getExitTriggerEvent())
self.enterDoor()
return Task.done
return Task.cont
def cancelCheckIsDoorHitTask(self, args):
taskMgr.remove(self.checkIsDoorHitTaskName())
del self.currentDoorNp
del self.currentDoorVec
self.ignore(self.getExitTriggerEvent())
self.accept(self.getEnterTriggerEvent(), self.doorTrigger)
def doorTrigger(self, args = None):
self.ignore(self.getEnterTriggerEvent())
if args == None:
self.enterDoor()
else:
self.currentDoorNp = NodePath(args.getIntoNodePath())
self.currentDoorVec = Vec3(args.getSurfaceNormal(self.currentDoorNp))
if self.isDoorHit():
self.enterDoor()
else:
self.accept(self.getExitTriggerEvent(), self.cancelCheckIsDoorHitTask)
taskMgr.add(self.checkIsDoorHitTask, self.checkIsDoorHitTaskName())
def avatarEnter(self, avatarID):
avatar = self.cr.doId2do.get(avatarID, None)
if avatar:
avatar.setAnimState('neutral')
track = self.avatarEnqueueTrack(avatar, 0.5)
track.start()
self.avatarTracks.append(track)
self.avatarIDList.append(avatarID)
def rejectEnter(self, reason):
message = FADoorCodes.reasonDict[reason]
if message:
self._DistributedDoor__faRejectEnter(message)
else:
self._DistributedDoor__basicRejectEnter()
def _DistributedDoor__basicRejectEnter(self):
self.accept(self.getEnterTriggerEvent(), self.doorTrigger)
if self.cr.playGame.getPlace():
self.cr.playGame.getPlace().setState('walk')
def _DistributedDoor__faRejectEnter(self, message):
self.rejectDialog = TTDialog.TTGlobalDialog(message = message, doneEvent = 'doorRejectAck', style = TTDialog.Acknowledge)
self.rejectDialog.show()
self.rejectDialog.delayDelete = DelayDelete.DelayDelete(self, '__faRejectEnter')
event = 'clientCleanup'
self.acceptOnce(event, self._DistributedDoor__handleClientCleanup)
base.cr.playGame.getPlace().setState('stopped')
self.acceptOnce('doorRejectAck', self._DistributedDoor__handleRejectAck)
self.acceptOnce('stoppedAsleep', self._DistributedDoor__handleFallAsleepDoor)
def _DistributedDoor__handleClientCleanup(self):
if hasattr(self, 'rejectDialog') and self.rejectDialog:
self.rejectDialog.doneStatus = 'ok'
self._DistributedDoor__handleRejectAck()
def _DistributedDoor__handleFallAsleepDoor(self):
self.rejectDialog.doneStatus = 'ok'
self._DistributedDoor__handleRejectAck()
def _DistributedDoor__handleRejectAck(self):
self.ignore('doorRejectAck')
self.ignore('stoppedAsleep')
self.ignore('clientCleanup')
doneStatus = self.rejectDialog.doneStatus
if doneStatus != 'ok':
self.notify.error('Unrecognized doneStatus: ' + str(doneStatus))
self._DistributedDoor__basicRejectEnter()
self.rejectDialog.delayDelete.destroy()
self.rejectDialog.cleanup()
del self.rejectDialog
def getDoorNodePath(self):
if self.doorType == DoorTypes.INT_STANDARD:
otherNP = render.find('**/door_origin')
elif self.doorType == DoorTypes.EXT_STANDARD:
if hasattr(self, 'tempDoorNodePath'):
return self.tempDoorNodePath
else:
posHpr = self.cr.playGame.dnaStore.getDoorPosHprFromBlockNumber(self.block)
otherNP = NodePath('doorOrigin')
otherNP.setPos(posHpr.getPos())
otherNP.setHpr(posHpr.getHpr())
self.tempDoorNodePath = otherNP
elif self.specialDoorTypes.has_key(self.doorType):
building = self.getBuilding()
otherNP = building.find('**/door_origin_' + str(self.doorIndex))
elif self.doorType == DoorTypes.INT_HQ:
otherNP = render.find('**/door_origin_' + str(self.doorIndex))
else:
self.notify.error('No such door type as ' + str(self.doorType))
return otherNP
def avatarExitTrack(self, avatar, duration):
if hasattr(avatar, 'stopSmooth'):
avatar.stopSmooth()
otherNP = self.getDoorNodePath()
trackName = 'avatarExitDoor-%d-%d' % (self.doId, avatar.doId)
track = Sequence(name = trackName)
track.append(self.getAnimStateInterval(avatar, 'walk'))
track.append(PosHprInterval(avatar, Point3(-(self.doorX), 0, ToontownGlobals.FloorOffset), VBase3(179, 0, 0), other = otherNP))
track.append(Func(avatar.setParent, ToontownGlobals.SPRender))
if avatar.doId == base.localAvatar.doId:
track.append(PosHprInterval(camera, VBase3(-(self.doorX), 5, avatar.getHeight()), VBase3(180, 0, 0), other = otherNP))
if avatar.doId == base.localAvatar.doId:
finalPos = render.getRelativePoint(otherNP, Point3(-(self.doorX), -6, ToontownGlobals.FloorOffset))
else:
finalPos = render.getRelativePoint(otherNP, Point3(-(self.doorX), -3, ToontownGlobals.FloorOffset))
track.append(LerpPosInterval(nodePath = avatar, duration = duration, pos = finalPos, blendType = 'easeInOut'))
if avatar.doId == base.localAvatar.doId:
track.append(Func(self.exitCompleted))
track.append(Func(base.transitions.irisIn))
if hasattr(avatar, 'startSmooth'):
track.append(Func(avatar.startSmooth))
track.delayDelete = DelayDelete.DelayDelete(avatar, 'DistributedDoor.avatarExitTrack')
return track
def exitCompleted(self):
base.localAvatar.setAnimState('neutral')
place = self.cr.playGame.getPlace()
if place:
place.setState('walk')
base.localAvatar.d_setParent(ToontownGlobals.SPRender)
def avatarExit(self, avatarID):
if avatarID in self.avatarIDList:
self.avatarIDList.remove(avatarID)
if avatarID == base.localAvatar.doId:
self.exitCompleted()
else:
self.avatarExitIDList.append(avatarID)
def finishDoorTrack(self):
if self.doorTrack:
self.doorTrack.finish()
self.doorTrack = None
def finishDoorExitTrack(self):
if self.doorExitTrack:
self.doorExitTrack.finish()
self.doorExitTrack = None
def finishAllTracks(self):
self.finishDoorTrack()
self.finishDoorExitTrack()
for t in self.avatarTracks:
t.finish()
DelayDelete.cleanupDelayDeletes(t)
self.avatarTracks = []
for t in self.avatarExitTracks:
t.finish()
DelayDelete.cleanupDelayDeletes(t)
self.avatarExitTracks = []
def enterOff(self):
pass
def exitOff(self):
pass
def getRequestStatus(self):
zoneId = self.otherZoneId
request = {
'loader': ZoneUtil.getBranchLoaderName(zoneId),
'where': ZoneUtil.getToonWhereName(zoneId),
'how': 'doorIn',
'hoodId': ZoneUtil.getHoodId(zoneId),
'zoneId': zoneId,
'shardId': None,
'avId': -1,
'allowRedirect': 0,
'doorDoId': self.otherDoId }
return request
def enterClosing(self, ts):
doorFrameHoleRight = self.findDoorNode('doorFrameHoleRight')
if doorFrameHoleRight.isEmpty():
self.notify.warning('enterClosing(): did not find doorFrameHoleRight')
return None
rightDoor = self.findDoorNode('rightDoor')
if rightDoor.isEmpty():
self.notify.warning('enterClosing(): did not find rightDoor')
return None
otherNP = self.getDoorNodePath()
trackName = 'doorClose-%d' % self.doId
if self.rightSwing:
h = 100
else:
h = -100
self.finishDoorTrack()
self.doorTrack = Sequence(LerpHprInterval(nodePath = rightDoor, duration = 1.0, hpr = VBase3(0, 0, 0), startHpr = VBase3(h, 0, 0), other = otherNP, blendType = 'easeInOut'), Func(doorFrameHoleRight.hide), Func(self.hideIfHasFlat, rightDoor), SoundInterval(self.closeSfx, node = rightDoor), name = trackName)
self.doorTrack.start(ts)
if hasattr(self, 'done'):
request = self.getRequestStatus()
messenger.send('doorDoneEvent', [
request])
def exitClosing(self):
pass
def enterClosed(self, ts):
pass
def exitClosed(self):
pass
def enterOpening(self, ts):
doorFrameHoleRight = self.findDoorNode('doorFrameHoleRight')
if doorFrameHoleRight.isEmpty():
self.notify.warning('enterOpening(): did not find doorFrameHoleRight')
return None
rightDoor = self.findDoorNode('rightDoor')
if rightDoor.isEmpty():
self.notify.warning('enterOpening(): did not find rightDoor')
return None
otherNP = self.getDoorNodePath()
trackName = 'doorOpen-%d' % self.doId
if self.rightSwing:
h = 100
else:
h = -100
self.finishDoorTrack()
self.doorTrack = Parallel(SoundInterval(self.openSfx, node = rightDoor), Sequence(HprInterval(rightDoor, VBase3(0, 0, 0), other = otherNP), Wait(0.40000000000000002), Func(rightDoor.show), Func(doorFrameHoleRight.show), LerpHprInterval(nodePath = rightDoor, duration = 0.59999999999999998, hpr = VBase3(h, 0, 0), startHpr = VBase3(0, 0, 0), other = otherNP, blendType = 'easeInOut')), name = trackName)
self.doorTrack.start(ts)
def exitOpening(self):
pass
def enterOpen(self, ts):
for avatarID in self.avatarIDList:
avatar = self.cr.doId2do.get(avatarID)
if avatar:
track = self.avatarEnterDoorTrack(avatar, 1.0)
track.start(ts)
self.avatarTracks.append(track)
if avatarID == base.localAvatar.doId:
self.done = 1
continue
self.avatarIDList = []
def exitOpen(self):
for track in self.avatarTracks:
track.finish()
DelayDelete.cleanupDelayDeletes(track)
self.avatarTracks = []
def exitDoorEnterOff(self):
pass
def exitDoorExitOff(self):
pass
def exitDoorEnterClosing(self, ts):
doorFrameHoleLeft = self.findDoorNode('doorFrameHoleLeft')
if doorFrameHoleLeft.isEmpty():
self.notify.warning('enterOpening(): did not find flatDoors')
return None
if self.leftSwing:
h = -100
else:
h = 100
leftDoor = self.findDoorNode('leftDoor')
if not leftDoor.isEmpty():
otherNP = self.getDoorNodePath()
trackName = 'doorExitTrack-%d' % self.doId
self.finishDoorExitTrack()
self.doorExitTrack = Sequence(LerpHprInterval(nodePath = leftDoor, duration = 1.0, hpr = VBase3(0, 0, 0), startHpr = VBase3(h, 0, 0), other = otherNP, blendType = 'easeInOut'), Func(doorFrameHoleLeft.hide), Func(self.hideIfHasFlat, leftDoor), SoundInterval(self.closeSfx, node = leftDoor), name = trackName)
self.doorExitTrack.start(ts)
def exitDoorExitClosing(self):
pass
def exitDoorEnterClosed(self, ts):
pass
def exitDoorExitClosed(self):
pass
def exitDoorEnterOpening(self, ts):
doorFrameHoleLeft = self.findDoorNode('doorFrameHoleLeft')
if doorFrameHoleLeft.isEmpty():
self.notify.warning('enterOpening(): did not find flatDoors')
return None
leftDoor = self.findDoorNode('leftDoor')
if self.leftSwing:
h = -100
else:
h = 100
if not leftDoor.isEmpty():
otherNP = self.getDoorNodePath()
trackName = 'doorDoorExitTrack-%d' % self.doId
self.finishDoorExitTrack()
self.doorExitTrack = Parallel(SoundInterval(self.openSfx, node = leftDoor), Sequence(Func(leftDoor.show), Func(doorFrameHoleLeft.show), LerpHprInterval(nodePath = leftDoor, duration = 0.59999999999999998, hpr = VBase3(h, 0, 0), startHpr = VBase3(0, 0, 0), other = otherNP, blendType = 'easeInOut')), name = trackName)
self.doorExitTrack.start(ts)
else:
self.notify.warning('exitDoorEnterOpening(): did not find leftDoor')
def exitDoorExitOpening(self):
pass
def exitDoorEnterOpen(self, ts):
for avatarID in self.avatarExitIDList:
avatar = self.cr.doId2do.get(avatarID)
if avatar:
track = self.avatarExitTrack(avatar, 0.20000000000000001)
track.start()
self.avatarExitTracks.append(track)
continue
self.avatarExitIDList = []
def exitDoorExitOpen(self):
for track in self.avatarExitTracks:
track.finish()
DelayDelete.cleanupDelayDeletes(track)
self.avatarExitTracks = []
def findDoorNode(self, string, allowEmpty = False):
building = self.getBuilding()
if not building:
self.notify.warning('getBuilding() returned None, avoiding crash, remark 896029')
foundNode = None
else:
foundNode = building.find('**/door_' + str(self.doorIndex) + '/**/' + string + '*;+s+i')
if foundNode.isEmpty():
foundNode = building.find('**/' + string + '*;+s+i')
if allowEmpty:
return foundNode
return foundNode
def hideIfHasFlat(self, node):
if self.bHasFlat:
node.hide()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.