blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4db586bf6bffedab7a1c883b8f6c0e720434fe87 | 10d87423fe57af7258d7c327db06034a80700571 | /gym_minigrid/envs/fourroom.py | ed6047002a07f36ceead3de4c7fbb76645a275c5 | [
"BSD-3-Clause"
]
| permissive | johannah/gym-minigrid | dee47bc1cd5587d069f2094173053ee3f1e42f5e | 127eef302c4e52f0b976c10304a2b33390fbca78 | refs/heads/master | 2021-08-30T10:27:25.191472 | 2017-12-17T14:08:59 | 2017-12-17T14:08:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | from gym_minigrid.minigrid import *
from gym_minigrid.register import register
| [
"[email protected]"
]
| |
7e7d1b9026dfc15931ee9281fa1c3cbbd6ee0303 | c818eafff8fb9cfb052e9c016aa7de67de246f21 | /sales/migrations/0027_remove_return_receipt.py | b27fbc4fb3e044ba81c8479d9cf55a5e81ca6c45 | []
| no_license | mugagambi/mgh-server | a4275b07243f476db9d63e568c8b9331190b75f0 | da966882bd695df606622ab816cd93fab1d53773 | refs/heads/master | 2021-10-22T05:52:15.354561 | 2019-03-08T11:50:19 | 2019-03-08T11:50:19 | 120,087,420 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | # Generated by Django 2.0.3 on 2018-04-17 15:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sales', '0026_return_approved_by'),
]
operations = [
migrations.RemoveField(
model_name='return',
name='receipt',
),
]
| [
"[email protected]"
]
| |
0d5db6af7c7852a4890054a62024e6713e579c74 | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/Trigger/TriggerCommon/TriggerMenu/python/l1menu/Menu_Physics_HI_v5.py | 15f0ee3b1a83a6c0196d246fe4a72355984a0428 | []
| no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,900 | py | # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
def print_available():
from TriggerMenu.l1.Lvl1Flags import Lvl1Flags
defineMenu()
available = []
for i in range(512):
if i==463: continue #reserved for L1_RD2_BGRP14, L1_RD3_BGRP15 now assigned to 510 for partition 3 ATR-17737
if i>=509 and i<=511: continue #reserved for CALREQ
if not i in Lvl1Flags.CtpIdMap().values(): available.append(str(i))
available.sort()
print "There are %d available CTP IDs:"%len(available),",".join(available)
print "IDs >= 472 go in partition 2, IDs >= 492 go in partition 3"
def defineMenu():
"""
Defines the following LvlFlags:
thresholds .... list of all threshold names in the menu (includes forced thresholds)
items .... list of all L1 item names in the menu
CtpIdMap .... map of item names to CTP IDs
"""
from TriggerMenu.l1.Lvl1Flags import Lvl1Flags
Lvl1Flags.CTPVersion = 4 # new CTP
#TODO should be different?
Lvl1Flags.BunchGroupPartitioning = [1, 13, 14] # partition 1: 1-12, partition 2: 13, partition 3: 14-15 (note that BGRP0 is used by all items)
Lvl1Flags.BunchGroupNames = ['BCRVeto', 'Filled', 'Calib', 'Empty', 'UnpairedBeam1', 'UnpairedBeam2', 'FirstEmpty', 'InTrain']
Lvl1Flags.BunchGroupNames += ['NotUsed'] * len(Lvl1Flags.BunchGroupNames())
Lvl1Flags.MenuPartitioning = [0, 472, 492] # partition 1: ctpid 0-471, partition 2: ctpid 472-491, partition 3: ctpid 492-511
Lvl1Flags.RemapThresholdsAsListed = True
Lvl1Flags.thresholds = [
# Note that the forced thresholds (FTHR) are not used in the menu but are needed for input monitoring
# They can always be replaced with something that is needed for the menu
#-------------------------
# SLOT 7 / CON 0,1 (EM1,2)
#-------------------------
# 16 x EM
'EM7', 'EM8', 'EM8I', 'EM10', 'EM10VH', 'EM12', 'EM13VH', 'EM14', 'EM15', 'EM15HI', 'EM16', 'EM18', 'EM20', 'EM20VH', 'EM20VHI', 'EM22',
# 1 x ZB
'ZB_J75', # TODO double check if 'ZB_EM15' should be used for Run-2 (need to be changed also in ItemDef).
#--------------------------
# SLOT 7 / CON 2,3 (TAU1,2)
#--------------------------
#TODO TAU trigger behaving like EM items, lowere thresholds
# 16 x TAU
'HA8', 'HA12', 'HA12IL', 'HA12IM', 'HA12IT', 'HA15', 'HA20', 'HA20IL', 'HA20IM', 'HA20IT', 'HA25', 'HA25IT', 'HA30', 'HA40', 'HA60', 'HA100',
#----------------------
# SLOT 8 / CON 0 (JET1)
#----------------------
# 10 x 3-bit JET (can have multiplicity 4 or more)
#TODO shall we start at higher pT to reduce overflows?
# 8 x JETs and central jets
'J12', 'J12.0ETA23', 'J15', 'J15.0ETA25','J20', 'J25','J25.0ETA23','J30',
# 2 x VBF
'J20.0ETA49', 'J30.0ETA49',
#----------------------
# SLOT 8 / CON 1 (JET2)
#----------------------
# 15 x 2-bit JET (can have maximum multiplicity of 3) (SLOT 8, CON 1)
# 3 x Central Jet
'JJ15.23ETA49','J20.28ETA31','J40.0ETA25',
# 6 Jets
'J40', 'J50', 'J75', 'J85', 'J100', 'J175',
# 6 x FJ
'J10.31ETA49', 'J15.31ETA49', 'J20.31ETA49', 'J30.31ETA49', 'J75.31ETA49', 'J100.31ETA49',
#---------------------
# SLOT 8 / CON 2 (EN1)
#---------------------
# 24 x 1-bit thresholds
# 8 x TE
'TE5', 'TE20', 'TE50', 'TE100', 'TE200', 'TE10000', 'TE12000', 'TE14000',
# 8 x XE
'XE35', 'XE40', 'XE45', 'XE50', 'XE55', 'XE60', 'XE70', 'XE80',
'XE35.0ETA24', 'XE40.0ETA24', 'XE45.0ETA24', 'XE50.0ETA24', 'XE55.0ETA24', 'XE60.0ETA24', 'XE70.0ETA24', 'XE80.0ETA24',
# 8 x XS
'XS20', 'XS30', 'XS40', 'XS45', 'XS50', 'XS55', 'XS60', 'XS65',
#---------------------
# SLOT 8 / CON 3 (EN2)
#---------------------
# 16 x 1-bit thresholds
# 8 x weighted sum ET
#'RXE35', 'RXE40', 'RXE45', 'RXE50', 'RXE55', 'RXE60', 'RXE70', 'RXE80', # FTHR
# 8 x restricted eta range in |eta|<4.9
'TE500.0ETA49', 'TE1500.0ETA49', 'TE3000.0ETA49', 'TE3500.0ETA49',
'TE5000.0ETA49', 'TE6500.0ETA49', 'TE8000.0ETA49', 'TE9000.0ETA49',
#------------------------
# SLOT 9 / CON 0 (MUCTPi)
#------------------------
# 6 x MU (TODO can we still use MU0?)
'MU4', 'MU6', 'MU10', 'MU11', 'MU15', 'MU20',
# MU10 is needed for monitoring
#------------------------
# SLOT 9 / CON 1 (CTPCal)
#------------------------
# 3 x 1-bit BCM
'BCM_AtoC', 'BCM_CtoA', 'BCM_Wide',
# 1 x 3-bit BCM
'BCM_Comb',
# 8 x DBM
#'DBM0', 'DBM1', 'DBM2', 'DBM3', 'DBM4', 'DBM5', 'DBM6', 'DBM7', # FTHR
# 2 x BPTX
'BPTX0','BPTX1',
# 6 x LUCID
'LUCID_A', 'LUCID_C',
#'LUCID_Coinc_AC', # FTHR
#'LUCID_COMM',
#'LUCID_05', 'LUCID_06', # FHTR
# 3 x ZDC
'ZDC_A', 'ZDC_C',
'ZDC_AND', # FTHR
# 3 x CALREQ
'CAL0','CAL1','CAL2',
#-----------------------------
# SLOT 9 / CON 2,3 (NIM1,NIM2)
#-----------------------------
# 2 x MBTS
'MBTS_A', 'MBTS_C',
#TODO/NOTE: redefined according to pp_v7 menu
# 32 x MBTSSI (all FTHR)
# NOTE: THESE ARE OUT OF ORDER FOR A REASON! Do not change!
# The order defines the mapping - see ATR-17870.
'MBTS_A0', 'MBTS_A1', 'MBTS_A2', 'MBTS_A3', 'MBTS_A4', 'MBTS_A5', 'MBTS_A6', 'MBTS_A7',
'MBTS_A8', 'MBTS_A10', 'MBTS_A12', 'MBTS_A14', 'MBTS_A9', 'MBTS_A11','MBTS_A13', 'MBTS_A15',
'MBTS_C0', 'MBTS_C1', 'MBTS_C2', 'MBTS_C3', 'MBTS_C4', 'MBTS_C5', 'MBTS_C6', 'MBTS_C7',
'MBTS_C8', 'MBTS_C10', 'MBTS_C12', 'MBTS_C14', 'MBTS_C11', 'MBTS_C13','MBTS_C9','MBTS_C15',
# L1A for CTP monitoring itself
'NIML1A',
# LHCF
'NIMLHCF',
# TGC
'NIMTGC',
# RPC
'NIMRPC',
# TRT
'NIMTRT',
# AFP
'AFP_NSC', 'AFP_NSA',
'AFP_FSA_SIT', 'AFP_FSA_TOF', 'AFP_FSA_LOG',
'AFP_FSC_SIT', 'AFP_FSC_LOG', 'AFP_FSC_TOF',
#-------------------------------------------------------------------
#L1 TOPO inputs TODO - need to modify?
#-------------------------------------------------------------------
#HT
'HT190-AJ15all.ETA21',
'HT190-J15s5.ETA21',
'HT150-AJ20all.ETA31',
'HT150-J20s5.ETA31',
#ZH
'10MINDPHI-AJ20s2-XE50', '10MINDPHI-J20s2-XE50', '10MINDPHI-J20s2-XE30', '10MINDPHI-J20ab-XE50', '10MINDPHI-CJ20ab-XE50',
#Jpsi T&P
'1INVM5-EMs1-EMs6', '1INVM5-EM7s1-EMs6', '1INVM5-EM12s1-EMs6',
#W T&P
'05MINDPHI-AJj10s6-XE0',
'10MINDPHI-AJj10s6-XE0',
'15MINDPHI-AJj10s6-XE0',
'05MINDPHI-EM12s6-XE0',
'15MINDPHI-EM12s6-XE0',
'05MINDPHI-EM15s6-XE0',
'15MINDPHI-EM15s6-XE0',
'05RATIO-XE0-HT0-AJj15all.ETA49',
'90RATIO2-XE0-HT0-AJj15all.ETA49',
'250RATIO2-XE0-HT0-AJj15all.ETA49',
'HT20-AJj15all.ETA49',
'NOT-02MATCH-EM10s1-AJj15all.ETA49',
'25MT-EM12s6-XE0',
'35MT-EM12s6-XE0',
'35MT-EM15s6-XE0',
#'10MINDPHI-AJj15s2-XE0',
#'20MINDPHI-AJjs6-XE0',
#'20MINDPHI-AJj15s2-XE0',
#'10MINDPHI-EM6s1-XE0',
#'20MINDPHI-EM9s6-XE0',
#'20MINDPHI-EM6s1-XE0',
#'05RATIO-XE0-HT0-AJj15all.ETA49',
#'08RATIO-XE0-HT0-AJj0all.ETA49',
#'40RATIO2-XE0-HT0-AJj15all.ETA49',
#'90RATIO2-XE0-HT0-AJj0all.ETA49',
#'HT20-AJj0all.ETA49',
#'NOT-02MATCH-EM9s1-AJj15all.ETA49',
#'05RATIO-XE0-SUM0-EM9s1-HT0-AJj15all.ETA49',
#'20MT-EM6s6-XE0',
#'30MT-EM6s6-XE0',
#'40MT-EM6s6-XE0',
# central muon
'MULT-CMU4ab',
'MULT-CMU6ab',
#B-jet
'0DR04-MU4ab-CJ15ab', '0DR04-MU4ab-CJ20ab', '0DR04-MU4ab-CJ30ab', '0DR04-MU6ab-CJ20ab', '0DR04-MU6ab-CJ25ab',
#B-physics
'0DR03-EM7ab-CJ15ab',
'2DR15-2MU6ab',
# L1 thresholds for L1Topo streamers
#SX '2INVM999-CMU4ab-MU4ab',
#SX '2INVM999-2CMU4ab',
#SX '2INVM999-MU6ab-MU4ab',
#SX '2INVM999-ONEBARREL-MU6ab-MU4ab',
#SX '2INVM999-CMU6ab-CMU4ab',
#SX '4INVM8-CMU4ab-MU4ab',
#SX '4INVM8-2CMU4ab',
#SX '4INVM8-MU6ab-MU4ab',
#SX '4INVM8-ONEBARREL-MU6ab-MU4ab',
#SX '4INVM8-CMU6ab-CMU4ab',
'2DR99-2MU4ab',
'5DETA99-5DPHI99-MU6ab-MU4ab',
'5DETA99-5DPHI99-2MU6ab',
'0DR10-MU10ab-MU6ab',
'0DR15-2MU6ab',
# '0DETA04-0DPHI03-EM8abi-MU10ab',
'0DETA04-EM8abi-MU10ab',
'0DPHI03-EM8abi-MU10ab',
# '0DETA04-0DPHI03-EM15abi-MUab',
'0DETA04-EM15abi-MUab',
'0DPHI03-EM15abi-MUab',
'10MINDPHI-AJ20s2-XE50',
'10MINDPHI-J20s2-XE50',
'10MINDPHI-J20ab-XE50',
'10MINDPHI-CJ20ab-XE50',
'900INVM9999-AJ30s6-AJ20s6',
'800INVM9999-AJ30s6-AJ20s6',
'700INVM9999-AJ30s6-AJ20s6',
'500INVM9999-AJ30s6-AJ20s6',
'400INVM9999-AJ30s6-AJ20s6',
#'350INVM9999-AJ30s6-AJ20s6',
'300INVM9999-AJ30s6-AJ20s6',
'200INVM9999-AJ30s6-AJ20s6',
'100INVM9999-AJ30s6-AJ20s6',
'600INVM9999-J30s6-AJ20s6',
'500INVM9999-J30s6-AJ20s6',
'400INVM9999-J30s6-AJ20s6',
'200INVM9999-J30s6-AJ20s6',
'63DETA127-FJ20s1-FJ20s2',
'0DETA20-J50s1-Js2',
'27DPHI32-EMs1-EMs6',
#'350INVM9999-J30s6-J20s6',
#'300INVM9999-J30s6-J20s6',
#'250INVM9999-J30s6-J20s6',
#'200INVM9999-J30s6-J20s6',
'HT150-AJj15all.ETA49',
'0MATCH-4AJ20.ETA31-4AJj15.ETA31',
'100RATIO-0MATCH-TAU30si2-EMall',
'NOT-0MATCH-TAU30si1-EMall',
'0DR28-MU10ab-TAU12abi',
'1DISAMB-TAU12abi-J25ab',
'1DISAMB-EM15his2-TAU12abi-J25ab',
'DISAMB-0DR28-EM15his2-TAU12abi',
'1DISAMB-J25ab-0DR28-EM15his2-TAU12abi',
'1DISAMB-TAU20abi-TAU12abi-J25ab',
'0DR25-TAU20abi-TAU12abi',
'0DR28-TAU20abi-TAU12abi',
'0DETA20-0DPHI20-TAU20abi-TAU12abi',
'1DISAMB-J25ab-0DR25-TAU20abi-TAU12abi',
'1DISAMB-J25ab-0DR28-TAU20abi-TAU12abi',
'DISAMB-30INVM-EM20his2-TAU12ab',
'400INVM9999-AJ30s6.ETA31-AJ20s6.31ETA49',
'LAR-EM20shi1',
'LAR-J100s1',
'ZEE-EM20shi2',
'FTK-EM20s1',
'FTK-J100s1',
'FTK-MU10s1',
'2INVM9-2MU6ab',
'7INVM15-2MU4ab',
'2INVM8-ONEBARREL-MU6ab-MU4ab',
'0DR24-2CMU4ab',
'0DR22-2MU6ab',
'0DR34-2MU4ab',
'0DR24-2MU4ab',
'0DR24-CMU4ab-MU4ab',
'2INVM8-CMU4ab-MU4ab',
'0DR15-2MU4ab',
'0DR15-MU6ab-MU4ab',
'0DR22-MU6ab-MU4ab',
'8INVM15-MU6ab-MU4ab',
'8INVM15-2MU6ab',
'0INVM9-EM7ab-EMab',
'2INVM8-2MU4ab', # ATR-15197 '2INVM9-2MU4ab',
'2INVM8-MU6ab-MU4ab', # ATR-15197 '2INVM9-MU6ab-MU4ab',
'2INVM9-2MU4ab',
'2INVM9-MU6ab-MU4ab',
'KF-XE40-AJall',
'KF-XE50-AJall',
'KF-XE55-AJall',
'KF-XE60-AJall',
'KF-XE65-AJall',
'KF-XE75-AJall',
'LATE-MU10s1',
'SC111-CJ15ab.ETA26',
'SC85-CJ15ab.ETA26',
# ALFA (the replication is needed to build all the combinations in the CTPCore)
'ALFA_B7R1L', 'ALFA_B7R1U', 'ALFA_A7R1L', 'ALFA_A7R1U', 'ALFA_A7L1L', 'ALFA_A7L1U', 'ALFA_B7L1L', 'ALFA_B7L1U',
'ALFA2_B7R1L', 'ALFA2_B7R1U', 'ALFA2_A7R1L', 'ALFA2_A7R1U', 'ALFA2_A7L1L', 'ALFA2_A7L1U', 'ALFA2_B7L1L', 'ALFA2_B7L1U',
'ALFA3_B7R1L', 'ALFA3_B7R1U', 'ALFA3_A7R1L', 'ALFA3_A7R1U', 'ALFA3_A7L1L', 'ALFA3_A7L1U', 'ALFA3_B7L1L', 'ALFA3_B7L1U',
'ALFA4_B7R1L', 'ALFA4_B7R1U', 'ALFA4_A7R1L', 'ALFA4_A7R1U', 'ALFA4_A7L1L', 'ALFA4_A7L1U', 'ALFA4_B7L1L', 'ALFA4_B7L1U',
#ATR-13743
'ALFA_B7R1L_OD', 'ALFA_B7R1U_OD', 'ALFA_A7R1L_OD', 'ALFA_A7R1U_OD', 'ALFA_A7L1L_OD', 'ALFA_A7L1U_OD', 'ALFA_B7L1L_OD', 'ALFA_B7L1U_OD',
]
Lvl1Flags.items = [
'L1_EM7',
'L1_EM8',
'L1_EM10',
'L1_EM12',
'L1_EM14',
'L1_EM16',
'L1_EM18',
'L1_EM20',
'L1_EM22',
'L1_2EM10',
#'L1_2EM5',
'L1_2EM7',
# tau beam items
#'L1_TAU8',
#muons
#'L1_MU0',
'L1_MU11',
'L1_MU15',
'L1_MU20',
'L1_MU4',
'L1_MU6',
#'L1_2MU0',
'L1_2MU4',
#'L1_2MU0_MU6',
'L1_2MU11',
'L1_2MU20',
'L1_2MU6',
'L1_3MU6',
#jets
'L1_J12',
'L1_J15',
'L1_J20',
'L1_J30',
#'L1_J35',
'L1_J50',
'L1_J75',
'L1_J175',
#jet energy
'L1_2J15',
#'L1_JE200',
#'L1_JE300',
#'L1_JE500',
# forward jets
'L1_J10.31ETA49',
#'L1_FJ30',
#'L1_FJ55' ,
#'L1_FJ95',
#MinBias
'L1_LUCID_A',
'L1_LUCID_C',
'L1_LUCID_A_C',
'L1_LUCID',
#'L1_LUCID_COMM',
'L1_MBTS_2',
'L1_MBTS_2_BGRP9',
'L1_MBTS_2_BGRP11',
'L1_MBTS_2_EMPTY',
'L1_MBTS_2_UNPAIRED_ISO',
'L1_MBTS_2_UNPAIRED_NONISO',
'L1_MBTS_1',
'L1_MBTS_1_EMPTY',
'L1_MBTS_1_UNPAIRED_ISO',
'L1_MBTS_1_UNPAIRED_NONISO',
'L1_MBTS_1_1',
'L1_MBTS_2_2',
'L1_MBTS_3_3',
'L1_MBTS_4_4',
'L1_MBTS_1_1_BGRP11',
'L1_MBTS_1_1_VTE50',
'L1_MBTS_2_2_VTE50',
#TRT
'L1_TRT_FILLED',
'L1_TRT_EMPTY',
#randoms
'L1_RD0_BGRP9',
'L1_RD0_FILLED',
'L1_RD0_FIRSTEMPTY',
'L1_RD0_UNPAIRED_ISO',
'L1_RD0_EMPTY',
'L1_RD1_FILLED',
'L1_RD1_EMPTY',
#MET
#total energy
'L1_TE5',
'L1_TE20',
'L1_TE50',
'L1_TE100',
'L1_TE200',
'L1_TE10000',
'L1_TE12000',
'L1_TE14000',
'L1_TE5_NZ',
'L1_TE5_VTE200',
# restricted TE
'L1_TE500.0ETA49',
'L1_TE1500.0ETA49',
'L1_TE3000.0ETA49',
'L1_TE3500.0ETA49',
'L1_TE5000.0ETA49',
'L1_TE6500.0ETA49',
'L1_TE8000.0ETA49',
'L1_TE9000.0ETA49',
# restricted TE for overlay
'L1_TE500.0ETA49_OVERLAY',
'L1_TE1500.0ETA49_OVERLAY',
'L1_TE3000.0ETA49_OVERLAY',
'L1_TE3500.0ETA49_OVERLAY',
'L1_TE5000.0ETA49_OVERLAY',
'L1_TE6500.0ETA49_OVERLAY',
'L1_TE8000.0ETA49_OVERLAY',
'L1_TE9000.0ETA49_OVERLAY',
#Min Bias
'L1_ZDC',
'L1_ZDC_A',
'L1_ZDC_C',
'L1_ZDC_AND',
'L1_ZDC_AND_VTE50',
'L1_ZDC_A_C',
'L1_ZDC_A_C_BGRP11',
'L1_ZDC_A_C_OVERLAY',
'L1_ZDC_A_C_VTE50_OVERLAY',
'L1_TE50_OVERLAY',
'L1_ZDC_A_C_VTE50',
'L1_ZDC_A_C_TE50',
'L1_BCM_Wide',
'L1_BCM_HT_BGRP0','L1_BCM_Wide_BGRP0','L1_BCM_AC_CA_BGRP0',
'L1_ZDC_MBTS_1',
'L1_ZDC_MBTS_2',
'L1_ZDC_MBTS_1_1',
'L1_ZDC_MBTS_2_2',
'L1_ZDC_VTE200',
#ZDC one side
'L1_ZDC_A_VZDC_C_VTE200',
'L1_ZDC_C_VZDC_A_VTE200',
'L1_ZDC_XOR',
'L1_ZDC_XOR_VTE200',
'L1_ZDC_XOR_TE5_VTE200',
'L1_ZDC_XOR_TRT_VTE200',
#coincidence
'L1_ZDC_A_C_VTE200',
'L1_ZDC_A_C_TE5_VTE200',
#NIMDIR stuff
# temporary commented out in HI_v3: 'L1_NIM_S8C2B21', 'L1_NIM_S8C2B22', 'L1_NIM_S8C2B23',
# votoed by TE
#UPC triggers
#'L1_MU0_NZ',
'L1_J15_NZ',
#'L1_2MU0_NZ',
#'L1_2EM3_NZ',
'L1_2J15_NZ',
#'L1_MU0_NL',
#'L1_EM3_NL',
'L1_J15_NL',
#'L1_2MU0_NL',
#'L1_2EM3_NL',
'L1_2J15_NL',
#'L1_MU0_MV',
#'L1_2MU0_MV',
#'L1_MU0_MV_VTE50',
#'L1_MU0_VTE50',
#'L1_MU0_TE50',
'L1_MU4_MV_VTE50',
'L1_MU4_VTE50',
'L1_MU4_TE50',
#'L1_EM3_MV_VTE50',
#'L1_EM3_VTE50',
'L1_J12_VTE100',
'L1_J12_VTE200',
## VDM
'L1_ZDC_A_C_BGRP7','L1_LUCID_BGRP7','L1_BGRP7',
#MBTS 32 inputs
'L1_MBTSA0', 'L1_MBTSA1', 'L1_MBTSA2', 'L1_MBTSA3', 'L1_MBTSA4',
'L1_MBTSA5', 'L1_MBTSA6', 'L1_MBTSA7', 'L1_MBTSA8', ##'L1_MBTSA9', 11, 13, 15 not in run2 anymore
'L1_MBTSA10', ##'L1_MBTSA11',
'L1_MBTSA12', ##'L1_MBTSA13',
'L1_MBTSA14',##'L1_MBTSA15',
'L1_MBTSC0', 'L1_MBTSC1', 'L1_MBTSC2', 'L1_MBTSC3', 'L1_MBTSC4',
'L1_MBTSC5', 'L1_MBTSC6', 'L1_MBTSC7', 'L1_MBTSC8', ##'L1_MBTSC9',
'L1_MBTSC10', ##'L1_MBTSC11',
'L1_MBTSC12', ##'L1_MBTSC13',
'L1_MBTSC14',
##'L1_MBTSC15',
#background
'L1_EM7_UNPAIRED_ISO','L1_EM7_UNPAIRED_NONISO','L1_EM7_EMPTY','L1_EM7_FIRSTEMPTY',
#MU UNPAIRED-EMPTY-ETC
#'L1_MU0_UNPAIRED_ISO','L1_MU0_UNPAIRED_NONISO','L1_MU0_EMPTY','L1_MU0_FIRSTEMPTY',
'L1_MU4_UNPAIRED_ISO', 'L1_MU4_UNPAIRED_NONISO', 'L1_MU4_EMPTY',
'L1_MU4_FIRSTEMPTY',
'L1_MU6_FIRSTEMPTY','L1_MU11_EMPTY',
#'L1_2MU0_EMPTY',
'L1_2MU4_EMPTY',
'L1_2MU6_UNPAIRED_ISO','L1_2MU6_UNPAIRED_NONISO','L1_2MU6_EMPTY','L1_2MU6_FIRSTEMPTY',
#TAU UNPAIRED-EMPTY-ETC
'L1_TAU12_UNPAIRED_ISO','L1_TAU12_UNPAIRED_NONISO','L1_TAU12_EMPTY','L1_TAU12_FIRSTEMPTY',
#J UNPAIRED-EMPTY-ETC
'L1_J12_UNPAIRED_ISO','L1_J12_UNPAIRED_NONISO','L1_J12_EMPTY','L1_J12_FIRSTEMPTY',
'L1_J30_EMPTY', 'L1_J30_UNPAIRED', 'L1_J30_FIRSTEMPTY',
#FJ UNPAIRED-EMPTY-ETC
#'L1_FJ10_UNPAIRED_ISO', 'L1_FJ10_FIRSTEMPTY',
'L1_J10.31ETA49_EMPTY',
#ZDC
'L1_ZDC_EMPTY',
'L1_ZDC_UNPAIRED_ISO',
'L1_ZDC_UNPAIRED_NONISO',
#L1_ZDC_AND
'L1_ZDC_AND_EMPTY',
'L1_ZDC_AND_UNPAIRED_ISO',
'L1_ZDC_AND_UNPAIRED_NONISO',
#
'L1_ZDC_A_C_BGRP9',
'L1_ZDC_A_C_EMPTY',
'L1_ZDC_A_C_UNPAIRED_ISO',
'L1_ZDC_A_C_UNPAIRED_NONISO',
#MBTS
'L1_MBTS_1_1_BGRP9',
'L1_MBTS_1_1_EMPTY',
'L1_MBTS_2_2_EMPTY',
'L1_MBTS_3_3_EMPTY',
'L1_MBTS_1_1_UNPAIRED_ISO',
'L1_MBTS_2_2_UNPAIRED_ISO',
'L1_MBTS_3_3_UNPAIRED_ISO',
'L1_MBTS_4_4_UNPAIRED_ISO',
#LUCID
'L1_LUCID_EMPTY',
'L1_LUCID_UNPAIRED_ISO',#'L1_LUCID_COMM_UNPAIRED_ISO',
'L1_LUCID_A_C_EMPTY',
'L1_LUCID_A_C_UNPAIRED_ISO',
'L1_LUCID_A_C_UNPAIRED_NONISO',
#ZB
'L1_ZB',
# lumi measurements
'L1_MLZ_A', 'L1_MLZ_C', 'L1_MBLZ',
# BGRP and BPTX
'L1_BPTX0_BGRP0', 'L1_BPTX1_BGRP0',
'L1_BGRP0',
'L1_BGRP1',
#BCM
'L1_BCM_Wide_EMPTY','L1_BCM_Wide_UNPAIRED_ISO','L1_BCM_Wide_UNPAIRED_NONISO',
'L1_BCM_AC_CA_UNPAIRED_ISO',
######### Run-2 monitoring items taken from monitoring rules
'L1_BCM_AC_ABORTGAPNOTCALIB',
'L1_BCM_AC_CALIB',
'L1_BCM_AC_UNPAIRED_ISO',
'L1_BCM_AC_UNPAIRED_NONISO',
'L1_BCM_CA_ABORTGAPNOTCALIB',
'L1_BCM_CA_CALIB',
'L1_BCM_CA_UNPAIRED_ISO',
'L1_BCM_CA_UNPAIRED_NONISO',
'L1_BCM_Wide_ABORTGAPNOTCALIB',
'L1_BCM_Wide_CALIB',
'L1_J12_ABORTGAPNOTCALIB',
'L1_J12_BGRP12',
'L1_J30.31ETA49_UNPAIRED_ISO',
'L1_J30.31ETA49_UNPAIRED_NONISO',
'L1_J30.31ETA49_BGRP12',
'L1_J50_ABORTGAPNOTCALIB',
'L1_J50_UNPAIRED_ISO',
'L1_J50_UNPAIRED_NONISO',
'L1_CALREQ2',
'L1_EM10VH',
'L1_EM15',
'L1_EM15HI_2TAU12IM',
'L1_EM15HI_2TAU12IM_J25_3J12',
'L1_EM15HI_2TAU12IM_XE35',
'L1_EM15HI_TAU40_2TAU15',
#### NO-MU10 'L1_MU10_TAU12IM',
#### NO-MU10 'L1_MU10_TAU12IM_J25_2J12',
#### NO-MU10 'L1_MU10_TAU12IM_XE35',
#### NO-MU10 'L1_MU10_TAU12IM_XE40',
#### NO-MU10 'L1_MU10_TAU12_J25_2J12',
#### NO-MU10 'L1_MU10_TAU20',
#### NO-MU10 'L1_MU10_TAU20IM',
'L1_TAU12',
'L1_TAU12IL',
'L1_TAU12IM',
'L1_TAU12IT',
'L1_TAU20',
'L1_TAU20IL',
'L1_TAU20IL_2TAU12IL_J25_2J20_3J12',
'L1_TAU20IM',
'L1_TAU20IM_2J20_XE45',
'L1_TAU20IM_2J20_XE50',
'L1_TAU20IM_2TAU12IM',
'L1_TAU20IM_2TAU12IM_J25_2J20_3J12',
'L1_TAU20IM_2TAU12IM_XE35',
'L1_TAU20IM_2TAU12IM_XE40',
'L1_TAU20IT',
'L1_TAU20_2J20_XE45',
'L1_TAU20_2TAU12',
'L1_TAU20_2TAU12_XE35',
'L1_TAU25IT_2TAU12IT_2J25_3J12',
'L1_TAU30',
'L1_TAU40',
'L1_TAU60',
#'L1_TAU8',
'L1_EM20VH_FIRSTEMPTY',
'L1_EM20VHI',
'L1_EM7_EMPTY',
'L1_EM7_FIRSTEMPTY',
'L1_J100',
#'L1_J100.31ETA49',
#'L1_J100.31ETA49_FIRSTEMPTY',
'L1_J100_FIRSTEMPTY',
'L1_J30.31ETA49',
'L1_J30.31ETA49_EMPTY',
'L1_J40_XE50',
'L1_J75.31ETA49',
'L1_J75_XE40',
'L1_RD0_ABORTGAPNOTCALIB',
'L1_TGC_BURST',
'L1_XE35',
'L1_XE50',
#TOPO
'L1_LAR-EM',
'L1_LAR-J',
]
Lvl1Flags.CtpIdMap = {
'L1_EM18' : 0,
'L1_EM22' : 1,
'L1_EM7' : 2,
'L1_EM10' : 3,
'L1_EM12' : 4,
'L1_EM14' : 5,
'L1_2MU4_EMPTY' : 6,
'L1_RD0_UNPAIRED_NONISO' : 7,
'L1_BCM_AC_CA_UNPAIRED_NONISO': 8,
'L1_FJ10_UNPAIRED_NONISO' : 9,
'L1_2EM10' : 10,
#'L1_2EM5' : 11,
#'L1_MU0_VTE20' : 12,
'L1_LAR-EM' : 11,
'L1_LAR-J' : 12,
'L1_NIM_S8C2B21' : 13,#DIRNIM
'L1_NIM_S8C2B22' : 14,#DIRNIM
'L1_MBTS_4_4' : 15,
'L1_RD1_EMPTY': 16,
'L1_RD0_FILLED' : 17,
'L1_RD0_FIRSTEMPTY' : 18,
'L1_RD0_UNPAIRED_ISO' : 19,
'L1_MBTS_4_4_UNPAIRED_ISO': 20,
'L1_ZDC_AND_VTE50' : 27,
#'L1_EM3_VTE20': 28,
#'L1_MU0_MV_VTE50' : 34,
#'L1_MU0_VTE50' : 35,
'L1_ZDC_A_C_VTE50' : 36,
'L1_ZDC_A_C_UNPAIRED_NONISO' : 37,
'L1_MU4_VTE50' : 38,
'L1_MU4_MV_VTE50' : 39,
'L1_ZDC_A_C_OVERLAY' : 40,
#'L1_MU0_TE50' : 41,
'L1_ZDC_A_C_TE50' : 42,
'L1_MU4_TE50' : 43,
#'L1_EM3_VTE50' : 44,
#'L1_EM3_MV_VTE50' : 45,
'L1_J12_VTE100' : 46,
'L1_BGRP7' : 47,
'L1_LUCID_BGRP7' : 48,
'L1_MBTS_2_BGRP7' : 49,
'L1_MBTSC4' : 50,
'L1_MBTSC5' : 51,
'L1_MBTSC6' : 52,
'L1_MBTSC7' : 53,
'L1_MBTSC8' : 54,
'L1_MBTSC9' : 55,
'L1_MBTSC10' : 56,
'L1_MBTSC11' : 57,
'L1_MBTSC12' : 58,
'L1_MBTSC13' : 59,
'L1_MBTSC14' : 60,
'L1_MBTSC15' : 61,
'L1_RD0_EMPTY' : 62,
'L1_RD1_FILLED' : 63,
#'L1_TAU3' : 64,
'L1_MBTSC3' : 65,
'L1_MU4_FIRSTEMPTY' : 67,
'L1_MU6_FIRSTEMPTY' : 68,
#'L1_2MU0_EMPTY' : 69,
#'L1_MU0_FIRSTEMPTY' : 70,
'L1_2MU6' : 71,
'L1_2MU11' : 72,
'L1_2MU20' : 73,
'L1_MU11_EMPTY' : 75,
'L1_MBTSC2' : 76,
'L1_LUCID_UNPAIRED_NONISO' : 77,
'L1_BCM_Wide_BGRP0' : 80,
'L1_BCM_AC_CA_BGRP0' : 81,
'L1_MBTSC1' : 82,
'L1_J12_UNPAIRED' : 83,
'L1_EM20' : 84,
'L1_EM16' : 85,
'L1_MBTSC0' : 86,
'L1_J30_UNPAIRED' : 87,
'L1_MU15' : 88,
#'L1_MU0' : 89,
'L1_MU6' : 90,
'L1_MU11' : 91,
'L1_MU20' : 92,
'L1_MU4' : 93,
#'L1_2MU0' : 94,
'L1_2MU4' : 95,
'L1_J20' : 96,
#'L1_J12' : 97,
'L1_J15' : 98,
'L1_J30' : 99,
#'L1_J35' : 100,
'L1_J50' : 102,
'L1_J18' : 104,
#'L1_J5' : 105,
'L1_BCM_AC_CA_UNPAIRED_ISO' : 106,
'L1_BCM_Wide_EMPTY' : 107,
'L1_BCM_Wide_UNPAIRED_ISO' : 108,
'L1_L1_BCM_Wide_UNPAIRED_NONISO' : 109,
'L1_LUCID_UNPAIRED_ISO' : 113,
#'L1_TAU8_FIRSTEMPTY' : 114,
'L1_TAU8_UNPAIRED_ISO' : 115,
'L1_TAU8_UNPAIRED_NONISO' : 116,
'L1_ZDC_A_C_UNPAIRED_ISO' : 117,
'L1_MBTSA0' : 120,
'L1_MBTSA1' : 122,
'L1_FJ0' : 123,
'L1_2MU6_UNPAIRED_ISO' : 124,
'L1_2MU6_UNPAIRED_NONISO' : 125,
'L1_BCM_Wide_UNPAIRED_NONISO' : 126,
'L1_EM7_UNPAIRED_ISO' : 127,
'L1_EM7_UNPAIRED_NONISO' : 128,
'L1_J30_FIRSTEMPTY' : 130,
'L1_MBTSA2' : 131,
'L1_TE5' : 132,
'L1_TE14000' : 133,
'L1_TE20' : 134,
'L1_TE50' : 138,
'L1_TE100' : 136,
'L1_TE200' : 135,
'L1_MBTSA3' : 137,
'L1_2J5' : 139,
'L1_2J12' : 140,
'L1_TE12000' : 141,
'L1_TE10000' : 142,
'L1_2MU6_EMPTY' : 143,
'L1_2MU6_FIRSTEMPTY' : 144,
'L1_ZDC_MBTS_1' : 145,
'L1_ZDC_MBTS_2' : 146,
'L1_ZDC_MBTS_1_1' : 147,
'L1_ZDC_MBTS_2_2' : 148,
'L1_MBTS_1_EMPTY' : 149,
'L1_MBTS_1_1_EMPTY' : 150,
'L1_MBTS_2_EMPTY' : 151,
#'L1_TAU8_EMPTY' : 152,
'L1_MBTSA4' : 153,
'L1_MBTSA5' : 154,
'L1_MBTSA6' : 155,
'L1_MBTSA7' : 156,
'L1_NIM_S8C2B23' : 157,#DIRNIM
#'L1_MU0_UNPAIRED_NONISO' : 159,
#'L1_MU0_UNPAIRED_ISO' : 160,
'L1_MBTSA8' : 161,
#'L1_MU0_EMPTY' : 162,
'L1_MBTSA9' : 163,
'L1_MBTSA10' : 164,
'L1_MU4_UNPAIRED_NONISO': 165,
'L1_MU4_EMPTY' : 166,
'L1_MU4_UNPAIRED_ISO': 167,
'L1_MBTSA11' : 168,
#'L1_J10_EMPTY' : 171,
'L1_J30_EMPTY' : 172,
'L1_MBTSA12' : 173,
'L1_MBTSA13' : 174,
'L1_FJ0_EMPTY' : 175,
'L1_MBTSA14' : 176,
#'L1_EM3_EMPTY' : 177,
'L1_MBTSA15' : 178,
'L1_FJ0_UNPAIRED_ISO' : 180,
'L1_FJ5_UNPAIRED_ISO' : 181,
'L1_ZDC_UNPAIRED_ISO' : 182,
'L1_ZDC_UNPAIRED_NONISO' : 183,
#'L1_J12_EMPTY' : 184,
#'L1_J12_FIRSTEMPTY' : 185,
#'L1_J12_UNPAIRED_ISO' : 186,
#'L1_J12_UNPAIRED_NONISO' : 187,
'L1_ZDC_A_BGRP7' : 188,
'L1_ZDC_AND' : 189,
'L1_ZDC_A' : 190,
'L1_ZDC_C' : 191,
'L1_ZDC_A_C' : 192,
'L1_ZDC' : 193,
'L1_ZDC_C_BGRP7' : 194,
'L1_ZDC_A_C_EMPTY' : 196,
'L1_ZDC_EMPTY' : 197,
'L1_FJ5' : 198,
'L1_FJ10' : 199,
'L1_MBTS_1_UNPAIRED_ISO': 200,
'L1_MBTS_1_UNPAIRED_NONISO': 201,
'L1_MBTS_1_1_UNPAIRED_ISO': 202,
'L1_FJ15': 203,
'L1_MBTS_2_UNPAIRED_ISO': 204,
'L1_MBTS_2_UNPAIRED_NONISO': 205,
'L1_LUCID_A_C_UNPAIRED_NONISO': 206,
'L1_LUCID_A_C_UNPAIRED_ISO': 208,
'L1_LUCID_A_UNPAIRED': 209,
'L1_LUCID_C_UNPAIRED': 210,
'L1_LUCID_A_C_UNPAIRED': 211,
'L1_LUCID_A' : 212,
'L1_LUCID_C' : 213,
'L1_LUCID_A_C' : 214,
'L1_LUCID' : 215,
'L1_FJ5_EMPTY' : 218,
'L1_FJ0_C' : 219,
'L1_MBTS_2' : 222,
'L1_MBTS_2_2' : 223,
'L1_MBTS_3_3' : 224,
'L1_BCM_HT_BGRP0' : 225,
'L1_MBTS_1' : 226,
'L1_MBTS_1_1' : 228,
'L1_MBTS_2_2_EMPTY' : 229,
'L1_MBTS_3_3_EMPTY' : 230,
'L1_MBTS_2_2_UNPAIRED_ISO' : 231,
'L1_MBTS_3_3_UNPAIRED_ISO' : 232,
#'L1_J5_TE90' : 233,
'L1_2J5_TE90' : 234,
'L1_ZDC_A_C_BGRP11' : 235,
'L1_LUCID_A_EMPTY' : 236,
'L1_LUCID_C_EMPTY' : 237,
'L1_LUCID_A_C_EMPTY' : 238,
'L1_FJ0_A' : 239,
'L1_MBTS_1_1_BGRP11' : 240,
'L1_BPTX0_BGRP0' : 241,
'L1_BPTX1_BGRP0' : 242,
'L1_MBTS_2_BGRP9' : 243,
'L1_MBTS_1_1_BGRP9' : 244,
'L1_LUCID_EMPTY' : 245,
'L1_RD0_BGRP9' : 246,
'L1_LHCF' : 247,
'L1_ZDC_A_C_BGRP9' : 248,
'L1_MBTS_2_BGRP11' : 249,
'L1_ZB' : 250,
'L1_BGRP1' : 252,
#new in Run 2
'L1_ZDC_A_C_VTE50_OVERLAY' : 256,
'L1_TE50_OVERLAY' : 257,
'L1_J12_VTE200' : 258,
'L1_BCM_AC_ABORTGAPNOTCALIB': 259,
'L1_BCM_AC_CALIB': 260,
'L1_BCM_AC_UNPAIRED_ISO': 261,
'L1_BCM_AC_UNPAIRED_NONISO': 262,
'L1_BCM_CA_ABORTGAPNOTCALIB': 263,
'L1_BCM_CA_CALIB': 264,
'L1_BCM_CA_UNPAIRED_ISO': 265,
'L1_BCM_CA_UNPAIRED_NONISO': 266,
'L1_BCM_Wide_ABORTGAPNOTCALIB': 267,
'L1_BCM_Wide_CALIB': 268,
'L1_J12_ABORTGAPNOTCALIB': 269,
'L1_J12_UNPAIRED_ISO': 270,
'L1_J12_UNPAIRED_NONISO': 271,
'L1_J12_BGRP12': 493,
'L1_J30.31ETA49_UNPAIRED_ISO': 272,
'L1_J30.31ETA49_UNPAIRED_NONISO': 273,
'L1_J30.31ETA49_BGRP12': 494,
'L1_J50_ABORTGAPNOTCALIB': 274,
'L1_J50_UNPAIRED_ISO': 275,
'L1_J50_UNPAIRED_NONISO': 276,
#'L1_TAU8': 277,
'L1_TAU12': 278,
'L1_TAU12IL': 279,
'L1_TAU12IM': 280,
'L1_TAU12IT': 281,
'L1_TAU20': 282,
'L1_TAU20IL': 283,
'L1_TAU20IL_2TAU12IL_J25_2J20_3J12': 284,
'L1_TAU20IM': 285,
'L1_TAU20IM_2J20_XE45': 286,
'L1_TAU20IM_2J20_XE50': 287,
'L1_TAU20IM_2TAU12IM': 288,
'L1_TAU20IM_2TAU12IM_J25_2J20_3J12': 289,
'L1_TAU20IM_2TAU12IM_XE35': 290,
'L1_TAU20IM_2TAU12IM_XE40': 291,
'L1_TAU20IT': 292,
'L1_TAU20_2J20_XE45': 293,
'L1_TAU20_2TAU12': 294,
'L1_TAU20_2TAU12_XE35': 295,
'L1_TAU25IT_2TAU12IT_2J25_3J12': 296,
'L1_TAU30': 297,
'L1_TAU40': 298,
'L1_TAU60': 299,
'L1_EM20VH_FIRSTEMPTY': 300,
'L1_EM20VHI': 301,
'L1_EM7_EMPTY': 302,
'L1_EM7_FIRSTEMPTY': 303,
'L1_J100': 304,
#'L1_J100.31ETA49': 305,
#'L1_J100.31ETA49_FIRSTEMPTY': 306,
'L1_J100_FIRSTEMPTY': 307,
'L1_J12': 308,
'L1_J12_EMPTY': 309,
'L1_J12_FIRSTEMPTY': 310,
'L1_J30.31ETA49': 311,
'L1_J30.31ETA49_EMPTY': 312,
'L1_J40_XE50': 313,
'L1_J75.31ETA49': 314,
'L1_J75_XE40': 315,
'L1_RD0_ABORTGAPNOTCALIB': 316,
'L1_TGC_BURST': 317,
'L1_XE35': 318,
'L1_XE50': 319,
'L1_EM10VH': 320,
'L1_EM15': 321,
'L1_EM15HI_2TAU12IM': 322,
'L1_EM15HI_2TAU12IM_J25_3J12': 323,
'L1_EM15HI_2TAU12IM_XE35': 324,
'L1_EM15HI_TAU40_2TAU15': 325,
# restricted TE
'L1_TE500.0ETA49': 326,
'L1_TE1500.0ETA49': 327,
'L1_TE3000.0ETA49': 328,
'L1_TE3500.0ETA49': 329,
'L1_TE4500.0ETA49': 330,
'L1_TE6500.0ETA49': 331,
'L1_TE8000.0ETA49': 332,
'L1_TE9000.0ETA49': 333,
#'L1_J5': 334,
'L1_TE5_VTE200': 335,
# restricted TE for overlay
'L1_TE500.0ETA49_OVERLAY': 336,
'L1_TE1500.0ETA49_OVERLAY': 337,
'L1_TE3000.0ETA49_OVERLAY': 338,
'L1_TE3500.0ETA49_OVERLAY': 339,
'L1_TE4500.0ETA49_OVERLAY': 340,
'L1_TE6500.0ETA4_OVERLAY9': 341,
'L1_TE8000.0ETA49_OVERLAY': 342,
'L1_TE9000.0ETA49_OVERLAY': 343,
'L1_EM8': 344,
'L1_2EM7': 345,
'L1_CALREQ2' : 511,
'L1_TRT_FILLED' : 482,
'L1_TRT_EMPTY' : 483,
'L1_ZDC_A_VZDC_C_VTE200' : 484,
'L1_ZDC_C_VZDC_A_VTE200' : 485,
'L1_ZDC_XOR' : 486,
'L1_ZDC_XOR_VTE200' : 487,
'L1_ZDC_XOR_TE5_VTE200' : 488,
'L1_ZDC_XOR_TRT_VTE200' : 489,
'L1_ZDC_VTE200' : 490,
'L1_ZDC_A_C_VTE200' : 491,
'L1_ZDC_A_C_TE5_VTE200' : 492,
}
Lvl1Flags.prescales = {}
| [
"[email protected]"
]
| |
7c30ca77ff7ab8d16b8eccdf763b818abbd72e45 | ac810c7e637afd67cf19704a1a724eaac56fed93 | /Hackerrank_python/4.sets/30.Symmetric Difference.py | 880ecccb9397cfde54a05b96340d08a4c960acc0 | [
"MIT"
]
| permissive | Kushal997-das/Hackerrank | 57e8e422d2b47d1f2f144f303a04f32ca9f6f01c | 1256268bdc818d91931605f12ea2d81a07ac263a | refs/heads/master | 2021-10-28T06:27:58.153073 | 2021-10-18T04:11:18 | 2021-10-18T04:11:18 | 298,875,299 | 41 | 8 | MIT | 2021-03-01T04:40:57 | 2020-09-26T18:26:19 | Python | UTF-8 | Python | false | false | 194 | py | # Enter your code here. Read input from STDIN. Print output to STDOUT
M=input()
x=set(map(int,input().split()))
N=input()
y=set(map(int,input().split()))
f=x^y
for i in sorted(f):
print (i)
| [
"[email protected]"
]
| |
546f4b0a7b9e573b93414313b979be1eeb48b1b5 | b43c6c03eea348d68d6582c3594760bbe0ecaa08 | /gitlab/tests/conftest.py | 929f128062588569e26097e5ee90aebcc993b89f | [
"MIT"
]
| permissive | imsardine/learning | 1b41a13a4c71c8d9cdd8bd4ba264a3407f8e05f5 | 925841ddd93d60c740a62e12d9f57ef15b6e0a20 | refs/heads/master | 2022-12-22T18:23:24.764273 | 2020-02-21T01:35:40 | 2020-02-21T01:35:40 | 24,145,674 | 0 | 0 | MIT | 2022-12-14T20:43:28 | 2014-09-17T13:24:37 | Python | UTF-8 | Python | false | false | 2,278 | py | import os
from os import path
from subprocess import Popen, PIPE
import pytest
class DataFileHelper(object):
def __init__(self, base_dir):
self._base_dir = base_dir
def abspath(self, fn):
return path.join(self._base_dir, fn)
def relpath(self, fn):
return path.relpath(self.abspath(fn)) # relative to CWD
def read(self, fn, encoding=None):
with open(self.abspath(fn), 'rb') as f:
data = f.read()
return data.decode(encoding) if encoding else data
def json(self, fn, encoding='utf-8'):
import json
return json.loads(self.read(fn, encoding))
class CommandLine(object):
def __init__(self, base_dir):
self._base_dir = base_dir
def run(self, cmdline, cwd=None):
_cwd = os.getcwd()
assert path.isabs(_cwd), _cwd
os.chdir(self._base_dir)
if cwd:
os.chdir(cwd) # absolute or relative to base dir
try:
p = Popen(cmdline, stdout=PIPE, stderr=PIPE, shell=True)
out, err = p.communicate()
return CommandLineResult(
out.decode('utf-8'), err.decode('utf-8'), p.returncode)
finally:
os.chdir(_cwd)
class CommandLineResult(object):
def __init__(self, out, err, rc):
self.out = out
self.err = err
self.rc = rc
@pytest.fixture
def testdata(request):
base_dir = path.dirname(request.module.__file__)
return DataFileHelper(base_dir)
@pytest.fixture
def cli(request):
base_dir = path.dirname(request.module.__file__)
return CommandLine(base_dir)
import urllib, urllib2
import json
class GitLabAPI():
def __init__(self, url, access_token):
self._url = url
self._access_token = access_token
def _request(self, endpoint):
request = urllib2.Request(self._url + endpoint)
request.add_header('Private-Token', self._access_token)
return request
def get(self, endpoint, params={}):
qs = urllib.urlencode(params)
resp = urllib2.urlopen(self._request(endpoint + '?' + qs))
return json.loads(resp.read())
@pytest.fixture
def gitlab():
return GitLabAPI(
os.environ['GITLAB_URL'],
os.environ['GITLAB_ACCESS_TOKEN'])
| [
"[email protected]"
]
| |
e4b9f8ab5bb19544e331e60d7ba9441168e86c0f | 3c9727c4b5a89684b861fa90424e43c5a914ea45 | /Production/test/get_py.py | 5234fbfb2b6066772be282bce7ee1e8393f89862 | []
| no_license | vhegde91/TreeMaker | f51b453243081ccef0cfa721468ed7f7f9ca51f2 | e9dc3e3de793250980b29bebfef9b07c78bc97f7 | refs/heads/Run2 | 2021-08-11T03:45:45.430562 | 2018-04-11T14:30:28 | 2018-04-11T14:30:28 | 78,883,127 | 0 | 0 | null | 2017-01-13T20:14:01 | 2017-01-13T20:14:01 | null | UTF-8 | Python | false | false | 6,669 | py | import re,sys,getopt,urllib2,json
from dbs.apis.dbsClient import DbsApi
from optparse import OptionParser
# Read parameters
parser = OptionParser()
parser.add_option("-d", "--dict", dest="dict", default="", help="check for samples listed in this dict (default = %default)")
parser.add_option("-p", "--py", dest="py", default=False, action="store_true", help="generate python w/ list of files (default = %default)")
parser.add_option("-w", "--wp", dest="wp", default=False, action="store_true", help="generate WeightProducer lines (default = %default)")
parser.add_option("-s", "--se", dest="se", default=False, action="store_true", help="make list of sites with 100% hosting (default = %default)")
(options, args) = parser.parse_args()
dictname = options.dict.replace(".py","");
flist = __import__(dictname).flist
makepy = options.py
makewp = options.wp
makese = options.se
if not makepy and not makewp and not makese:
parser.error("No operations selected!")
#interface with DBS
dbs3api = DbsApi("https://cmsweb.cern.ch/dbs/prod/global/DBSReader")
#format for dict entries:
# data: [['sample'] , []]
# MC: [['sample'] , [xsec]]
# MC w/ extended sample: [['sample','sample_ext'] , [xsec]]
# MC w/ negative weights (amcatnlo): [['sample'] , [xsec, neff]]
#MC w/ negative weights (amcatnlo) + extended sample: [['sample','sample_ext'] , [xsec, neff, neff_ext]]
if makewp:
wname = "weights_"+dictname+".txt"
wfile = open(wname,'w')
if makese:
sname = "sites_"+dictname+".txt"
sfile = open(sname,'w')
for fitem in flist:
ff = fitem[0]
x = fitem[1]
nevents_all = []
for f in ff: # in case of extended samples
if makepy:
#get sample name
oname = f.split('/')[1]
#check for extended sample
extcheck = re.search("ext[0-9]",f.split('/')[2])
if not extcheck==None and len(extcheck.group(0))>0: oname = oname+"_"+extcheck.group(0)
#make python file with preamble
pfile = open(oname+"_cff.py",'w')
pfile.write("import FWCore.ParameterSet.Config as cms\n\n")
pfile.write("maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )\n")
pfile.write("readFiles = cms.untracked.vstring()\n")
pfile.write("secFiles = cms.untracked.vstring()\n")
pfile.write("source = cms.Source (\"PoolSource\",fileNames = readFiles, secondaryFileNames = secFiles)\n")
#get dataset info - detail only needed in makewp case
filelist = []
nevents = 0
print f
fileArrays = dbs3api.listFileArray(dataset=f,detail=makewp)
for fileArray in fileArrays:
if makepy:
filelist.append(fileArray["logical_file_name"])
if makewp:
nevents += fileArray["event_count"]
nevents_all.append(nevents)
# check for sites with 100% dataset presence (using PhEDEx API)
# refs:
# https://github.com/dmwm/DAS/blob/master/src/python/DAS/services/combined/combined_service.py
# https://github.com/gutsche/scripts/blob/master/PhEDEx/checkLocation.py
if makese:
url='https://cmsweb.cern.ch/phedex/datasvc/json/prod/blockreplicas?dataset=' + f
jstr = urllib2.urlopen(url).read()
jstr = jstr.replace("\n", " ")
result = json.loads(jstr)
site_list = {}
for block in result['phedex']['block']:
for replica in block['replica']:
site = replica['node']
addr = replica['se']
#safety checks
if site is None: continue
if addr is None: addr = ""
if (site,addr) not in site_list.keys(): site_list[(site,addr)] = 0
site_list[(site,addr)] += replica['files']
# get total number of expected files from DBS
nfiles_tot = len(fileArrays)
# calculate dataset fraction (presence) in % and check for completion
highest_percent = 0
for site,addr in site_list:
this_percent = float(site_list[(site,addr)])/float(nfiles_tot)*100
site_list[(site,addr)] = this_percent
if this_percent > highest_percent: highest_percent = this_percent
sfile.write(f+"\n")
if highest_percent < 100:
sfile.write(" !!! No site has complete dataset !!! ( Highest: "+str(highest_percent)+"% )\n")
for site,addr in site_list:
this_percent = site_list[(site,addr)]
if this_percent==highest_percent:
sfile.write(" "+site+" ("+addr+")\n")
if makepy:
#sort list of files for consistency
filelist.sort()
counter = 0
#split into chunks of 255
for lfn in filelist:
if counter==0: pfile.write("readFiles.extend( [\n")
pfile.write(" '"+lfn+"',\n")
if counter==254 or lfn==filelist[-1]:
pfile.write("] )\n")
counter = 0
else:
counter += 1
#only do weightproducer stuff for MC (w/ xsec provided)
if makewp and len(x)>0:
xsec = x[0]
nevents = nevents_all[0]
neff = 0
if len(x)>1: neff = x[1]
#handle combining extended samples
if len(ff)>1:
neff = sum(x[1:])
nevents = sum(nevents_all)
for i,f in enumerate(ff):
#make line for weightproducer
line = " MCSample(\""+f.split('/')[1]+"\", \""+"-".join(f.split('/')[2].split('-')[1:3])+"\", \""+f.split('/')[2].split('-')[0]+"\", \"Constant\", "+str(x[0])+", ";
if neff>0:
line = line+str(neff)+"),"
if len(ff)>1: line = line+" # subtotal = "+str(x[i+1])+", straight subtotal = "+str(nevents_all[i])+"\n"
else: line = line+" # straight total = "+str(nevents)+"\n"
else:
line = line+str(nevents)+"),"
if len(ff)>1: line = line+" # subtotal = "+str(nevents_all[i])+"\n"
else: line = line+"\n"
wfile.write(line)
| [
"[email protected]"
]
| |
9ba94fdaa0336d97658bb817cac17daeacb40efa | 11841e8fb1e44c69ae7e50c0b85b324c4d90abda | /zipfile1.py | 57928210c0c0f271bff15ecb5d69c931b5a2dca3 | []
| no_license | chenlong2019/python | 1d7bf6fb60229221c79538234ad2f1a91bb03c50 | fc9e239754c5715a67cb6d743109800b64d74dc8 | refs/heads/master | 2020-12-08T11:11:49.951752 | 2020-01-10T04:58:29 | 2020-01-10T04:59:50 | 232,968,232 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | import os, zipfile
#打包目录为zip文件(未压缩)
def make_zip(source_dir, output_filename):
zipf = zipfile.ZipFile(output_filename, 'w')
for parent, dirnames, filenames in os.walk(source_dir):
for filename in filenames:
pathfile = os.path.join(parent, filename)
zipf.write(pathfile, filename)
zipf.close()
if __name__ == '__main__':
make_zip("F:\\changshu\\state\\pm25\\PM252019_08_16_16", 'F:\\zip\\PM252019_08_190977_.zip') | [
"[email protected]"
]
| |
fcb806b070156928c2b03ad6d408e9055efc9a9a | 3cde5a749af89c9dc4d2aca3fb9bf7c56d9a4a7f | /website.py | 43a44ed6750624703414cd6a969170689fe73bba | []
| no_license | akrherz/kimthub | b211974c071f6ed5f2caa7349ba8ff8e2ec2f87b | 028894591841e83ddc35d2157fe4044049d20db8 | refs/heads/master | 2020-12-29T00:25:50.689178 | 2019-04-01T15:46:14 | 2019-04-01T15:46:14 | 16,999,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 654 | py | # website tool to configure this application
from twisted.web import resource, server
class HomePage(resource.Resource):
def __init__(self, r):
resource.Resource.__init__(self)
self.r = r
def render(self, request):
s = self.r.dumpObs()
request.setHeader('Content-Length', len(s))
request.setHeader('Content-Type', 'text/plain')
request.setResponseCode(200)
request.write( s )
request.finish()
return server.NOT_DONE_YET
class RootResource(resource.Resource):
def __init__(self, r):
resource.Resource.__init__(self)
self.putChild('', HomePage(r))
| [
"[email protected]"
]
| |
148d3e817efcd11b28dcc6c13e49112239d6e335 | 39f95a7b4abe665f1b0e3a0f4b356db002ddce2e | /tests/test_exceptions.py | 3f14f4213f02a864daf23f5b39c6897b7609a298 | [
"MIT"
]
| permissive | gitter-badger/tapioca-wrapper | d96a538071d44c36f93f0bbd7318510dfc9f7633 | 4e6dbd85da1a218d00f08fee84dfea29a83d61c3 | refs/heads/master | 2021-01-16T18:48:50.848519 | 2015-09-09T15:21:41 | 2015-09-09T15:21:41 | 42,362,017 | 0 | 0 | null | 2015-09-12T15:29:44 | 2015-09-12T15:29:44 | Python | UTF-8 | Python | false | false | 1,889 | py | # coding: utf-8
from __future__ import unicode_literals
import unittest
import responses
import requests
from tapioca.exceptions import (
ClientError, ServerError, ResponseProcessException)
from tests.client import TesterClient, TesterClientAdapter
class TestExceptions(unittest.TestCase):
def setUp(self):
self.wrapper = TesterClient()
@responses.activate
def test_adapter_raises_response_process_exception_on_400s(self):
responses.add(responses.GET, self.wrapper.test().data(),
body='{"erros": "Server Error"}',
status=400,
content_type='application/json')
response = requests.get(self.wrapper.test().data())
with self.assertRaises(ResponseProcessException):
TesterClientAdapter().process_response(response)
@responses.activate
def test_adapter_raises_response_process_exception_on_500s(self):
responses.add(responses.GET, self.wrapper.test().data(),
body='{"erros": "Server Error"}',
status=500,
content_type='application/json')
response = requests.get(self.wrapper.test().data())
with self.assertRaises(ResponseProcessException):
TesterClientAdapter().process_response(response)
@responses.activate
def test_raises_request_error(self):
responses.add(responses.GET, self.wrapper.test().data(),
body='{"data": {"key": "value"}}',
status=400,
content_type='application/json')
with self.assertRaises(ClientError):
self.wrapper.test().get()
@responses.activate
def test_raises_server_error(self):
responses.add(responses.GET, self.wrapper.test().data(),
status=500,
content_type='application/json')
with self.assertRaises(ServerError):
self.wrapper.test().get()
| [
"[email protected]"
]
| |
1ce8eab6442ed03dd6f60806e1900e36fe0df0d2 | 1670dca534ef4fd7e8d9ca9e6d55b5885e4071f9 | /AlgoExpert/Day2.py | 4117f3dcd99612ad1bd1b4b1307b4ac6c8d06480 | []
| no_license | Tejas1510/Pythonary | 24512a6c5abfee17457397aa37849f3a5a739002 | 55c11f74d9f540bf696acecaa78febecd14d8422 | refs/heads/master | 2022-11-23T23:27:32.219513 | 2020-08-02T17:22:17 | 2020-08-02T17:22:17 | 264,151,076 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | #Modification of last Day Question
#Find a triplet that sum to a given value
#Brute Force approach requires o(n^3)
#We will Solve it with the help of hash map in O(n^2) approach
#This question has been asked in multiple times in most of the FANNG Company interview
def Solution(a,TargetSum):
for i in range(0,len(a)-1):
nums={}
current_sum=TargetSum-a[i]
for j in range(1,len(a)):
if(current_sum-a[j] in nums):
return [a[j],a[i],current_sum-a[j]]
else:
nums[a[j]]=True
return -1
t=int(input())
for i in range(t):
n=int(input())
a=list(map(int,input().split()))
TargetSum=int(input())
a=Solution(a,TargetSum)
print(*a)
| [
"[email protected]"
]
| |
084af231761d48ccdf9950ed5fbab1a7a44f86ab | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/SjSzRZMR/YW_RZMR_SZSJ_150.py | 60204aa1a24e2cc122eacbff5931e061d9482cba | []
| no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,334 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import json
sys.path.append("/home/yhl2/workspace/xtp_test")
from xtp.api.xtp_test_case import xtp_test_case, Api, unittest
from service.ServiceConfig import *
from financing.service.mainService import ParmIni, serviceTest
from financing.service.QueryStkPriceQty import QueryStkPriceQty
from service.log import *
from financing.service.CaseParmInsertMysql import *
from mysql.QueryOrderErrorMsg import queryOrderErrorMsg
reload(sys)
sys.setdefaultencoding('utf-8')
class YW_RZMR_SZSJ_150(xtp_test_case):
# YW_RZMR_SZSJ_150 YW_RZMR_SZSJ_150 YW_RZMR_SZSJ_150 YW_RZMR_SZSJ_150
def test_YW_RZMR_SZSJ_150(self):
title = '对方最优转限价买——错误的价格(价格10亿)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('999999', '2', '0', '2', '0', 'B', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_MARGIN'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_MARGIN_TRADE'],
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_REVERSE_BEST_LIMIT'],
'price': 1000000000,
'quantity': 200
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
if rs['用例测试结果']:
logger.warning('执行结果为{0}'.format(str(rs['用例测试结果'])))
else:
logger.warning('执行结果为{0},{1},{2}'.format(
str(rs['用例测试结果']), str(rs['用例错误源']),
json.dumps(rs['用例错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True) # 0
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
f4a7e68daf70584049de24fcf0e3d0d1aa07c352 | 0ff99fc75b1f42811f72aa86f4b32d1e3f8b6b48 | /PR_inference/maskrcnn_benchmark/data/datasets/__init__.py | a9fdd2e7e4b7c500fbf9a66017cd0a5759e6581a | [
"MIT"
]
| permissive | alwc/buildings2vec | f95a4468a0d5c21f2732c177474350e767d459f7 | bd5121e715bc28f6e88163f18407a762736d38d5 | refs/heads/master | 2022-04-25T05:26:50.053219 | 2020-04-28T20:49:49 | 2020-04-28T20:49:49 | 263,258,064 | 0 | 1 | null | 2020-05-12T06:57:24 | 2020-05-12T06:57:23 | null | UTF-8 | Python | false | false | 302 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from .coco import COCODataset
from .voc import PascalVOCDataset
from .buildings import BuildingsDataset
from .concat_dataset import ConcatDataset
__all__ = ["COCODataset", "ConcatDataset", "PascalVOCDataset", "BuildingsDataset"]
| [
"[email protected]"
]
| |
346f9be15be33ed6d7a104aea444b4a2dc9ba754 | dccd1058e723b6617148824dc0243dbec4c9bd48 | /atcoder/abc083/c.py | 3bda8a9bf4626f9a95318a39ab49b5aaae251926 | []
| no_license | imulan/procon | 488e49de3bcbab36c624290cf9e370abfc8735bf | 2a86f47614fe0c34e403ffb35108705522785092 | refs/heads/master | 2021-05-22T09:24:19.691191 | 2021-01-02T14:27:13 | 2021-01-02T14:27:13 | 46,834,567 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | x,y = map(int,input().split())
ans = 0
while x<=y:
ans += 1
x *= 2
print(ans)
| [
"[email protected]"
]
| |
c92eb73452c18a2297bb716fa73b4aeb74d7822b | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/223/users/4182/codes/1644_2703.py | 494d16f9af8086445803a09cef9f1f6afbb7ded8 | []
| no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 108 | py | idade = int(input())
if (idade >= 18):
mensagem = "eleitor"
else:
mensagem = "nao_eleitor"
print(mensagem) | [
"[email protected]"
]
| |
59aeeb5cfbbd52e95cf09691c8180bb4633af9c4 | 321b4ed83b6874eeb512027eaa0b17b0daf3c289 | /606/606.construct-string-from-binary-tree.234546044.Wrong-Answer.leetcode.py | 30bee179f09e3deb04c3c0ab49b7e971a008aac3 | []
| no_license | huangyingw/submissions | 7a610613bdb03f1223cdec5f6ccc4391149ca618 | bfac1238ecef8b03e54842b852f6fec111abedfa | refs/heads/master | 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | class Solution:
def tree2str(self, t):
if not t:
return ''
left = '({})'.format(self.tree2str(t.left)) if (t.left or t.right) else ''
right = '({})'.format(self.tree2str(t.right)) if t.right else ''
return '{}{}{}'.format(t.val, left, right)
def tree2str(self, t):
if not t:
return ""
subleft = "(%s)" % (self.tree2str(t.left) if t.left or t.right else "")
subright = "(%s)" % (self.tree2str(t.right) if t.right else "")
return ("%s%s%s" % (str(t.val), subleft, subright)).replace("()", "")
| [
"[email protected]"
]
| |
05048d5f830df5ed0b5e43b5e6473d8c7b7d7246 | 0ff87e0a84dd8b9a198cebb59a5130fa7765b9dd | /tests/test_backtest.py | 606de235aebb5127e7941c9e643a0564ca164f4f | [
"Apache-2.0"
]
| permissive | dxcv/moonshot | 470caf28cdb3bc5cd5864596e69875bf1810d05d | ca05aa347b061db05c0da221e80b125a5e9ddea1 | refs/heads/master | 2020-05-31T04:40:43.638058 | 2019-03-28T17:07:04 | 2019-03-28T17:07:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76,137 | py | # Copyright 2018 QuantRocket LLC - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To run: python3 -m unittest discover -s tests/ -p test_*.py -t . -v
import os
import unittest
from unittest.mock import patch
import glob
import pandas as pd
from moonshot import Moonshot
from moonshot.cache import TMP_DIR
class BacktestTestCase(unittest.TestCase):
def tearDown(self):
"""
Remove cached files.
"""
for file in glob.glob("{0}/moonshot*.pkl".format(TMP_DIR)):
os.remove(file)
def test_complain_if_prices_to_signals_not_implemented(self):
"""
Tests error handling when prices_to_signals hasn't been implemented.
"""
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
# Volume
5000,
16000,
8800
],
23456: [
# Close
12,
11,
8.50,
# Volume
15000,
14000,
28800
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
with self.assertRaises(NotImplementedError) as cm:
Moonshot().backtest()
self.assertIn("strategies must implement prices_to_signals", repr(cm.exception))
def test_basic_long_only_strategy(self):
"""
Tests that the resulting DataFrames are correct after running a basic
long-only strategy that largely relies on the default methods.
"""
class BuyBelow10(Moonshot):
"""
A basic test strategy that buys below 10.
"""
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 10
return signals.astype(int)
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
# replace nan with "nan" to allow equality comparisons
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
0.0,
0.0,
1.0],
23456: [1.0,
0.0,
1.0,
0.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
0.0,
0.0,
1.0],
23456: [0.5,
0.0,
1.0,
0.0]}
)
abs_weights = results.loc["AbsWeight"].reset_index()
abs_weights.loc[:, "Date"] = abs_weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
0.0,
0.0,
1.0],
23456: [0.5,
0.0,
1.0,
0.0]}
)
net_positions = results.loc["NetExposure"].reset_index()
net_positions.loc[:, "Date"] = net_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
0.0,
0.0],
23456: ["nan",
0.5,
0.0,
1.0]}
)
abs_positions = results.loc["AbsExposure"].reset_index()
abs_positions.loc[:, "Date"] = abs_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
0.0,
0.0],
23456: ["nan",
0.5,
0.0,
1.0]}
)
total_holdings = results.loc["TotalHoldings"].reset_index()
total_holdings.loc[:, "Date"] = total_holdings.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
total_holdings.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0,
1.0,
0,
0],
23456: [0,
1.0,
0,
1.0]}
)
turnover = results.loc["Turnover"].reset_index()
turnover.loc[:, "Date"] = turnover.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
turnover.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
0.5,
0.0],
23456: ["nan",
0.5,
0.5,
1.0]}
)
commissions = results.loc["Commission"].reset_index()
commissions.loc[:, "Date"] = commissions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
commissions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
-0.0227273, # (10.50 - 11)/11 * 0.5
-0.0],
23456: [0.0,
0.0,
-0.1136364, # (8.50 - 11)/11 * 0.5
0.0]}
)
def test_basic_long_short_strategy(self):
"""
Tests that the resulting DataFrames are correct after running a basic
long-short strategy that largely relies on the default methods.
"""
class BuyBelow10ShortAbove10(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10ShortAbove10().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
# replace nan with "nan" to allow equality comparisons
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
-0.5,
-0.5,
0.5],
23456: [0.5,
-0.5,
0.5,
-0.5]}
)
abs_weights = results.loc["AbsWeight"].reset_index()
abs_weights.loc[:, "Date"] = abs_weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.5,
0.5,
0.5,
0.5],
23456: [0.5,
0.5,
0.5,
0.5]}
)
net_positions = results.loc["NetExposure"].reset_index()
net_positions.loc[:, "Date"] = net_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
-0.5,
-0.5],
23456: ["nan",
0.5,
-0.5,
0.5]}
)
abs_positions = results.loc["AbsExposure"].reset_index()
abs_positions.loc[:, "Date"] = abs_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
0.5,
0.5],
23456: ["nan",
0.5,
0.5,
0.5]}
)
total_holdings = results.loc["TotalHoldings"].reset_index()
total_holdings.loc[:, "Date"] = total_holdings.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
total_holdings.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0,
1.0,
1.0,
1.0],
23456: [0,
1.0,
1.0,
1.0]}
)
turnover = results.loc["Turnover"].reset_index()
turnover.loc[:, "Date"] = turnover.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
turnover.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
1.0,
0.0],
23456: ["nan",
0.5,
1.0,
1.0]}
)
commissions = results.loc["Commission"].reset_index()
commissions.loc[:, "Date"] = commissions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
commissions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
-0.0227273, # (10.50 - 11)/11 * 0.5
0.0242857], # (9.99 - 10.50)/10.50 * -0.5
23456: [0.0,
0.0,
-0.1136364, # (8.50 - 11)/11 * 0.5
-0.1176471] # (10.50 - 8.50)/8.50 * -0.5
}
)
def test_long_short_strategy_override_methods(self):
"""
Tests that the resulting DataFrames are correct after running a
long-short strategy that overrides the major backtesting methods.
"""
class BuyBelow10ShortAbove10Overnight(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10 and holds overnight.
"""
def prices_to_signals(self, prices):
long_signals = prices.loc["Open"] <= 10
short_signals = prices.loc["Open"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def signals_to_target_weights(self, signals, prices):
weights = self.allocate_fixed_weights(signals, 0.25)
return weights
def target_weights_to_positions(self, weights, prices):
# enter on close same day
positions = weights.copy()
return positions
def positions_to_gross_returns(self, positions, prices):
# hold on close till next day open
closes = prices.loc["Close"]
opens = prices.loc["Open"]
pct_changes = (opens - closes.shift()) / closes.shift()
gross_returns = pct_changes * positions.shift()
return gross_returns
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Open","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9.6,
10.45,
10.23,
8.67,
# Open
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
10.56,
12.01,
10.50,
9.80,
# Open
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10ShortAbove10Overnight().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
# replace nan with "nan" to allow equality comparisons
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.25,
-0.25,
-0.25,
0.25],
23456: [0.25,
-0.25,
0.25,
-0.25]}
)
abs_weights = results.loc["AbsWeight"].reset_index()
abs_weights.loc[:, "Date"] = abs_weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.25,
0.25,
0.25,
0.25],
23456: [0.25,
0.25,
0.25,
0.25]}
)
net_positions = results.loc["NetExposure"].reset_index()
net_positions.loc[:, "Date"] = net_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.25,
-0.25,
-0.25,
0.25],
23456: [0.25,
-0.25,
0.25,
-0.25]}
)
abs_positions = results.loc["AbsExposure"].reset_index()
abs_positions.loc[:, "Date"] = abs_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.25,
0.25,
0.25,
0.25],
23456: [0.25,
0.25,
0.25,
0.25]}
)
total_holdings = results.loc["TotalHoldings"].reset_index()
total_holdings.loc[:, "Date"] = total_holdings.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
total_holdings.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
1.0,
1.0,
1.0],
23456: [1.0,
1.0,
1.0,
1.0]}
)
turnover = results.loc["Turnover"].reset_index()
turnover.loc[:, "Date"] = turnover.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
turnover.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
0.5,
0.0,
0.5],
23456: ["nan",
0.5,
0.5,
0.5]}
)
commissions = results.loc["Commission"].reset_index()
commissions.loc[:, "Date"] = commissions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
commissions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0364583, # (11 - 9.6)/9.6 * 0.25
-0.0011962, # (10.50 - 10.45)/10.45 * -0.25
0.0058651], # (9.99 - 10.23)/10.23 * 0.25
23456: [0.0,
0.0104167,# (11 - 10.56)/10.56 * 0.25
0.0730641, # (8.50 - 12.01)/12.01 * -0.25
0.0] # (10.50 - 10.50)/10.50 * 0.25
}
)
def test_short_only_once_a_day_intraday_strategy(self):
"""
Tests that the resulting DataFrames are correct after running a
short-only intraday strategy.
"""
class ShortAbove10Intraday(Moonshot):
"""
A basic test strategy that shorts above 10 and holds intraday.
"""
POSITIONS_CLOSED_DAILY = True
def prices_to_signals(self, prices):
morning_prices = prices.loc["Open"].xs("09:30:00", level="Time")
short_signals = morning_prices > 10
return -short_signals.astype(int)
def signals_to_target_weights(self, signals, prices):
weights = self.allocate_fixed_weights(signals, 0.25)
return weights
def target_weights_to_positions(self, weights, prices):
# enter on same day
positions = weights.copy()
return positions
def positions_to_gross_returns(self, positions, prices):
# hold from 10:00-16:00
closes = prices.loc["Close"]
entry_prices = closes.xs("09:30:00", level="Time")
exit_prices = closes.xs("15:30:00", level="Time")
pct_changes = (exit_prices - entry_prices) / entry_prices
gross_returns = pct_changes * positions
return gross_returns
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03"])
fields = ["Close","Open"]
times = ["09:30:00", "15:30:00"]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
12345: [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
12.30,
# Open
9.88,
10.34,
10.23,
16.45,
8.90,
11.30,
],
23456: [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
14.50,
# Open
9.89,
11,
8.50,
10.50,
14.10,
15.60
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = ShortAbove10Intraday().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
# replace nan with "nan" to allow equality comparisons
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
-1.0,
0.0],
23456: [0.0,
0.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
-0.25,
0.0],
23456: [0.0,
0.0,
-0.25]}
)
abs_weights = results.loc["AbsWeight"].reset_index()
abs_weights.loc[:, "Date"] = abs_weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
0.25,
0.0],
23456: [0.0,
0.0,
0.25]}
)
net_positions = results.loc["NetExposure"].reset_index()
net_positions.loc[:, "Date"] = net_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
-0.25,
0.0],
23456: [0.0,
0.0,
-0.25]}
)
abs_positions = results.loc["AbsExposure"].reset_index()
abs_positions.loc[:, "Date"] = abs_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
0.25,
0.0],
23456: [0.0,
0.0,
0.25]}
)
total_holdings = results.loc["TotalHoldings"].reset_index()
total_holdings.loc[:, "Date"] = total_holdings.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
total_holdings.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
1.0,
0.0],
23456: [0.0,
0.0,
1.0]}
)
turnover = results.loc["Turnover"].reset_index()
turnover.loc[:, "Date"] = turnover.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
turnover.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
0.5,
0.0],
23456: [0.0,
0.0,
0.5]}
)
commissions = results.loc["Commission"].reset_index()
commissions.loc[:, "Date"] = commissions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
commissions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00'],
12345: [0.0,
-0.13167, # (15.45 - 10.12)/10.12 * -0.25
0.0],
23456: [0.0,
0.0,
-0.0205224] # (14.50 - 13.40)/13.40 * 0.25
}
)
def test_continuous_intraday_strategy(self):
"""
Tests that the resulting DataFrames are correct after running a
long-short continuous intraday strategy.
"""
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02"])
fields = ["Close"]
times = ["10:00:00", "11:00:00", "12:00:00"]
idx = pd.MultiIndex.from_product([fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
12345: [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
12.30,
],
23456: [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
7.50,
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 hour'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10ShortAbove10ContIntraday().backtest()
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
# replace nan with "nan" to allow equality comparisons
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [1.0,
-1.0,
-1.0,
-1.0,
1.0,
-1.0],
23456: [-1.0,
-1.0,
-1.0,
1.0,
-1.0,
1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [0.5,
-0.5,
-0.5,
-0.5,
0.5,
-0.5],
23456: [-0.5,
-0.5,
-0.5,
0.5,
-0.5,
0.5]}
)
abs_weights = results.loc["AbsWeight"].reset_index()
abs_weights.loc[:, "Date"] = abs_weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [0.5,
0.5,
0.5,
0.5,
0.5,
0.5],
23456: [0.5,
0.5,
0.5,
0.5,
0.5,
0.5]}
)
net_positions = results.loc["NetExposure"].reset_index()
net_positions.loc[:, "Date"] = net_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: ['nan',
0.5,
-0.5,
-0.5,
-0.5,
0.5],
23456: ['nan',
-0.5,
-0.5,
-0.5,
0.5,
-0.5]}
)
abs_positions = results.loc["AbsExposure"].reset_index()
abs_positions.loc[:, "Date"] = abs_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: ['nan',
0.5,
0.5,
0.5,
0.5,
0.5],
23456: ['nan',
0.5,
0.5,
0.5,
0.5,
0.5]}
)
total_holdings = results.loc["TotalHoldings"].reset_index()
total_holdings.loc[:, "Date"] = total_holdings.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
total_holdings.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [0,
1.0,
1.0,
1.0,
1.0,
1.0],
23456: [0,
1.0,
1.0,
1.0,
1.0,
1.0]}
)
turnover = results.loc["Turnover"].reset_index()
turnover.loc[:, "Date"] = turnover.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
turnover.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: ['nan',
0.5,
1.0,
0.0,
0.0,
1.0],
23456: ['nan',
0.5,
0.0,
0.0,
1.0,
1.0]}
)
commissions = results.loc["Commission"].reset_index()
commissions.loc[:, "Date"] = commissions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
commissions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [0.0,
0.0,
0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0,
0.0,
0.0]}
)
slippage = results.loc["Slippage"].reset_index()
slippage.loc[:, "Date"] = slippage.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
slippage.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [0.0,
0.0,
0.0,
0.0,
0.0,
0.0],
23456: [0.0,
0.0,
0.0,
0.0,
0.0,
0.0]}
)
returns = results.loc["Return"].reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00',
'2018-05-02T00:00:00'],
'Time': ['10:00:00',
'11:00:00',
'12:00:00',
'10:00:00',
'11:00:00',
'12:00:00'],
12345: [0.0,
0.0,
-0.0157895, # (10.12-10.45)/10.45 * 0.5
-0.2633399, # (15.45-10.12)/10.12 * -0.5
0.2194175, # (8.67-15.45)/15.45 * -0.5
-0.2093426 # (12.30-8.67)/8.67 * -0.5
],
23456: [0.0,
0.0,
0.0628643, # (10.50-12.01)/12.01 * -0.5
0.0333333, # (9.80-10.50)/10.50 * -0.5
-0.1836735, # (13.40-9.80)/9.80 * -0.5
-0.2201493 # (7.50-13.40)/13.40 * 0.5
]}
)
def test_pass_allocation(self):
"""
Tests that the resulting DataFrames are correct after running a basic
long-short strategy and passing an allocation.
"""
class BuyBelow10ShortAbove10(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10ShortAbove10().backtest(allocation=3.0)
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
# replace nan with "nan" to allow equality comparisons
results = results.round(7)
results = results.where(results.notnull(), "nan")
signals = results.loc["Signal"].reset_index()
signals.loc[:, "Date"] = signals.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
signals.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.0,
-1.0,
-1.0,
1.0],
23456: [1.0,
-1.0,
1.0,
-1.0]}
)
weights = results.loc["Weight"].reset_index()
weights.loc[:, "Date"] = weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.5,
-1.5,
-1.5,
1.5],
23456: [1.5,
-1.5,
1.5,
-1.5]}
)
abs_weights = results.loc["AbsWeight"].reset_index()
abs_weights.loc[:, "Date"] = abs_weights.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_weights.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [1.5,
1.5,
1.5,
1.5],
23456: [1.5,
1.5,
1.5,
1.5]}
)
net_positions = results.loc["NetExposure"].reset_index()
net_positions.loc[:, "Date"] = net_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
net_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
1.5,
-1.5,
-1.5],
23456: ["nan",
1.5,
-1.5,
1.5]}
)
abs_positions = results.loc["AbsExposure"].reset_index()
abs_positions.loc[:, "Date"] = abs_positions.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
abs_positions.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
1.5,
1.5,
1.5],
23456: ["nan",
1.5,
1.5,
1.5]}
)
total_holdings = results.loc["TotalHoldings"].reset_index()
total_holdings.loc[:, "Date"] = total_holdings.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
total_holdings.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0,
1.0,
1.0,
1.0],
23456: [0,
1.0,
1.0,
1.0]}
)
turnover = results.loc["Turnover"].reset_index()
turnover.loc[:, "Date"] = turnover.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
turnover.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: ["nan",
1.5,
3.0,
0.0],
23456: ["nan",
1.5,
3.0,
3.0]}
)
returns = results.loc["Return"]
returns = returns.reset_index()
returns.loc[:, "Date"] = returns.Date.dt.strftime("%Y-%m-%dT%H:%M:%S%z")
self.assertDictEqual(
returns.to_dict(orient="list"),
{'Date': [
'2018-05-01T00:00:00',
'2018-05-02T00:00:00',
'2018-05-03T00:00:00',
'2018-05-04T00:00:00'],
12345: [0.0,
0.0,
-0.0681818, # (10.50 - 11)/11 * 1.5
0.0728571], # (9.99 - 10.50)/10.50 * -1.5
23456: [0.0,
0.0,
-0.3409091, # (8.50 - 11)/11 * 1.5
-0.3529412] # (10.50 - 8.50)/8.50 * -1.5
}
)
def test_label_conids(self):
"""
Tests that the label_conids param causes symbols to be included in
the resulting columns. For forex, symbol.currency should be used as
the label.
"""
class BuyBelow10(Moonshot):
"""
A basic test strategy that buys below 10.
"""
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 10
return signals.astype(int)
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"AAPL",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"EUR",
"CASH",
"JPY",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
# control: run without label_conids
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10().backtest()
self.assertSetEqual(
set(results.columns),
{12345,
23456}
)
# control: run with label_conids
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10().backtest(label_conids=True)
self.assertSetEqual(
set(results.columns),
{"AAPL(12345)",
"EUR.JPY(23456)"}
)
def test_truncate_at_start_date(self):
"""
Tests that the resulting DataFrames are truncated at the requested
start date even if the data predates the start date due to lookback
window.
"""
class BuyBelow10(Moonshot):
"""
A basic test strategy that buys below 10.
"""
LOOKBACK_WINDOW = 10 # Due to mock, this isn't actually having any effect
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 10
return signals.astype(int)
def mock_get_historical_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02","2018-05-03", "2018-05-04"])
fields = ["Close","Volume"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
12345: [
# Close
9,
11,
10.50,
9.99,
# Volume
5000,
16000,
8800,
9900
],
23456: [
# Close
9.89,
11,
8.50,
10.50,
# Volume
15000,
14000,
28800,
17000
],
},
index=idx
)
return prices
def mock_get_db_config(db):
return {
'vendor': 'ib',
'domain': 'main',
'bar_size': '1 day'
}
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "Symbol", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
12345: [
"America/New_York",
"ABC",
"STK",
"USD",
None,
None
],
23456: [
"America/New_York",
"DEF",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "ConId"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_historical_prices", new=mock_get_historical_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with patch("moonshot.strategies.base.get_db_config", new=mock_get_db_config):
results = BuyBelow10().backtest(start_date="2018-05-03")
self.assertSetEqual(
set(results.index.get_level_values("Field")),
{'Commission',
'AbsExposure',
'Signal',
'Return',
'Slippage',
'NetExposure',
'TotalHoldings',
'Turnover',
'AbsWeight',
'Weight'}
)
self.assertEqual(results.index.get_level_values("Date").min(), pd.Timestamp("2018-05-03"))
| [
"[email protected]"
]
| |
b21aab70f83a44383ba2584afdf1c8db013d0187 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/databoxedge/v20210201preview/get_role.py | 2b3f43979b4fd165e04ffde6de3dfaed95f863d9 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,584 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetRoleResult',
'AwaitableGetRoleResult',
'get_role',
'get_role_output',
]
warnings.warn("""Please use one of the variants: CloudEdgeManagementRole, IoTRole, KubernetesRole, MECRole.""", DeprecationWarning)
@pulumi.output_type
class GetRoleResult:
"""
Compute role.
"""
def __init__(__self__, id=None, kind=None, name=None, system_data=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The path ID that uniquely identifies the object.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> str:
"""
Role type.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
The object name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Role configured on ASE resource
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The hierarchical type of the object.
"""
return pulumi.get(self, "type")
class AwaitableGetRoleResult(GetRoleResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRoleResult(
id=self.id,
kind=self.kind,
name=self.name,
system_data=self.system_data,
type=self.type)
def get_role(device_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRoleResult:
"""
Compute role.
:param str device_name: The device name.
:param str name: The role name.
:param str resource_group_name: The resource group name.
"""
pulumi.log.warn("""get_role is deprecated: Please use one of the variants: CloudEdgeManagementRole, IoTRole, KubernetesRole, MECRole.""")
__args__ = dict()
__args__['deviceName'] = device_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:databoxedge/v20210201preview:getRole', __args__, opts=opts, typ=GetRoleResult).value
return AwaitableGetRoleResult(
id=__ret__.id,
kind=__ret__.kind,
name=__ret__.name,
system_data=__ret__.system_data,
type=__ret__.type)
@_utilities.lift_output_func(get_role)
def get_role_output(device_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetRoleResult]:
"""
Compute role.
:param str device_name: The device name.
:param str name: The role name.
:param str resource_group_name: The resource group name.
"""
pulumi.log.warn("""get_role is deprecated: Please use one of the variants: CloudEdgeManagementRole, IoTRole, KubernetesRole, MECRole.""")
...
| [
"[email protected]"
]
| |
b2890fee28b3469e99f5ae1c676d8500ba428280 | 48d1bdfe8ef88e9e24e26f05a07b61a220fd5663 | /tests/settings.py | d264de5677cb4acca69cc9729cd414a7b2c6905b | [
"MIT"
]
| permissive | dejmail/django-data-wizard | b2680cf14564e4be3d74c5e63d17060665adfb8d | cfb4d00032c73d4b55abceb542b68563f3a79a05 | refs/heads/master | 2023-05-10T20:59:46.222978 | 2022-08-18T01:37:40 | 2022-08-18T01:37:40 | 278,087,179 | 0 | 0 | MIT | 2020-07-08T12:46:19 | 2020-07-08T12:46:19 | null | UTF-8 | Python | false | false | 2,248 | py | import os
TEST_BACKEND = os.environ.get("TEST_BACKEND", "threading")
TEST_VARIANT = os.environ.get("TEST_VARIANT", "default")
WITH_WQDB = TEST_VARIANT == "wq.db"
SECRET_KEY = "1234"
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
if TEST_VARIANT == "wq.db":
WQ_APPS = (
"wq.db.rest",
"wq.db.rest.auth",
)
else:
WQ_APPS = tuple()
if TEST_VARIANT == "reversion":
REVERSION_APPS = ("reversion",)
else:
REVERSION_APPS = tuple()
INSTALLED_APPS = (
(
"django.contrib.contenttypes",
"django.contrib.admin",
"django.contrib.sessions",
"django.contrib.staticfiles",
"django.contrib.auth",
)
+ WQ_APPS
+ REVERSION_APPS
+ (
"data_wizard",
"data_wizard.sources",
"tests.data_app",
"tests.naturalkey_app",
"tests.eav_app",
"tests.source_app",
)
)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "data_wizard_test.sqlite3",
}
}
ROOT_URLCONF = "tests.urls"
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), "media")
if TEST_BACKEND == "celery":
CELERY_RESULT_BACKEND = BROKER_URL = "redis://localhost/0"
if TEST_VARIANT == "wq.db":
from wq.db.default_settings import * # noqa
DATA_WIZARD = {
"BACKEND": f"data_wizard.backends.{TEST_BACKEND}",
}
STATIC_URL = "/static/"
DEBUG = True
| [
"[email protected]"
]
| |
c2006c7cd89aca0775e2f8862c0c7c80d2818081 | 6ac683881a26231638ae77261bc1c2e962ed81e6 | /message/models.py | 7155156b1bd11413e82722ed09d6d44072e0ac20 | []
| no_license | tenshiPure/chat | a3deea994d106b27bdcf7c8ac6bc21987b853601 | c10489b87814033ffbd4f50d0eebc3b9e1c364d4 | refs/heads/master | 2016-09-06T02:24:40.094709 | 2014-02-06T03:37:06 | 2014-02-06T03:37:06 | 16,363,786 | 0 | 0 | null | 2016-02-20T02:14:08 | 2014-01-30T00:12:25 | Python | UTF-8 | Python | false | false | 2,828 | py | #-*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.forms import ModelForm
from django.db import models
from django.contrib.auth.models import User, Group
class UserForm(UserCreationForm):
first_name = forms.CharField(max_length = 32)
last_name = forms.CharField(max_length = 32)
class Meta:
model = User
fields = ('first_name', 'last_name')
class Tag(models.Model):
body = models.CharField(max_length = 64)
last_used = models.DateTimeField(auto_now = True)
group = models.ForeignKey(Group)
def formatedDatetime(self):
return self.last_used.strftime('%Y-%m-%d %H:%M')
@staticmethod
def tagging(tag, create, group):
if not tag and not create:
return None
if tag:
result = Tag.objects.get(pk = tag)
elif create:
rows = Tag.objects.filter(body = create).filter(group = group)
if rows:
result = rows[0]
else:
result = Tag(body = create, group = group)
result.save()
return result
def __unicode__(self):
return self.body
class TagForm(ModelForm):
class Meta:
model = Tag
class Message(models.Model):
body = models.TextField()
datetime = models.DateTimeField(u'送信日時', auto_now = True)
ref = models.ForeignKey('self', null = True, blank = True)
tag = models.ForeignKey(Tag, null = True, blank = True)
user = models.ForeignKey(User)
group = models.ForeignKey(Group)
def formatedDatetime(self):
return self.datetime.strftime('%Y-%m-%d %H:%M')
def __unicode__(self):
return '%s - %s' % (self.user.username, self.body[0:40])
class MessageForm(ModelForm):
class Meta:
model = Message
exclude = ('user', 'group')
def __init__(self, *args, **kwargs):
group = kwargs.get('group', False)
if group:
kwargs.pop('group')
super(MessageForm, self).__init__(*args, **kwargs)
self.fields['body'] = forms.CharField(
label = '',
widget = forms.Textarea(
attrs = {
'class' : 'class_form_input',
'cols' : 80,
'rows' : 5
}
)
)
self.fields['tag_create'] = forms.CharField(
label = '',
required = False,
widget = forms.TextInput(
attrs = {
'class' : 'class_form_input'
}
)
)
self.fields['ref'] = forms.ModelChoiceField(
# queryset = Message.objects.filter(group = group).order_by('-id'),
queryset = Message.objects.all().order_by('-id'),
label = '',
required = False,
widget = forms.Select(
attrs = {
'class' : 'class_form_input'
}
)
)
self.fields['tag'] = forms.ModelChoiceField(
# queryset = Tag.objects.filter(group = group).order_by('last_used'),
queryset = Tag.objects.all().order_by('last_used'),
label = '',
required = False,
widget = forms.Select(
attrs = {
'class' : 'class_form_input'
}
)
)
| [
"[email protected]"
]
| |
6ce1c62e5908770f961a6f42807d4ed6711f56ab | 3cdc345a9cf34f028ce34e2a5d01a86e77b88a90 | /gevent/greentest/test_threading_2.py | 11413b663a6a6f6076906c3017ec3ffecbebb117 | [
"MIT"
]
| permissive | WeilerWebServices/Reddit | 459ace5af417d7bd8b4552a3068ff64b3986a579 | b300835f5c78f83a89931cf2a1c4e9150ddb9a9c | refs/heads/master | 2023-01-01T18:25:00.196266 | 2020-10-21T12:27:25 | 2020-10-21T12:27:25 | 305,972,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,520 | py | # testing gevent's Event, Lock, RLock, Semaphore, BoundedSemaphore with standard test_threading
from __future__ import print_function
from six import xrange
setup_ = '''from gevent import monkey; monkey.patch_all()
from gevent.event import Event
from gevent.lock import RLock, Semaphore, BoundedSemaphore
from gevent.thread import allocate_lock as Lock
import threading
threading.Event = Event
threading.Lock = Lock
threading.RLock = RLock
threading.Semaphore = Semaphore
threading.BoundedSemaphore = BoundedSemaphore
'''
exec(setup_)
setup_3 = '\n'.join(' %s' % line for line in setup_.split('\n'))
setup_4 = '\n'.join(' %s' % line for line in setup_.split('\n'))
try:
from test import support
from test.support import verbose
except ImportError:
from test import test_support as support
from test.test_support import verbose
import random
import re
import sys
import threading
try:
import thread
except ImportError:
import _thread as thread
import time
import unittest
import weakref
import lock_tests
# A trivial mutable counter.
class Counter(object):
def __init__(self):
self.value = 0
def inc(self):
self.value += 1
def dec(self):
self.value -= 1
def get(self):
return self.value
class TestThread(threading.Thread):
def __init__(self, name, testcase, sema, mutex, nrunning):
threading.Thread.__init__(self, name=name)
self.testcase = testcase
self.sema = sema
self.mutex = mutex
self.nrunning = nrunning
def run(self):
delay = random.random() / 10000.0
if verbose:
print('task %s will run for %.1f usec' % (
self.name, delay * 1e6))
with self.sema:
with self.mutex:
self.nrunning.inc()
if verbose:
print(self.nrunning.get(), 'tasks are running')
self.testcase.assert_(self.nrunning.get() <= 3)
time.sleep(delay)
if verbose:
print('task', self.name, 'done')
with self.mutex:
self.nrunning.dec()
self.testcase.assert_(self.nrunning.get() >= 0)
if verbose:
print('%s is finished. %d tasks are running' % (
self.name, self.nrunning.get()))
class ThreadTests(unittest.TestCase):
# Create a bunch of threads, let each do some work, wait until all are
# done.
def test_various_ops(self):
# This takes about n/3 seconds to run (about n/3 clumps of tasks,
# times about 1 second per clump).
NUMTASKS = 10
# no more than 3 of the 10 can run at once
sema = threading.BoundedSemaphore(value=3)
mutex = threading.RLock()
numrunning = Counter()
threads = []
for i in range(NUMTASKS):
t = TestThread("<thread %d>" % i, self, sema, mutex, numrunning)
threads.append(t)
t.daemon = False # Under PYPY we get daemon by default?
if hasattr(t, 'ident'):
self.failUnlessEqual(t.ident, None)
self.assertFalse(t.daemon)
self.assert_(re.match(r'<TestThread\(.*, initial\)>', repr(t)))
t.start()
if verbose:
print('waiting for all tasks to complete')
for t in threads:
t.join(NUMTASKS)
self.assert_(not t.is_alive())
if hasattr(t, 'ident'):
self.failIfEqual(t.ident, 0)
self.assertFalse(t.ident is None)
self.assert_(re.match(r'<TestThread\(.*, \w+ -?\d+\)>', repr(t)))
if verbose:
print('all tasks done')
self.assertEqual(numrunning.get(), 0)
def test_ident_of_no_threading_threads(self):
# The ident still must work for the main thread and dummy threads,
# as must the repr and str.
t = threading.currentThread()
self.assertFalse(t.ident is None)
str(t)
repr(t)
def f():
t = threading.currentThread()
ident.append(t.ident)
str(t)
repr(t)
done.set()
done = threading.Event()
ident = []
thread.start_new_thread(f, ())
done.wait()
self.assertFalse(ident[0] is None)
# Kill the "immortal" _DummyThread
del threading._active[ident[0]]
# run with a small(ish) thread stack size (256kB)
def test_various_ops_small_stack(self):
if verbose:
print('with 256kB thread stack size...')
try:
threading.stack_size(262144)
except thread.error:
if verbose:
print('platform does not support changing thread stack size')
return
self.test_various_ops()
threading.stack_size(0)
# run with a large thread stack size (1MB)
def test_various_ops_large_stack(self):
if verbose:
print('with 1MB thread stack size...')
try:
threading.stack_size(0x100000)
except thread.error:
if verbose:
print('platform does not support changing thread stack size')
return
self.test_various_ops()
threading.stack_size(0)
def test_foreign_thread(self):
# Check that a "foreign" thread can use the threading module.
def f(mutex):
# Calling current_thread() forces an entry for the foreign
# thread to get made in the threading._active map.
threading.current_thread()
mutex.release()
mutex = threading.Lock()
mutex.acquire()
tid = thread.start_new_thread(f, (mutex,))
# Wait for the thread to finish.
mutex.acquire()
self.assert_(tid in threading._active)
self.assert_(isinstance(threading._active[tid],
threading._DummyThread))
del threading._active[tid]
# in gevent, we actually clean up threading._active, but it's not happended there yet
# PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently)
# exposed at the Python level. This test relies on ctypes to get at it.
def SKIP_test_PyThreadState_SetAsyncExc(self):
try:
import ctypes
except ImportError:
if verbose:
print("test_PyThreadState_SetAsyncExc can't import ctypes")
return # can't do anything
set_async_exc = ctypes.pythonapi.PyThreadState_SetAsyncExc
class AsyncExc(Exception):
pass
exception = ctypes.py_object(AsyncExc)
# `worker_started` is set by the thread when it's inside a try/except
# block waiting to catch the asynchronously set AsyncExc exception.
# `worker_saw_exception` is set by the thread upon catching that
# exception.
worker_started = threading.Event()
worker_saw_exception = threading.Event()
class Worker(threading.Thread):
def run(self):
self.id = thread.get_ident()
self.finished = False
try:
while True:
worker_started.set()
time.sleep(0.1)
except AsyncExc:
self.finished = True
worker_saw_exception.set()
t = Worker()
t.daemon = True # so if this fails, we don't hang Python at shutdown
t.start()
if verbose:
print(" started worker thread")
# Try a thread id that doesn't make sense.
if verbose:
print(" trying nonsensical thread id")
result = set_async_exc(ctypes.c_long(-1), exception)
self.assertEqual(result, 0) # no thread states modified
# Now raise an exception in the worker thread.
if verbose:
print(" waiting for worker thread to get started")
worker_started.wait()
if verbose:
print(" verifying worker hasn't exited")
self.assert_(not t.finished)
if verbose:
print(" attempting to raise asynch exception in worker")
result = set_async_exc(ctypes.c_long(t.id), exception)
self.assertEqual(result, 1) # one thread state modified
if verbose:
print(" waiting for worker to say it caught the exception")
worker_saw_exception.wait(timeout=10)
self.assert_(t.finished)
if verbose:
print(" all OK -- joining worker")
if t.finished:
t.join()
# else the thread is still running, and we have no way to kill it
def test_limbo_cleanup(self):
# Issue 7481: Failure to start thread should cleanup the limbo map.
def fail_new_thread(*args):
raise thread.error()
_start_new_thread = threading._start_new_thread
threading._start_new_thread = fail_new_thread
try:
t = threading.Thread(target=lambda: None)
self.assertRaises(thread.error, t.start)
self.assertFalse(
t in threading._limbo,
"Failed to cleanup _limbo map on failure of Thread.start().")
finally:
threading._start_new_thread = _start_new_thread
def test_finalize_runnning_thread(self):
# Issue 1402: the PyGILState_Ensure / _Release functions may be called
# very late on python exit: on deallocation of a running thread for
# example.
try:
import ctypes
getattr(ctypes, 'pythonapi') # not available on PyPy
except (ImportError,AttributeError):
if verbose:
print("test_finalize_with_runnning_thread can't import ctypes")
return # can't do anything
del ctypes # pyflakes fix
import subprocess
rc = subprocess.call([sys.executable, "-c", """if 1:
%s
import ctypes, sys, time
try:
import thread
except ImportError:
import _thread as thread # Py3
# This lock is used as a simple event variable.
ready = thread.allocate_lock()
ready.acquire()
# Module globals are cleared before __del__ is run
# So we save the functions in class dict
class C:
ensure = ctypes.pythonapi.PyGILState_Ensure
release = ctypes.pythonapi.PyGILState_Release
def __del__(self):
state = self.ensure()
self.release(state)
def waitingThread():
x = C()
ready.release()
time.sleep(100)
thread.start_new_thread(waitingThread, ())
ready.acquire() # Be sure the other thread is waiting.
sys.exit(42)
""" % setup_3])
self.assertEqual(rc, 42)
def test_join_nondaemon_on_shutdown(self):
# Issue 1722344
# Raising SystemExit skipped threading._shutdown
import subprocess
p = subprocess.Popen([sys.executable, "-c", """if 1:
%s
import threading
from time import sleep
def child():
sleep(1)
# As a non-daemon thread we SHOULD wake up and nothing
# should be torn down yet
print("Woke up, sleep function is: %%r" %% sleep)
threading.Thread(target=child).start()
raise SystemExit
""" % setup_4],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.strip()
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
assert re.match('^Woke up, sleep function is: <.*?sleep.*?>$', stdout), repr(stdout)
stderr = re.sub(r"^\[\d+ refs\]", "", stderr, re.MULTILINE).strip()
self.assertEqual(stderr, "")
def test_enumerate_after_join(self):
# Try hard to trigger #1703448: a thread is still returned in
# threading.enumerate() after it has been join()ed.
enum = threading.enumerate
old_interval = sys.getcheckinterval()
try:
for i in xrange(1, 100):
# Try a couple times at each thread-switching interval
# to get more interleavings.
sys.setcheckinterval(i // 5)
t = threading.Thread(target=lambda: None)
t.start()
t.join()
l = enum()
self.assertFalse(t in l,
"#1703448 triggered after %d trials: %s" % (i, l))
finally:
sys.setcheckinterval(old_interval)
if not hasattr(sys, 'pypy_version_info'):
def test_no_refcycle_through_target(self):
class RunSelfFunction(object):
def __init__(self, should_raise):
# The links in this refcycle from Thread back to self
# should be cleaned up when the thread completes.
self.should_raise = should_raise
self.thread = threading.Thread(target=self._run,
args=(self,),
kwargs={'yet_another': self})
self.thread.start()
def _run(self, other_ref, yet_another):
if self.should_raise:
raise SystemExit
cyclic_object = RunSelfFunction(should_raise=False)
weak_cyclic_object = weakref.ref(cyclic_object)
cyclic_object.thread.join()
del cyclic_object
self.assertEquals(None, weak_cyclic_object(),
msg=('%d references still around' %
sys.getrefcount(weak_cyclic_object())))
raising_cyclic_object = RunSelfFunction(should_raise=True)
weak_raising_cyclic_object = weakref.ref(raising_cyclic_object)
raising_cyclic_object.thread.join()
del raising_cyclic_object
self.assertEquals(None, weak_raising_cyclic_object(),
msg=('%d references still around' %
sys.getrefcount(weak_raising_cyclic_object())))
class ThreadJoinOnShutdown(unittest.TestCase):
def _run_and_join(self, script):
script = """if 1:
%s
import sys, os, time, threading
# a thread, which waits for the main program to terminate
def joiningfunc(mainthread):
mainthread.join()
print('end of thread')
\n""" % setup_3 + script
import subprocess
p = subprocess.Popen([sys.executable, "-c", script], stdout=subprocess.PIPE)
rc = p.wait()
data = p.stdout.read().replace(b'\r', b'')
self.assertEqual(data, b"end of main\nend of thread\n")
self.failIf(rc == 2, b"interpreter was blocked")
self.failUnless(rc == 0, b"Unexpected error")
def test_1_join_on_shutdown(self):
# The usual case: on exit, wait for a non-daemon thread
script = """if 1:
import os
t = threading.Thread(target=joiningfunc,
args=(threading.current_thread(),))
t.start()
time.sleep(0.1)
print('end of main')
"""
self._run_and_join(script)
def test_2_join_in_forked_process(self):
# Like the test above, but from a forked interpreter
import os
if not hasattr(os, 'fork'):
return
script = """if 1:
childpid = os.fork()
if childpid != 0:
os.waitpid(childpid, 0)
sys.exit(0)
t = threading.Thread(target=joiningfunc,
args=(threading.current_thread(),))
t.start()
print('end of main')
"""
self._run_and_join(script)
def test_3_join_in_forked_from_thread(self):
# Like the test above, but fork() was called from a worker thread
# In the forked process, the main Thread object must be marked as stopped.
import os
if not hasattr(os, 'fork'):
return
# Skip platforms with known problems forking from a worker thread.
# See http://bugs.python.org/issue3863.
# skip disable because I think the bug shouldn't apply to gevent -- denis
#if sys.platform in ('freebsd4', 'freebsd5', 'freebsd6', 'os2emx'):
# print(('Skipping test_3_join_in_forked_from_thread'
# ' due to known OS bugs on'), sys.platform, file=sys.stderr)
# return
script = """if 1:
main_thread = threading.current_thread()
def worker():
childpid = os.fork()
if childpid != 0:
os.waitpid(childpid, 0)
sys.exit(0)
t = threading.Thread(target=joiningfunc,
args=(main_thread,))
print('end of main')
t.start()
t.join() # Should not block: main_thread is already stopped
w = threading.Thread(target=worker)
w.start()
"""
self._run_and_join(script)
class ThreadingExceptionTests(unittest.TestCase):
# A RuntimeError should be raised if Thread.start() is called
# multiple times.
def test_start_thread_again(self):
thread = threading.Thread()
thread.start()
self.assertRaises(RuntimeError, thread.start)
def test_joining_current_thread(self):
current_thread = threading.current_thread()
self.assertRaises(RuntimeError, current_thread.join)
def test_joining_inactive_thread(self):
thread = threading.Thread()
self.assertRaises(RuntimeError, thread.join)
def test_daemonize_active_thread(self):
thread = threading.Thread()
thread.start()
self.assertRaises(RuntimeError, setattr, thread, "daemon", True)
class LockTests(lock_tests.LockTests):
locktype = staticmethod(threading.Lock)
class RLockTests(lock_tests.RLockTests):
locktype = staticmethod(threading.RLock)
class EventTests(lock_tests.EventTests):
eventtype = staticmethod(threading.Event)
class ConditionAsRLockTests(lock_tests.RLockTests):
# An Condition uses an RLock by default and exports its API.
locktype = staticmethod(threading.Condition)
class ConditionTests(lock_tests.ConditionTests):
condtype = staticmethod(threading.Condition)
class SemaphoreTests(lock_tests.SemaphoreTests):
semtype = staticmethod(threading.Semaphore)
class BoundedSemaphoreTests(lock_tests.BoundedSemaphoreTests):
semtype = staticmethod(threading.BoundedSemaphore)
def main():
support.run_unittest(LockTests, RLockTests, EventTests,
ConditionAsRLockTests, ConditionTests,
SemaphoreTests, BoundedSemaphoreTests,
ThreadTests,
ThreadJoinOnShutdown,
ThreadingExceptionTests,
)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
a0a87ff1bb1928a162d29d5c0b92860320bbc4cb | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/base.py | 2954e4b53ef852369ec2fb2c2b6ac7f8f8469fda | [
"Apache-2.0"
]
| permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 11,551 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1, operations_v1
from google.api_core import retry as retries
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.cloud.location import locations_pb2 # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.oauth2 import service_account # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from google.cloud.dialogflowcx_v3 import gapic_version as package_version
from google.cloud.dialogflowcx_v3.types import environment
from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)
class EnvironmentsTransport(abc.ABC):
"""Abstract transport class for Environments."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
)
DEFAULT_HOST: str = "dialogflow.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
credentials = credentials.with_gdch_audience(
api_audience if api_audience else host
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.list_environments: gapic_v1.method.wrap_method(
self.list_environments,
default_timeout=None,
client_info=client_info,
),
self.get_environment: gapic_v1.method.wrap_method(
self.get_environment,
default_timeout=None,
client_info=client_info,
),
self.create_environment: gapic_v1.method.wrap_method(
self.create_environment,
default_timeout=None,
client_info=client_info,
),
self.update_environment: gapic_v1.method.wrap_method(
self.update_environment,
default_timeout=None,
client_info=client_info,
),
self.delete_environment: gapic_v1.method.wrap_method(
self.delete_environment,
default_timeout=None,
client_info=client_info,
),
self.lookup_environment_history: gapic_v1.method.wrap_method(
self.lookup_environment_history,
default_timeout=None,
client_info=client_info,
),
self.run_continuous_test: gapic_v1.method.wrap_method(
self.run_continuous_test,
default_timeout=None,
client_info=client_info,
),
self.list_continuous_test_results: gapic_v1.method.wrap_method(
self.list_continuous_test_results,
default_timeout=None,
client_info=client_info,
),
self.deploy_flow: gapic_v1.method.wrap_method(
self.deploy_flow,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def list_environments(
self,
) -> Callable[
[environment.ListEnvironmentsRequest],
Union[
environment.ListEnvironmentsResponse,
Awaitable[environment.ListEnvironmentsResponse],
],
]:
raise NotImplementedError()
@property
def get_environment(
self,
) -> Callable[
[environment.GetEnvironmentRequest],
Union[environment.Environment, Awaitable[environment.Environment]],
]:
raise NotImplementedError()
@property
def create_environment(
self,
) -> Callable[
[gcdc_environment.CreateEnvironmentRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def update_environment(
self,
) -> Callable[
[gcdc_environment.UpdateEnvironmentRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_environment(
self,
) -> Callable[
[environment.DeleteEnvironmentRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def lookup_environment_history(
self,
) -> Callable[
[environment.LookupEnvironmentHistoryRequest],
Union[
environment.LookupEnvironmentHistoryResponse,
Awaitable[environment.LookupEnvironmentHistoryResponse],
],
]:
raise NotImplementedError()
@property
def run_continuous_test(
self,
) -> Callable[
[environment.RunContinuousTestRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_continuous_test_results(
self,
) -> Callable[
[environment.ListContinuousTestResultsRequest],
Union[
environment.ListContinuousTestResultsResponse,
Awaitable[environment.ListContinuousTestResultsResponse],
],
]:
raise NotImplementedError()
@property
def deploy_flow(
self,
) -> Callable[
[environment.DeployFlowRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_operations(
self,
) -> Callable[
[operations_pb2.ListOperationsRequest],
Union[
operations_pb2.ListOperationsResponse,
Awaitable[operations_pb2.ListOperationsResponse],
],
]:
raise NotImplementedError()
@property
def get_operation(
self,
) -> Callable[
[operations_pb2.GetOperationRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def cancel_operation(
self,
) -> Callable[[operations_pb2.CancelOperationRequest], None,]:
raise NotImplementedError()
@property
def get_location(
self,
) -> Callable[
[locations_pb2.GetLocationRequest],
Union[locations_pb2.Location, Awaitable[locations_pb2.Location]],
]:
raise NotImplementedError()
@property
def list_locations(
self,
) -> Callable[
[locations_pb2.ListLocationsRequest],
Union[
locations_pb2.ListLocationsResponse,
Awaitable[locations_pb2.ListLocationsResponse],
],
]:
raise NotImplementedError()
@property
def kind(self) -> str:
raise NotImplementedError()
__all__ = ("EnvironmentsTransport",)
| [
"[email protected]"
]
| |
e25e3fb611bdf6fa99186813f21592c175ee2b99 | 53ee800e1cd6b4cd3e834e049a74c67c5e32eaca | /conftest.py | d7d40aca37ed6a8b6431be82ec5d473360206d71 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | sasobadovinac/ht | 482cd7e7c8ef351dd4bcb5bc9993ef3f74d8cab0 | de707506c00a3aefc2985008e98e9df0e7af9cb6 | refs/heads/master | 2023-02-09T04:42:11.961473 | 2023-01-23T02:21:06 | 2023-01-23T02:21:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,984 | py | import sys
import platform
is_pypy = 'PyPy' in sys.version
def pytest_ignore_collect(path):
path = str(path)
if 'manual_runner' in path or 'make_test_stubs' in path or 'plot' in path or 'prerelease' in path:
return True
if 'conf.py' in path:
return True
ver_tup = platform.python_version_tuple()[0:2]
ver_tup = tuple(int(i) for i in ver_tup)
if ver_tup < (3, 7) or ver_tup >= (3, 10) or is_pypy:
# numba does not yet run under pypy
if 'numba' in path:
return True
if '.rst' in path: # skip .rst tests as different rendering from pint and no support for NUMBER flag
return True
if sys.version[0] == '2':
if 'numba' in path or 'typing_utils' in path:
return True
if 'test' not in path:
return True
if 'ipynb' in path and 'bench' in path:
return True
return False
#def pytest_addoption(parser, pluginmanager):
# if sys.version[0] == '323523':
# parser.addoption("--doctest-modules")
# parser.addini(name="doctest_optionflags", help="", default="NORMALIZE_WHITESPACE NUMBER")
#def pytest_configure(config):
# print(config)
#open('/home/caleb/testoutput', 'w').write(str(1))
#if sys.version[0] == '2':
# args = []
# #print(args)
def pytest_load_initial_conftests(args):
a = 1
b = 2
def pytest_configure(config):
if sys.version[0] == '3':
import pytest
if pytest.__version__.split('.')[0] >= '6':
config.addinivalue_line("addopts", '--doctest-modules')
config.option.doctestmodules = True
config.addinivalue_line("doctest_optionflags", "NUMBER")
# config.addinivalue_line("addopts", config.inicfg['addopts'].replace('//', '') + ' --doctest-modules')
#config.inicfg['addopts'] = config.inicfg['addopts'] + ' --doctest-modules'
#
config.addinivalue_line("doctest_optionflags", "NORMALIZE_WHITESPACE")
| [
"[email protected]"
]
| |
bfbdcb02acc6bbaaf28aed62a3a02c0364e3390f | 1e5f6ac1590fe64e2d5a2d8b036c0948847f668d | /codes/Module_3/lecture_14/lecture_14_1.py | 8e31229472bbd0149536f6ac5d764794c79ff078 | []
| no_license | Gedanke/Reptile_study_notes | 54a4f48820586b1784c139716c719cc9d614c91b | a9705ebc3a6f95160ad9571d48675bc59876bd32 | refs/heads/master | 2022-07-12T23:43:24.452049 | 2021-08-09T12:54:18 | 2021-08-09T12:54:18 | 247,996,275 | 5 | 1 | null | 2022-06-26T00:21:48 | 2020-03-17T14:50:42 | HTML | UTF-8 | Python | false | false | 728 | py | # -*- coding: utf-8 -*-
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
url = "https://www.baidu.com"
browser = webdriver.Chrome()
try:
browser.get(url)
input = browser.find_element_by_id('kw')
input.send_keys('Python')
input.send_keys(Keys.ENTER)
wait = WebDriverWait(browser, 10)
wait.until(EC.presence_of_element_located((By.ID, 'content_left')))
time.sleep(5)
print(browser.current_url)
print(browser.get_cookies())
print(browser.page_source)
finally:
browser.close()
| [
"[email protected]"
]
| |
4cac7bbd91f2ee70771624bc6cc8a2c4bfff9f5f | 3ea45d6acd362a646e906eac31ab6d3ea019d727 | /qaeval/tests/scoring/scorers/lerc_test.py | cadff9977a9b00b52775e5e6b44447cb724e1300 | [
"Apache-2.0"
]
| permissive | rajhans/qaeval | 9747dea5dd0a234cc3df7837d6cbc0406b5d1b03 | dd7273183dd1b2c9995115310ef041daa953ca81 | refs/heads/master | 2023-07-10T04:15:05.399369 | 2021-08-03T02:22:15 | 2021-08-03T02:22:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,155 | py | import os
import pytest
from qaeval.scoring.scorers import LERCScorer
from qaeval.tests.scoring.scorers.scorer_test import TestScorer
@pytest.mark.skipif('LERC_MODEL' not in os.environ or 'LERC_PRETRAINED' not in os.environ, reason='LERC environment variables not set')
class TestLERCScorer(TestScorer):
@classmethod
def setUpClass(cls) -> None:
cls.scorer = LERCScorer(
model_path=os.environ['LERC_MODEL'],
pretrained_path=os.environ['LERC_PRETRAINED'],
cuda_device=0
)
def test_keys(self):
assert self.scorer.keys() == {'lerc'}
def test_default_scores(self):
assert self.scorer.default_scores() == {'lerc': 0.0}
def test_is_answered(self):
self.assert_expected_output(
# This is a regression test. It does not ensure these numbers are correct
self.scorer,
{'lerc': (2.5152266025543213 + 4.940724849700928) / 2},
[{'lerc': 2.5152266025543213}, {'lerc': 4.940724849700928}],
[[{'lerc': 2.5210483074188232}, {'lerc': 5.024631500244141}, {'lerc': 0.0}], [{'lerc': 4.940724849700928}]]
)
| [
"[email protected]"
]
| |
2eda323e1df29dba8b357199e32a196401cea08e | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_2_1_neat/16_2_1_latsyrc11235_1.py | da55e94217b12acb619e4ed1d23e38ecc1f4df14 | []
| no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 2,143 | py | f = [line.rstrip() for line in open('/Users/roshil/Desktop/A-small-attempt0 (2).in')]
out = open('/Users/roshil/Desktop/out.txt','w')
out.truncate()
line = 0
testcases = int(f[line])
line += 1
for i in range(1, testcases+1):
r1 = f[line]
line += 1
r1 = r1.lower()
word = [k for k in r1]
s = []
while len(word) > 0:
#print word
if 'z' in word:
word.remove('z')
word.remove('e')
word.remove('r')
word.remove('o')
s.append(0)
elif 'w' in word:
word.remove('t')
word.remove('w')
word.remove('o')
s.append(2)
elif 'u' in word:
word.remove('f')
word.remove('o')
word.remove('u')
word.remove('r')
s.append(4)
elif 'r' in word:
word.remove('t')
word.remove('h')
word.remove('r')
word.remove('e')
word.remove('e')
s.append(3)
elif 'x' in word:
word.remove('s')
word.remove('i')
word.remove('x')
s.append(6)
elif 'g' in word:
word.remove('e')
word.remove('i')
word.remove('g')
word.remove('h')
word.remove('t')
s.append(8)
elif 'o' in word:
word.remove('o')
word.remove('n')
word.remove('e')
s.append(1)
elif 'f' in word:
word.remove('f')
word.remove('i')
word.remove('v')
word.remove('e')
s.append(5)
elif 'v' in word:
word.remove('s')
word.remove('e')
word.remove('v')
word.remove('e')
word.remove('n')
s.append(7)
else:
word.remove('n')
word.remove('i')
word.remove('n')
word.remove('e')
s.append(9)
s.sort()
ans = "".join([str(l) for l in s])
print ans
out.write("Case #"+str(i)+": "+str(ans) + "\n")
out.close() | [
"[[email protected]]"
]
| |
023d9f5a2081647f38c2abb19c67c5d07e7f1bac | fb3f2c3f83fbfe894f01ea514c760371ef05d54f | /Algorithm/chapter5/flatten.py | 0b99312d0778169c809ff206410031189ac979eb | []
| no_license | jonXue92/PythonGit | 8160220a3d51fb6a317702a2b50e8ca3306a8f0e | a9358ac79a47b3d1fd072a4af603bf07a89b1a2c | refs/heads/master | 2020-04-02T05:25:51.032912 | 2019-04-12T04:18:15 | 2019-04-12T04:18:15 | 154,076,228 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,088 | py | # -*- coding: utf-8 -*-
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
class Flatten:
last_node = None
def flatten(self, root):
if root is None:
return
if self.last_node is not None:
self.last_node.left = None
self.last_node.right = root
self.last_node = root
right = root.right
self.flatten(root.left)
self.flatten(right)
def flatten1(self, root):
self.helper(root)
# restructure and return last node in preorder
def helper(self, root):
if root is None:
return None
left_last = self.helper(root.left)
right_last = self.helper(root.right)
# connect
if left_last is not None:
left_last.right = root.right
root.right = root.left
root.left = None
if right_last is not None:
return right_last
if left_last is not None:
return left_last
return root | [
"[email protected]"
]
| |
edfb24502e388ee7e252a957ea60815238e99f0f | 29f8b7f92eb22cc3134a16c439d3180e254df4bb | /chp04_database_programming/04_65_sql.py | d81a91878a6b51f9b1bfd0ac8be6453d5ed66e59 | []
| no_license | Hemie143/realpython2 | 7df80dd5f61ce7cd8c31b8bf78111b8507cbdb36 | b8535ffe97594e1b18233bcd9aa0de664257cb09 | refs/heads/master | 2022-12-12T04:51:53.120131 | 2021-01-03T19:52:32 | 2021-01-03T19:52:32 | 208,735,855 | 0 | 0 | null | 2023-08-17T05:45:32 | 2019-09-16T07:22:16 | Python | UTF-8 | Python | false | false | 925 | py | import sqlite3
with sqlite3.connect("new.db") as connection:
c = connection.cursor()
c.execute("CREATE TABLE regions (city TEXT, region TEXT)")
cities = [
('New York City', 'Northeast'),
('San Francisco', 'West'),
('Chicago', 'Midwest'),
('Houston', 'South'),
('Phoenix', 'West'),
('Boston', 'Northeast'),
('Los Angeles', 'West'),
('Houston', 'South'),
('Philadelphia', 'Northeast'),
('San Antonio', 'South'),
('San Diego', 'West'),
('Dallas', 'South'),
('San Jose', 'West'),
('Jacksonville', 'South'),
('Indianapolis', 'Midwest'),
('Austin', 'South'),
('Detroit', 'Midwest')
]
c.executemany("INSERT INTO regions VALUES(?, ?)", cities)
c.execute("SELECT * FROM regions ORDER BY region ASC")
rows = c.fetchall()
for r in rows:
print(r[0], r[1])
| [
"[email protected]"
]
| |
537d39ea66e7cc44ae00acb9282f590cf9ffb326 | ae8074a50ee666e46484e33bed7eb1cc16dfd0b8 | /notebooks/CaseStudies/executor_1.py | 04316c053eba23dd3c842d1844e318ff17f821f8 | []
| no_license | ayyogg0628/AnomalyDetection_MEAD | 72edb3c5f222c1d8c1f4fc7fc6d2ae17a757e254 | 0df68f91568726c40f5ff7309cf8f74bcc2af74e | refs/heads/master | 2023-03-18T22:22:17.045809 | 2020-07-07T23:44:59 | 2020-07-07T23:44:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,089 | py | import operator
import pickle
import numpy as np
import os
import sys
import time
import pprint
import inspect
from collections import OrderedDict
import matplotlib.pyplot as plt
import yaml
from sklearn.metrics import auc
import logging
import logging.handlers
import tensorflow as tf
import pandas as pd
tf.logging.set_verbosity(tf.logging.ERROR)
# matplotlib.use('Agg')
sys.path.append('./..')
sys.path.append('./../../.')
try:
import src.m2_test_1layer.tf_model_3_withNorm as tf_model
except:
from .src.m2_test_1layer import tf_model_3_withNorm as tf_model
try:
from src.Eval import eval_v1 as eval
except:
from .src.Eval import eval_v1 as eval
# ------------------------------------ #
cur_path = '/'.join(
os.path.abspath(
inspect.stack()[0][1]
).split('/')[:-1]
)
sys.path.append(cur_path)
_author__ = "Debanjan Datta"
__email__ = "[email protected]"
__version__ = "5.0"
__processor__ = 'embedding'
_SAVE_DIR = 'save_dir'
MODEL_NAME = None
_DIR = None
DATA_DIR = None
MODEL_OP_FILE_PATH = None
CONFIG_FILE = 'config_caseStudy_1.yaml'
CONFIG = None
# ----------------------------------------- #
def get_domain_dims():
global DATA_DIR
f_path = os.path.join(DATA_DIR, 'domain_dims.pkl')
with open(f_path, 'rb') as fh:
res = pickle.load(fh)
return list(res.values())
# ----------------------------------------- #
# --------- Model Config --------- #
# ----------------------------------------- #
# embedding_dims = None
DOMAIN_DIMS = None
logger = None
def setup_general_config():
global MODEL_NAME
global _DIR
global SAVE_DIR
global OP_DIR
global _SAVE_DIR
global CONFIG
global logger
SAVE_DIR = os.path.join(CONFIG['SAVE_DIR'], _DIR)
OP_DIR = os.path.join(CONFIG['OP_DIR'], _DIR)
if not os.path.exists(CONFIG['SAVE_DIR']):
os.mkdir(os.path.join(CONFIG['SAVE_DIR']))
if not os.path.exists(SAVE_DIR):
os.mkdir(os.path.join(SAVE_DIR))
return
# --------------------------------------------- #
def set_up_model(config, _dir):
global embedding_dims
global SAVE_DIR
global OP_DIR
global MODEL_NAME
MODEL_NAME = config['MODEL_NAME']
if type(config[_dir]['op_dims']) == str:
embedding_dims = config[_dir]['op_dims']
embedding_dims = embedding_dims.split(',')
embedding_dims = [int(e) for e in embedding_dims]
else:
embedding_dims = [config[_dir]['op_dims']]
model_obj = tf_model.model(MODEL_NAME, SAVE_DIR, OP_DIR)
model_obj.set_model_options(
show_loss_figure=config[_dir]['show_loss_figure'],
save_loss_figure=config[_dir]['save_loss_figure']
)
domain_dims = get_domain_dims()
LR = config[_dir]['learning_rate']
model_obj.set_model_hyperparams(
domain_dims=domain_dims,
emb_dims=embedding_dims,
batch_size=config[_dir]['batchsize'],
num_epochs=config[_dir]['num_epochs'],
learning_rate=LR,
num_neg_samples=config[_dir]['num_neg_samples']
)
model_obj.set_l2_loss_flag(True)
model_obj.inference = False
model_obj.build_model()
return model_obj
def get_data():
global CONFIG
global DATA_DIR
global _DIR
DIR = _DIR
with open(os.path.join(
CONFIG['DATA_DIR'],
DIR,
'domain_dims.pkl'
), 'rb') as fh:
domain_dims = pickle.load(fh)
train_x_pos_file = os.path.join(
CONFIG['DATA_DIR'],
DIR,
'matrix_train_positive_v1.pkl'
)
with open(train_x_pos_file, 'rb') as fh:
train_x_pos = pickle.load(fh)
train_x_neg_file = os.path.join(
CONFIG['DATA_DIR'],
DIR,
'negative_samples_v1.pkl'
)
with open(train_x_neg_file, 'rb') as fh:
train_x_neg = pickle.load(fh)
train_x_neg = train_x_neg
test_x_file = os.path.join(
CONFIG['DATA_DIR'],
DIR,
'matrix_test_positive.pkl'
)
with open(test_x_file, 'rb') as fh:
test_x = pickle.load(fh)
_df = pd.read_csv(os.path.join(CONFIG['DATA_DIR'],DIR,'test_data.csv'),header=0)
test_id_list = list(_df['PanjivaRecordID'])
return train_x_pos, train_x_neg, test_x, test_id_list, domain_dims
def process(
CONFIG,
_DIR,
train_x_pos,
train_x_neg,
test_data_x,
test_id_list
):
global logger
num_neg_samples = train_x_neg.shape[1]
CONFIG[_DIR]['num_neg_samples'] = num_neg_samples
model_obj = set_up_model(CONFIG, _DIR)
_use_pretrained = CONFIG[_DIR]['use_pretrained']
if _use_pretrained is True:
saved_file_path = None
pretrained_file = CONFIG[_DIR]['saved_model_file']
print('Pretrained File :', pretrained_file)
saved_file_path = os.path.join(
SAVE_DIR,
'checkpoints',
pretrained_file
)
if saved_file_path is not None:
model_obj.set_pretrained_model_file(saved_file_path)
else:
model_obj.train_model(
train_x_pos,
train_x_neg
)
elif _use_pretrained is False:
model_obj.train_model(
train_x_pos,
train_x_neg
)
print(' Len of test_ids ', len(test_id_list))
print('Length of test data', test_data_x.shape)
res = model_obj.get_event_score(test_data_x)
print('Length of results ', len(res))
res = list(res)
_id_score_dict = {
id: _res for id, _res in zip(
test_id_list,
res
)
}
'''
sort by ascending
since lower likelihood means anomalous
'''
tmp = sorted(
_id_score_dict.items(),
key=operator.itemgetter(1)
)
sorted_id_score_dict = OrderedDict()
for e in tmp:
sorted_id_score_dict[e[0]] = e[1][0]
_ID = []
_SCORE = []
for k,v in sorted_id_score_dict.items():
_ID.append(k)
_SCORE.append(v)
_df = pd.DataFrame(columns=['PanjivaRecordID','score'])
_df['PanjivaRecordID'] = _ID
_df['score'] = _SCORE
_df.to_csv(os.path.join(OP_DIR,'result_1.csv'))
# get embeddings
emb_res = model_obj.get_record_embeddings(train_x_pos)
with open(os.path.join(OP_DIR,'train_embeddings.pkl'),'wb') as fh:
pickle.dump(emb_res,fh,pickle.HIGHEST_PROTOCOL)
return
def main():
global embedding_dims
global SAVE_DIR
global _DIR
global DATA_DIR
global CONFIG
global CONFIG_FILE
global MODEL_NAME
global DOMAIN_DIMS
global logger
with open(CONFIG_FILE) as f:
CONFIG = yaml.safe_load(f)
DATA_DIR = os.path.join(CONFIG['DATA_DIR'], _DIR)
setup_general_config()
if not os.path.exists(os.path.join(SAVE_DIR, 'checkpoints')):
os.mkdir(
os.path.join(SAVE_DIR, 'checkpoints')
)
# ------------ #
if not os.path.exists(os.path.join(SAVE_DIR, 'checkpoints')):
os.mkdir(os.path.join(SAVE_DIR, 'checkpoints'))
# ------------ #
logger.info('-------------------')
train_x_pos, train_x_neg, test_x, test_id_list, domain_dims = get_data()
process(
CONFIG,
_DIR,
train_x_pos,
train_x_neg,
test_x,
test_id_list
)
logger.info('-------------------')
# ----------------------------------------------------------------- #
# find out which model works best
# ----------------------------------------------------------------- #
with open(CONFIG_FILE) as f:
CONFIG = yaml.safe_load(f)
try:
log_file = 'case_studies_1.log'
except:
log_file = 'm2.log'
_DIR = 'us_import'
logger = logging.getLogger('main')
logger.setLevel(logging.INFO)
OP_DIR = os.path.join(CONFIG['OP_DIR'], _DIR)
if not os.path.exists(CONFIG['OP_DIR']):
os.mkdir(CONFIG['OP_DIR'])
if not os.path.exists(OP_DIR):
os.mkdir(OP_DIR)
handler = logging.FileHandler(os.path.join(OP_DIR, log_file))
handler.setLevel(logging.INFO)
logger.addHandler(handler)
logger.info(' Info start ')
logger.info(' -----> ' + _DIR)
main()
| [
"[email protected]"
]
| |
70772f2adcd137ef04c0dd0f83df8264fa9192f8 | 72db8db1a513dfa01ce81bf88b39c10c662bfae2 | /annoying/tests/models.py | 099d3338d0b17db45ce4bfc6d5fbc2b27c37d152 | [
"MIT"
]
| permissive | colorstheforce/insta-clonewars | ec6053853505db26e9e931c531e531b5e6754740 | 2e8e6fc2e5ef7d2401d7902679e64d8859918d3a | refs/heads/master | 2022-03-30T13:28:39.755391 | 2019-01-17T12:55:11 | 2019-01-17T12:55:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | from django.db import models
from annoying.fields import AutoOneToOneField
from annoying.fields import JSONField
class SuperVillain(models.Model):
name = models.CharField(max_length=20, default="Dr Horrible")
stats = JSONField(default=None, blank=True, null=True)
class SuperHero(models.Model):
name = models.CharField(max_length=20, default="Captain Hammer")
mortal_enemy = AutoOneToOneField(SuperVillain, related_name='mortal_enemy')
| [
"[email protected]"
]
| |
e9b5cf2445399642b2b7c925cbf7645c8e7e2f58 | 5864e86954a221d52d4fa83a607c71bacf201c5a | /trinity/renderjobs.py | 394ddcb28608b85b76dfb5fc0412e2471051f7de | []
| no_license | connoryang/1v1dec | e9a2303a01e5a26bf14159112b112be81a6560fd | 404f2cebf13b311e754d45206008918881496370 | refs/heads/master | 2021-05-04T02:34:59.627529 | 2016-10-19T08:56:26 | 2016-10-19T08:56:26 | 71,334,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,474 | py | #Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\packages\trinity\renderjobs.py
import decometaclass
from . import _trinity as trinity
class RenderJobs(object):
__cid__ = 'trinity.Tr2RenderJobs'
__metaclass__ = decometaclass.BlueWrappedMetaclass
def __init__(self):
pass
def UnscheduleByName(self, name):
for rj in self.recurring:
if rj.name == name:
self.recurring.remove(rj)
return True
return False
def FindByName(self, name):
for rj in self.recurring:
if rj.name == name:
return rj
def FindStepByName(self, name):
def FindInJob(rj):
for step in rj.steps:
if step.name == name:
return step
for rj in self.recurring:
ret = FindInJob(rj)
if ret is not None:
return ret
def FindScenes(self, sceneType, filter = lambda x: True):
results = set({})
def RecursiveSearch(job):
for step in job.steps:
if hasattr(step, 'object') and type(step.object) is sceneType and filter(step.object):
results.add(step.object)
return
if type(step) is trinity.TriStepRunJob:
RecursiveSearch(step.job)
for job in self.recurring:
RecursiveSearch(job)
return results
| [
"[email protected]"
]
| |
d4a4f7cad1ae98a307e8097d46ba07924f6a4adb | 1f85142263a08d2e20080f18756059f581d524df | /chromium_extension/branches/timeline/src/build/common.gypi | 764680d3d99285d67d4b0c6a767afa03d918f377 | []
| no_license | songlibo/page-speed | 60edce572136a4b35f4d939fd11cc4d3cfd04567 | 8776e0441abd3f061da969644a9db6655fe01855 | refs/heads/master | 2021-01-22T08:27:40.145133 | 2016-02-03T15:34:40 | 2016-02-03T15:34:40 | 43,261,473 | 0 | 0 | null | 2015-09-27T19:32:17 | 2015-09-27T19:32:17 | null | UTF-8 | Python | false | false | 1,260 | gypi | # Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
# Make sure we link statically so everything gets linked into a
# single shared object.
'library': 'static_library',
# The nacl toolchain fails to build valid nexes when we enable gc
# sections, at least on 64 bit builds. TODO: revisit this to see
# if a newer nacl toolchain supports it.
'no_gc_sections': 1,
# We're building a shared library, so everything needs to be built
# with Position-Independent Code.
'linux_fpic': 1,
},
'includes': [
'../third_party/libpagespeed/src/build/common.gypi',
],
# 'target_defaults': {
# 'include_dirs': [
# '<(DEPTH)/build/nacl_header_stubs',
# ],
# },
}
| [
"[email protected]"
]
| |
499899b07cb558bc9dd599794ace8b8746cee9ba | 06c9edb02884ced68c62b5527d2be0e1a2e65bf1 | /9012.py | 3ce6c980e1d0a4c7ae29f246559b2957d47c7fc6 | []
| no_license | 0x232/BOJ | 3c5d3973b62036bfe9b761c88c822cf7fe909bce | 5f135ac51b1c304eff4630798fb5c516b666a5c6 | refs/heads/master | 2021-07-03T02:56:00.132987 | 2020-10-31T02:18:19 | 2020-10-31T02:18:19 | 191,161,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | n = int(input())
for _ in range(n):
paren = input()
counter = 0
answer = True
for p in paren:
if counter < 0:
answer = False
break
if p == '(':
counter += 1
if p == ')':
counter -= 1
if counter != 0:
answer = False
if answer:
print('YES')
else:
print('NO')
| [
"[email protected]"
]
| |
158391a0ca82c0639608e6f98dede3195bd12b40 | 9d862dd68f8b4ea4e7de9397fef8592824c77449 | /app/top/api/rest/FenxiaoDiscountsGetRequest.py | 2989652b695920224f032670cc2c84c122f36215 | []
| no_license | hi-noikiy/tmall-sku-outer_id | ffaca630dfb288ca33d962b8a050932d1047b9c8 | 1bcf29386a513bcb210bf5d91016e0dcb1ebc1ad | refs/heads/master | 2021-05-09T18:20:27.150316 | 2017-03-08T06:43:57 | 2017-03-08T06:43:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | '''
Created by auto_sdk on 2016.03.05
'''
from app.top.api.base import RestApi
class FenxiaoDiscountsGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.discount_id = None
self.ext_fields = None
def getapiname(self):
return 'taobao.fenxiao.discounts.get'
| [
"[email protected]"
]
| |
e925cae9746d4510a5277d88ffa5e8a07c3c90e6 | 4eaab9327d25f851f9e9b2cf4e9687d5e16833f7 | /problems/critical_connections_in_a_network/solution.py | 7ddf628a29bc4208c9823e84011f61a218c0010c | []
| no_license | kadhirash/leetcode | 42e372d5e77d7b3281e287189dcc1cd7ba820bc0 | 72aea7d43471e529ee757ff912b0267ca0ce015d | refs/heads/master | 2023-01-21T19:05:15.123012 | 2020-11-28T13:53:11 | 2020-11-28T13:53:11 | 250,115,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 949 | py | class Solution:
def criticalConnections(self, n: int, connections: List[List[int]]) -> List[List[int]]:
def dfs(previous = -1, current = 0, depth = 1):
nonlocal depths, output
temp_depth = depth
depths[current] = depth
for neighbor in graph[current]:
if neighbor == previous:
continue
neighbor_depth = depths[neighbor] or dfs(current, neighbor, depth + 1)
if depth < neighbor_depth:
output.append((current, neighbor))
elif neighbor_depth < temp_depth:
temp_depth = neighbor_depth
depths[current] = temp_depth
return temp_depth
graph = [[] for _ in range(n)]
depths = [0] * n
output = []
for u, v in connections:
graph[u].append(v)
graph[v].append(u)
dfs()
return output | [
"[email protected]"
]
| |
8bc175401c234330dcca0e841f43babb1b91a34e | e831c22c8834030c22c54b63034e655e395d4efe | /Strings/409-LongestPalindrome.py | a7c78ae605311f965fabd78f56853df5f5a2ed97 | []
| no_license | szhmery/leetcode | a5eb1a393422b21f9fd4304b3bdc4a9db557858c | 9fcd1ec0686db45d24e2c52a7987d58c6ef545a0 | refs/heads/master | 2023-08-16T00:27:56.866626 | 2021-10-23T07:35:37 | 2021-10-23T07:35:37 | 331,875,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | from collections import Counter
class Solution:
#https://leetcode.com/problems/longest-palindrome/solution/
def longestPalindrome(self, s: str) -> int:
ans = 0
map = Counter(s)
for v in map.values():
ans += v // 2 * 2
if ans % 2 == 0 and v % 2 != 0: # aaaccc, if a is 3, add 1 more.
ans += 1
return ans
solution = Solution()
result = solution.longestPalindrome('abccb')
print(result)
result = solution.longestPalindrome('ccc')
print(result)
result = solution.longestPalindrome('cccaaadde')
print(result)
| [
"[email protected]"
]
| |
eadd064afcb20f96f92a1dd01fffdcfba42712a5 | 24dd3c272457110b2b51bb783715d1245afcd9ce | /eth_dev/infura.py | 73181d7325cfb92aa1ccb3a2719e9daa434c82ab | []
| no_license | fubuloubu/eth-dev | 81761da7942927a97830c426cccf650046e6db74 | 383e51bba0b4471ef1c7a5d6ee2d1ff6a0562f8a | refs/heads/master | 2020-04-30T04:24:29.606074 | 2019-03-19T23:15:56 | 2019-03-19T23:15:56 | 176,610,133 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 595 | py | import os
import sys
from importlib import import_module
def get_web3(network: str, project_id: str):
# Infura websocket API requires Project ID token as of March 23rd
print("Setting Infura Project ID to", project_id, file=sys.stderr)
os.environ['WEB3_INFURA_PROJECT_ID'] = project_id
# Dynamically load the correct autoloader (based on network)
print("Connecting to the", network, "network (using Infura)", file=sys.stderr)
infura_module = import_module("web3.auto.infura.%s" % network)
# Return w3 autoloader for network
return getattr(infura_module, 'w3')
| [
"[email protected]"
]
| |
b28a6e9427e27b1ccb8fa350686110b8a21e74e3 | 68c4805ad01edd612fa714b1e0d210115e28bb7d | /venv/Lib/site-packages/numba/cuda/tests/cudapy/test_print.py | 59513d127a2aebd9b1461428f48b504cac50b75b | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Happy-Egg/redesigned-happiness | ac17a11aecc7459f4ebf0afd7d43de16fb37ae2c | 08b705e3569f3daf31e44254ebd11dd8b4e6fbb3 | refs/heads/master | 2022-12-28T02:40:21.713456 | 2020-03-03T09:04:30 | 2020-03-03T09:04:30 | 204,904,444 | 2 | 1 | Apache-2.0 | 2022-12-08T06:19:04 | 2019-08-28T10:18:05 | Python | UTF-8 | Python | false | false | 1,919 | py | from __future__ import print_function
import numpy as np
from numba import cuda
from numba import unittest_support as unittest
from numba.cuda.testing import captured_cuda_stdout, SerialMixin
def cuhello():
i = cuda.grid(1)
print(i, 999)
print(-42)
def printfloat():
i = cuda.grid(1)
print(i, 23, 34.75, 321)
def printstring():
i = cuda.grid(1)
print(i, "hop!", 999)
def printempty():
print()
class TestPrint(SerialMixin, unittest.TestCase):
def test_cuhello(self):
jcuhello = cuda.jit('void()', debug=False)(cuhello)
with captured_cuda_stdout() as stdout:
jcuhello[2, 3]()
# The output of GPU threads is intermingled, but each print()
# call is still atomic
out = stdout.getvalue()
lines = sorted(out.splitlines(True))
expected = ['-42\n'] * 6 + ['%d 999\n' % i for i in range(6)]
self.assertEqual(lines, expected)
def test_printfloat(self):
jprintfloat = cuda.jit('void()', debug=False)(printfloat)
with captured_cuda_stdout() as stdout:
jprintfloat()
# CUDA and the simulator use different formats for float formatting
self.assertIn(stdout.getvalue(), ["0 23 34.750000 321\n",
"0 23 34.75 321\n"])
def test_printempty(self):
cufunc = cuda.jit('void()', debug=False)(printempty)
with captured_cuda_stdout() as stdout:
cufunc()
self.assertEqual(stdout.getvalue(), "\n")
def test_string(self):
cufunc = cuda.jit('void()', debug=False)(printstring)
with captured_cuda_stdout() as stdout:
cufunc[1, 3]()
out = stdout.getvalue()
lines = sorted(out.splitlines(True))
expected = ['%d hop! 999\n' % i for i in range(3)]
self.assertEqual(lines, expected)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
4ad18edeba3a472fa88ee13931a6c5ad42d6a3dc | d7779408c44502a0cb8da4e3923e1b68492b1610 | /apps/organization/forms.py | ccfe66e97ba094e2b0233dc63e529b03fbcc07b3 | [
"MIT"
]
| permissive | codelieche/moocweb | 5c4429d3ebee43452d42db63fdd364935e2d6eee | 0e25efa597a79a38066ec41559334be604388f30 | refs/heads/master | 2021-01-13T11:49:08.444658 | 2017-02-26T16:36:08 | 2017-02-26T16:36:08 | 81,343,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | # _*_ coding:utf-8 _*_
import re
from django import forms
from operation.models import UserAsk
class UserAskForm(forms.ModelForm):
'''用户咨询Form'''
class Meta:
model = UserAsk
fields = ['name', 'mobile', 'course_name']
def clean_mobil(self):
'''
验证手机号码是否合法
:return:
'''
mobile = self.cleaned_data['mobile']
REGEX_MOBILE = '^1[358]\d{9}$|^147\d{8}$|^176\d{8}$'
p = re.compile(REGEX_MOBILE)
if p.match(mobile):
return mobile
else:
return False
raise forms.ValidationError("手机号码非法", code="mobile_invalid")
| [
"[email protected]"
]
| |
bf6f236aa05ce0ae841dd0b933b1930625d39351 | a75d4e8ff5e2d0641e539af3980768c10298dfb9 | /main.py | d8058f25dc7e3b8e940ce79bf28746f3235b9492 | []
| no_license | vitvara/tk-space-1 | c70942af4c235ebabc8648d7d49efc9c31feb961 | 57f668f3137ce893d576f03c8f7c6ffc0cb794c3 | refs/heads/main | 2023-03-23T23:35:07.711907 | 2021-03-24T23:17:02 | 2021-03-24T23:17:02 | 351,682,625 | 1 | 0 | null | 2021-03-26T06:20:18 | 2021-03-26T06:20:17 | null | UTF-8 | Python | false | false | 5,424 | py | import math
from random import randint, random
import tkinter as tk
from gamelib import Sprite, GameApp, Text
from consts import *
from elements import Ship, Bullet, Enemy
from utils import random_edge_position, normalize_vector, direction_to_dxdy, vector_len, distance
class SpaceGame(GameApp):
def init_game(self):
self.ship = Ship(self, CANVAS_WIDTH // 2, CANVAS_HEIGHT // 2)
self.level = 1
self.level_text = Text(self, '', 100, 580)
self.update_level_text()
self.score = 0
self.score_wait = 0
self.score_text = Text(self, '', 100, 20)
self.update_score_text()
self.bomb_power = BOMB_FULL_POWER
self.bomb_wait = 0
self.bomb_power_text = Text(self, '', 700, 20)
self.update_bomb_power_text()
self.elements.append(self.ship)
self.enemies = []
self.bullets = []
def add_enemy(self, enemy):
self.enemies.append(enemy)
def add_bullet(self, bullet):
self.bullets.append(bullet)
def bullet_count(self):
return len(self.bullets)
def bomb(self):
if self.bomb_power == BOMB_FULL_POWER:
self.bomb_power = 0
self.bomb_canvas_id = self.canvas.create_oval(
self.ship.x - BOMB_RADIUS,
self.ship.y - BOMB_RADIUS,
self.ship.x + BOMB_RADIUS,
self.ship.y + BOMB_RADIUS
)
self.after(200, lambda: self.canvas.delete(self.bomb_canvas_id))
for e in self.enemies:
if self.ship.distance_to(e) <= BOMB_RADIUS:
e.to_be_deleted = True
self.update_bomb_power_text()
def update_score_text(self):
self.score_text.set_text('Score: %d' % self.score)
def update_bomb_power_text(self):
self.bomb_power_text.set_text('Power: %d%%' % self.bomb_power)
def update_level_text(self):
self.level_text.set_text('Level: %d' % self.level)
def update_score(self):
self.score_wait += 1
if self.score_wait >= SCORE_WAIT:
self.score += 1
self.score_wait = 0
self.update_score_text()
def update_bomb_power(self):
self.bomb_wait += 1
if (self.bomb_wait >= BOMB_WAIT) and (self.bomb_power != BOMB_FULL_POWER):
self.bomb_power += 1
self.bomb_wait = 0
self.update_bomb_power_text()
def create_enemy_star(self):
enemies = []
x = randint(100, CANVAS_WIDTH - 100)
y = randint(100, CANVAS_HEIGHT - 100)
while vector_len(x - self.ship.x, y - self.ship.y) < 200:
x = randint(100, CANVAS_WIDTH - 100)
y = randint(100, CANVAS_HEIGHT - 100)
for d in range(18):
dx, dy = direction_to_dxdy(d * 20)
enemy = Enemy(self, x, y, dx * ENEMY_BASE_SPEED, dy * ENEMY_BASE_SPEED)
enemies.append(enemy)
return enemies
def create_enemy_from_edges(self):
x, y = random_edge_position()
vx, vy = normalize_vector(self.ship.x - x, self.ship.y - y)
vx *= ENEMY_BASE_SPEED
vy *= ENEMY_BASE_SPEED
enemy = Enemy(self, x, y, vx, vy)
return [enemy]
def create_enemies(self):
if random() < 0.2:
enemies = self.create_enemy_star()
else:
enemies = self.create_enemy_from_edges()
for e in enemies:
self.add_enemy(e)
def pre_update(self):
if random() < 0.1:
self.create_enemies()
def process_bullet_enemy_collisions(self):
for b in self.bullets:
for e in self.enemies:
if b.is_colliding_with_enemy(e):
b.to_be_deleted = True
e.to_be_deleted = True
def process_ship_enemy_collision(self):
for e in self.enemies:
if self.ship.is_colliding_with_enemy(e):
self.stop_animation()
def process_collisions(self):
self.process_bullet_enemy_collisions()
self.process_ship_enemy_collision()
def update_and_filter_deleted(self, elements):
new_list = []
for e in elements:
e.update()
e.render()
if e.to_be_deleted:
e.delete()
else:
new_list.append(e)
return new_list
def post_update(self):
self.process_collisions()
self.bullets = self.update_and_filter_deleted(self.bullets)
self.enemies = self.update_and_filter_deleted(self.enemies)
self.update_score()
self.update_bomb_power()
def on_key_pressed(self, event):
if event.keysym == 'Left':
self.ship.start_turn('LEFT')
elif event.keysym == 'Right':
self.ship.start_turn('RIGHT')
elif event.char == ' ':
self.ship.fire()
elif event.char.upper() == 'Z':
self.bomb()
def on_key_released(self, event):
if event.keysym == 'Left':
self.ship.stop_turn('LEFT')
elif event.keysym == 'Right':
self.ship.stop_turn('RIGHT')
if __name__ == "__main__":
root = tk.Tk()
root.title("Space Fighter")
# do not allow window resizing
root.resizable(False, False)
app = SpaceGame(root, CANVAS_WIDTH, CANVAS_HEIGHT, UPDATE_DELAY)
app.start()
root.mainloop()
| [
"[email protected]"
]
| |
49bffe80d5dc1bd97ce084a22875362795285f16 | a7e89bc0436f67e2160905e7d1becd681acc42c1 | /manage.py | ea86dc2ce3543cb9faa1c82848b799de048a0cc7 | []
| no_license | supermanfeng/shengxianproject | 6cc718a99d17054a959af264aae88c02d75be10b | dba59227e918653c5e6b5d4dd892afc4477eccd1 | refs/heads/master | 2020-03-10T13:30:47.214241 | 2018-04-26T13:20:34 | 2018-04-26T13:20:34 | 129,401,909 | 1 | 0 | null | 2018-04-27T12:26:08 | 2018-04-13T12:52:08 | JavaScript | UTF-8 | Python | false | false | 810 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vueshengxian.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
4f12b2cc59d6c1796f624bc5b10d8d35fa779390 | 22749c6a569661b2637233cc0aebdc1701033b26 | /src/python/pants/backend/codegen/protobuf/python/python_protobuf_module_mapper_test.py | d1a882ed3ab9459719226cada03e3667f28f2afd | [
"Apache-2.0"
]
| permissive | akk5597/pants | 2eceb226c39b8ef7f603dfa96684b7522e1a9065 | 7ad295f71d2990eebbbe9c778bbf70f7d9e66584 | refs/heads/main | 2023-08-27T02:40:54.753545 | 2021-11-10T03:42:18 | 2021-11-10T03:42:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,124 | py | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import pytest
from pants.backend.codegen.protobuf.python import additional_fields, python_protobuf_module_mapper
from pants.backend.codegen.protobuf.python.python_protobuf_module_mapper import (
PythonProtobufMappingMarker,
)
from pants.backend.codegen.protobuf.target_types import ProtobufSourcesGeneratorTarget
from pants.backend.codegen.protobuf.target_types import rules as python_protobuf_target_types_rules
from pants.backend.python.dependency_inference.module_mapper import FirstPartyPythonMappingImpl
from pants.core.util_rules import stripped_source_files
from pants.engine.addresses import Address
from pants.testutil.rule_runner import QueryRule, RuleRunner
from pants.util.frozendict import FrozenDict
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*additional_fields.rules(),
*stripped_source_files.rules(),
*python_protobuf_module_mapper.rules(),
*python_protobuf_target_types_rules(),
QueryRule(FirstPartyPythonMappingImpl, [PythonProtobufMappingMarker]),
],
target_types=[ProtobufSourcesGeneratorTarget],
)
def test_map_first_party_modules_to_addresses(rule_runner: RuleRunner) -> None:
rule_runner.set_options(["--source-root-patterns=['root1', 'root2', 'root3']"])
rule_runner.write_files(
{
"root1/protos/f1.proto": "",
"root1/protos/f2.proto": "",
"root1/protos/BUILD": "protobuf_sources()",
# These protos would result in the same module name, so neither should be used.
"root1/two_owners/f.proto": "",
"root1/two_owners/BUILD": "protobuf_sources()",
"root2/two_owners/f.proto": "",
"root2/two_owners/BUILD": "protobuf_sources()",
# A file with grpc. This also uses the `python_source_root` mechanism, which should be
# irrelevant to the module mapping because we strip source roots.
"root1/tests/f.proto": "",
"root1/tests/BUILD": "protobuf_sources(grpc=True, python_source_root='root3')",
}
)
result = rule_runner.request(FirstPartyPythonMappingImpl, [PythonProtobufMappingMarker()])
assert result == FirstPartyPythonMappingImpl(
mapping=FrozenDict(
{
"protos.f1_pb2": (Address("root1/protos", relative_file_path="f1.proto"),),
"protos.f2_pb2": (Address("root1/protos", relative_file_path="f2.proto"),),
"tests.f_pb2": (Address("root1/tests", relative_file_path="f.proto"),),
"tests.f_pb2_grpc": (Address("root1/tests", relative_file_path="f.proto"),),
}
),
ambiguous_modules=FrozenDict(
{
"two_owners.f_pb2": (
Address("root1/two_owners", relative_file_path="f.proto"),
Address("root2/two_owners", relative_file_path="f.proto"),
)
}
),
)
| [
"[email protected]"
]
| |
edf41798fa0e01ff97c9f048dd79ff4eb088c77a | a7d5fad9c31dc2678505e2dcd2166ac6b74b9dcc | /dlkit/authz_adapter/learning/managers.py | 89a47c8f2c81a8efae575f3c791d4a82dc5e30e3 | [
"MIT"
]
| permissive | mitsei/dlkit | 39d5fddbb8cc9a33e279036e11a3e7d4fa558f70 | 445f968a175d61c8d92c0f617a3c17dc1dc7c584 | refs/heads/master | 2022-07-27T02:09:24.664616 | 2018-04-18T19:38:17 | 2018-04-18T19:38:17 | 88,057,460 | 2 | 1 | MIT | 2022-07-06T19:24:50 | 2017-04-12T13:53:10 | Python | UTF-8 | Python | false | false | 62,360 | py | """AuthZ Adapter implementations of learning managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from . import sessions
from ..osid import managers as osid_managers
from ..osid.osid_errors import Unimplemented
from ..osid.osid_errors import Unimplemented, OperationFailed, Unsupported
from ..primitives import Id
from ..utilities import raise_null_argument
from dlkit.manager_impls.learning import managers as learning_managers
class LearningProfile(osid_managers.OsidProfile, learning_managers.LearningProfile):
"""Adapts underlying LearningProfile methodswith authorization checks."""
def __init__(self):
osid_managers.OsidProfile.__init__(self)
def _get_hierarchy_session(self, proxy=None):
if proxy is not None:
try:
return self._provider_manager.get_objective_bank_hierarchy_session(proxy)
except Unimplemented:
return None
try:
return self._provider_manager.get_objective_bank_hierarchy_session()
except Unimplemented:
return None
def supports_objective_lookup(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_lookup()
def supports_objective_query(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_query()
def supports_objective_admin(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_admin()
def supports_objective_hierarchy(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_hierarchy()
def supports_objective_hierarchy_design(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_hierarchy_design()
def supports_objective_sequencing(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_sequencing()
def supports_objective_objective_bank(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_objective_bank()
def supports_objective_objective_bank_assignment(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_objective_bank_assignment()
def supports_objective_requisite(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_requisite()
def supports_objective_requisite_assignment(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_requisite_assignment()
def supports_activity_lookup(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_activity_lookup()
def supports_activity_query(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_activity_query()
def supports_activity_admin(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_activity_admin()
def supports_activity_objective_bank(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_activity_objective_bank()
def supports_activity_objective_bank_assignment(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_activity_objective_bank_assignment()
def supports_proficiency_lookup(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_proficiency_lookup()
def supports_proficiency_query(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_proficiency_query()
def supports_proficiency_admin(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_proficiency_admin()
def supports_proficiency_objective_bank_assignment(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_proficiency_objective_bank_assignment()
def supports_objective_bank_lookup(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_bank_lookup()
def supports_objective_bank_admin(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_bank_admin()
def supports_objective_bank_hierarchy(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_bank_hierarchy()
def supports_objective_bank_hierarchy_design(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.supports_resource_lookup
return self._provider_manager.supports_objective_bank_hierarchy_design()
def get_objective_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_objective_record_types()
objective_record_types = property(fget=get_objective_record_types)
def get_objective_search_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_objective_search_record_types()
objective_search_record_types = property(fget=get_objective_search_record_types)
def get_activity_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_activity_record_types()
activity_record_types = property(fget=get_activity_record_types)
def get_activity_search_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_activity_search_record_types()
activity_search_record_types = property(fget=get_activity_search_record_types)
def get_proficiency_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_proficiency_record_types()
proficiency_record_types = property(fget=get_proficiency_record_types)
def get_proficiency_search_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_proficiency_search_record_types()
proficiency_search_record_types = property(fget=get_proficiency_search_record_types)
def get_objective_bank_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_objective_bank_record_types()
objective_bank_record_types = property(fget=get_objective_bank_record_types)
def get_objective_bank_search_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_objective_bank_search_record_types()
objective_bank_search_record_types = property(fget=get_objective_bank_search_record_types)
class LearningManager(osid_managers.OsidManager, LearningProfile, learning_managers.LearningManager):
"""Adapts underlying LearningManager methodswith authorization checks."""
def __init__(self):
LearningProfile.__init__(self)
def initialize(self, runtime):
osid_managers.OsidManager.initialize(self, runtime)
config = self._my_runtime.get_configuration()
parameter_id = Id('parameter:learningProviderImpl@authz_adapter')
provider_impl = config.get_value_by_parameter(parameter_id).get_string_value()
self._provider_manager = runtime.get_manager('LEARNING', provider_impl)
# need to add version argument
def get_objective_lookup_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_objective_query_session()
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveLookupSession')(
provider_session=self._provider_manager.get_objective_lookup_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
objective_lookup_session = property(fget=get_objective_lookup_session)
@raise_null_argument
def get_objective_lookup_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_objective_query_session_for_objective_bank(objective_bank_id)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveLookupSession')(
provider_session=self._provider_manager.get_objective_lookup_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
def get_objective_query_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_objective_query_session()
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveQuerySession')(
provider_session=self._provider_manager.get_objective_query_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
objective_query_session = property(fget=get_objective_query_session)
@raise_null_argument
def get_objective_query_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_objective_query_session_for_objective_bank(objective_bank_id)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveQuerySession')(
provider_session=self._provider_manager.get_objective_query_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
def get_objective_admin_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveAdminSession')(
provider_session=self._provider_manager.get_objective_admin_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_admin_session = property(fget=get_objective_admin_session)
@raise_null_argument
def get_objective_admin_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveAdminSession')(
provider_session=self._provider_manager.get_objective_admin_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_objective_hierarchy_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveHierarchySession')(
provider_session=self._provider_manager.get_objective_hierarchy_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_hierarchy_session = property(fget=get_objective_hierarchy_session)
@raise_null_argument
def get_objective_hierarchy_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveHierarchySession')(
provider_session=self._provider_manager.get_objective_hierarchy_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_objective_hierarchy_design_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveHierarchyDesignSession')(
provider_session=self._provider_manager.get_objective_hierarchy_design_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_hierarchy_design_session = property(fget=get_objective_hierarchy_design_session)
@raise_null_argument
def get_objective_hierarchy_design_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveHierarchyDesignSession')(
provider_session=self._provider_manager.get_objective_hierarchy_design_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_objective_sequencing_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveSequencingSession')(
provider_session=self._provider_manager.get_objective_sequencing_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_sequencing_session = property(fget=get_objective_sequencing_session)
@raise_null_argument
def get_objective_sequencing_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveSequencingSession')(
provider_session=self._provider_manager.get_objective_sequencing_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_objective_objective_bank_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveObjectiveBankSession')(
provider_session=self._provider_manager.get_objective_objective_bank_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_objective_bank_session = property(fget=get_objective_objective_bank_session)
def get_objective_objective_bank_assignment_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveObjectiveBankAssignmentSession')(
provider_session=self._provider_manager.get_objective_objective_bank_assignment_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_objective_bank_assignment_session = property(fget=get_objective_objective_bank_assignment_session)
def get_objective_requisite_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveRequisiteSession')(
provider_session=self._provider_manager.get_objective_requisite_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_requisite_session = property(fget=get_objective_requisite_session)
@raise_null_argument
def get_objective_requisite_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveRequisiteSession')(
provider_session=self._provider_manager.get_objective_requisite_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_objective_requisite_assignment_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveRequisiteAssignmentSession')(
provider_session=self._provider_manager.get_objective_requisite_assignment_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_requisite_assignment_session = property(fget=get_objective_requisite_assignment_session)
@raise_null_argument
def get_objective_requisite_assignment_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveRequisiteAssignmentSession')(
provider_session=self._provider_manager.get_objective_requisite_assignment_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_activity_lookup_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_activity_query_session()
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityLookupSession')(
provider_session=self._provider_manager.get_activity_lookup_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
activity_lookup_session = property(fget=get_activity_lookup_session)
@raise_null_argument
def get_activity_lookup_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_activity_query_session_for_objective_bank(objective_bank_id)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityLookupSession')(
provider_session=self._provider_manager.get_activity_lookup_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
def get_activity_query_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_activity_query_session()
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityQuerySession')(
provider_session=self._provider_manager.get_activity_query_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
activity_query_session = property(fget=get_activity_query_session)
@raise_null_argument
def get_activity_query_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_activity_query_session_for_objective_bank(objective_bank_id)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityQuerySession')(
provider_session=self._provider_manager.get_activity_query_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
def get_activity_admin_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ActivityAdminSession')(
provider_session=self._provider_manager.get_activity_admin_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
activity_admin_session = property(fget=get_activity_admin_session)
@raise_null_argument
def get_activity_admin_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ActivityAdminSession')(
provider_session=self._provider_manager.get_activity_admin_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_activity_objective_bank_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ActivityObjectiveBankSession')(
provider_session=self._provider_manager.get_activity_objective_bank_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
activity_objective_bank_session = property(fget=get_activity_objective_bank_session)
def get_activity_objective_bank_assignment_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ActivityObjectiveBankAssignmentSession')(
provider_session=self._provider_manager.get_activity_objective_bank_assignment_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
activity_objective_bank_assignment_session = property(fget=get_activity_objective_bank_assignment_session)
def get_proficiency_lookup_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_proficiency_query_session()
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyLookupSession')(
provider_session=self._provider_manager.get_proficiency_lookup_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
proficiency_lookup_session = property(fget=get_proficiency_lookup_session)
@raise_null_argument
def get_proficiency_lookup_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_proficiency_query_session_for_objective_bank(objective_bank_id)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyLookupSession')(
provider_session=self._provider_manager.get_proficiency_lookup_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
def get_proficiency_query_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_proficiency_query_session()
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyQuerySession')(
provider_session=self._provider_manager.get_proficiency_query_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
proficiency_query_session = property(fget=get_proficiency_query_session)
@raise_null_argument
def get_proficiency_query_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_proficiency_query_session_for_objective_bank(objective_bank_id)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyQuerySession')(
provider_session=self._provider_manager.get_proficiency_query_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
hierarchy_session=self._get_hierarchy_session(),
query_session=query_session)
def get_proficiency_admin_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ProficiencyAdminSession')(
provider_session=self._provider_manager.get_proficiency_admin_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
proficiency_admin_session = property(fget=get_proficiency_admin_session)
@raise_null_argument
def get_proficiency_admin_session_for_objective_bank(self, objective_bank_id):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ProficiencyAdminSession')(
provider_session=self._provider_manager.get_proficiency_admin_session_for_objective_bank(objective_bank_id),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
def get_proficiency_objective_bank_assignment_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ProficiencyObjectiveBankAssignmentSession')(
provider_session=self._provider_manager.get_proficiency_objective_bank_assignment_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
proficiency_objective_bank_assignment_session = property(fget=get_proficiency_objective_bank_assignment_session)
def get_objective_bank_lookup_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankLookupSession')(
provider_session=self._provider_manager.get_objective_bank_lookup_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_bank_lookup_session = property(fget=get_objective_bank_lookup_session)
def get_objective_bank_admin_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankAdminSession')(
provider_session=self._provider_manager.get_objective_bank_admin_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_bank_admin_session = property(fget=get_objective_bank_admin_session)
def get_objective_bank_hierarchy_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankHierarchySession')(
provider_session=self._provider_manager.get_objective_bank_hierarchy_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_bank_hierarchy_session = property(fget=get_objective_bank_hierarchy_session)
def get_objective_bank_hierarchy_design_session(self):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankHierarchyDesignSession')(
provider_session=self._provider_manager.get_objective_bank_hierarchy_design_session(),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager)
objective_bank_hierarchy_design_session = property(fget=get_objective_bank_hierarchy_design_session)
def get_learning_batch_manager(self):
raise Unimplemented()
learning_batch_manager = property(fget=get_learning_batch_manager)
class LearningProxyManager(osid_managers.OsidProxyManager, LearningProfile, learning_managers.LearningProxyManager):
"""Adapts underlying LearningProxyManager methodswith authorization checks."""
def __init__(self):
LearningProfile.__init__(self)
def initialize(self, runtime):
osid_managers.OsidProxyManager.initialize(self, runtime)
config = self._my_runtime.get_configuration()
parameter_id = Id('parameter:learningProviderImpl@authz_adapter')
provider_impl = config.get_value_by_parameter(parameter_id).get_string_value()
self._provider_manager = runtime.get_proxy_manager('LEARNING', provider_impl)
# need to add version argument
@raise_null_argument
def get_objective_lookup_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_objective_query_session(proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveLookupSession')(
provider_session=self._provider_manager.get_objective_lookup_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_objective_lookup_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_objective_query_session_for_objective_bank(objective_bank_id, proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveLookupSession')(
provider_session=self._provider_manager.get_objective_lookup_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_objective_query_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_objective_query_session(proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveQuerySession')(
provider_session=self._provider_manager.get_objective_query_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_objective_query_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_objective_query_session_for_objective_bank(objective_bank_id, proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ObjectiveQuerySession')(
provider_session=self._provider_manager.get_objective_query_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_objective_admin_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveAdminSession')(
provider_session=self._provider_manager.get_objective_admin_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_admin_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveAdminSession')(
provider_session=self._provider_manager.get_objective_admin_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_hierarchy_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveHierarchySession')(
provider_session=self._provider_manager.get_objective_hierarchy_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_hierarchy_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveHierarchySession')(
provider_session=self._provider_manager.get_objective_hierarchy_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_hierarchy_design_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveHierarchyDesignSession')(
provider_session=self._provider_manager.get_objective_hierarchy_design_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_hierarchy_design_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveHierarchyDesignSession')(
provider_session=self._provider_manager.get_objective_hierarchy_design_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_sequencing_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveSequencingSession')(
provider_session=self._provider_manager.get_objective_sequencing_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_sequencing_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveSequencingSession')(
provider_session=self._provider_manager.get_objective_sequencing_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_objective_bank_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveObjectiveBankSession')(
provider_session=self._provider_manager.get_objective_objective_bank_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_objective_bank_assignment_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveObjectiveBankAssignmentSession')(
provider_session=self._provider_manager.get_objective_objective_bank_assignment_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_requisite_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveRequisiteSession')(
provider_session=self._provider_manager.get_objective_requisite_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_requisite_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveRequisiteSession')(
provider_session=self._provider_manager.get_objective_requisite_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_requisite_assignment_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveRequisiteAssignmentSession')(
provider_session=self._provider_manager.get_objective_requisite_assignment_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_requisite_assignment_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ObjectiveRequisiteAssignmentSession')(
provider_session=self._provider_manager.get_objective_requisite_assignment_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_activity_lookup_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_activity_query_session(proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityLookupSession')(
provider_session=self._provider_manager.get_activity_lookup_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_activity_lookup_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_activity_query_session_for_objective_bank(objective_bank_id, proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityLookupSession')(
provider_session=self._provider_manager.get_activity_lookup_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_activity_query_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_activity_query_session(proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityQuerySession')(
provider_session=self._provider_manager.get_activity_query_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_activity_query_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_activity_query_session_for_objective_bank(objective_bank_id, proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ActivityQuerySession')(
provider_session=self._provider_manager.get_activity_query_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_activity_admin_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ActivityAdminSession')(
provider_session=self._provider_manager.get_activity_admin_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_activity_admin_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ActivityAdminSession')(
provider_session=self._provider_manager.get_activity_admin_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_activity_objective_bank_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ActivityObjectiveBankSession')(
provider_session=self._provider_manager.get_activity_objective_bank_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_activity_objective_bank_assignment_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ActivityObjectiveBankAssignmentSession')(
provider_session=self._provider_manager.get_activity_objective_bank_assignment_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_proficiency_lookup_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_proficiency_query_session(proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyLookupSession')(
provider_session=self._provider_manager.get_proficiency_lookup_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_proficiency_lookup_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_proficiency_query_session_for_objective_bank(objective_bank_id, proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyLookupSession')(
provider_session=self._provider_manager.get_proficiency_lookup_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_proficiency_query_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_template
try:
query_session = self._provider_manager.get_proficiency_query_session(proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyQuerySession')(
provider_session=self._provider_manager.get_proficiency_query_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_proficiency_query_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
try:
query_session = self._provider_manager.get_proficiency_query_session_for_objective_bank(objective_bank_id, proxy)
query_session.use_federated_objective_bank_view()
except Unimplemented:
query_session = None
return getattr(sessions, 'ProficiencyQuerySession')(
provider_session=self._provider_manager.get_proficiency_query_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
proxy=proxy,
hierarchy_session=self._get_hierarchy_session(proxy),
query_session=query_session)
@raise_null_argument
def get_proficiency_admin_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ProficiencyAdminSession')(
provider_session=self._provider_manager.get_proficiency_admin_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_proficiency_admin_session_for_objective_bank(self, objective_bank_id, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_lookup_session_for_bin_template
return getattr(sessions, 'ProficiencyAdminSession')(
provider_session=self._provider_manager.get_proficiency_admin_session_for_objective_bank(objective_bank_id, proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_proficiency_objective_bank_assignment_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ProficiencyObjectiveBankAssignmentSession')(
provider_session=self._provider_manager.get_proficiency_objective_bank_assignment_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_bank_lookup_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankLookupSession')(
provider_session=self._provider_manager.get_objective_bank_lookup_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_bank_admin_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankAdminSession')(
provider_session=self._provider_manager.get_objective_bank_admin_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_bank_hierarchy_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankHierarchySession')(
provider_session=self._provider_manager.get_objective_bank_hierarchy_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
@raise_null_argument
def get_objective_bank_hierarchy_design_session(self, proxy):
# Implemented from azosid template for -
# osid.resource.ResourceManager.get_resource_admin_session_template
return getattr(sessions, 'ObjectiveBankHierarchyDesignSession')(
provider_session=self._provider_manager.get_objective_bank_hierarchy_design_session(proxy),
authz_session=self._get_authz_session(),
override_lookup_session=self._get_override_lookup_session(),
provider_manager=self._provider_manager,
proxy=proxy)
def get_learning_batch_proxy_manager(self):
raise Unimplemented()
learning_batch_proxy_manager = property(fget=get_learning_batch_proxy_manager)
| [
"[email protected]"
]
| |
7b512c468b007c8b2f336f735e4eb125dfc4082e | a03eba726a432d8ef133f2dc55894ba85cdc4a08 | /config/hostsconf/views.py | f6ec260509c9276a8b978a80bd4cf61bb22bcbaf | [
"MIT"
]
| permissive | mansonul/events | 2546c9cfe076eb59fbfdb7b4ec8bcd708817d59b | 4f6ca37bc600dcba3f74400d299826882d53b7d2 | refs/heads/master | 2021-01-15T08:53:22.442929 | 2018-01-30T16:14:20 | 2018-01-30T16:14:20 | 99,572,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | from django.http import HttpResponseRedirect
from django_hosts.resolvers import reverse as host_reverse
def www_root_redirect(request, path=None):
url_ = host_reverse("home", host='www')
if path is not None:
url_ = url_ + path
return HttpResponseRedirect(host_reverse('home'))
| [
"[email protected]"
]
| |
90e783ea257a3f30cbf5ecd45264e3e1bfb0f5e5 | dc221edce0ad617aac3b9ad8f4f347ff84f56bf9 | /.history/client_20200807180109.py | 54c6da2132910d7f0425fdabfa0c1da205eccabc | []
| no_license | zlm05170/cacontroller | 310014c83ecf130643230eba87990e635fe1575f | e76d2eb5d58d6adfe7823e0dcd0059027c52b6bc | refs/heads/master | 2022-12-21T08:05:58.315017 | 2020-09-23T11:45:07 | 2020-09-23T11:45:07 | 284,527,141 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,720 | py | import asyncio
import websockets
import time
import json
import traceback
def view_actor_data(actor, port_type, port_name):
pass
def get_port_value_by_name(port_list, name):
for port in port_list:
if port['port']['name'] == name:
return port['value']
def find_port_index_by_name(actor, port_type, port_name):
port_list = actor[port_type]
num_port = len(port_list)
for i in range(num_port):
if port_list[i]['port']['name'] == port_name:
return i
def print_port_data_by_index(actor, port_type, index):
print(actor[port_type][index]['port']['name'] + ': ' + actor[port_type][index]['port']['value'])
async def start():
uri = "ws://192.168.114.18:8887"
actor_info = {
'clazz' : '',
'name' : '',
'uuid' : None,
'parent_uuid' : None
}
gps_gunnerus = actor_info.copy()
gps_gunnerus['clazz'] = 'GPSController'
gps_gunnerus['name'] = 'GPS1'
gps_target_ship_1 = actor_info.copy()
gps_target_ship_1['clazz'] = 'GPSController'
gps_target_ship_1['name'] = 'Target Ship 1'
gps_target_ship_2 = actor_info.copy()
gps_target_ship_2['clazz'] = 'GPSController'
gps_target_ship_2['name'] = 'Target Ship 2'
gunnerus_thruster_port = actor_info.copy()
gunnerus_thruster_port['clazz'] = 'ThrusterActor'
gunnerus_thruster_port['name'] = 'Port'
gunnerus_thruster_starboard = actor_info.copy()
gunnerus_thruster_starboard['clazz'] = 'ThrusterActor'
gunnerus_thruster_starboard['name'] = 'Starboard'
actor_info_list = [gps_gunnerus, gps_target_ship_1, gps_target_ship_2, gunnerus_thruster_port, gunnerus_thruster_starboard]
actor_list = [None for i in range(5)]
async with websockets.connect(uri, ping_timeout=None) as websocket:
while True:
# name = f"luman!"
# await websocket.send(name)
# #print(f"> {name}")
#await sendmessage(websocket)
gunnerus = None
ts1 = None
ts2 = None
if not websocket.open:
print('reconnecting')
websocket = await websockets.connect(uri)
else:
resp = await websocket.recv()
try:
data_dic = json.loads(resp[resp.index('{'):])
evaluate(data_dic)
except:
traceback.print_exc()
await sendmessage()
# async def sendmessage():
# name = f"luman"
# return websocket.send(name)
async def evaluate(data_dic, clazz, name):
x = False if data_dic['clazz'].find(clazz) == -1 else True
y = (data_dic['name'] == name)
for i in range(len(actor_list)):
actor_info = actor_info_list[i]
actor = await evaluate(resp, actor_info['clazz'], actor_info['name'])
if actor != None:
actor_info['uuid'] = actor['uuid']
actor_info['parent_uuid'] = get_port_value_by_name(actor['output'],'PARENT')
print_port_data_by_index(find_port_index_by_name(actor_list[0], 'output', 'longitude'.upper()))
#print(print_port_data_by_index)
if x and y:
return data_dic
def clazz_ls(data_dic):
#print(data_dic['output']) # list
lon, lat, east, north, course, speed, rpm, alpha = 0.0, 0.0, 0.0, 0.0, 0.0, [], [], []
for message in data_dic['output']:
port = message['port']['name']
if port == "longitude".upper():
lon = message['value']['value']
elif port == "latitude".upper():
lat = message['value']['value']
elif port == "easting".upper():
east = message['value']['value']
elif port == "northing".upper():
north = message['value']['value']
elif port == "bearing".upper():
course = message['value']['value']
elif port == "WORLD_VELOCITY".upper():
value_ls = message['value']['valueObjects']
for v in value_ls:
speed.append(v['value'])
elif port == "ACTUAL_RPM".upper():
rpm = message['value']['value']
elif port == "ANGLE".upper():
alpha = message['value']['value']
else:
pass
all_data = [lon, lat, east, north, course, speed, rpm, alpha]
#return all_data
print(all_data)
async def savefile(receivedata):
#time.sleep(5)
with open('serverdata.json', 'w') as json_file:
json_file.writelines(receivedata)
if __name__=='__main__':
#rospy.init_node("simulator_drl")
asyncio.get_event_loop().run_until_complete(start())
asyncio.get_event_loop().run_forever()
| [
"[email protected]"
]
| |
5bbb829967936246b03da49dbe2c58f523568660 | 6959d1dec46a490ac1469e21c68506157efea0ee | /Pandora/apps.py | 02ebc2445ba9bb709fd38d97417e67ff5d5ceeb7 | []
| no_license | andyshu6/Nuclear | 305f589a7d81cd90f7f19c3b28cb50d5d7867af3 | 88c68e50dc7506f495120313536e3cfa48329e8f | refs/heads/master | 2020-03-08T18:29:33.430204 | 2018-07-11T16:27:18 | 2018-07-11T16:27:18 | 107,992,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from django.apps import AppConfig
class PandoraConfig(AppConfig):
name = 'Pandora'
| [
"[email protected]"
]
| |
769a920462f74093deebe33e6db9ca5f4ce57734 | bc6e2056500afdd5d11a28a613d6d73f5dd05447 | /moneycash/produccion/admin.py | 28741262c044e84e45e8db2088d83ef264941422 | []
| no_license | xangcastle/respaldo | d0540fabc089f947f052019431d55a9c3c85f131 | 48c5f53b2a2bce0bfa79b1fcc657aa40268e702b | refs/heads/master | 2021-01-10T01:52:48.102689 | 2015-12-16T15:42:02 | 2015-12-16T15:42:02 | 48,118,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,700 | py | from django.contrib import admin
from .models import *
from moneycash.entidad import entidad_admin
from moneycash.documento import documento_admin
from django.template.context import RequestContext
from django.shortcuts import render_to_response
class contadores_tabular(admin.TabularInline):
model = equipo_periodo
extra = 0
classes = ('grp-collapse grp-open',)
#fields = ('equipo', 'contador_inicial', 'contador_final')
class detalle_recibo_tabular(admin.TabularInline):
model = recibo_detalle
extra = 0
classes = ('grp-collapse grp-open',)
class recibo_admin(documento_admin):
list_display = ('numero', 'fecha', 'area', 'copias', 'importe')
inlines = [detalle_recibo_tabular]
fieldsets = (('Datos del Recibo',
{'classes': ('grp-collapse grp-open',),
'fields': (('numero', 'fecha'), 'area'), }),
("Detalle Inlines", {"classes":
("placeholder recibo_detalle_set-group",), "fields": ()}),
('Datos de Facturacion',
{'classes': ('grp-collapse grp-open',),
'fields': (('copias', 'importe', 'tc'),), }),)
actions = ['generar_imprimir', 'facturar']
list_filter = ('periodo', 'area')
def generar_imprimir(self, request, queryset):
for r in queryset:
r.impreso = True
r.save()
id_unico = False
if queryset.count() == 1:
id_unico = True
ctx = {'queryset': queryset, 'id_unico': id_unico}
return render_to_response('moneycash/produccion/recibo.html', ctx,
context_instance=RequestContext(request))
generar_imprimir.short_description = "Imprimir recibos selecionados"
def facturar(self, request, queryset):
facturar(queryset)
class periodo_admin(admin.ModelAdmin):
list_display = ('short_name', 'inicio_produccion', 'fin_produccion',
'copias_equipos', 'copias_areas', 'importe_produccion', 'cerrado')
inlines = [contadores_tabular]
fieldsets = (('Datos del Periodo', {'classes': ('grp-collapse grp-open',),
'fields': (('fecha_inicial', 'fecha_final'),
('inicio_produccion', 'fin_produccion'),)}),)
def generar_recibos(self, request, queryset):
for p in queryset:
crear_recibos(p)
generar_recibos.short_description = \
'generar recibos de los periodos seleccionados'
def cargar_copias(self, request, queryset):
for p in queryset:
cargar_copias(p)
cargar_copias.short_description = \
'cargar copias de los periodos seleccionados'
def activar_equipos(self, request, queryset):
for p in queryset:
activar_equipos(p)
activar_equipos.short_description = \
'activar equipos de los periodos seleccionados'
def cerrar_(self, request, queryset):
for p in queryset:
cerrar(p)
cerrar.short_description = \
'cerrar periodos seleccionados'
actions = [generar_recibos, cargar_copias, activar_equipos, cerrar_]
class equipo_admin(entidad_admin):
list_display = ('code', 'modelo', 'serie', 'marca', 'contador_inicial',
'contador_actual', 'vida_util', 'costo_compra', 'depreciacion_copia',
'valor_depreciado', 'precio_venta', 'activo',)
search_fields = ('code', 'name', 'modelo', 'serie')
list_filter = ('activo', 'marca', 'ubicacion')
fieldsets = (('Datos Generales',
{'classes': ('grp-collapse grp-open',),
'fields': (('code', 'modelo'), ('serie', 'marca'),
('velocidad', 'ubicacion')), }),
('Datos de Facturacion',
{'classes': ('grp-collapse grp-open',),
'fields': (('contador_inicial', 'contador_actual', 'vida_util'),
('costo_compra', 'depreciacion_copia', 'valor_depreciado'),
('precio_venta', 'activo'), ('costo_copia',
'precio_copia')), }),)
ordering = ['code']
class cliente_admin(entidad_admin):
list_display = ('code', 'name', 'identificacion', 'telefono', 'direccion',
'activo')
search_fields = ('code', 'name', 'telefono')
list_filter = ('activo', )
fieldsets = (('Datos Generales',
{'classes': ('grp-collapse grp-open',),
'fields': (('code', 'name'), ('identificacion', 'telefono'),
('direccion',), ('contacto', 'nombre_area'), 'activo'), }),)
class area_admin(entidad_admin):
list_display = ('code', 'name', 'encargado', 'unidad_ejecutora',
'ubicacion', 'activo')
search_fields = ('code', 'name', 'encargado')
list_filter = ('activo', 'cliente', 'ubicacion')
fieldsets = (('Datos del Area',
{'classes': ('grp-collapse grp-open',),
'fields': (('code', 'name'), ('encargado', 'unidad_ejecutora'),
('equipos', 'activo'), ('ubicacion', 'cliente'), 'item'), }),)
class factura_detalle_admin(admin.TabularInline):
model = factura_detalle
extra = 0
classes = ('grp-collapse grp-open',)
class factura_admin(documento_admin):
list_display = ('numero', 'fecha', 'cliente', 'subtotal', 'descuento',
'iva', 'total', 'total', 'tc', 'ir', 'al', 'impreso')
fieldsets = (
('Datos de la Factura',
{'classes': ('grp-collapse grp-open',),
'fields': (('numero', 'fecha'), 'cliente',
('exento_iva', 'exento_ir', 'exento_al')), }),
("Detalle Inlines",
{"classes": ("placeholder factura_detalle_set-group",),
'fields': ()}),
('Totales de la Factura',
{'classes': ('grp-collapse grp-open',),
'fields': (('subtotal', 'descuento'),
('iva', 'total'), ('ir', 'al'), 'tc'), }),
)
inlines = [factura_detalle_admin]
actions = ['generar_imprimir']
def generar_imprimir(self, request, queryset):
id_unico = False
if queryset.count() == 1:
id_unico = True
ctx = {'queryset': queryset, 'id_unico': id_unico}
queryset.update(impreso=True)
return render_to_response('moneycash/produccion/factura.html', ctx,
context_instance=RequestContext(request))
generar_imprimir.short_description = "Imprimir Facturas Selecionadas"
admin.site.register(Marca, entidad_admin)
admin.site.register(Equipo, equipo_admin)
admin.site.register(Area, area_admin)
admin.site.register(Ubicacion, entidad_admin)
admin.site.register(Cliente, cliente_admin)
admin.site.register(Periodo, periodo_admin)
admin.site.register(Recibo, recibo_admin)
admin.site.register(Factura, factura_admin)
admin.site.register(Item, entidad_admin)
admin.site.register(Categoria, entidad_admin)
| [
"[email protected]"
]
| |
762c5f01dc26bf85b36b2cda337b1e05fd67f44e | 22f96e07b22e3ca89ee757badd1f35ed9efcc034 | /docs/conf.py | d4b5fe98e2c13e6412c9c4feeec2f5eaf200fdf8 | [
"MIT"
]
| permissive | Duc98f/MangAdventure | 83e341ecbdb6592c947f77e32848346dcc23e861 | fe69c850f6adce1d9a8755e5aa63db358a6084f6 | refs/heads/master | 2023-06-09T23:08:25.595545 | 2021-06-13T10:55:16 | 2021-06-13T11:16:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,077 | py | # -- Setup Django --
from os import environ as env
from os.path import dirname, join
from sys import path
path.insert(0, dirname(dirname(__file__)))
path.insert(1, join(dirname(__file__), '_ext'))
env['DJANGO_SETTINGS_MODULE'] = 'MangAdventure.tests.settings'
__import__('django').setup()
# -- Project information --
import MangAdventure as MA # noqa: E402
project = 'MangAdventure'
author = MA.__author__
release = MA.__version__
copyright = f'2018-2021, {project}, {MA.__license__} license'
# -- General configuration --
extensions = [
'sphinx.ext.autodoc',
'mangadventure_patches',
'sphinx_autodoc_typehints',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
'sphinx.ext.viewcode',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = 'en'
pygments_style = 'manni'
needs_sphinx = '3.3'
# -- InterSphinx & extlinks configuration --
_django = 'https://docs.djangoproject.com/en/3.2/'
_mdn = 'https://developer.mozilla.org/en-US/docs/Web/'
intersphinx_mapping = {
'django': (_django, f'{_django}_objects/'),
'python': ('https://docs.python.org/3.6/', None),
}
extlinks = {
'setting': (f'{_django}ref/settings/#std:setting-%s', ''),
'tag': (f'{_django}ref/templates/builtins/#%s', ''),
'auth': ('https://django-allauth.rtfd.io/en/latest/%s', ''),
'csp': (f'{_mdn}HTTP/Headers/Content-Security-Policy/%s', ''),
'status': (f'{_mdn}HTTP/Status/%s', ''),
'header': (f'{_mdn}HTTP/Headers/%s', ''),
'schema': ('https://schema.org/%s', ''),
}
# -- Autodoc configuration --
autodoc_default_options = {
'member-order': 'bysource',
'special-members': True,
'undoc-members': True,
'exclude-members': ','.join((
'__new__',
'__dict__',
'__repr__',
'__init__',
'__slots__',
'__module__',
'__weakref__',
'__slotnames__',
'__annotations__',
))
}
autodoc_mock_imports = ['pytest']
autodoc_inherit_docstrings = True
always_document_param_types = True
set_type_checking_flag = True
typehints_fully_qualified = False
typehints_document_rtype = True
# disable sphinx.ext.autodoc.typehints
autodoc_typehints = 'none'
# -- Options for HTML output --
html_theme = 'sphinx_rtd_theme'
html_theme_path = [__import__(html_theme).get_html_theme_path()]
html_theme_options = {
'logo_only': True,
'display_version': False,
'collapse_navigation': True,
}
html_static_path = ['_static']
html_logo = '_static/logo.png'
# html_sidebars = {}
# -- Options for HTMLHelp output --
htmlhelp_basename = f'{project}Doc'
# -- Options for LaTeX output --
latex_elements = {}
latex_documents = [(
master_doc, f'{project}.tex',
f'{project} Documentation', author, 'manual'
)]
# -- Options for manual page output --
man_pages = [(
master_doc, project.lower(),
f'{project} Documentation', author.split(', '), 7
)]
# -- Options for Texinfo output --
texinfo_documents = [(
master_doc, project, f'{project} Documentation',
author, project, MA.__doc__, 'Miscellaneous'
)]
| [
"[email protected]"
]
| |
f3a43ef0015900475f2c2da760ba827c2fe933df | 923f1c7bd149d37c23c5b2f067baab3f5b95a4cf | /setup.py | 309502c9f88be647a041ae202762971497a89441 | [
"BSD-2-Clause"
]
| permissive | Lokeshburade007/python-mammoth | 7467d08ad906e932fbdba720557ee5fd8d862c28 | f8eb2e1214b7ef1749f2cf73a91b09c9f3adf6a8 | refs/heads/master | 2023-08-21T00:35:06.783844 | 2021-10-12T18:52:31 | 2021-10-12T18:52:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,514 | py | #!/usr/bin/env python
import os
import sys
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='1.4.17',
description='Convert Word documents from docx to simple and clean HTML and Markdown',
long_description=read("README"),
author='Michael Williamson',
author_email='[email protected]',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.html', 'mammoth.styles', 'mammoth.styles.parser', 'mammoth.writers'],
entry_points={
"console_scripts": [
"mammoth=mammoth.cli:main"
]
},
keywords="docx word office clean html markdown md",
install_requires=[
"cobble>=0.1.3,<0.2",
],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
license="BSD-2-Clause",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
)
| [
"[email protected]"
]
| |
24b1dd003a704844352756f3fd2812733e0fd7d8 | 6955cf08b26ddce910f4932374d9b5242680009f | /tasks/toplevel.py | e3710f3b479bba0c705d153c244b825c31850b05 | [
"MIT"
]
| permissive | ADicksonLab/wepy | 6c4cea39dacecf4597e0278a0a7e4a50aa3641e5 | 3a029510114db6e66db6a264bd213c9f06559b41 | refs/heads/master | 2023-04-30T03:26:22.365330 | 2023-04-21T15:50:39 | 2023-04-21T15:50:39 | 101,077,926 | 43 | 21 | MIT | 2023-08-31T04:01:04 | 2017-08-22T15:24:10 | Python | UTF-8 | Python | false | false | 88 | py | """User editable top-level commands"""
from invoke import task
from .config import *
| [
"[email protected]"
]
| |
452d6a1116be732f045e520d350dc705407e2c81 | 8fcc27160f8700be46296568260fa0017a0b3004 | /client/eve/client/script/ui/shared/fitting/panels/offensePanel.py | 2f426e8e743c0e2dd09191bd7a22606f6464d826 | []
| no_license | connoryang/dec-eve-serenity | 5d867f4eedfa896a4ef60f92556356cafd632c96 | b670aec7c8b4514fc47cd52e186d7ccf3aabb69e | refs/heads/master | 2021-01-22T06:33:16.303760 | 2016-03-16T15:15:32 | 2016-03-16T15:15:32 | 56,389,750 | 1 | 0 | null | 2016-04-16T15:05:24 | 2016-04-16T15:05:24 | null | UTF-8 | Python | false | false | 3,079 | py | #Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\eve\client\script\ui\shared\fitting\panels\offensePanel.py
from carbonui import const as uiconst
from carbonui.primitives.container import Container
from carbonui.primitives.sprite import Sprite
from eve.client.script.ui.control.eveLabel import EveLabelMedium
from eve.client.script.ui.station.fitting.fittingTooltipUtils import SetFittingTooltipInfo
from eve.client.script.ui.shared.fitting.panels.basePanel import BaseMenuPanel
from localization import GetByLabel
import uthread
class OffensePanel(BaseMenuPanel):
damageStats = (('turretDps', 'res:/UI/Texture/Icons/26_64_1.png', 'UI/Fitting/FittingWindow/TurretDpsTooltip', 'DamagePerSecondTurrets'), ('droneDps', 'res:/UI/Texture/Icons/drones.png', 'UI/Fitting/FittingWindow/DroneDpsTooltip', 'DamagePerSecondDrones'), ('missileDps', 'res:/UI/Texture/Icons/81_64_16.png', 'UI/Fitting/FittingWindow/MissileDpsTooltip', 'DamagePerSecondMissiles'))
iconSize = 26
def ApplyAttributes(self, attributes):
BaseMenuPanel.ApplyAttributes(self, attributes)
def LoadPanel(self, initialLoad = False):
self.Flush()
self.ResetStatsDicts()
self.display = True
parentGrid = self.GetValueParentGrid(columns=len(self.damageStats))
for dps, texturePath, hintPath, tooltipName in self.damageStats:
hint = GetByLabel(hintPath)
c = self.GetValueCont(self.iconSize)
parentGrid.AddCell(cellObject=c)
icon = Sprite(texturePath=texturePath, parent=c, align=uiconst.CENTERLEFT, pos=(0,
0,
self.iconSize,
self.iconSize), state=uiconst.UI_DISABLED)
SetFittingTooltipInfo(targetObject=c, tooltipName=tooltipName)
c.hint = hint
label = EveLabelMedium(text='', parent=c, state=uiconst.UI_DISABLED, align=uiconst.CENTERLEFT)
self.statsLabelsByIdentifier[dps] = label
self.statsIconsByIdentifier[dps] = icon
self.statsContsByIdentifier[dps] = c
BaseMenuPanel.FinalizePanelLoading(self, initialLoad)
def UpdateOffenseStats(self):
uthread.new(self._UpdateOffenseStats)
def _UpdateOffenseStats(self):
itemID = self.controller.GetItemID()
turretDps, missileDps = self.dogmaLocation.GetTurretAndMissileDps(itemID)
dpsText = GetByLabel('UI/Fitting/FittingWindow/DpsLabel', dps=turretDps)
self.SetLabel('turretDps', dpsText)
missileText = GetByLabel('UI/Fitting/FittingWindow/DpsLabel', dps=missileDps)
self.SetLabel('missileDps', missileText)
droneDps, drones = self.dogmaLocation.GetOptimalDroneDamage(itemID)
droneText = GetByLabel('UI/Fitting/FittingWindow/DpsLabel', dps=droneDps)
self.SetLabel('droneDps', droneText)
totalDps = turretDps + missileDps + droneDps
totalDpsText = GetByLabel('UI/Fitting/FittingWindow/DpsLabel', dps=totalDps)
self.SetStatusText(totalDpsText)
| [
"[email protected]"
]
| |
90d61a45791a4c4fca451ce3958912b1271ff667 | f71d67025b732e66e1a37c02c05392c3dd116d65 | /Lessons/ITP1/08_Character/d.py | 81a5619fd4674529f96b237cb3fef6f221b7ee12 | []
| no_license | clarinet758/aoj | 2829f92137dd1a93734445e1e92513f8e3e0b5c0 | 21787ffee1a6dd60c717d7b880b63107187e4710 | refs/heads/main | 2023-06-25T12:04:40.127040 | 2023-06-19T16:36:38 | 2023-06-19T16:36:38 | 34,978,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import time
import sys
import io
import re
import math
import itertools
#sys.stdin=file('input.txt')
#sys.stdout=file('output.txt','w')
#10**9+7
mod=1000000007
#mod=1777777777
pi=3.141592653589
xy=[(1,0),(-1,0),(0,1),(0,-1)]
bs=[(-1,-1),(-1,1),(1,1),(1,-1)]
#start = time.clock()
n=raw_input()*2
print'Yes' if raw_input() in n else'No'
ans=chk=0
#end = time.clock()
#print end - start
| [
"[email protected]"
]
| |
262a46b28e0f81a173486d6faa14c8be88a61e79 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2738/60598/281309.py | b37b2756d6e576c77a144d36933054d39da07823 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 850 | py | input()
matrix = []
while 1:
s = input().replace(" ","")
if s == "]":
break
if s[-1] == ',':
matrix.append(s[1:-2].split(","))
else:
matrix.append(s[1:-1].split(","))
row = len(matrix)
col = len(matrix[0])
result = 0
are = []
for i in range(row):
for j in range(col):
if matrix[i][j] == "\"1\"":
high = 0
wides = []
for h in range(i, row):
high += 1
wide = 0
for s in range(j, col):
if matrix[h][s] == "\"1\"":
wide += 1
else:
break
wides.append(wide)
tempAre = high * min(wides)
if tempAre == 0:
break
are.append(tempAre)
print(max(are))
| [
"[email protected]"
]
| |
4fa11bf0bf80e1c45ba384816c50e106b6e37996 | 63eb05febaac75f781a266d48d1cfff2debe64ea | /the_tale/game/actions/tests/test_meta_action_arena_pvp_1x1.py | cc1908c6fc5a58626d98e062129badc099331957 | [
"BSD-2-Clause-Views"
]
| permissive | MadRiw/the-tale | 185ca33e410a59de63a594daf15fc8a5701338d2 | 1801beab2ed149556c0b3380e8adaaa976f74e6c | refs/heads/master | 2021-01-15T23:45:34.873857 | 2015-06-17T13:06:12 | 2015-06-17T13:06:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,832 | py | # coding: utf-8
import mock
from the_tale.common.utils import testcase
from the_tale.accounts.prototypes import AccountPrototype
from the_tale.accounts.logic import register_user
from the_tale.game.logic_storage import LogicStorage
from the_tale.game.logic import create_test_map
from the_tale.game.prototypes import TimePrototype
from the_tale.game.balance import constants as c
from the_tale.game.actions.meta_actions import MetaActionArenaPvP1x1Prototype
from the_tale.game.actions.models import MetaAction, MetaActionMember
from the_tale.game.pvp.models import Battle1x1, Battle1x1Result
from the_tale.game.pvp.relations import BATTLE_1X1_STATE
from the_tale.game.pvp.prototypes import Battle1x1Prototype
from the_tale.game.pvp.tests.helpers import PvPTestsMixin
from the_tale.game.pvp.abilities import ABILITIES, Flame
class ArenaPvP1x1MetaActionTest(testcase.TestCase, PvPTestsMixin):
def setUp(self):
super(ArenaPvP1x1MetaActionTest, self).setUp()
create_test_map()
result, account_1_id, bundle_id = register_user('test_user_1')
result, account_2_id, bundle_id = register_user('test_user_2')
self.account_1 = AccountPrototype.get_by_id(account_1_id)
self.account_2 = AccountPrototype.get_by_id(account_2_id)
self.storage = LogicStorage()
self.storage.load_account_data(self.account_1)
self.storage.load_account_data(self.account_2)
self.hero_1 = self.storage.accounts_to_heroes[self.account_1.id]
self.hero_2 = self.storage.accounts_to_heroes[self.account_2.id]
# for test data reset
self.hero_1.health = self.hero_1.max_health / 2
self.hero_1.pvp.set_advantage(1)
self.hero_1.pvp.set_effectiveness(0.5)
# for test data reset
self.hero_2.pvp.set_advantage(1)
self.hero_2.pvp.set_effectiveness(0.5)
self.battle_1 = self.pvp_create_battle(self.account_1, self.account_2, BATTLE_1X1_STATE.PROCESSING)
self.battle_1.calculate_rating = True
self.battle_1.save()
self.battle_2 = self.pvp_create_battle(self.account_2, self.account_1, BATTLE_1X1_STATE.PROCESSING)
self.battle_2.calculate_rating = True
self.battle_2.save()
self.bundle_id = 666
self.meta_action_battle = MetaActionArenaPvP1x1Prototype.create(self.storage, self.hero_1, self.hero_2, bundle_id=self.bundle_id)
self.meta_action_battle.set_storage(self.storage)
def test_initialization(self):
self.assertEqual(MetaAction.objects.all().count(), 1)
self.assertEqual(MetaActionMember.objects.all().count(), 2)
self.assertEqual(len(self.meta_action_battle.members), 2)
self.assertEqual(len(self.meta_action_battle.members_by_roles), 2)
self.assertTrue(self.meta_action_battle.storage)
self.assertEqual(self.meta_action_battle.hero_1, self.hero_1)
self.assertEqual(self.meta_action_battle.hero_2, self.hero_2)
# test reset of pvp_data
self.assertEqual(self.meta_action_battle.hero_1.health, self.hero_1.max_health)
self.assertEqual(self.meta_action_battle.hero_1.pvp.advantage, 0)
self.assertEqual(self.meta_action_battle.hero_1.pvp.effectiveness, c.PVP_EFFECTIVENESS_INITIAL)
self.assertEqual(self.meta_action_battle.hero_1.pvp.energy, 0)
self.assertEqual(self.meta_action_battle.hero_1.pvp.energy_speed, 1)
self.assertEqual(self.meta_action_battle.hero_1.pvp.turn_advantage, 0)
self.assertEqual(self.meta_action_battle.hero_1.pvp.turn_effectiveness, c.PVP_EFFECTIVENESS_INITIAL)
self.assertEqual(self.meta_action_battle.hero_1.pvp.turn_energy, 0)
self.assertEqual(self.meta_action_battle.hero_1.pvp.turn_energy_speed, 1)
self.assertTrue(self.meta_action_battle.hero_1_context.pvp_advantage_strike_damage > 0)
self.assertEqual(self.meta_action_battle.hero_2.health, self.hero_2.max_health)
self.assertEqual(self.meta_action_battle.hero_2.pvp.advantage, 0)
self.assertEqual(self.meta_action_battle.hero_2.pvp.effectiveness, c.PVP_EFFECTIVENESS_INITIAL)
self.assertEqual(self.meta_action_battle.hero_2.pvp.energy, 0)
self.assertEqual(self.meta_action_battle.hero_2.pvp.energy_speed, 1)
self.assertEqual(self.meta_action_battle.hero_2.pvp.turn_advantage, 0)
self.assertEqual(self.meta_action_battle.hero_2.pvp.turn_effectiveness, c.PVP_EFFECTIVENESS_INITIAL)
self.assertEqual(self.meta_action_battle.hero_2.pvp.turn_energy, 0)
self.assertEqual(self.meta_action_battle.hero_2.pvp.turn_energy_speed, 1)
self.assertTrue(self.meta_action_battle.hero_2_context.pvp_advantage_strike_damage > 0)
def test_one_hero_killed(self):
current_time = TimePrototype.get_current_time()
self.hero_1.health = 0
self.meta_action_battle.process()
self.assertEqual(self.meta_action_battle.state, MetaActionArenaPvP1x1Prototype.STATE.BATTLE_ENDING)
current_time.increment_turn()
self.meta_action_battle.process()
self.assertEqual(self.meta_action_battle.state, MetaActionArenaPvP1x1Prototype.STATE.PROCESSED)
self.assertTrue(self.hero_1.is_alive and self.hero_2.is_alive)
self.assertEqual(self.hero_1.health, self.hero_1.max_health / 2)
self.assertEqual(self.hero_2.health, self.hero_2.max_health)
def check_hero_pvp_statistics(self, hero, battles, victories, draws, defeats):
self.assertEqual(hero.statistics.pvp_battles_1x1_number, battles)
self.assertEqual(hero.statistics.pvp_battles_1x1_victories, victories)
self.assertEqual(hero.statistics.pvp_battles_1x1_draws, draws)
self.assertEqual(hero.statistics.pvp_battles_1x1_defeats, defeats)
def _end_battle(self, hero_1_health, hero_2_health):
self.hero_1.health = hero_1_health
self.hero_2.health = hero_2_health
current_time = TimePrototype.get_current_time()
self.meta_action_battle.process()
current_time.increment_turn()
self.meta_action_battle.process()
def test_hero_1_win(self):
self._end_battle(hero_1_health=self.hero_1.max_health, hero_2_health=0)
self.assertEqual(Battle1x1Prototype._model_class.objects.all().count(), 0)
self.check_hero_pvp_statistics(self.hero_1, 1, 1, 0, 0)
self.check_hero_pvp_statistics(self.hero_2, 1, 0, 0, 1)
def test_hero_2_win(self):
self._end_battle(hero_1_health=0, hero_2_health=self.hero_2.max_health)
self.assertEqual(Battle1x1Prototype._model_class.objects.all().count(), 0)
self.check_hero_pvp_statistics(self.hero_1, 1, 0, 0, 1)
self.check_hero_pvp_statistics(self.hero_2, 1, 1, 0, 0)
def test_draw(self):
self._end_battle(hero_1_health=0, hero_2_health=0)
self.assertEqual(Battle1x1Prototype._model_class.objects.all().count(), 0)
self.check_hero_pvp_statistics(self.hero_1, 1, 0, 1, 0)
self.check_hero_pvp_statistics(self.hero_2, 1, 0, 1, 0)
@mock.patch('the_tale.game.pvp.prototypes.Battle1x1Prototype.calculate_rating', False)
def test_hero_1_win_no_stats(self):
self._end_battle(hero_1_health=self.hero_1.max_health, hero_2_health=0)
self.check_hero_pvp_statistics(self.hero_1, 0, 0, 0, 0)
self.check_hero_pvp_statistics(self.hero_2, 0, 0, 0, 0)
@mock.patch('the_tale.game.pvp.prototypes.Battle1x1Prototype.calculate_rating', False)
def test_hero_2_win_no_stats(self):
self._end_battle(hero_1_health=0, hero_2_health=self.hero_1.max_health)
self.check_hero_pvp_statistics(self.hero_1, 0, 0, 0, 0)
self.check_hero_pvp_statistics(self.hero_2, 0, 0, 0, 0)
@mock.patch('the_tale.game.pvp.prototypes.Battle1x1Prototype.calculate_rating', False)
def test_draw_no_stats(self):
self._end_battle(hero_1_health=0, hero_2_health=0)
self.check_hero_pvp_statistics(self.hero_1, 0, 0, 0, 0)
self.check_hero_pvp_statistics(self.hero_2, 0, 0, 0, 0)
def test_second_process_call_in_one_turn(self):
with mock.patch('the_tale.game.actions.meta_actions.MetaActionArenaPvP1x1Prototype._process') as meta_action_process_counter:
self.meta_action_battle.process()
self.meta_action_battle.process()
self.assertEqual(meta_action_process_counter.call_count, 1)
def test_update_hero_pvp_info(self):
self.hero_2.pvp.set_effectiveness(50)
self.meta_action_battle.update_hero_pvp_info(self.hero_2)
self.assertTrue(self.hero_2.pvp.energy > self.hero_1.pvp.energy)
self.assertTrue(0 < self.hero_2.pvp.effectiveness < 50)
def test_advantage_after_turn(self):
self.hero_1.pvp.set_effectiveness(50)
self.hero_2.pvp.set_effectiveness(25)
self.meta_action_battle.process()
self.assertTrue(self.hero_1.pvp.advantage > 0)
self.assertTrue(self.hero_2.pvp.advantage < 0)
def test_full_battle(self):
current_time = TimePrototype.get_current_time()
self.assertEqual(Battle1x1.objects.filter(state=BATTLE_1X1_STATE.PROCESSING).count(), 2)
while self.meta_action_battle.state != MetaActionArenaPvP1x1Prototype.STATE.PROCESSED:
self.meta_action_battle.process()
current_time.increment_turn()
self.assertEqual(self.meta_action_battle.state, MetaActionArenaPvP1x1Prototype.STATE.PROCESSED)
self.assertTrue(self.hero_1.is_alive and self.hero_2.is_alive)
self.assertEqual(self.hero_1.health, self.hero_1.max_health / 2)
self.assertEqual(self.hero_2.health, self.hero_2.max_health)
self.assertEqual(Battle1x1.objects.all().count(), 0)
self.assertEqual(Battle1x1Result.objects.all().count(), 1)
battle_result = Battle1x1Result.objects.all()[0]
self.assertNotEqual(battle_result.participant_1_id, battle_result.participant_2_id)
def test_remove(self):
self.assertEqual(MetaAction.objects.all().count(), 1)
self.assertEqual(MetaActionMember.objects.all().count(), 2)
self.meta_action_battle.remove()
self.assertEqual(MetaAction.objects.all().count(), 0)
self.assertEqual(MetaActionMember.objects.all().count(), 0)
def test_get_bot_pvp_properties(self):
properties = self.meta_action_battle.get_bot_pvp_properties()
self.meta_action_battle.save()
self.meta_action_battle.reload()
self.assertEqual(set(properties.keys()), set(('ability_chance', 'priorities')))
self.assertTrue('bot_pvp_properties' in self.meta_action_battle.data)
self.assertEqual(set(properties.keys()), set(self.meta_action_battle.data['bot_pvp_properties']))
self.assertTrue(0 <properties['ability_chance'] <= 1)
self.assertEqual(set(properties['priorities']), set(ABILITIES.keys()))
self.assertEqual(properties, self.meta_action_battle.get_bot_pvp_properties())
for ability_priority in properties['priorities']:
self.assertTrue(ability_priority > 0)
def test_process_bot_called__hero_1(self):
self.hero_1._model.is_bot = True
self.meta_action_battle.reload()
with mock.patch('the_tale.game.actions.meta_actions.MetaActionArenaPvP1x1Prototype.process_bot') as process_bot:
self.meta_action_battle.process()
self.assertEqual(process_bot.call_count, 1)
self.assertEqual(process_bot.call_args[1]['bot'].id, self.hero_1.id )
self.assertEqual(process_bot.call_args[1]['enemy'].id, self.hero_2.id )
def test_process_bot_called__hero_2(self):
self.hero_2._model.is_bot = True
self.meta_action_battle.reload()
with mock.patch('the_tale.game.actions.meta_actions.MetaActionArenaPvP1x1Prototype.process_bot') as process_bot:
self.meta_action_battle.process()
self.assertEqual(process_bot.call_count, 1)
self.assertEqual(process_bot.call_args[1]['bot'].id, self.hero_2.id )
self.assertEqual(process_bot.call_args[1]['enemy'].id, self.hero_1.id )
def test_process_bot_called__use_ability(self):
self.hero_1._model.is_bot = True
self.hero_1.pvp.set_energy(10)
properties = self.meta_action_battle.get_bot_pvp_properties()
properties['ability_chance'] = 1.0
self.hero_2.pvp.set_energy_speed(2) # flame abilitie will not be used, if enemy energy speed is 1
self.meta_action_battle.process()
self.assertTrue(self.hero_1.pvp.energy in (1, 2))
def test_initialize_bots__bot_is_second(self):
result, account_1_id, bundle_id = register_user('test_user_3')
result, account_2_id, bundle_id = register_user('bot', '[email protected]', '111111', is_bot=True)
account_1 = AccountPrototype.get_by_id(account_1_id)
account_2 = AccountPrototype.get_by_id(account_2_id)
storage = LogicStorage()
storage.load_account_data(account_1)
storage.load_account_data(account_2)
hero_1 = storage.accounts_to_heroes[account_1.id]
hero_2 = storage.accounts_to_heroes[account_2.id]
hero_1._model.level = 50
self.assertEqual(hero_2.level, 1)
MetaActionArenaPvP1x1Prototype.create(storage, hero_1, hero_2, bundle_id=self.bundle_id+1)
self.assertEqual(hero_2.level, 50)
self.assertTrue(len(hero_2.abilities.all) > 1)
self.assertEqual(hero_2.health, hero_2.max_health)
def test_initialize_bots__bot_is_first(self):
result, account_1_id, bundle_id = register_user('bot', '[email protected]', '111111', is_bot=True)
result, account_2_id, bundle_id = register_user('test_user_3')
account_1 = AccountPrototype.get_by_id(account_1_id)
account_2 = AccountPrototype.get_by_id(account_2_id)
storage = LogicStorage()
storage.load_account_data(account_1)
storage.load_account_data(account_2)
hero_1 = storage.accounts_to_heroes[account_1.id]
hero_2 = storage.accounts_to_heroes[account_2.id]
hero_2._model.level = 50
self.assertEqual(hero_1.level, 1)
MetaActionArenaPvP1x1Prototype.create(storage, hero_1, hero_2, bundle_id=self.bundle_id+1)
self.assertEqual(hero_1.level, 50)
self.assertTrue(len(hero_1.abilities.all) > 1)
self.assertEqual(hero_1.health, hero_1.max_health)
def test_initialize_bots__second_create(self):
result, account_1_id, bundle_id = register_user('test_user_3')
result, account_2_id, bundle_id = register_user('bot', '[email protected]', '111111', is_bot=True)
account_1 = AccountPrototype.get_by_id(account_1_id)
account_2 = AccountPrototype.get_by_id(account_2_id)
storage = LogicStorage()
storage.load_account_data(account_1)
storage.load_account_data(account_2)
hero_1 = storage.accounts_to_heroes[account_1.id]
hero_2 = storage.accounts_to_heroes[account_2.id]
hero_1._model.level = 50
self.assertEqual(hero_2.level, 1)
self.pvp_create_battle(account_1, account_2, BATTLE_1X1_STATE.PROCESSING)
self.pvp_create_battle(account_2, account_1, BATTLE_1X1_STATE.PROCESSING)
meta_action = MetaActionArenaPvP1x1Prototype.create(storage, hero_1, hero_2, bundle_id=self.bundle_id+1)
meta_action.process_battle_ending()
MetaActionArenaPvP1x1Prototype.create(storage, hero_1, hero_2, bundle_id=self.bundle_id+2)
self.assertEqual(hero_2.level, 50)
self.assertTrue(len(hero_2.abilities.all) > 1)
self.assertEqual(hero_2.health, hero_2.max_health)
def test_process_bot__flame_ability_not_used(self):
result, account_1_id, bundle_id = register_user('bot', '[email protected]', '111111', is_bot=True)
result, account_2_id, bundle_id = register_user('test_user_3')
account_1 = AccountPrototype.get_by_id(account_1_id)
account_2 = AccountPrototype.get_by_id(account_2_id)
storage = LogicStorage()
storage.load_account_data(account_1)
storage.load_account_data(account_2)
hero_1 = storage.accounts_to_heroes[account_1.id]
hero_2 = storage.accounts_to_heroes[account_2.id]
MetaActionArenaPvP1x1Prototype.create(storage, hero_1, hero_2, bundle_id=self.bundle_id+1)
self.assertEqual(hero_2.pvp.energy_speed, 1)
with mock.patch('the_tale.game.actions.meta_actions.MetaActionArenaPvP1x1Prototype.get_bot_pvp_properties',
lambda a: {'priorities': {Flame.TYPE: 1}, 'ability_chance': 1}):
with mock.patch('the_tale.game.pvp.abilities.Flame.use') as use:
for i in xrange(100):
self.meta_action_battle.process_bot(hero_1, hero_2)
self.assertEqual(use.call_count, 0)
| [
"[email protected]"
]
| |
1160fe2c4176a9a8392411959eb0d17929231848 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-gaussdbforopengauss/huaweicloudsdkgaussdbforopengauss/v3/model/gauss_d_bfor_open_gauss_user_for_list.py | ffed016aea17b6dc6eec320863498e523cbc368d | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,944 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class GaussDBforOpenGaussUserForList:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'name': 'str',
'attributes': 'GaussDBforOpenGaussUserForListAttributes',
'memberof': 'str'
}
attribute_map = {
'name': 'name',
'attributes': 'attributes',
'memberof': 'memberof'
}
def __init__(self, name=None, attributes=None, memberof=None):
"""GaussDBforOpenGaussUserForList
The model defined in huaweicloud sdk
:param name: 帐号名。
:type name: str
:param attributes:
:type attributes: :class:`huaweicloudsdkgaussdbforopengauss.v3.GaussDBforOpenGaussUserForListAttributes`
:param memberof: 用户的默认权限。
:type memberof: str
"""
self._name = None
self._attributes = None
self._memberof = None
self.discriminator = None
self.name = name
if attributes is not None:
self.attributes = attributes
if memberof is not None:
self.memberof = memberof
@property
def name(self):
"""Gets the name of this GaussDBforOpenGaussUserForList.
帐号名。
:return: The name of this GaussDBforOpenGaussUserForList.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this GaussDBforOpenGaussUserForList.
帐号名。
:param name: The name of this GaussDBforOpenGaussUserForList.
:type name: str
"""
self._name = name
@property
def attributes(self):
"""Gets the attributes of this GaussDBforOpenGaussUserForList.
:return: The attributes of this GaussDBforOpenGaussUserForList.
:rtype: :class:`huaweicloudsdkgaussdbforopengauss.v3.GaussDBforOpenGaussUserForListAttributes`
"""
return self._attributes
@attributes.setter
def attributes(self, attributes):
"""Sets the attributes of this GaussDBforOpenGaussUserForList.
:param attributes: The attributes of this GaussDBforOpenGaussUserForList.
:type attributes: :class:`huaweicloudsdkgaussdbforopengauss.v3.GaussDBforOpenGaussUserForListAttributes`
"""
self._attributes = attributes
@property
def memberof(self):
"""Gets the memberof of this GaussDBforOpenGaussUserForList.
用户的默认权限。
:return: The memberof of this GaussDBforOpenGaussUserForList.
:rtype: str
"""
return self._memberof
@memberof.setter
def memberof(self, memberof):
"""Sets the memberof of this GaussDBforOpenGaussUserForList.
用户的默认权限。
:param memberof: The memberof of this GaussDBforOpenGaussUserForList.
:type memberof: str
"""
self._memberof = memberof
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GaussDBforOpenGaussUserForList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
af85091132f201033888c47fc10b43a4b7e8d32d | a8b37bd399dd0bad27d3abd386ace85a6b70ef28 | /airbyte-integrations/connectors/source-aircall/setup.py | 25b830a1e3cce6526bed07734eb77ef89e7f7d8b | [
"MIT",
"LicenseRef-scancode-free-unknown",
"Elastic-2.0"
]
| permissive | thomas-vl/airbyte | 5da2ba9d189ba0b202feb952cadfb550c5050871 | 258a8eb683634a9f9b7821c9a92d1b70c5389a10 | refs/heads/master | 2023-09-01T17:49:23.761569 | 2023-08-25T13:13:11 | 2023-08-25T13:13:11 | 327,604,451 | 1 | 0 | MIT | 2021-01-07T12:24:20 | 2021-01-07T12:24:19 | null | UTF-8 | Python | false | false | 628 | py | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from setuptools import find_packages, setup
MAIN_REQUIREMENTS = [
"airbyte-cdk~=0.1",
]
TEST_REQUIREMENTS = [
"requests-mock~=1.9.3",
"pytest~=6.2",
"pytest-mock~=3.6.1",
]
setup(
name="source_aircall",
description="Source implementation for Aircall.",
author="Airbyte",
author_email="[email protected]",
packages=find_packages(),
install_requires=MAIN_REQUIREMENTS,
package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]},
extras_require={
"tests": TEST_REQUIREMENTS,
},
)
| [
"[email protected]"
]
| |
f060ef31d43c3220db23ba2d5f5b9638358bec69 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_nitpicked.py | c24415023c45b6fa685872e33916c3f83b705177 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py |
#calss header
class _NITPICKED():
def __init__(self,):
self.name = "NITPICKED"
self.definitions = nitpick
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['nitpick']
| [
"[email protected]"
]
| |
98491800978764c42bde1d1d36a77b8dc13c9ef3 | 1e249067ab2dabc17cb7ebda46f9f23a5cfad552 | /tests/test_processor.py | 149ae513f1fdcfc1b09bbec275c63aac1d55b556 | [
"BSD-2-Clause"
]
| permissive | STIRLIN6/indra_cogex | 6e4cba84ee1ce82a404154e7370f88fc340400cb | 552cefd71431b08b8118b2cc0428fd8681e6fc83 | refs/heads/main | 2023-08-14T01:28:14.852108 | 2021-09-14T04:18:13 | 2021-09-14T04:18:13 | 377,100,238 | 0 | 0 | BSD-2-Clause | 2021-06-15T09:01:23 | 2021-06-15T09:01:23 | null | UTF-8 | Python | false | false | 184 | py | from indra_cogex.representation import norm_id
def test_norm_id():
assert norm_id("UP", "P12345") == "uniprot:P12345"
assert norm_id("CHEBI", "CHEBI:12345") == "chebi:12345"
| [
"[email protected]"
]
| |
2a99b3bb613dba1885dc7a069898c4d69a501f7e | 833b43575815ce6c5fa8cbac2628cb774331eda7 | /chap20_p371_code3.py | dcda8aea075beb81ff7c9027d10c117c90dfe210 | []
| no_license | ai-times/infinitybook_python | d9529dfe7d486bf5c713d52b530915a23cbf1812 | 1c011c31994d07fe959bba9b519c4365f5f40e7f | refs/heads/main | 2023-03-01T12:18:20.695888 | 2021-02-14T04:22:40 | 2021-02-14T04:22:40 | 338,578,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from datetime import datetime
birth = datetime(2002, 6, 30, 10, 15, 3, 56765)
now = datetime.now( )
print( now - birth )
| [
"[email protected]"
]
| |
29689d82e65139fffd325b2517ea32a511041d38 | 9734c93c86c982b1ce046340bac9e53645b261b8 | /tests/formatters/yaml_formatters_file.py | f69655c1080aa94b6d70e50bbc0002921e12694b | [
"Apache-2.0"
]
| permissive | log2timeline/plaso | cd72dd407d6c5627506c14f58cb8f6a6926aa808 | d6022f8cfebfddf2d08ab2d300a41b61f3349933 | refs/heads/main | 2023-09-02T08:43:48.241198 | 2023-08-19T07:28:12 | 2023-08-19T07:28:12 | 23,812,315 | 1,506 | 421 | Apache-2.0 | 2023-09-04T08:24:53 | 2014-09-08T23:29:28 | Python | UTF-8 | Python | false | false | 3,363 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the YAML-based formatters file."""
import io
import unittest
from plaso.formatters import yaml_formatters_file
from plaso.lib import errors
from tests import test_lib as shared_test_lib
class YAMLFormattersFileTest(shared_test_lib.BaseTestCase):
"""Tests for the YAML-based formatters file."""
# pylint: disable=protected-access
_FORMATTERS_YAML = {
'type': 'conditional',
'data_type': 'test:fs:stat',
'message': [
'{display_name}',
'Type: {file_entry_type}',
'({unallocated})'],
'short_message': [
'{filename}'],
'short_source': 'SOURCE',
'source': 'My Custom Log Source'}
def testReadFormatterDefinition(self):
"""Tests the _ReadFormatterDefinition function."""
test_formatters_file = yaml_formatters_file.YAMLFormattersFile()
formatter = test_formatters_file._ReadFormatterDefinition(
self._FORMATTERS_YAML)
self.assertIsNotNone(formatter)
self.assertEqual(formatter.data_type, 'test:fs:stat')
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({})
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({'type': 'bogus'})
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({'type': 'conditional'})
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({
'type': 'conditional',
'data_type': 'test:fs:stat'})
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({
'type': 'conditional',
'data_type': 'test:fs:stat',
'message': [
'{display_name}',
'Type: {file_entry_type}',
'({unallocated})']})
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({
'type': 'conditional',
'data_type': 'test:fs:stat',
'message': [
'{display_name}',
'Type: {file_entry_type}',
'({unallocated})']})
with self.assertRaises(errors.ParseError):
test_formatters_file._ReadFormatterDefinition({'bogus': 'error'})
def testReadFromFileObject(self):
"""Tests the _ReadFromFileObject function."""
test_file_path = self._GetTestFilePath(['formatters', 'format_test.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_formatters_file = yaml_formatters_file.YAMLFormattersFile()
with io.open(test_file_path, 'r', encoding='utf-8') as file_object:
formatters = list(test_formatters_file._ReadFromFileObject(file_object))
self.assertEqual(len(formatters), 2)
def testReadFromFile(self):
"""Tests the ReadFromFile function."""
test_file_path = self._GetTestFilePath(['formatters', 'format_test.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_formatters_file = yaml_formatters_file.YAMLFormattersFile()
formatters = list(test_formatters_file.ReadFromFile(test_file_path))
self.assertEqual(len(formatters), 2)
self.assertEqual(formatters[0].data_type, 'test:event')
self.assertEqual(formatters[1].data_type, 'test:fs:stat')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
0e1cdca49f5eeb7315a63e0091ae55029d0eece7 | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/third_party/kubernetes/client/models/v1_watch_event.py | eeac0514753ca0d2cfe0c9ba717f53e73fabf2aa | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
]
| permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 3,880 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen
https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1WatchEvent(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name and the value is attribute
type.
attribute_map (dict): The key is attribute name and the value is json key
in definition.
"""
swagger_types = {'object': 'RuntimeRawExtension', 'type': 'str'}
attribute_map = {'object': 'object', 'type': 'type'}
def __init__(self, object=None, type=None):
"""
V1WatchEvent - a model defined in Swagger
"""
self._object = None
self._type = None
self.discriminator = None
self.object = object
self.type = type
@property
def object(self):
"""
Gets the object of this V1WatchEvent.
Object is: * If Type is Added or Modified: the new state of the object.
* If Type is Deleted: the state of the object immediately before
deletion. * If Type is Error: *Status is recommended; other types may
make sense depending on context.
:return: The object of this V1WatchEvent.
:rtype: RuntimeRawExtension
"""
return self._object
@object.setter
def object(self, object):
"""
Sets the object of this V1WatchEvent.
Object is: * If Type is Added or Modified: the new state of the object.
* If Type is Deleted: the state of the object immediately before
deletion. * If Type is Error: *Status is recommended; other types may
make sense depending on context.
:param object: The object of this V1WatchEvent.
:type: RuntimeRawExtension
"""
if object is None:
raise ValueError('Invalid value for `object`, must not be `None`')
self._object = object
@property
def type(self):
"""
Gets the type of this V1WatchEvent.
:return: The type of this V1WatchEvent.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1WatchEvent.
:param type: The type of this V1WatchEvent.
:type: str
"""
if type is None:
raise ValueError('Invalid value for `type`, must not be `None`')
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, 'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], 'to_dict') else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1WatchEvent):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
]
| |
541be20181d90c2f788955ed7c94c8e307b6d08e | a7da58ad91b007b3650003708eb91928f1e3684a | /bt5/erp5_banking_cash/WorkflowTemplateItem/portal_workflow/internal_money_payment_workflow/scripts/validateCounter.py | 1259c1c7a0143dad30f158e310e8328d81adaa3d | []
| no_license | jgpjuniorj/j | 042d1bd7710fa2830355d4312a6b76103e29639d | dc02bfa887ffab9841abebc3f5c16d874388cef5 | refs/heads/master | 2021-01-01T09:26:36.121339 | 2020-01-31T10:34:17 | 2020-02-07T04:39:18 | 239,214,398 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,447 | py | from Products.DCWorkflow.DCWorkflow import ValidationFailed
from Products.ERP5Type.Message import Message
transaction = state_change['object']
date = transaction.getStartDate()
source = transaction.getSource(None)
# check we are in an opened accounting day
transaction.Baobab_checkCounterDateOpen(site=source, date=transaction.getStartDate())
# check again that the counter is open
context.Baobab_checkCounterOpened(source)
if transaction.getPaymentType() in (None, ""):
msg = Message(domain="ui", message="No payment type defined.")
raise ValidationFailed, (msg,)
#test if the source or the destination is correct
transaction.Base_checkBaobabSourceAndDestination()
# Get price and total_price.
amount = transaction.getSourceTotalAssetPrice()
total_price = transaction.getTotalPrice(portal_type=('Cash Delivery Line','Cash Delivery Cell'), fast=0)
if amount != total_price:
msg = Message(domain="ui", message="Amount differ from total price.")
raise ValidationFailed, (msg,)
if source is None:
msg = Message(domain='ui', message='No counter defined.')
raise ValidationFailed, (msg,)
site = transaction.getSourceValue()
vault = transaction.getBaobabSource()
resource = transaction.CashDelivery_checkCounterInventory(source=vault, portal_type='Cash Delivery Line',same_source=1)
#context.log('resource',resource)
if resource == 2:
msg = Message(domain="ui", message="No Resource.")
raise ValidationFailed, (msg,)
| [
"[email protected]"
]
| |
c4167281b5e6283bb6cd67dd447b40152c61100c | f36fc94a1ac5ffbfb6d2a78807992347a7e9f6e2 | /assignment1/cs231n/classifiers/linear_classifier.py | 844826318d20b5e2114d43a0cfb20aa6ca31046a | []
| no_license | Dipeshtamboli/CS231n-Assignments | d2f60504410499aed96da9f988fc69c239096abe | 146b3ce885867c81dd609abdbaedabeafa23f7b7 | refs/heads/master | 2020-04-11T09:10:45.563002 | 2019-01-01T20:56:18 | 2019-01-01T20:56:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,966 | py | from __future__ import print_function
import numpy as np
from cs231n.classifiers.linear_svm import *
from cs231n.classifiers.softmax import *
class LinearClassifier(object):
def __init__(self):
self.W = None
def train(self, X, y, learning_rate=1e-3, reg=1e-5, num_iters=100,
batch_size=200, verbose=False):
"""
Train this linear classifier using stochastic gradient descent.
Inputs:
- X: A numpy array of shape (N, D) containing training data; there are N
training samples each of dimension D.
- y: A numpy array of shape (N,) containing training labels; y[i] = c
means that X[i] has label 0 <= c < C for C classes.
- learning_rate: (float) learning rate for optimization.
- reg: (float) regularization strength.
- num_iters: (integer) number of steps to take when optimizing
- batch_size: (integer) number of training examples to use at each step.
- verbose: (boolean) If true, print progress during optimization.
Outputs:
A list containing the value of the loss function at each training iteration.
"""
num_train, dim = X.shape
num_classes = np.max(y) + 1 # assume y takes values 0...K-1 where K is number of classes
if self.W is None:
# lazily initialize W
self.W = 0.001 * np.random.randn(dim, num_classes)
# Run stochastic gradient descent to optimize W
loss_history = []
for it in range(num_iters):
X_batch = None
y_batch = None
#########################################################################
# TODO: #
# Sample batch_size elements from the training data and their #
# corresponding labels to use in this round of gradient descent. #
# Store the data in X_batch and their corresponding labels in #
# y_batch; after sampling X_batch should have shape (dim, batch_size) # @@ X_batch should have shape (batch_size,dim)
# and y_batch should have shape (batch_size,) # @@ instead of (dim,batch_size)
# #
# Hint: Use np.random.choice to generate indices. Sampling with #
# replacement is faster than sampling without replacement. #
#########################################################################
#######
#CODE
#######
ids=np.arange(batch_size)
ids=np.random.choice(ids,batch_size,replace=True)
X_batch=X[ids]
y_batch=y[ids]
#######
pass
#########################################################################
# END OF YOUR CODE #
#########################################################################
# evaluate loss and gradient
loss, grad = self.loss(X_batch, y_batch, reg)
loss_history.append(loss)
# perform parameter update
#########################################################################
# TODO: #
# Update the weights using the gradient and the learning rate. #
#########################################################################
#######
#CODE
#######
self.W-=learning_rate*grad
#######
pass
#########################################################################
# END OF YOUR CODE #
#########################################################################
if verbose and it % 100 == 0:
print('iteration %d / %d: loss %f' % (it, num_iters, loss))
return loss_history
def predict(self, X):
"""
Use the trained weights of this linear classifier to predict labels for
data points.
Inputs:
- X: A numpy array of shape (N, D) containing training data; there are N
training samples each of dimension D.
Returns:
- y_pred: Predicted labels for the data in X. y_pred is a 1-dimensional
array of length N, and each element is an integer giving the predicted
class.
"""
y_pred = np.zeros(X.shape[0])
###########################################################################
# TODO: #
# Implement this method. Store the predicted labels in y_pred. #
###########################################################################
#######
#CODE
#######
score=X.dot(self.W)
y_pred=np.argmax(score,axis=1)
#######
pass
###########################################################################
# END OF YOUR CODE #
###########################################################################
return y_pred
def loss(self, X_batch, y_batch, reg):
"""
Compute the loss function and its derivative.
Subclasses will override this.
Inputs:
- X_batch: A numpy array of shape (N, D) containing a minibatch of N
data points; each point has dimension D.
- y_batch: A numpy array of shape (N,) containing labels for the minibatch.
- reg: (float) regularization strength.
Returns: A tuple containing:
- loss as a single float
- gradient with respect to self.W; an array of the same shape as W
"""
pass
class LinearSVM(LinearClassifier):
""" A subclass that uses the Multiclass SVM loss function """
def loss(self, X_batch, y_batch, reg):
return svm_loss_vectorized(self.W, X_batch, y_batch, reg)
class Softmax(LinearClassifier):
""" A subclass that uses the Softmax + Cross-entropy loss function """
def loss(self, X_batch, y_batch, reg):
return softmax_loss_vectorized(self.W, X_batch, y_batch, reg)
| [
"[email protected]"
]
| |
d243c506f63f7cc1780806923f5d78de5943116b | 08ee36e0bb1c250f7f2dfda12c1a73d1984cd2bc | /src/mnistk/networks/linearrelu_5.py | efcfbec738c0a5f4fad45d439a1de52528caf7c2 | []
| no_license | ahgamut/mnistk | 58dadffad204602d425b18549e9b3d245dbf5486 | 19a661185e6d82996624fc6fcc03de7ad9213eb0 | refs/heads/master | 2021-11-04T07:36:07.394100 | 2021-10-27T18:37:12 | 2021-10-27T18:37:12 | 227,103,881 | 2 | 1 | null | 2020-02-19T22:07:24 | 2019-12-10T11:33:09 | Python | UTF-8 | Python | false | false | 675 | py | # -*- coding: utf-8 -*-
"""
linearrelu_5.py
:copyright: (c) 2019 by Gautham Venkatasubramanian.
:license: MIT
"""
import torch
from torch import nn
class LinearReLU_5(nn.Module):
def __init__(self):
nn.Module.__init__(self)
self.f0 = nn.Linear(in_features=784, out_features=70, bias=True)
self.f1 = nn.ReLU(inplace=False)
self.f2 = nn.Linear(in_features=70, out_features=10, bias=False)
self.f3 = nn.LogSoftmax(dim=1)
def forward(self, *inputs):
x = inputs[0]
x = x.view(x.shape[0],784)
x = self.f0(x)
x = self.f1(x)
x = self.f2(x)
x = self.f3(x)
return x
| [
"[email protected]"
]
| |
8e65f1218388ca45500e4bd62348647c2fbb7197 | 344e2956b4e2a30a8ef7532d951f96d995d1dd1e | /21_maskrcnn/lib/cfgs/cascade_mask_rcnn_r101_64x4d_fpn_coco.py | d43096d1dc7c09bd4ed5fbb76ee8a9fbe1c09e25 | [
"Apache-2.0",
"LGPL-3.0-only",
"MIT",
"LicenseRef-scancode-proprietary-license",
"BSD-3-Clause",
"GPL-3.0-only"
]
| permissive | karndeepsingh/Monk_Object_Detection | e64199705326e4cd65e4b29946cae210a4ef9649 | 425fa50a3236cb9097389646275da06bf9185f6b | refs/heads/master | 2022-12-22T18:26:53.933397 | 2020-09-28T12:49:50 | 2020-09-28T12:49:50 | 299,307,843 | 1 | 1 | Apache-2.0 | 2020-09-28T12:52:18 | 2020-09-28T12:52:17 | null | UTF-8 | Python | false | false | 9,156 | py | # model settings
model = dict(
type='CascadeRCNN',
pretrained='torchvision://resnext101_64x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=64,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
roi_head=dict(
type='CascadeRoIHead',
num_stages=3,
stage_loss_weights=[1, 0.5, 0.25],
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=80,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False)
])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100,
mask_thr_binary=0.5))
#Dataset Settings
dataset_type = 'CocoDataset'
data_root = ''
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=, #change1
workers_per_gpu=, #change2
train=dict(
type=dataset_type,
classes=, #change3
ann_file=, #change4
img_prefix=, #change5
pipeline=train_pipeline),
val=dict(
type=dataset_type,
classes=, #change6
ann_file=, #change7
img_prefix=, #change8
pipeline=test_pipeline),
test=dict(
type=dataset_type,
classes=, #change9
ann_file=, #change10
img_prefix=, #change11
pipeline=test_pipeline))
evaluation = dict(interval=, metric='bbox') #change9
# Schedule Settings
optimizer = dict(type='SGD', lr=, momentum=, weight_decay=) #change12
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=) #change13
total_epochs = #change14
# Runtime Dataset
checkpoint_config = dict(interval=) #change15
# yapf:disable
log_config = dict(
interval=50, #change16
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = #change17
resume_from = None
workflow = [('train', 1)]
gpu_ids = None #change18
| [
"[email protected]"
]
| |
3603592a43f6cb57493b90f261bc46ecb00ef171 | 1f936103af336af6bbd335f45d6baa55c426922b | /monatbx/generate_random_image_list.py | 8a9ca4dabc8f05852c8bdb56a7c99cb54b3732fe | []
| no_license | monarin/monatbx | 2ec342d67f1fbccb82656218ffd136f2eb7d96ab | 43f56974f811e5b2b0dcc428d4f9b36043ed9d04 | refs/heads/master | 2020-06-18T13:08:58.893701 | 2016-11-30T00:58:18 | 2016-11-30T00:58:18 | 75,136,381 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | import os
import sys
import random
p = sys.argv[1]
n_images = int(sys.argv[2])
frame_files = []
if os.path.isdir(p):
for pickle_filename in os.listdir(p):
if pickle_filename.endswith('.pickle'):
frame_files.append(p+'/'+pickle_filename)
i_rand = random.sample(range(len(frame_files)),n_images)
frame_files_sel = [frame_files[i] for i in i_rand]
txt_out = ''
for frame in frame_files_sel:
txt_out += frame + '\n'
f = open('frame_rand_'+str(n_images)+'.lst', 'w')
f.write(txt_out)
f.close()
| [
"[email protected]"
]
| |
947dced367bd8dde73a91f39443a0f7b80bda3a8 | 86319aad3690906f614ac1af28b8843529e9e0da | /thwackbin/data/__init__.py | a156485bd18d0e80a766cdfa5aabbee5f290dab9 | []
| no_license | sohgoh/thwackbin | b5828783a6179e96784bed0bdb894b179e3bea07 | ba9fedc4bcec598f367aa6d4f2567d1840c65c51 | refs/heads/master | 2021-01-21T03:14:08.261732 | 2014-04-16T03:53:51 | 2014-04-16T04:02:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 410 | py | """
thwackbin.data
~~~~~~~~~~~~~~
Package which contains mock results data stored on the file system.
"""
__author__ = 'Andrew Hawker <[email protected]>'
import json
import os
RESULTS = None
ROOT = os.path.dirname(__file__)
def init():
"""
Load and cache our results.json data on startup.
"""
global RESULTS
RESULTS = json.load(open(os.path.join(ROOT, 'results.json')))
| [
"[email protected]"
]
| |
6f4f86844af5579493a6f13c1a0dcd95fafe0bd1 | c79e7e691c9fa5cc05bd227209762f735e6263e7 | /pyy1/.pycharm_helpers/python_stubs/-1550516950/apt_pkg/Hashes.py | 92a3d2de462fdebf1fdaf1414141fd87e81bd746 | [
"Apache-2.0"
]
| permissive | pyy1988/pyy_test1 | 27fd5fbd41935ba907e26f4f4d2546ca502f29a6 | 6bea878409e658aa87441384419be51aaab061e7 | refs/heads/master | 2020-04-05T07:01:58.745653 | 2018-11-08T12:51:00 | 2018-11-08T12:51:00 | 156,660,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | # encoding: utf-8
# module apt_pkg
# from /usr/lib/python3/dist-packages/apt_pkg.cpython-35m-x86_64-linux-gnu.so
# by generator 1.145
"""
Classes and functions wrapping the apt-pkg library.
The apt_pkg module provides several classes and functions for accessing
the functionality provided by the apt-pkg library. Typical uses might
include reading APT index files and configuration files and installing
or removing packages.
"""
# no imports
from .object import object
class Hashes(object):
"""
Hashes([object: (bytes, file)])
Calculate hashes for the given object. It can be used to create all
supported hashes for a file.
The parameter 'object' can be a bytestring, an object providing the
fileno() method, or an integer describing a file descriptor.
"""
def __init__(self, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
md5 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The MD5Sum of the file as a string."""
sha1 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The SHA1Sum of the file as a string."""
sha256 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The SHA256Sum of the file as a string."""
| [
"[email protected]"
]
| |
2b880886119cd49ba10dd9ed027ea26772f13106 | b1fe732c6abb51d44bd965cbbf259bb2d93e4514 | /Day3/problemSet.py | 14560c6d9c0b5f77cfebb8b4be2faea25056a2f5 | []
| no_license | RahulSinghDhek/GettingStartedWithPython | 04c85c2c370e7ea93b16dade44e5eea633ec284c | c655e3376707b8e4e14ed352a8bc07b010c31e12 | refs/heads/master | 2020-05-07T17:15:38.120491 | 2019-04-11T05:39:12 | 2019-04-11T05:39:12 | 180,721,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | __author__ = 'rdhek'
a=[1,3,5,7]
b=[1,2,3,4,5]
x= set(a)
y= set(b)
print list(x.intersection(y))
print x.union(y)
print x-y | [
"[email protected]"
]
| |
57b977f2ae53db87282b285aa878effd453face0 | 2a46ad4e83dcd903451fb5fba8d04da266dbd49e | /Algorithm/Leetcode/Codes/ConstructBinaryTreeFromInorderAndPostorderTraversal.py | 769cc7114912f93e7e81cf26965025c23ac1cdbd | []
| no_license | chloeeekim/TIL | e248801508340cb2eb9f3cfddc486b7dd7250386 | c5a94e81aa2f2dfcc626820205ca9feaad069fad | refs/heads/master | 2022-03-02T04:05:24.439271 | 2022-02-22T01:25:14 | 2022-02-22T01:25:14 | 190,150,063 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,522 | py | """
106. Construct Binary Tree from Inorder and Postorder Traversal : https://leetcode.com/problems/construct-binary-tree-from-inorder-and-postorder-traversal/
어떤 트리의 inorder, postorder traversal 결과가 리스트로 주어졌을 때, 트리를 복원하는 문제
- 트리 내에 중복된 값은 없다고 가정한다
Example:
- Input : inorder = [9,3,15,20,7], postorder = [9,15,7,20,3]
- Output : [3,9,20,null,null,15,7]
Note:
recursive하게 해결
inorder와 preorder로 트리를 복원하는 문제에서 약간만 변형
postorder 리스트의 마지막 값이 root가 되고, inorder 리스트에서 root 값을 기준으로 left children과 right children으로 구분된다
위 조건이 모든 subtree에 대해서도 만족
preorder에서는 left children을 먼저 구하고, right children을 구하는 순서였으나,
postorder에서는 반대로 right children을 먼저 구하고, left children을 구하는 순서
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode:
if inorder:
rootval = postorder.pop(-1)
root = TreeNode(rootval)
idx = inorder.index(rootval)
root.right = self.buildTree(inorder[idx+1:], postorder)
root.left = self.buildTree(inorder[:idx], postorder)
return root | [
"[email protected]"
]
| |
fdba2e38a7275b27bf739668f77984e9aad554b6 | d5fd936e7346844a1b7c5ea81dfa9adf5bb647d0 | /datasets/load_data.py | c547ebd91f699327cac78ca35d0dbe0f0094489e | []
| no_license | isaachenrion/graphs | 098e7098a894a3d1d9d18cf0ce1054e5910afa15 | 2ba6d50a7f61233fa8cc92ba03256691abb889de | refs/heads/master | 2021-01-02T09:10:49.686240 | 2017-09-11T19:52:48 | 2017-09-11T19:52:48 | 99,154,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,013 | py | import os
import pickle
from .datasets import BatchedFixedOrderGraphDataset, FixedOrderGraphDataset, GraphDataset, BatchedGraphDataset
from .add_virtual_node import add_virtual_node, add_target_nodes
from .path import DATA_DIR
def load_from_path(data_path, args):
with open(data_path, 'rb') as f:
dataset = pickle.load(f)
if isinstance(dataset, FixedOrderGraphDataset):
dataset = BatchedFixedOrderGraphDataset(dataset, args.batch_size)
elif isinstance(dataset, GraphDataset):
dataset = BatchedGraphDataset(dataset, args.batch_size)
if args.model == 'vcn':
add_target_nodes(dataset)
dataset = dataset.preprocess()
return dataset
def load_data(args):
train_data_path = os.path.join(DATA_DIR, args.problem + '-train.pkl')
eval_data_path = os.path.join(DATA_DIR, args.problem + '-eval.pkl')
training_set = load_from_path(train_data_path, args)
validation_set = load_from_path(eval_data_path, args)
return training_set, validation_set
| [
"[email protected]"
]
| |
02af9acedfd8eb63a76f63c93c109e539acb1fa4 | 0f9f8e8478017da7c8d408058f78853d69ac0171 | /python2/l0064_minimum_path_sum.py | e5eed8adafa9b21abd66ed0af9541fba57e42edd | []
| no_license | sprax/1337 | dc38f1776959ec7965c33f060f4d43d939f19302 | 33b6b68a8136109d2aaa26bb8bf9e873f995d5ab | refs/heads/master | 2022-09-06T18:43:54.850467 | 2020-06-04T17:19:51 | 2020-06-04T17:19:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 748 | py | class Solution(object):
def minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if not grid:
return 0
m = len(grid)
n = len(grid[0])
dp = [[0 for _ in range(n)] for _ in range(m)]
# Initialize.
dp[m-1][n-1] = grid[m-1][n-1]
for i in range(m-2, -1, -1):
dp[i][n-1] = grid[i][n-1] + dp[i+1][n-1]
for j in range(n-2, -1, -1):
dp[m-1][j] = grid[m-1][j] + dp[m-1][j+1]
# Solve.
for i in range(m-2, -1, -1):
for j in range(n-2, -1, -1):
dp[i][j] = min(dp[i+1][j], dp[i][j+1]) + grid[i][j]
return dp[0][0]
| [
"[email protected]"
]
| |
ed777a2b20b0c94e0469882347bedeaacedfd55e | 876a1b7b7c898c826b94ff34f3d9a1d22ee5459b | /QUANTAXIS/QAUtil/QAWebutil.py | 8a2a75459233fd85e3744b092b8ba3babacb56ca | [
"MIT"
]
| permissive | pm58/QUANTAXIS | 6db63c461d18f13f7340f7d46e42cde3bc3f40cb | 03c526f640f48f4a153e9c4e0e27f74ccd18a345 | refs/heads/master | 2020-04-27T08:17:42.227150 | 2019-03-09T05:56:05 | 2019-03-09T05:56:05 | 174,165,118 | 5 | 0 | MIT | 2019-03-09T05:56:06 | 2019-03-06T14:55:39 | Python | UTF-8 | Python | false | false | 1,967 | py | # coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2018 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import datetime
from subprocess import PIPE, Popen
def QA_util_web_ping(url):
ms_list = []
p = Popen(["ping", url],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
shell=True)
out = p.stdout.read()
list_ = str(out).split('=')
# print(list)
for item in list_:
if 'ms' in item:
ms_list.append(int(item.split('ms')[0]))
if len(ms_list) < 1:
# Bad Request:
ms_list.append(9999999)
return ms_list[-1]
class QA_Util_web_pool():
def __init__(self):
pass
def hot_update(self):
pass
def dynamic_optimics(self):
pass
def task_queue(self):
pass
if __name__ == "__main__":
print(datetime.datetime.now())
print(QA_util_web_ping('www.baidu.com'))
print(datetime.datetime.now())
| [
"[email protected]"
]
| |
a41ee74e0d74a2f619205675cb265d0c888b3d01 | 9645bdfbb15742e0d94e3327f94471663f32061a | /Python/235 - Lowest Common Ancestor of a Binary Search Tree/235_lowest-common-ancestor-of-a-binary-search-tree.py | 863b29d2d3d70572b919bc045ab5e6b412efb394 | []
| no_license | aptend/leetcode-rua | f81c080b2260adb2da677612e5c437eda256781d | 80e44f4e9d3a5b592fdebe0bf16d1df54e99991e | refs/heads/master | 2023-06-22T00:40:05.533424 | 2021-03-17T13:51:28 | 2021-03-17T13:51:28 | 186,434,133 | 2 | 0 | null | 2023-06-21T22:12:51 | 2019-05-13T14:17:27 | HTML | UTF-8 | Python | false | false | 1,554 | py | from leezy import Solution, solution
from leezy.assists import TreeContext
class Q235(Solution):
@solution
def lowestCommonAncestor(self, root, p, q):
# 68ms
if p < root.val > q:
return self.lowestCommonAncestor(root.left, p, q)
if p > root.val < q:
return self.lowestCommonAncestor(root.right, p, q)
return root
@solution
def lca_iter(self, root, p, q):
# 76ms 40.62%
while root:
if root.val > p and root.val > q:
root = root.left
elif root.val < p and root.val < q:
root = root.right
else:
return root
def lca_dumb(self, root, p, q):
ppath, qpath = [], []
self.search(root, p, ppath)
self.search(root, q, qpath)
prev = x = y = None
for x, y in zip(ppath, qpath):
if x.val != y.val:
return prev
prev = x
return x
def search(self, node, v, path):
if node is None:
path.clear()
return
if v == node.val:
path.append(node)
return
path.append(node)
if v > node.val:
self.search(node.right, v, path)
else:
self.search(node.left, v, path)
def main():
q = Q235()
q.set_context(TreeContext)
t1 = [6, 2, 8, 0, 4, 7, 9, None, None, 3, 5]
q.add_args(t1, 2, 8)
q.add_args(t1, 2, 4)
q.add_args(t1, 3, 7)
q.run()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
8e990b308f624c1525603f9ab92945fda7fb8ce2 | 5167f77d96d1dc5412a8a0a91c95e3086acd05dc | /test/functional/wallet_implicitsegwit.py | 553ce7367502b4851bea035523dbb7026ed2072f | [
"MIT"
]
| permissive | ocvcoin/ocvcoin | 04fb0cea7c11bf52e07ea06ddf9df89631eced5f | 79c3803e330f32ed50c02ae657ff9aded6297b9d | refs/heads/master | 2023-04-30T10:42:05.457630 | 2023-04-15T11:49:40 | 2023-04-15T11:49:40 | 406,011,904 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,424 | py | #!/usr/bin/env python3
# Copyright (c) 2019 The Ocvcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet implicit segwit feature."""
import test_framework.address as address
from test_framework.test_framework import OcvcoinTestFramework
# TODO: Might be nice to test p2pk here too
address_types = ('legacy', 'bech32', 'p2sh-segwit')
def key_to_address(key, address_type):
if address_type == 'legacy':
return address.key_to_p2pkh(key)
elif address_type == 'p2sh-segwit':
return address.key_to_p2sh_p2wpkh(key)
elif address_type == 'bech32':
return address.key_to_p2wpkh(key)
def send_a_to_b(receive_node, send_node):
keys = {}
for a in address_types:
a_address = receive_node.getnewaddress(address_type=a)
pubkey = receive_node.getaddressinfo(a_address)['pubkey']
keys[a] = pubkey
for b in address_types:
b_address = key_to_address(pubkey, b)
send_node.sendtoaddress(address=b_address, amount=1)
return keys
def check_implicit_transactions(implicit_keys, implicit_node):
# The implicit segwit node allows conversion all possible ways
txs = implicit_node.listtransactions(None, 99999)
for a in address_types:
pubkey = implicit_keys[a]
for b in address_types:
b_address = key_to_address(pubkey, b)
assert(('receive', b_address) in tuple((tx['category'], tx['address']) for tx in txs))
class ImplicitSegwitTest(OcvcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info("Manipulating addresses and sending transactions to all variations")
implicit_keys = send_a_to_b(self.nodes[0], self.nodes[1])
self.sync_all()
self.log.info("Checking that transactions show up correctly without a restart")
check_implicit_transactions(implicit_keys, self.nodes[0])
self.log.info("Checking that transactions still show up correctly after a restart")
self.restart_node(0)
self.restart_node(1)
check_implicit_transactions(implicit_keys, self.nodes[0])
if __name__ == '__main__':
ImplicitSegwitTest().main()
| [
"[email protected]"
]
| |
7365f9c8cd8272ff89d5a4395be8d501ab43d64b | 514f11a1e5643459594a90485c93332a3a53a338 | /barf/barf/analysis/basicblock/basicblock.py | a2460e0ef5f0d7dfa4ff9d3839b0abfb3a3e113a | [
"BSD-2-Clause"
]
| permissive | ksmaheshkumar/barf-project | b17cdfb3bd9f7eaeabba69ddd1f048d90d3d43ef | f5a4e081b458987676e1abf1f5f54f0ff49133b1 | refs/heads/master | 2021-01-17T21:55:27.477007 | 2015-01-16T14:34:56 | 2015-01-16T14:34:56 | 31,496,855 | 1 | 0 | null | 2015-03-01T13:10:51 | 2015-03-01T13:10:50 | Python | UTF-8 | Python | false | false | 18,362 | py | import bisect
import itertools
import networkx
from Queue import Queue
from pydot import Dot
from pydot import Edge
from pydot import Node
from barf.core.reil import DualInstruction
from barf.core.reil import ReilMnemonic
from barf.core.reil import ReilImmediateOperand
# CFG recovery mode
BARF_DISASM_LINEAR = 0 # linear sweep
BARF_DISASM_RECURSIVE = 1 # recursive descent
BARF_DISASM_MIXED = 2 # linear sweep + recursive descent
verbose = False
class BasicBlock(object):
"""Basic block representation.
"""
def __init__(self):
# List of instruction within the basic block. Each instruction
# is a 'dual' instruction, e.i. it pairs an assembler
# instruction with its REIL translation.
self._instrs = []
# Start address of the basic block.
self._address = None
# Taken branch address. If a basic block ends in a conditional
# instruction, this field has the address of the taken branch
# (condition equals True)
self._taken_branch = None
# Similar to taken branch but it holds the target address of
# the jump when the condition is false.
self._not_taken_branch = None
# If a basic block ends in a direct jump or in an instruction
# different from a conditional jump, this fields holds the
# address of the jump or next instruction.
self._direct_branch = None
@property
def instrs(self):
"""Get basic block instructions.
"""
return self._instrs
@property
def address(self):
"""Get basic block start address.
"""
if self._instrs == []:
return None
return self._instrs[0].address
@property
def start_address(self):
"""Get basic block start address.
"""
if self._instrs is []:
return None
return self._instrs[0].address
@property
def end_address(self):
"""Get basic block end address.
"""
if self._instrs is []:
return None
return self._instrs[-1].address + self._instrs[-1].asm_instr.size - 1
@property
def size(self):
"""Get basic block size.
"""
if self._instrs is []:
return None
return sum([dinstr.asm_instr.size for dinstr in self._instrs])
@property
def taken_branch(self):
"""Get basic block taken branch.
"""
return self._taken_branch
@taken_branch.setter
def taken_branch(self, value):
"""Set basic block taken branch.
"""
self._taken_branch = value
@property
def not_taken_branch(self):
"""Get basic block not taken branch.
"""
return self._not_taken_branch
@not_taken_branch.setter
def not_taken_branch(self, value):
"""Set basic block not taken branch.
"""
self._not_taken_branch = value
@property
def direct_branch(self):
"""Get basic block direct branch.
"""
return self._direct_branch
@direct_branch.setter
def direct_branch(self, value):
"""Set basic block direct branch.
"""
self._direct_branch = value
@property
def branches(self):
"""Get basic block branches.
"""
branches = []
if self._taken_branch:
branches += [(self._taken_branch, 'taken')]
if self._not_taken_branch:
branches += [(self._not_taken_branch, 'not-taken')]
if self._direct_branch:
branches += [(self._direct_branch, 'direct')]
return branches
def contains(self, address):
"""Check if an address is within the range of a basic block.
"""
return address >= self.address and address <= self.end_address
def empty(self):
"""Check if a basic block is empty.
"""
return len(self._instrs) == 0
def __str__(self):
lines = ["Basic Block @ 0x%08x" % (self.address if self.address else 0)]
for instr in self._instrs:
lines += [" %s ; %s" % (str(instr.ir_instrs[0]).ljust(25), str(instr.asm_instr))]
for ir_instr in instr.ir_instrs[1:]:
lines += [" %s" % str(ir_instr)]
return "\n".join(lines)
def __eq__(self, other):
# Assumes that you are comparing basic block from the same binary
return self.address == other.address and self.end_address == other.end_address
def __ne__(self, other):
return not self.__eq__(other)
class BasicBlockGraph(object):
"""Basic block graph representation.
"""
def __init__(self, basic_blocks):
# List of basic blocks.
self._basic_blocks = basic_blocks
# Basic block accessed by address
self._bb_by_addr = dict([(bb.address, bb) for bb in basic_blocks])
# Basic block graph
self._graph = self._build_graph(basic_blocks)
def all_simple_bb_paths(self, start_address, end_address):
"""Return a list of path between start and end address.
"""
bb_start = self._find_basic_block(start_address)
bb_end = self._find_basic_block(end_address)
paths = networkx.all_simple_paths(self._graph, \
source=bb_start.address, target=bb_end.address)
return (map(lambda addr : self._bb_by_addr[addr], path) for path in paths)
def save(self, filename, print_ir=False, format='dot'):
"""Save basic block graph into a file.
"""
node_format = {
'shape' : 'Mrecord',
'rankdir' : 'LR',
'fontname' : 'monospace',
'fontsize' : '9.0'
}
edge_format = {
'fontname' : 'monospace',
'fontsize' : '8.0'
}
edge_colors = {
'taken' : 'green',
'not-taken' : 'red',
'direct' : 'blue'
}
try:
# for each conneted component
for idx, gr in enumerate(networkx.connected_component_subgraphs(self._graph.to_undirected())):
graph = Dot(graph_type="digraph", rankdir="TB")
# add nodes
nodes = {}
for bb_addr in gr.node.keys():
dump = self._dump_bb(self._bb_by_addr[bb_addr], print_ir)
label = "{<f0> 0x%08x | %s}" % (bb_addr, dump)
# html-encode colon character
label = label.replace(":", ":")
nodes[bb_addr] = Node(bb_addr, label=label, **node_format)
graph.add_node(nodes[bb_addr])
# add edges
for bb_src_addr in gr.node.keys():
for bb_dst_addr, branch_type in self._bb_by_addr[bb_src_addr].branches:
graph.add_edge(Edge(nodes[bb_src_addr],
nodes[bb_dst_addr], label=branch_type, \
color=edge_colors[branch_type], **edge_format))
graph.write("%s_%03d.%s" % (filename, idx, format), format=format)
except Exception as err:
import traceback
import sys
print("[E] Error loading BARF (%s:%d) : '%s'" %
(__name__, sys.exc_traceback.tb_lineno, str(err)))
print("")
print(traceback.format_exc())
# Auxiliary functions
# ======================================================================== #
def _build_graph(self, basic_blocks):
graph = networkx.DiGraph()
# add nodes
for bb_addr in self._bb_by_addr.keys():
graph.add_node(bb_addr, address=bb_addr)
# add edges
for bb_src_addr in self._bb_by_addr.keys():
for bb_dst_addr, branch_type in self._bb_by_addr[bb_src_addr].branches:
graph.add_edge(bb_src_addr, bb_dst_addr, branch_type=branch_type)
return graph
def _find_basic_block(self, address):
bb_rv = None
for bb in self._basic_blocks:
if address >= bb.address and address <= bb.end_address:
bb_rv = bb
break
return bb_rv
def _dump_bb(self, basic_block, print_ir=False):
lines = []
base_addr = basic_block.instrs[0].address
for instr in basic_block.instrs:
lines += ["0x%08x (%2d) " % (instr.address, instr.asm_instr.size) + str(instr.asm_instr) + "\\l"]
# lines += ["+%02x " % (instr.address - base_addr) + str(instr.asm_instr) + "\\l"]
# lines += [str(instr.asm_instr) + "\\l"]
if print_ir:
for ir_instr in instr.ir_instrs:
lines += [" " + str(ir_instr) + "\\l"]
return "".join(lines)
@property
def basic_blocks(self):
return self._basic_blocks
class BasicBlockBuilder(object):
"""Basic block builder.
"""
def __init__(self, disassembler, memory, translator):
# An instance of a disassembler.
self._disasm = disassembler
# And instance of a REIL translator.
self._ir_trans = translator
# Maximun number of bytes that gets from memory to disassemble.
self._lookahead_max = 16
# Memory of the program being analyze.
self._mem = memory
def build(self, start_address, end_address):
"""Return the list of basic blocks.
Linear Sweep Disassembly.
@param start_address: Address of the first byte to start disassembling
basic blocks.
@param end_address: Address of the last byte (inclusive) to finish
disassembling basic blocks.
"""
if verbose:
print("[+] Recovering Basic Blocks :")
if verbose:
print(" Finding candidate BBs...")
bbs = self._find_candidate_bbs(start_address, end_address)
if verbose:
print(" %d" % len(bbs))
# print " Number of instrs..."
# asm_count = 0
# ir_count = 0
# for bb in bbs:
# asm_count += len(bb.instrs)
# ir_count += sum(map(lambda i : len(i.ir_instrs), bb.instrs))
# print " asm : %d" % asm_count
# print " ir : %d" % ir_count
if verbose:
print(" Refining BBs...")
bbs = self._refine_bbs(bbs)
if verbose:
print(" %d" % len(bbs))
# print " Checking gaps..."
# for curr, next in zip(bbs[:-1], bbs[1:]):
# if curr.address + curr.size != next.address:
# print "gap found @ %s" % hex(curr.address + curr.size)
if verbose:
print(" Stripping BBs...")
bbs = self._strip_bbs(bbs)
if verbose:
print(" %d" % len(bbs))
if verbose:
print(" Updating branches...")
self._update_branches(bbs)
if verbose:
print(" %d" % len(bbs))
return bbs
def _find_candidate_bbs(self, start_address, end_address, mode=BARF_DISASM_MIXED):
bbs = []
addrs_to_process = Queue()
addrs_processed = set()
addrs_to_process.put(start_address)
while not addrs_to_process.empty():
curr_addr = addrs_to_process.get()
# there no standard way to check if an item is in the queue
# before pushing it in. So, it is necesary to check if the pop
# address have already been processed.
if curr_addr in addrs_processed:
continue
# print "curr_addr : ", hex(curr_addr)
bb = self._disassemble_bb(curr_addr, end_address + 0x1)
if bb.empty():
# print " empty bb"
continue
# print " valid bb"
# add bb to the list
bbs += [bb]
addrs_processed.add(curr_addr)
# linear sweep mode: add next addr to process queue
if mode in [BARF_DISASM_LINEAR, BARF_DISASM_MIXED]:
next_addr = bb.address + bb.size
# print "next_addr : ", hex(next_addr)
if next_addr < end_address and not next_addr in addrs_processed:
addrs_to_process.put(next_addr)
# recursive descent mode: add branches to process queue
if mode in [BARF_DISASM_RECURSIVE, BARF_DISASM_MIXED]:
for addr, branch_type in bb.branches:
if not addr in addrs_processed:
addrs_to_process.put(addr)
return bbs
def _refine_bbs(self, bbs):
bbs.sort(key=lambda x : x.address)
bbs_addrs = map(lambda x : x.address, bbs)
bbs_new = []
for idx, bb1 in enumerate(bbs):
# sys.stdout.write("\r Processing : %d/%d" % (idx, len(bbs)))
# sys.stdout.flush()
bb_divided = False
lower = bisect.bisect_left(bbs_addrs, bb1.start_address)
upper = bisect.bisect_right(bbs_addrs, bb1.end_address)
for bb2 in bbs[lower:upper]:
if bb1.contains(bb2.address) and bb1 != bb2:
# print "split!!", hex(bb2.address)
bba = self._divide_bb(bb1, bb2.address)
if len(bba.instrs) > 0 and bba not in bbs_new:
bbs_new += [bba]
bb_divided = True
break
if not bb_divided:
if bb1 not in bbs_new:
bbs_new += [bb1]
return bbs_new
def _strip_bbs(self, bbs):
return [bb for bb in map(self._strip_bb, bbs) if len(bb.instrs) > 0]
def _update_branches(self, bbs):
bb_addrs = [bb.address for bb in bbs]
for bb in bbs:
if not bb.taken_branch in bb_addrs:
bb.taken_branch = None
if not bb.not_taken_branch in bb_addrs:
bb.not_taken_branch = None
if not bb.direct_branch in bb_addrs:
bb.direct_branch = None
def _strip_bb(self, bb):
# top
while len(bb.instrs) > 0:
if bb.instrs[0].ir_instrs[0].mnemonic == ReilMnemonic.NOP:
del bb.instrs[0]
else:
break
# bottom
while len(bb.instrs) > 0:
if bb.instrs[-1].ir_instrs[0].mnemonic == ReilMnemonic.NOP:
del bb.instrs[-1]
else:
break
return bb
def _divide_bb(self, bb, address):
bb_new = BasicBlock()
for dinstr in bb.instrs:
if dinstr.address == address:
break
bb_new.instrs.append(dinstr)
bb_new.direct_branch = address
return bb_new
def _disassemble_bb(self, start_address, end_address):
bb_current = BasicBlock()
if start_address > end_address:
return bb_current
addr = start_address
taken = None
not_taken = None
direct = None
while addr < end_address:
start, end = addr, min(addr + self._lookahead_max, end_address)
asm, size = self._disasm.disassemble(self._mem[start:end], addr)
if not asm:
break
ir = self._ir_trans.translate(asm)
bb_current.instrs.append(DualInstruction(addr, asm, ir))
# if there is an 'end' instruction process it accordingly
if ir[-1].mnemonic == ReilMnemonic.RET:
break
# TODO: Manage 'call' instruction properly (without
# resorting to 'asm.mnemonic == "call"').
if ir[-1].mnemonic == ReilMnemonic.JCC and not asm.mnemonic == "call":
taken, not_taken, direct = self._extract_branches(addr, asm, size, ir)
break
# if ir[-1].mnemonic == ReilMnemonic.JCC and asm.mnemonic == "call":
# direct_branch = addr + size
# break
# update instruction pointer and iterate
addr += size
bb_current.taken_branch = taken
bb_current.not_taken_branch = not_taken
bb_current.direct_branch = direct
# print "bb addr : ", hex(bb_current.address), " bb end addr : ", hex(bb_current.end_address)
# print " taken :", hex(taken) if taken else ""
# print " not_taken :", hex(not_taken) if not_taken else ""
# print " direct :", hex(direct) if direct else ""
return bb_current
def _resolve_branch_address(self, jmp_instr, instrs):
dst = jmp_instr.operands[2]
if isinstance(dst, ReilImmediateOperand):
# branch address is an immediate
# Transform Reil address back to source arch address
return dst.immediate >> 8
else:
# try to resolve branch address
for instr in instrs[::-1]:
if instr.mnemonic == ReilMnemonic.STR and \
isinstance(instr.operands[0], ReilImmediateOperand) and \
instr.dst == dst:
# Transform Reil address back to source arch address
return instr.operands[0].immediate >> 8
def _extract_branches(self, addr, asm, size, ir):
taken_branch = None
not_taken_branch = None
direct_branch = None
instr_last = ir[-1]
if instr_last.mnemonic == ReilMnemonic.JCC:
cond = instr_last.operands[0]
dst = instr_last.operands[2]
branch_addr = self._resolve_branch_address(instr_last, ir)
# set branch address according to its type
if isinstance(cond, ReilImmediateOperand):
if cond.immediate == 0x0:
taken_branch = addr + size
not_taken_branch = branch_addr
if cond.immediate == 0x1 and asm.mnemonic == 'call':
direct_branch = addr + size
if cond.immediate == 0x1 and asm.mnemonic != 'call':
direct_branch = branch_addr
else:
taken_branch = branch_addr
not_taken_branch = addr + size
return taken_branch, not_taken_branch, direct_branch
| [
"[email protected]"
]
| |
bbc89e8e7645a694b405dccb4acd25b4f0cc9544 | 84cfe9b0ca7209487231e0725f7ad0d233f09544 | /smv/views.py | e0abea56ca1a13c1798a6cffabfed45f0991342d | []
| no_license | archit-dwevedi/M4Plan | 3eefc12ea447d624bae6f758c3648d7caf825c1a | d162592748ea37bc070b6217365e8601a6ccdd9a | refs/heads/master | 2021-10-26T23:22:04.456014 | 2019-04-14T20:02:17 | 2019-04-14T20:02:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,397 | py | from django.shortcuts import render,redirect
from django.http import HttpResponse
from django.contrib import messages
from absenteeism.models import *
from skill_matrix.models import *
from leave_calendar.models import *
from .forms import *
from .models import *
import datetime
def smv(request):
if(request.method=='POST'):
form = Smv(request.POST)
if form.is_valid():
form.save()
return HttpResponse("<h1>SMV is submitted</h1>")
else:
messages.error(request,"Error")
else:
form = Smv()
return render(request,'smv/smv.html',{'form':form})
def dashsmv(request):
a=SMV.objects.all()
return render(request,'smv/dash_smv.html',{'a':a})
def dashpfm(request):
a=SMV.objects.all()
sam=[]
mcs=[]
for i in a:
#time=((i.pick_in_sec+i.main_Process_in_sec+i.turn_in_sec+i.dispose_in_sec)*i.s_P_I.s_p_i)/12
sam.append((((((((i.pick_in_sec+i.main_Process_in_sec+i.turn_in_sec+i.dispose_in_sec)/60)/12)*i.s_P_I.s_p_i)/20)*i.stitch_Length.stitch_length)*int(i.complexity.complx))*(1+int(i.personal_Allowance+i.fatigue_Allowance+i.delay_Allowance))*0.02*0.9)
print(sam)
for i in sam:
mcs.append(560/(480/(i*0.85)))
print(mcs)
return render(request,'smv/dash_pfm.html',{'a':a,'sam':sam,'mcs':mcs})
def newdashpfm(request):
a=PFM.objects.all()
return render(request,'smv/new_dash_pfm.html',{'a':a})
def ob(request):
if(request.method=='POST'):
form=Pfm(request.POST)
if(form.is_valid()):
global a
global d
global s
s=request.POST.get('section')
a=PFM.objects.filter(sec__name=s)
d=a
return redirect('/newob')
else:
messages.error(request,"Error")
else:
form=Pfm()
return render(request,'smv/ob.html',{'form':form})
def newob(request):
if(request.method=='POST'):
global d
myself=Ob(request.POST,operation=d)
if(myself.is_valid()):
global get
cat=myself.cleaned_data['category']
sub=myself.cleaned_data['subcategory']
get=myself.cleaned_data['Add Neccessary Operation']
print(get)
print(cat,sub)
return redirect('/dashob')
else:
messages.error(request,"Error")
else:
global a
global s
form = Ob(operation=a)
return render(request,'smv/ob.html',{'form':form,'s':s})
def dashob(request):
global get
global q
q=[]
sam=[]
for i in get:
q.append(SMV.objects.get(operation=i))
for i in q:
print(i.operation)
print(i.s_P_I)
sam.append((((((((i.pick_in_sec + i.main_Process_in_sec + i.turn_in_sec + i.dispose_in_sec) / 60) / 12) * i.s_P_I.s_p_i) / 20) * i.stitch_Length.stitch_length) * int(i.complexity.complx)) * ( 1 + int(i.personal_Allowance + i.fatigue_Allowance + i.delay_Allowance)) * 0.02 * 0.9)
return render(request,'smv/dashob.html',{'a':q,'sam':sam})
def layout(request):
global s
global q
return render(request,'smv/layout.html',{'a':s,'q':q})
def dashboard(request):
global s
global q
global get
ab=[]
d=datetime.datetime.now().date()
a=LeaveApplication.objects.all()
for i in a:
if(d<=i.end_date):
ab.append(i.key.user)
print(ab)
b=Person.objects.all()
ab2=[]
for j in b:
if(j.date==d):
if(j.status=='Absent' or j.status=='Leave' or j.status==None):
ab2.append(User.objects.get(username=j.name))
print(ab2)
c=Scale.objects.all()
#e=Employee.objects.all()
ss=ab+ab2
for m in ss:
for n in c:
if(m==n.use):
c=c.exclude(use=m)
print(c)
print(get)
for i in get:
for j in c:
if(str(j.operation)==i):
print(j.use,j.operation,j.level)
## m=lambda x:x==y
## for i in c:
## y=str(i.operation)
## print(list(map(m,get)))
list=zip(c,q)
return render(request,'smv/dashboard.html',{'a':s,'q':q,'c':c,'get':get,'list':list})
def desc(request):
return render(request,'smv/desc.html')
| [
"[email protected]"
]
| |
45b49213838540d4cfa9b40c36aa8caf8d58558d | 38445323b49947266d72645ec973b02e96879eed | /harshad number.py | 8a99c9850563a0ad3ee51f2ed7074159f804f964 | []
| no_license | pooja-pichad/loop | 2d9989b472a2fbacf0a85da06d869016b2d74083 | 47dafba1253da98f98c8fa389e13283ce1e14dee | refs/heads/main | 2023-04-22T02:58:49.274211 | 2021-05-22T07:13:39 | 2021-05-22T07:13:39 | 369,741,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | # harshad number :
# it is take any number and add this two digit number and check the
# addition value is divisible bye this two digit number then it is divisible then its harshad
# number then it not divisiblr then it not harshad number
# forEx; 43
# 4+3=7
# 7/43
# num=int(input("enter a number "))
# i=0
# while i<1:
# a=num%10
# b=(num//10)%10
# c=(num//10)//10
# d=a+b+c
# i=i+1
# if num%d==0:
# print("harshad number")
# else:
# print("not harshad number")
i=1
while i<1000:
a=i%10
b=(i//10)%10
c=(i//10)//10
d=a+b+c
i=i+1
if i%d==0:
print("harshad number",i)
else:
print("not harshad number",i) | [
"[email protected]"
]
| |
0d3b899d072571d9b6f47263ee86838fd0b208a6 | 6ecc1d05bbd9ca2c1d21322faef076c1f28454db | /chrome/browser/ui/webui/chromeos/login/DEPS | 52acfb6a38a1c062632e3dbccf09ecbcc162ff4b | [
"BSD-3-Clause"
]
| permissive | pandareen/chromium | 0e3a9fb92bb9ad027d5b3482a6b03d0bb51c16a1 | 3ea799335afb5178c519f9e12db8b31390375736 | refs/heads/master | 2023-03-14T05:47:29.433132 | 2018-06-27T07:21:08 | 2018-06-27T07:21:08 | 138,843,522 | 0 | 0 | null | 2018-06-27T07:09:52 | 2018-06-27T07:09:52 | null | UTF-8 | Python | false | false | 863 | specific_include_rules = {
# TODO(mash): Fix. https://crbug.com/770866
"core_oobe_handler\.cc": [
"+ash/shell.h",
],
"oobe_display_chooser\.cc": [
"+ash/display/window_tree_host_manager.h",
"+ash/shell.h",
],
# TODO(mash): Fix. https://crbug.com/678990
"signin_screen_handler\.cc": [
"+ash/detachable_base",
"+ash/shell.h",
],
"signin_screen_handler\.h": [
"+ash/detachable_base/detachable_base_observer.h",
],
# Tests.
"oobe_display_chooser_browsertest\.cc": [
"+ash/shell.h",
],
"oobe_display_chooser_unittest.cc": [
"+ash/display/display_configuration_controller.h",
"+ash/shell.h",
"+ash/test/ash_test_base.h",
# TODO(mash): Remove. http://crbug.com/720917.
"+ui/events/devices/device_data_manager.h",
],
"signin_userlist_unittest\.cc": [
"+ash/test/ash_test_base.h"
],
}
| [
"[email protected]"
]
| ||
1984950eeeabd376b7d534bbc788f09949c9ea71 | f3416956f9bfc7af870867e2fe8644f08d513b23 | /combine/contest_20150310a/data_prep/prepare_pgmodel.py | 18a14ff2cbcfdb41cfe5e56133323bb4b304d6ed | []
| no_license | dsjoerg/blundercheck | a71012c0d3ded929599d191d4f73dcb14f94030a | 04fb39ba0dd1591b387f573f767973518b688822 | refs/heads/master | 2021-01-18T18:35:21.992359 | 2015-03-24T18:11:11 | 2015-03-24T18:11:11 | 27,928,453 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,108 | py | #!/usr/bin/env python
from pandas import *
from numpy import *
from djeval import *
import csv, code
import cPickle as pickle
from sklearn.externals import joblib
GAMELIMIT=60000
NUM_GAMES=100000
def shell():
vars = globals()
vars.update(locals())
shell = code.InteractiveConsole(vars)
shell.interact()
msg("Hi! Reading eheaders")
eheaders_filename = '/data/eheaders.p'
eheaders_file = open(eheaders_filename, 'r')
eheaders = pickle.load(eheaders_file)
elos = eheaders['elos']
result = eheaders['result']
checkmate = eheaders['checkmate']
openings = eheaders['openings']
ocount = eheaders['opening_count']
msg("Hi! Reading crunched movescores from %s" % sys.argv[1])
crunched_path = sys.argv[1]
crunched_df = read_csv(crunched_path, sep=',', engine='c', index_col=['gamenum', 'side'])
msg("Hi! Reading GB scores from %s" % sys.argv[2])
gb_path = sys.argv[2]
gb_df = read_csv(gb_path, sep=',', engine='c', index_col=['gamenum'])
msg("Hi! Reading depthstats")
depthstats_path = '/data/depthstats.csv'
columns = [
'gamenum',
'side',
'mean_depth',
'mean_seldepth',
'mean_depths_agreeing_ratio',
'mean_deepest_agree_ratio',
'pct_sanemoves',
'gamelength',
'mean_num_bestmoves',
'mean_num_bestmove_changes',
'mean_bestmove_depths_agreeing',
'mean_deepest_change',
'mean_deepest_change_ratio',
]
depthstats_df = read_csv(depthstats_path, sep=' ', engine='c', header=None, names=columns, index_col=False)
depthstats_df = depthstats_df.set_index(['gamenum', 'side'])
# we have the gamelength column in another df, drop it here to avoid conflicts
depthstats_df.drop('gamelength', axis=1, inplace=True)
msg("Hi! Reading material")
material_path = '/data/material.csv'
columns = [
'gamenum',
'material_break_0',
'material_break_1',
'material_break_2',
'material_break_3',
'material_break_4',
'opening_length',
'midgame_length',
'endgame_length',
'mean_acwsa',
'mean_acwsa_0',
'mean_acwsa_1',
'mean_acwsa_2',
'mean_acwsa_3',
'mean_acwsa_4',
'mean_acwsa_5',
'mean_acwsa_6',
'mean_acwsa_7',
'mean_acwsa_8',
'mean_acwsa_9',
]
material_df = read_csv(material_path, sep=' ', engine='c', header=None, names=columns, index_col=False)
material_df = material_df.set_index(['gamenum'])
material_df = material_df.reindex(range(1, NUM_GAMES+1))
material_df = material_df.fillna(material_df.mean())
msg("Reading ELOscored data")
eloscored_cols = [
'gamenum',
'final_elo',
'final_ply',
'final_num_games',
'final_elo_stdev',
'elopath_min',
'elopath_max',
]
eloscored_df = read_csv('/data/data.pgn.eloscored21', sep=',', engine='c', header=None, names=eloscored_cols, index_col=False)
eloscored_df = eloscored_df.set_index(['gamenum'])
msg("Reading ELOscored data 4")
eloscored4_cols = [
'gamenum',
'final_elo',
'final_ply',
'final_num_games',
'final_elo_stdev',
]
eloscored4_cols[1:] = [x + '_elo4' for x in eloscored4_cols[1:]]
eloscored4_df = read_csv('/data/data.pgn.eloscored4', sep=',', engine='c', header=None, names=eloscored4_cols, index_col=False)
eloscored4_df = eloscored4_df.set_index(['gamenum'])
msg("Reading ELOscored data 10")
eloscored10_cols = [
'gamenum',
'final_elo',
'final_ply',
'final_num_games',
'final_elo_stdev',
]
eloscored10_cols[1:] = [x + '_elo10' for x in eloscored10_cols[1:]]
eloscored10_df = read_csv('/data/data.pgn.eloscored10', sep=',', engine='c', header=None, names=eloscored10_cols, index_col=False)
eloscored10_df = eloscored10_df.set_index(['gamenum'])
msg("Hi! Reading moveaggs")
move_aggs = joblib.load('/data/move_aggs.p')
move_aggs.fillna(move_aggs.mean(), inplace=True)
msg("Hi! Reading wmoveaggs")
wmove_aggs = joblib.load('/data/wmove_aggs.p')
wmove_aggs.fillna(wmove_aggs.mean(), inplace=True)
wmove_aggs.rename(columns={'elo_pred': 'moveelo_weighted'}, inplace=True)
do_elochunk = True
if do_elochunk:
ch_agg_df = joblib.load('/data/chunk_aggs.p')
ch_agg_df.index = ch_agg_df.index.droplevel('elo')
ch_agg_df.columns = ['elochunk_' + x for x in ch_agg_df.columns]
msg("Hi! Setting up playergame rows")
if do_elochunk:
elorange_cols = list(ch_agg_df.columns.values)
msg("elorange cols are %s" % elorange_cols)
msg('Preparing ELO df')
elo_rows = [[x[0][0], x[0][1], x[1]] for x in elos.items()]
elo_df = DataFrame(elo_rows, columns=['gamenum','side','elo'])
elo_df.set_index(['gamenum','side'], inplace=True)
msg('Joining DFs')
supplemental_dfs = [move_aggs[['mean', 'median', '25', '10', 'min', 'max', 'stdev']], wmove_aggs['moveelo_weighted'], depthstats_df, elo_df, crunched_df]
if do_elochunk:
supplemental_dfs.append(ch_agg_df)
mega_df = concat(supplemental_dfs, axis=1)
mega_df = mega_df.join(material_df, how='outer')
mega_df = mega_df.join(eloscored_df, how='outer')
mega_df = mega_df.join(eloscored4_df, how='outer')
mega_df = mega_df.join(eloscored10_df, how='outer')
mega_df = mega_df.join(gb_df, how='outer')
yy_df = mega_df
msg("hi, columns are %s" % yy_df.columns)
# TODO confirm that all columns are there
def opening_feature(opening):
if ocount[opening] < 20:
return 'rare'
if ocount[opening] < 200:
return 'uncommon'
return opening
msg("Hi! Computing additional features")
yy_df['opening_feature'] = [opening_feature(openings[x]) for x in yy_df.index.get_level_values('gamenum')]
yy_df['opening_count'] = [ocount[openings[x]] for x in yy_df.index.get_level_values('gamenum')]
yy_df['any_grit'] = (yy_df['grit'] > 0)
yy_df['major_grit'] = (yy_df['grit'] > 5)
yy_df['nmerror'] = log((-1 * yy_df['meanerror']).clip(1,60)).clip(1,4) - 2.53
yy_df['premature_quit'] = (yy_df['gameoutcome'] == -1) & (yy_df['my_final_equity'] > -100)
yy_df['drawn_game'] = (yy_df['gameoutcome'] == 0)
yy_df['ended_by_checkmate'] = yy_df['won_by_checkmate'] | yy_df['lost_by_checkmate']
yy_df['noblunders'] = (yy_df['blunderrate'] == 0)
yy_df['final_equity'] = yy_df['my_final_equity'].abs().clip(0,300)
yy_df['early_lead'] = yy_df['early_lead'].clip(0,100)
yy_df['mean_depth_clipped'] = yy_df['mean_depth'].clip(0,25)
yy_df['gamelength_clipped'] = yy_df['gamelength'].clip(20,200)
# prepare opponent_df with selected info about opponent
opponent_columns = ['meanerror', 'blunderrate', 'perfectrate', 'grit', 'meanecho', 'mate_created', 'mate_destroyed', 'q_error_one', 'q_error_two', 'stdeverror', 'elo', 'any_grit', 'noblunders', 'nmerror', 'mean_depths_agreeing_ratio', 'mean_deepest_agree_ratio']
if do_elochunk:
opponent_columns.extend(elorange_cols)
opponent_df = yy_df[opponent_columns]
opponent_df = opponent_df.reset_index()
opponent_df['side'] = opponent_df['side'] * -1
opponent_df.set_index(['gamenum', 'side'], inplace=True)
opponent_df.columns = ['opponent_' + x for x in opponent_df.columns]
yy_df = concat([yy_df, opponent_df], axis=1)
# more derived columns that use opponent comparisons
yy_df['elo_advantage'] = (yy_df['elo'] - yy_df['opponent_elo']).clip(-500, 500)
yy_df['max_nmerror'] = yy_df[['nmerror', 'opponent_nmerror']].max(axis=1)
yy_df['min_nmerror'] = yy_df[['nmerror', 'opponent_nmerror']].min(axis=1)
yy_df['max_meanecho'] = yy_df[['meanecho', 'opponent_meanecho']].max(axis=1)
yy_df['elo_avg'] = (yy_df['elo'] + yy_df['opponent_elo'])/2.0
yy_df['elo_advantage'] = (yy_df['elo'] - yy_df['opponent_elo'])
yy_df['winner_elo_advantage'] = yy_df['elo_advantage'] * yy_df['gameoutcome']
msg("Hi! Computing dummy variables")
categorical_features = ['opening_feature']
dummies = get_dummies(yy_df[categorical_features]).astype(np.int8)
yy_df = yy_df.join(dummies)
# fill in missing values
msg("Hi! Filling in missing values")
full_index = pandas.MultiIndex.from_product([range(1,NUM_GAMES + 1), [1,-1]], names=['gamenum', 'side'])
yy_df = yy_df.reindex(full_index)
yy_elo = yy_df['elo'].copy(True)
yy_df.fillna(yy_df.mean(numeric_only=True), inplace=True)
yy_df.fillna(False, inplace=True)
yy_df['elo'] = yy_elo
# stupid patch for some stupid opening feature that got assigned to False by fillna ?!!?!?!?
yy_df.loc[yy_df['opening_feature'] == False,'opening_feature'] = 'rare'
msg("Hi! Writing yy_df to disk")
yy_df.to_pickle(sys.argv[3])
msg("Column counts are:")
counts = yy_df.count(axis=0)
print counts
| [
"[email protected]"
]
| |
c76e7b57a50d1595e23179c5dde7838452d683e2 | 95789a6503101b98548570f48e80ae12b964fff1 | /rango/views.py | 5fb1ab348828e249de3637bdc7da82f5648f0859 | []
| no_license | NikolayBorovenskiy/How-to-Tango-with-Django | be7a2d0b8354f17e1ec98a7bc5714ea00b386f7a | f04c9e534c84de2b8885dbaaa5144f4d748c33a0 | refs/heads/master | 2020-05-14T15:49:32.317274 | 2015-08-09T06:46:07 | 2015-08-09T06:46:07 | 39,078,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,340 | py | from django.template import RequestContext
from django.shortcuts import render_to_response, render, redirect
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from rango.models import Category, Page
from rango.forms import CategoryForm, PageForm, UserForm, UserProfileForm
from datetime import datetime
from rango.bing_search import run_query
def index(request):
#test cookies
request.session.set_test_cookie()
# Request the context of the request.
# The context contains information such as the client's machine details, for example.
context = RequestContext(request)
# Construct a dictionary to pass to the template engine as its context.
# Note the key boldmessage is the same as {{ boldmessage }} in the template!
category_list = Category.objects.order_by('-views')[:5]
context_dict = {'categories': category_list}
for category in category_list:
category.url = category.name.replace(' ', '_')
# Get the number of visits to the site.
# We use the COOKIES.get() function to obtain the visits cookie.
# If the cookie exists, the value returned is casted to an integer.
# If the cookie doesn't exist, we default to zero and cast that.
visits = int(request.COOKIES.get('visits', '1'))
reset_last_visit_time = False
response = render(request, 'rango/index.html', context_dict)
# Does the cookie last_visit exist?
if 'last_visit' in request.COOKIES:
# Yes it does! Get the cookie's value.
last_visit = request.COOKIES['last_visit']
# Cast the value to a Python date/time object.
last_visit_time = datetime.strptime(last_visit[:-7], "%Y-%m-%d %H:%M:%S")
# If it's been more than a day since the last visit...
if (datetime.now() - last_visit_time).seconds > 10:
visits = visits + 1
context_dict['visits'] = visits
response = render(request, 'rango/index.html', context_dict)
# ...and flag that the cookie last visit needs to be updated
reset_last_visit_time = True
else:
# Cookie last_visit doesn't exist, so flag that it should be set.
reset_last_visit_time = True
context_dict['visits'] = visits
print visits
#Obtain our Response object early so we can add cookie information.
response = render(request, 'rango/index.html', context_dict)
if reset_last_visit_time:
response.set_cookie('last_visit', datetime.now())
response.set_cookie('visits', visits)
# Return response back to the user, updating any cookies that need changed.
return response
#context = RequestContext(request)
#category_name = category_name_slug.replace('_', ' ')
#context_dict = {'category_name': category_name}
def category(request, category_name_slug):
context_dict = {}
context_dict['result_list'] = None
context_dict['query'] = None
context = RequestContext(request)
category_name = category_name_slug.replace('_', ' ')
context_dict['category_name'] = category_name
if request.method == 'POST':
query = request.POST['query'].strip()
if query:
# Run our Bing function to get the results list!
result_list = run_query(query)
context_dict['result_list'] = result_list
context_dict['query'] = query
try:
category = Category.objects.get(name=category_name)
context_dict['category_name'] = category.name
pages = Page.objects.filter(category=category).order_by('-views')
context_dict['pages'] = pages
context_dict['category'] = category
except Category.DoesNotExist:
pass
if not context_dict['query']:
context_dict['query'] = category.name
return render(request, 'rango/category.html', context_dict)
def add_category(request):
# A HTTP POST?
if request.method == 'POST':
form = CategoryForm(request.POST)
# Have we been provided with a valid form?
if form.is_valid():
# Save the new category to the database.
form.save(commit=True)
# Now call the index() view.
# The user will be shown the homepage.
return index(request)
else:
# The supplied form contained errors - just print them to the terminal.
print form.errors
else:
# If the request was not a POST, display the form to enter details.
form = CategoryForm()
# Bad form (or form details), no form supplied...
# Render the form with error messages (if any).
return render(request, 'rango/add_category.html', {'form': form})
def add_page(request, category_name_slug):
try:
cat = Category.objects.get(name=category_name_slug)
except Category.DoesNotExist:
cat = None
if request.method == 'POST':
form = PageForm(request.POST)
if form.is_valid():
if cat:
page = form.save(commit=False)
page.category = cat
page.views = 0
page.save()
# probably better to use a redirect here.
return category(request, category_name_slug)
else:
print form.errors
else:
form = PageForm()
context_dict = {'form':form, 'category': cat}
return render(request, 'rango/add_page.html', context_dict)
def register(request):
if request.session.test_cookie_worked():
print ">>>> TEST COOKIE WORKED!"
request.session.delete_test_cookie()
# A boolean value for telling the template whether the registration was successful.
# Set to False initially. Code changes value to True when registration succeeds.
registered = False
# If it's a HTTP POST, we're interested in processing form data.
if request.method == 'POST':
# Attempt to grab information from the raw form information.
# Note that we make use of both UserForm and UserProfileForm.
user_form = UserForm(data=request.POST)
profile_form = UserProfileForm(data=request.POST)
# If the two forms are valid...
if user_form.is_valid() and profile_form.is_valid():
# Save the user's form data to the database.
user = user_form.save()
# Now we hash the password with the set_password method.
# Once hashed, we can update the user object.
user.set_password(user.password)
user.save()
# Now sort out the UserProfile instance.
# Since we need to set the user attribute ourselves, we set commit=False.
# This delays saving the model until we're ready to avoid integrity problems.
profile = profile_form.save(commit=False)
profile.user = user
# Did the user provide a profile picture?
# If so, we need to get it from the input form and put it in the UserProfile model.
if 'picture' in request.FILES:
profile.picture = request.FILES['picture']
# Now we save the UserProfile model instance.
profile.save()
# Update our variable to tell the template registration was successful.
registered = True
# Invalid form or forms - mistakes or something else?
# Print problems to the terminal.
# They'll also be shown to the user.
else:
print user_form.errors, profile_form.errors
# Not a HTTP POST, so we render our form using two ModelForm instances.
# These forms will be blank, ready for user input.
else:
user_form = UserForm()
profile_form = UserProfileForm()
# Render the template depending on the context.
return render(request,
'rango/register.html',
{'user_form': user_form, 'profile_form': profile_form, 'registered': registered} )
def user_login(request):
# If the request is a HTTP POST, try to pull out the relevant information.
if request.method == 'POST':
# Gather the username and password provided by the user.
# This information is obtained from the login form.
# We use request.POST.get('<variable>') as opposed to request.POST['<variable>'],
# because the request.POST.get('<variable>') returns None, if the value does not exist,
# while the request.POST['<variable>'] will raise key error exception
username = request.POST.get('username')
password = request.POST.get('password')
# Use Django's machinery to attempt to see if the username/password
# combination is valid - a User object is returned if it is.
user = authenticate(username=username, password=password)
# If we have a User object, the details are correct.
# If None (Python's way of representing the absence of a value), no user
# with matching credentials was found.
if user:
# Is the account active? It could have been disabled.
if user.is_active:
# If the account is valid and active, we can log the user in.
# We'll send the user back to the homepage.
login(request, user)
return HttpResponseRedirect('/rango/')
else:
# An inactive account was used - no logging in!
return HttpResponse("Your Rango account is disabled.")
else:
# Bad login details were provided. So we can't log the user in.
print "Invalid login details: {0}, {1}".format(username, password)
return HttpResponse("Invalid login details supplied.")
# The request is not a HTTP POST, so display the login form.
# This scenario would most likely be a HTTP GET.
else:
# No context variables to pass to the template system, hence the
# blank dictionary object...
return render(request, 'rango/login.html', {})
@login_required
def restricted(request):
return HttpResponse("Since you're logged in, you can see this text!")
# Use the login_required() decorator to ensure only those logged in can access the view.
@login_required
def user_logout(request):
# Since we know the user is logged in, we can now just log them out.
logout(request)
# Take the user back to the homepage.
return HttpResponseRedirect('/rango/')
def search(request):
result_list = []
if request.method == 'POST':
query = request.POST['query'].strip()
if query:
# Run our Bing function to get the results list!
result_list = run_query(query)
return render(request, 'rango/search.html', {'result_list': result_list})
def track_url(request):
page_id = None
url = '/rango/'
if request.method == 'GET':
if 'page_id' in request.GET:
page_id = request.GET['page_id']
try:
page = Page.objects.get(id=page_id)
page.views = page.views + 1
page.save()
url = page.url
except:
pass
return redirect(url)
@login_required
def like_category(request):
cat_id = None
if request.method == 'GET':
cat_id = request.GET['category_id']
likes = 0
if cat_id:
cat = Category.objects.get(id=int(cat_id))
if cat:
likes = cat.likes + 1
cat.likes = likes
cat.save()
return HttpResponse(likes)
def get_category_list(max_results=0, starts_with=''):
cat_list = []
if starts_with:
cat_list = Category.objects.filter(name__istartswith=starts_with)
if max_results > 0:
if len(cat_list) > max_results:
cat_list = cat_list[:max_results]
return cat_list
def suggest_category(request):
cat_list = []
starts_with = ''
if request.method == 'GET':
starts_with = request.GET['suggestion']
cat_list = get_category_list(8, starts_with)
print 'Hello', cat_list
return render(request, 'rango/index.html', {'cat_list': cat_list }) | [
"[email protected]"
]
| |
9905b137698c4c8f41b452623054c57528b70709 | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-ddoscoo/aliyunsdkddoscoo/request/v20200101/DeleteAsyncTaskRequest.py | 55e30bb35da17abec88db8989a885f42142441d6 | [
"Apache-2.0"
]
| permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,620 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkddoscoo.endpoint import endpoint_data
class DeleteAsyncTaskRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ddoscoo', '2020-01-01', 'DeleteAsyncTask')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_TaskId(self):
return self.get_query_params().get('TaskId')
def set_TaskId(self,TaskId):
self.add_query_param('TaskId',TaskId) | [
"[email protected]"
]
| |
307b39b476091ab984dde86e503be570839f4667 | 77a7508c3a647711191b924959db80fb6d2bd146 | /src/gamesbyexample/countingquiz.py | 8b3131533dd87c5a56493d1814d27b3cca90f27e | [
"MIT"
]
| permissive | surlydev/PythonStdioGames | ff7edb4c8c57a5eb6e2036e2b6ebc7e23ec994e0 | d54c2509c12a5b1858eda275fd07d0edd456f23f | refs/heads/master | 2021-05-22T21:01:15.529159 | 2020-03-26T07:34:10 | 2020-03-26T07:34:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,345 | py | """Counting Quiz, by Al Sweigart [email protected]
Use multiplication and subtraction to count the number of stars shown
as fast as possible.
Tags: short, math"""
import math, random, time
def main():
print('''Counting Quiz, by Al Sweigart [email protected]
Use multiplication and subtraction to count the number of stars shown
as fast as possible. The quiz is 60 seconds long. For example:
* * * * * *
* * * * *
* * * * *
This is a 6 x 3 star field with 2 missing stars.
The answer is 6 x 3 - 2 = 16
''')
while True:
input('Press Enter to begin...')
runQuiz()
print('Would you like to play again? Y/N')
response = input().upper()
if not response.startswith('Y'):
print('Thanks for playing!')
break
def runQuiz():
correct = 0
startTime = time.time()
while time.time() < startTime + 60:
print('\n' * 40) # Clear the screen by printing several newlines.
# Generate the problem and the star field to display:
width = random.randint(1, 10)
height = random.randint(1, 10)
canvas = {}
for x in range(width):
for y in range(height):
canvas[(x, y)] = '*'
numMissing = random.randint(0, math.sqrt(width * height) // 2)
for i in range(numMissing):
while True:
x = random.randint(0, width - 1)
y = random.randint(0, height - 1)
if canvas[(x, y)] == '*':
break
canvas[(x, y)] = ' '
answer = width * height - numMissing
# Display the star field:
for y in range(height):
for x in range(width):
print(canvas[(x, y)] + ' ', end='')
print() # Print a newline.
# Let the player answer and determine if they're right or wrong.
response = input('Enter the number of stars. > ')
if response.isdecimal() and int(response) == answer:
correct += 1
else:
print('Wrong:', answer)
time.sleep(1)
print('Time\'s up!')
print('You were able to count', correct, 'star fields correctly.')
print()
# If the program is run (instead of imported), run the game:
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
8fd5e717b4d06d2f26535413e07fae832635769d | 72e463c26daf79b7d380db59a58849e3cd095a7e | /week7/day1_api.py | f7bcb6d95489339333501141914115cb6d9975ba | []
| no_license | tdhuynh/tiy_class_notes | dcc5454af63ca888cfdb99e85f4370cabce88f88 | a254d77f52cc438476d80ff58bfa9759de7826fa | refs/heads/master | 2020-04-15T12:19:30.045552 | 2016-11-09T14:30:06 | 2016-11-09T14:30:06 | 68,213,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | import requests
# result = requests.get("http://swapi.co/api/people/")
# # print(result.text)
# json_result = result.json()
# # print(json_result)
# # print(json_result["name"])
# for person in json_result["results"]:
# print(person["name"])
# result = requests.get(json_result["next"])
# json_result = result.json()
#
# for person in json_result["results"]:
# print(person["name"])
###################
def get_data(endpoint, lookup="name"):
url = "http://swapi.co/api/{}/".format(endpoint)
while url:
result = requests.get(url)
json_result = result.json()
for person in json_result["results"]:
print(person[lookup])
if input("Press Enter to keep going, type 'n' to stop " ):
break
url = json_result["next"]
while True:
value = input("What do you want to search for? (films) or (people)? ")
if value == "films":
get_data(value, lookup="title")
get_data(value)
| [
"[email protected]"
]
| |
c4ef0a5ad842febae7dc7d0f6b86210f665d8c52 | 71b7b6d84a61f514b038fac7741e6d16973fcaa9 | /devel/lib/python2.7/dist-packages/object_manipulation_msgs/msg/_GraspHandPostureExecutionFeedback.py | 239a1161abae63cd1fec56383bbec0f1c7153957 | []
| no_license | YiKangJ/perception_driven_ws | 15c02e523f1a708fe63b216d73019c8c2bde97a1 | 0a0f8fcbe3f5fed26439f449999b85f1e38c0f70 | refs/heads/master | 2020-04-01T19:47:48.372111 | 2018-10-18T06:17:57 | 2018-10-18T06:17:57 | 153,571,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | /home/jyk/perception_driven_ws/devel/.private/object_manipulation_msgs/lib/python2.7/dist-packages/object_manipulation_msgs/msg/_GraspHandPostureExecutionFeedback.py | [
"[email protected]"
]
| |
40be0ddf55f39cfcc4482a4bd777e333af9190e2 | 8ef8e6818c977c26d937d09b46be0d748022ea09 | /cv/distiller/CWD/pytorch/mmrazor/tests/test_models/test_losses/test_distillation_losses.py | 77233b81fce0cffa1c0bce23a3ba60bdeed31133 | [
"Apache-2.0"
]
| permissive | Deep-Spark/DeepSparkHub | eb5996607e63ccd2c706789f64b3cc0070e7f8ef | 9d643e88946fc4a24f2d4d073c08b05ea693f4c5 | refs/heads/master | 2023-09-01T11:26:49.648759 | 2023-08-25T01:50:18 | 2023-08-25T01:50:18 | 534,133,249 | 7 | 6 | Apache-2.0 | 2023-03-28T02:54:59 | 2022-09-08T09:07:01 | Python | UTF-8 | Python | false | false | 8,154 | py | # Copyright (c) OpenMMLab. All rights reserved.
from unittest import TestCase
import torch
from mmengine.structures import BaseDataElement
from mmrazor import digit_version
from mmrazor.models import (ABLoss, ActivationLoss, ATLoss, CRDLoss, DKDLoss,
FBKDLoss, FTLoss, InformationEntropyLoss,
KDSoftCELoss, MGDLoss, OFDLoss, OnehotLikeLoss,
PKDLoss)
class TestLosses(TestCase):
@classmethod
def setUpClass(cls):
cls.feats_1d = torch.randn(5, 6)
cls.feats_2d = torch.randn(5, 2, 3)
cls.feats_3d = torch.randn(5, 2, 3, 3)
num_classes = 6
cls.labels = torch.randint(0, num_classes, [5])
def test_ofd_loss(self):
ofd_loss = OFDLoss()
self.normal_test_1d(ofd_loss)
self.normal_test_3d(ofd_loss)
# test the calculation
s_feat_0 = torch.Tensor([[1, 1], [2, 2], [3, 3]])
t_feat_0 = torch.Tensor([[0, 0], [1, 1], [2, 2]])
ofd_loss_num_0 = ofd_loss.forward(s_feat_0, t_feat_0)
assert ofd_loss_num_0 != torch.tensor(0.0)
s_feat_1 = torch.Tensor([[1, 1], [2, 2], [3, 3]])
t_feat_1 = torch.Tensor([[2, 2], [3, 3], [4, 4]])
ofd_loss_num_1 = ofd_loss.forward(s_feat_1, t_feat_1)
assert ofd_loss_num_1 != torch.tensor(0.0)
s_feat_2 = torch.Tensor([[-3, -3], [-2, -2], [-1, -1]])
t_feat_2 = torch.Tensor([[-2, -2], [-1, -1], [0, 0]])
ofd_loss_num_2 = ofd_loss.forward(s_feat_2, t_feat_2)
assert ofd_loss_num_2 == torch.tensor(0.0)
def normal_test_1d(self, loss_instance, labels=False):
args = tuple([self.feats_1d, self.feats_1d])
if labels:
args += (self.labels, )
loss_1d = loss_instance.forward(*args)
self.assertTrue(loss_1d.numel() == 1)
def normal_test_2d(self, loss_instance, labels=False):
args = tuple([self.feats_2d, self.feats_2d])
if labels:
args += (self.labels, )
loss_2d = loss_instance.forward(*args)
self.assertTrue(loss_2d.numel() == 1)
def normal_test_3d(self, loss_instance, labels=False):
args = tuple([self.feats_3d, self.feats_3d])
if labels:
args += (self.labels, )
loss_3d = loss_instance.forward(*args)
self.assertTrue(loss_3d.numel() == 1)
def test_ab_loss(self):
ab_loss_cfg = dict(loss_weight=1.0, margin=1.0)
ab_loss = ABLoss(**ab_loss_cfg)
self.normal_test_1d(ab_loss)
self.normal_test_2d(ab_loss)
self.normal_test_3d(ab_loss)
def _mock_crd_data_sample(self, sample_idx_list):
data_samples = []
for _idx in sample_idx_list:
data_sample = BaseDataElement()
data_sample.set_data(dict(sample_idx=_idx))
data_samples.append(data_sample)
return data_samples
def test_crd_loss(self):
crd_loss = CRDLoss(**dict(neg_num=5, sample_n=10, dim_out=6))
sample_idx_list = torch.tensor(list(range(5)))
data_samples = self._mock_crd_data_sample(sample_idx_list)
loss = crd_loss.forward(self.feats_1d, self.feats_1d, data_samples)
self.assertTrue(loss.numel() == 1)
# test the calculation
s_feat_0 = torch.randn((5, 6))
t_feat_0 = torch.randn((5, 6))
crd_loss_num_0 = crd_loss.forward(s_feat_0, t_feat_0, data_samples)
assert crd_loss_num_0 != torch.tensor(0.0)
s_feat_1 = torch.randn((5, 6))
t_feat_1 = torch.rand((5, 6))
sample_idx_list_1 = torch.tensor(list(range(5)))
data_samples_1 = self._mock_crd_data_sample(sample_idx_list_1)
crd_loss_num_1 = crd_loss.forward(s_feat_1, t_feat_1, data_samples_1)
assert crd_loss_num_1 != torch.tensor(0.0)
def test_dkd_loss(self):
dkd_loss_cfg = dict(loss_weight=1.0)
dkd_loss = DKDLoss(**dkd_loss_cfg)
# dkd requires label logits
self.normal_test_1d(dkd_loss, labels=True)
def test_ft_loss(self):
ft_loss_cfg = dict(loss_weight=1.0)
ft_loss = FTLoss(**ft_loss_cfg)
assert ft_loss.loss_weight == 1.0
self.normal_test_1d(ft_loss)
self.normal_test_2d(ft_loss)
self.normal_test_3d(ft_loss)
def test_dafl_loss(self):
dafl_loss_cfg = dict(loss_weight=1.0)
ac_loss = ActivationLoss(**dafl_loss_cfg, norm_type='abs')
oh_loss = OnehotLikeLoss(**dafl_loss_cfg)
ie_loss = InformationEntropyLoss(**dafl_loss_cfg, gather=False)
# normal test with only one input
loss_ac = ac_loss.forward(self.feats_1d)
self.assertTrue(loss_ac.numel() == 1)
loss_oh = oh_loss.forward(self.feats_1d)
self.assertTrue(loss_oh.numel() == 1)
loss_ie = ie_loss.forward(self.feats_1d)
self.assertTrue(loss_ie.numel() == 1)
with self.assertRaisesRegex(AssertionError,
'"norm_type" must be "norm" or "abs"'):
_ = ActivationLoss(**dafl_loss_cfg, norm_type='random')
# test gather_tensors
ie_loss = InformationEntropyLoss(**dafl_loss_cfg, gather=True)
ie_loss.world_size = 2
if digit_version(torch.__version__) >= digit_version('1.8.0'):
with self.assertRaisesRegex(
RuntimeError,
'Default process group has not been initialized'):
loss_ie = ie_loss.forward(self.feats_1d)
else:
with self.assertRaisesRegex(
AssertionError,
'Default process group is not initialized'):
loss_ie = ie_loss.forward(self.feats_1d)
def test_kdSoftce_loss(self):
kdSoftce_loss_cfg = dict(loss_weight=1.0)
kdSoftce_loss = KDSoftCELoss(**kdSoftce_loss_cfg)
# kd soft ce loss requires label logits
self.normal_test_1d(kdSoftce_loss, labels=True)
def test_at_loss(self):
at_loss_cfg = dict(loss_weight=1.0)
at_loss = ATLoss(**at_loss_cfg)
assert at_loss.loss_weight == 1.0
self.normal_test_1d(at_loss)
self.normal_test_2d(at_loss)
self.normal_test_3d(at_loss)
def test_fbkdloss(self):
fbkdloss_cfg = dict(loss_weight=1.0)
fbkdloss = FBKDLoss(**fbkdloss_cfg)
spatial_mask = torch.randn(1, 1, 3, 3)
channel_mask = torch.randn(1, 4, 1, 1)
channel_pool_adapt = torch.randn(1, 4)
relation_adpt = torch.randn(1, 4, 3, 3)
s_input = (spatial_mask, channel_mask, channel_pool_adapt,
spatial_mask, channel_mask, relation_adpt)
t_input = (spatial_mask, channel_mask, spatial_mask, channel_mask,
relation_adpt)
fbkd_loss = fbkdloss(s_input, t_input)
self.assertTrue(fbkd_loss.numel() == 1)
def test_pkdloss(self):
pkd_loss = PKDLoss(loss_weight=1.0)
feats_S, feats_T = torch.rand(2, 256, 4, 4), torch.rand(2, 256, 4, 4)
loss = pkd_loss(feats_S, feats_T)
self.assertTrue(loss.numel() == 1)
self.assertTrue(0. <= loss <= 1.)
num_stages = 4
feats_S = (torch.rand(2, 256, 4, 4) for _ in range(num_stages))
feats_T = (torch.rand(2, 256, 4, 4) for _ in range(num_stages))
loss = pkd_loss(feats_S, feats_T)
self.assertTrue(loss.numel() == 1)
self.assertTrue(0. <= loss <= num_stages * 1.)
feats_S, feats_T = torch.rand(2, 256, 2, 2), torch.rand(2, 256, 4, 4)
loss = pkd_loss(feats_S, feats_T)
self.assertTrue(loss.numel() == 1)
self.assertTrue(0. <= loss <= 1.)
pkd_loss = PKDLoss(loss_weight=1.0, resize_stu=False)
feats_S, feats_T = torch.rand(2, 256, 2, 2), torch.rand(2, 256, 4, 4)
loss = pkd_loss(feats_S, feats_T)
self.assertTrue(loss.numel() == 1)
self.assertTrue(0. <= loss <= 1.)
def test_mgd_loss(self):
mgd_loss = MGDLoss(alpha_mgd=0.00002)
feats_S, feats_T = torch.rand(2, 256, 4, 4), torch.rand(2, 256, 4, 4)
loss = mgd_loss(feats_S, feats_T)
self.assertTrue(loss.numel() == 1)
| [
"[email protected]"
]
| |
8a8b8ba79006a28316ef9aa505f79d5b02b4b33a | 2481cde6506743565dff2b405a2396daf208ab3e | /src/ranking/management/modules/algorithm_yandex.py | 704ac764e959da19a6154d23d666eb6a1279c8cb | [
"Apache-2.0"
]
| permissive | aropan/clist | 4819a3036d179595e4df8c646aff2ed593b9dad3 | 5c805b2af71acee97f993f19d8d4e229f7f5b411 | refs/heads/master | 2023-08-31T11:15:17.987776 | 2023-08-27T21:51:14 | 2023-08-27T21:52:16 | 187,111,853 | 276 | 35 | Apache-2.0 | 2023-09-06T18:42:53 | 2019-05-16T22:57:03 | Python | UTF-8 | Python | false | false | 586 | py | # -*- coding: utf-8 -*-
import re
from ranking.management.modules import yandex
class Statistic(yandex.Statistic):
def get_standings(self, *args, **kwargs):
standings = super().get_standings(*args, **kwargs)
if re.search(r'\bfinals?\b', self.name, re.I):
if 'medals' not in standings.get('options', {}) and 'medals' not in self.info.get('standings', {}):
options = standings.setdefault('options', {})
options['medals'] = [{'name': name, 'count': 1} for name in ('gold', 'silver', 'bronze')]
return standings
| [
"[email protected]"
]
| |
8a97071f6de0931196876b2e68bc90a3e6b3f411 | 6acdc11c60e874e85adba173abb90f279049ab89 | /coding.py | 38bacad180c9b8b5d530c8e109dc4a1c43653658 | []
| no_license | bala977/balaji | 3ce9123709b1be1a24b8fd2235405e9a9be68382 | ba73e7591e7c86daef86081a15a19760dcac16b8 | refs/heads/master | 2020-06-11T16:58:56.256340 | 2019-08-05T10:03:27 | 2019-08-05T10:03:27 | 194,030,685 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | #B
N1,P2=input().split()
if (int(N1)-int(P2))%2==0:
print("even")
else:
print("odd")
| [
"[email protected]"
]
| |
1bf2158bc437ca181fbc66a1c3e55214a6f792ff | 7ed9b1d87012cd11ecc1625cadbea861223e82c5 | /plugins/devices/FakeCamera.py | cfad8329be69fc9640b4f45f25b6f0e84cc7df71 | []
| no_license | dsblank/pyrobot | 577bdcb1cd68777b76aaada11ff3d3c3c5231c38 | d9c19947767a97980ec31d2096ec157bafa55f0d | refs/heads/master | 2021-01-21T21:19:48.788998 | 2019-04-01T03:55:06 | 2019-04-01T03:55:06 | 94,819,207 | 2 | 2 | null | 2019-03-30T17:03:32 | 2017-06-19T20:43:18 | Python | UTF-8 | Python | false | false | 174 | py | from pyrobot.camera.fake import FakeCamera
from pyrobot.vision.cvision import VisionSystem
def INIT(robot):
return {"camera": FakeCamera(visionSystem = VisionSystem())}
| [
"[email protected]"
]
| |
aee96f6de4e6fd2ecd47ec453c188c2895fc41c9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/173/usersdata/268/81652/submittedfiles/moedas.py | b3b9d67c5c88d783592b6b36e092be5012cfea8b | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | # -*- coding: utf-8 -*-
a=int(input('Digite o valor de a: '))
b=int(input('Digite o valor de b: '))
c=int(input('Digite o valor da cédula: '))
w=0
x9=0
while(w<c):
duvida= a*w
comprovacao= (n-a*w)
if (comprovacao%b)==0:
print(duvida)
print(b*(comprovacao/b)
if ((comprovacao%b)!=0) :
x9=x9 +1
w=w+1
if(x9==n):
print('N')
| [
"[email protected]"
]
| |
ff00a04615743bcd931d99ee7f9a21cade5d3410 | 3ea3f46bd4d7231c5eb5c1e1c02625f5290cac76 | /heart/migrations/0003_auto_20170317_1846.py | 0573637431da4546fd61ccdbdfb05f3edf19ea1b | []
| no_license | moonclearner/simpleDjangoProject | 0340b0a744651bcc9dbd7a52b12c4827d40a7a5f | 51fc70d4c499aa64e82a6f02c913f44c45cad323 | refs/heads/master | 2021-01-23T01:41:25.481027 | 2017-04-11T14:29:09 | 2017-04-11T14:29:09 | 85,927,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,117 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-17 10:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('heart', '0002_auto_20170317_1841'),
]
operations = [
migrations.AlterField(
model_name='hbeat',
name='Created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='hpluse',
name='Created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='hpres',
name='Created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='hrelax',
name='Created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='htem',
name='Created_at',
field=models.DateTimeField(auto_now_add=True),
),
]
| [
"[email protected]"
]
| |
2d80f7c9fbbe827f590fe956913c72d4b5e11451 | 0ce587a8932592fd989e0be9bf3ee65469875078 | /quantdsl/syntax.py | 6b91619504be69797bc17073add5820c3e8cf245 | [
"BSD-3-Clause"
]
| permissive | caiorss/quantdsl | 98bdb73426a874e49ee71b7f030b528c4d479e02 | eaf72f5656628530e51a3ef4d0e83a903c55b86b | refs/heads/master | 2021-01-17T21:26:14.519914 | 2014-09-17T21:25:38 | 2014-09-17T21:25:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,662 | py | import ast
from quantdsl.exceptions import DslSyntaxError
class DslParser(object):
def parse(self, dslSource, filename='<unknown>', dslClasses=None):
"""
Creates a DSL Module object from a DSL source text.
"""
self.dslClasses = {}
if dslClasses:
assert isinstance(dslClasses, dict)
self.dslClasses.update(dslClasses)
if not isinstance(dslSource, basestring):
raise DslSyntaxError("Can't parse non-string object", dslSource)
assert isinstance(dslSource, basestring)
try:
# Parse as Python source code, into a Python abstract syntax tree.
astModule = ast.parse(dslSource, filename=filename, mode='exec')
except SyntaxError, e:
raise DslSyntaxError("DSL source code is not valid Python code", e)
# Generate Quant DSL from Python AST.
return self.visitAstNode(astModule)
def visitAstNode(self, node):
"""
Identifies which "visit" method to call, according to type of node being visited.
Returns the result of calling the identified "visit" method.
"""
assert isinstance(node, ast.AST)
# Construct the "visit" method name.
dslElementName = node.__class__.__name__
methodName = 'visit' + dslElementName
# Try to get the "visit" method object.
try:
method = getattr(self, methodName)
except AttributeError:
msg = "element '%s' is not supported (visit method '%s' not found on parser): %s" % (
dslElementName, methodName, node)
raise DslSyntaxError(msg)
# Call the "visit" method object, and return the result of visiting the node.
return method(node=node)
def visitReturn(self, node):
"""
Visitor method for ast.Return nodes.
Returns the result of visiting the expression held by the return statement.
"""
assert isinstance(node, ast.Return)
return self.visitAstNode(node.value)
def visitModule(self, node):
"""
Visitor method for ast.Module nodes.
Returns a DSL Module, with a list of DSL expressions as the body.
"""
assert isinstance(node, ast.Module)
body = [self.visitAstNode(n) for n in node.body]
return self.dslClasses['Module'](body, node=node)
def visitExpr(self, node):
"""
Visitor method for ast.Expr nodes.
Returns the result of visiting the contents of the expression node.
"""
assert isinstance(node, ast.Expr)
if isinstance(node.value, ast.AST):
return self.visitAstNode(node.value)
else:
raise DslSyntaxError
def visitNum(self, node):
"""
Visitor method for ast.Name.
Returns a DSL Number object, with the number value.
"""
assert isinstance(node, ast.Num)
return self.dslClasses['Number'](node.n, node=node)
def visitStr(self, node):
"""
Visitor method for ast.Str.
Returns a DSL String object, with the string value.
"""
assert isinstance(node, ast.Str)
return self.dslClasses['String'](node.s, node=node)
def visitUnaryOp(self, node):
"""
Visitor method for ast.UnaryOp.
Returns a specific DSL UnaryOp object (e.g UnarySub), along with the operand.
"""
assert isinstance(node, ast.UnaryOp)
args = [self.visitAstNode(node.operand)]
if isinstance(node.op, ast.USub):
dslUnaryOpClass = self.dslClasses['UnarySub']
else:
raise DslSyntaxError("Unsupported unary operator token: %s" % node.op)
return dslUnaryOpClass(node=node, *args)
def visitBinOp(self, node):
"""
Visitor method for ast.BinOp.
Returns a specific DSL BinOp object (e.g Add), along with the left and right operands.
"""
assert isinstance(node, ast.BinOp)
typeMap = {
ast.Add: self.dslClasses['Add'],
ast.Sub: self.dslClasses['Sub'],
ast.Mult: self.dslClasses['Mult'],
ast.Div: self.dslClasses['Div'],
ast.Pow: self.dslClasses['Pow'],
ast.Mod: self.dslClasses['Mod'],
ast.FloorDiv: self.dslClasses['FloorDiv'],
}
try:
dslClass = typeMap[type(node.op)]
except KeyError:
raise DslSyntaxError("Unsupported binary operator token", node.op, node=node)
args = [self.visitAstNode(node.left), self.visitAstNode(node.right)]
return dslClass(node=node, *args)
def visitBoolOp(self, node):
"""
Visitor method for ast.BoolOp.
Returns a specific DSL BoolOp object (e.g And), along with the left and right operands.
"""
assert isinstance(node, ast.BoolOp)
typeMap = {
ast.And: self.dslClasses['And'],
ast.Or: self.dslClasses['Or'],
}
try:
dslClass = typeMap[type(node.op)]
except KeyError:
raise DslSyntaxError("Unsupported boolean operator token: %s" % node.op)
else:
values = [self.visitAstNode(v) for v in node.values]
args = [values]
return dslClass(node=node, *args)
def visitName(self, node):
"""
Visitor method for ast.Name.
Returns a DSL Name object, along with the name's string.
"""
return self.dslClasses['Name'](node.id, node=node)
def visitCall(self, node):
"""
Visitor method for ast.Call.
Returns a built-in DSL expression, or a DSL FunctionCall if the name refers to a user
defined function.
"""
if node.keywords:
raise DslSyntaxError("Calling with keywords is not currently supported (positional args only).")
if node.starargs:
raise DslSyntaxError("Calling with starargs is not currently supported (positional args only).")
if node.kwargs:
raise DslSyntaxError("Calling with kwargs is not currently supported (positional args only).")
# Collect the call arg expressions (whose values will be passed into the call when it is made).
callArgExprs = [self.visitAstNode(arg) for arg in node.args]
# Check the called node is an ast.Name.
calledNode = node.func
assert isinstance(calledNode, ast.Name)
calledNodeName = calledNode.id
# Construct a DSL object for this call.
try:
# Resolve the name with a new instance of a DSL class.
dslClass = self.dslClasses[calledNodeName]
except KeyError:
# Resolve as a FunctionCall, and expect
# to resolve the name to a function def later.
dslNameClass = self.dslClasses['Name']
dslArgs = [dslNameClass(calledNodeName, node=calledNode), callArgExprs]
return self.dslClasses['FunctionCall'](node=node, *dslArgs)
else:
baseDslObjectClass = self.dslClasses['DslObject']
assert issubclass(dslClass, baseDslObjectClass), dslClass
return dslClass(node=node, *callArgExprs)
def visitFunctionDef(self, node):
"""
Visitor method for ast.FunctionDef.
Returns a named DSL FunctionDef, with a definition of the expected call argument values.
"""
name = node.name
dslFunctionArgClass = self.dslClasses['FunctionArg']
callArgDefs = [dslFunctionArgClass(arg.id, '') for arg in node.args.args]
assert len(node.body) == 1, "Function defs with more than one body statement are not supported at the moment."
decoratorNames = [astName.id for astName in node.decorator_list]
body = self.visitAstNode(node.body[0])
dslArgs = [name, callArgDefs, body, decoratorNames]
functionDef = self.dslClasses['FunctionDef'](node=node, *dslArgs)
return functionDef
def visitIfExp(self, node):
"""
Visitor method for ast.IfExp.
Returns a named DSL IfExp, with a test DSL expression and expressions whose usage is
conditional upon the test.
"""
test = self.visitAstNode(node.test)
body = self.visitAstNode(node.body)
orelse = self.visitAstNode(node.orelse)
args = [test, body, orelse]
return self.dslClasses['IfExp'](node=node, *args)
def visitIf(self, node):
"""
Visitor method for ast.If.
Returns a named DSL If object, with a test DSL expression and expressions whose usage is
conditional upon the test.
"""
test = self.visitAstNode(node.test)
assert len(node.body) == 1, "If statements with more than one body statement are not supported at the moment."
body = self.visitAstNode(node.body[0])
assert len(
node.orelse) == 1, "If statements with more than one orelse statement are not supported at the moment."
orelse = self.visitAstNode(node.orelse[0])
args = [test, body, orelse]
return self.dslClasses['If'](node=node, *args)
def visitCompare(self, node):
"""
Visitor method for ast.Compare.
Returns a named DSL Compare object, with operators (ops) and operands (comparators).
"""
left = self.visitAstNode(node.left)
opNames = [o.__class__.__name__ for o in node.ops]
comparators = [self.visitAstNode(c) for c in node.comparators]
args = [left, opNames, comparators]
return self.dslClasses['Compare'](node=node, *args)
| [
"[email protected]"
]
| |
b5097dc639ce1b85de30e5898a505721e3bb28f1 | a2e638cd0c124254e67963bda62c21351881ee75 | /Extensions/Deal Capture Examples/FPythonCode/MiniFutureInsDef.py | f25b228e0539ca28ceaaee4e44dc3bd4a628ca06 | []
| no_license | webclinic017/fa-absa-py3 | 1ffa98f2bd72d541166fdaac421d3c84147a4e01 | 5e7cc7de3495145501ca53deb9efee2233ab7e1c | refs/heads/main | 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,795 | py |
from __future__ import print_function
import acm, ael
def SetUpMiniFuture(definitionSetUp):
from DealCaptureSetup import AddInfoSetUp, CustomMethodSetUp
definitionSetUp.AddSetupItems(
AddInfoSetUp( recordType='Instrument',
fieldName='MiniFuture',
dataType='Boolean',
description='CustomInsdef',
dataTypeGroup='Standard',
subTypes=['Warrant'],
defaultValue='',
mandatory=False),
AddInfoSetUp( recordType='Instrument',
fieldName='RateMargin',
dataType='Double',
description='CustomInsdef',
dataTypeGroup='Standard',
subTypes=['Warrant'],
defaultValue='',
mandatory=False)
)
definitionSetUp.AddSetupItems(
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFuture',
methodName='MiniFuture'),
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFutureFinancingLevel',
methodName='MiniFutureFinancingLevel'),
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFutureFinancingSpread',
methodName='MiniFutureFinancingSpread'),
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFutureInterestRateMargin',
methodName='MiniFutureInterestRateMargin'),
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFutureStopLoss',
methodName='MiniFutureStopLoss'),
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFutureUnderlyingType',
methodName='MiniFutureUnderlyingType'),
CustomMethodSetUp( className='FWarrant',
customMethodName='GetMiniFutureUnderlyingType',
methodName='MiniFutureUnderlyingType'),
CustomMethodSetUp( className='FWarrant',
customMethodName='SetMiniFuture',
methodName='SetMiniFuture'),
CustomMethodSetUp( className='FWarrant',
customMethodName='SetMiniFutureFinancingLevel',
methodName='MiniFutureFinancingLevel'),
CustomMethodSetUp( className='FWarrant',
customMethodName='SetMiniFutureInterestRateMargin',
methodName='MiniFutureInterestRateMargin'),
CustomMethodSetUp( className='FWarrant',
customMethodName='SetMiniFutureStopLoss',
methodName='MiniFutureStopLoss'),
CustomMethodSetUp( className='FWarrant',
customMethodName='SetMiniFutureUnderlyingType',
methodName='MiniFutureUnderlyingType')
)
def SetUnderlyingType(instrument, underlyingType):
instrument.UnderlyingType(underlyingType)
return
def GetUnderlyingType(instrument):
return instrument.UnderlyingType()
def GetMiniFuture(instrument):
isMiniFuture = None
try:
isMiniFuture = instrument.AdditionalInfo().MiniFuture()
except Exception as e:
print ("Additional Info field missing. Please create an Additional Info field on Instrument (Warrant) of type boolean called MiniFuture and restart system.")
return isMiniFuture
def GetFinancingSpread(instrument):
if instrument.StrikePrice():
premium=instrument.Barrier()-instrument.StrikePrice()
premiumPercent=premium/instrument.StrikePrice()*100
return premiumPercent
else:
return 0
def SetStopLoss(instrument, stopLoss):
instrument.Barrier(stopLoss)
if instrument.StrikePrice():
premium=instrument.Barrier()-instrument.StrikePrice()
if premium < 0:
instrument.SuggestOptionType(False)
else:
instrument.SuggestOptionType(True)
return
def GetStopLoss(instrument):
return instrument.Barrier()
def SetFinancingLevel(instrument, financingLevel):
instrument.StrikePrice(financingLevel)
if instrument.StrikePrice():
premium=instrument.Barrier()-instrument.StrikePrice()
if premium < 0:
instrument.SuggestOptionType(False)
else:
instrument.SuggestOptionType(True)
return
def GetFinancingLevel(instrument):
return instrument.StrikePrice()
def SetMiniFuture(instrument, miniFuture):
try:
instrument.AdditionalInfo().MiniFuture(miniFuture)
except:
print ("Additional Info field missing. Please create an Additional Info field on Instrument (Warrant) of type boolean called MiniFuture and restart system.")
return
def SetRateMargin(instrument, rateMargin):
try:
instrument.AdditionalInfo().RateMargin(rateMargin)
except:
print ("Additional Info field missing. Please create an Additional Info field on Instrument (Warrant) of type double called RateMargin and restart system.")
def GetRateMargin(instrument):
try:
if instrument.AdditionalInfo().RateMargin():
return instrument.AdditionalInfo().RateMargin()
else:
return 0.0
except:
print ("Additional Info field missing. Please create an Additional Info field on Instrument (Warrant) of type double called RateMargin and restart system.")
def UpdateDefaultInstrument(ins):
# Not possible to set AddInfo fields on default instrument. Set Mini Future field to true.
try:
ins.AdditionalInfo().MiniFuture(True)
except:
print ("Additional Info field missing. Please create an Additional Info field on Instrument (Warrant) of type boolean called MiniFuture and restart system.")
if not ins.Exotic():
# This code will set up the Barrier if no default barrier instrument exists
ins.ExoticType('Other')
e=acm.FExotic()
ins.Exotics().Add(e)
e.RegisterInStorage()
e.BarrierOptionType("Up & In")
| [
"[email protected]"
]
| |
b506ffdba484cd5f343426bd1f96c751004ba9fa | eaeb685d13ef6c58364c5497c911f3e2f8c49a43 | /Solution/922_Sort_Array_By_Parity_II.py | 09d03afec572e8518220ff839a03cf91f98b8253 | []
| no_license | raririn/LeetCodePractice | 8b3a18e34a2e3524ec9ae8163e4be242c2ab6d64 | 48cf4f7d63f2ba5802c41afc2a0f75cc71b58f03 | refs/heads/master | 2023-01-09T06:09:02.017324 | 2020-09-10T02:34:46 | 2020-09-10T02:34:46 | 123,109,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | class Solution:
def sortArrayByParityII(self, A: List[int]) -> List[int]:
ret = [0] * len(A)
pos_e = 0
pos_o = 1
for i in A:
if i % 2 == 0:
ret[pos_e] = i
pos_e += 2
else:
ret[pos_o] = i
pos_o += 2
return ret
'''
Runtime: 244 ms, faster than 81.55% of Python3 online submissions for Sort Array By Parity II.
Memory Usage: 16 MB, less than 8.70% of Python3 online submissions for Sort Array By Parity II.
''' | [
"[email protected]"
]
| |
8c4e0732907c0a50c71b4fd46d7db075c8ad46a5 | 760fbdca58de7e2fb146ec60905ded7497b1812b | /ibm_whcs_sdk/insights_for_medical_literature/tests/integration/test_search_typeahead.py | ee752832dabdf1753e9150dd069ef924defa1b65 | [
"Apache-2.0"
]
| permissive | dmansjur/whcs-python-sdk | c5d28742cefc65e19a7eb5de0027fe9f59b1e689 | 110a847c91d5779df91c6562394bde557ee132e5 | refs/heads/master | 2021-05-26T21:49:44.515561 | 2020-04-07T17:17:36 | 2020-04-07T17:17:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,587 | py | # coding: utf-8
# Copyright 2018 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is an example of invoking the /v1/corpora/{corpus}/search/{corpus}/typeahead REST API
# of Insights for Medical Literature.
import configparser
import ibm_whcs_sdk.insights_for_medical_literature as wh
# To access a secure environment additional parameters are needed on the constructor which are listed below
CONFIG = configparser.RawConfigParser()
CONFIG.read('./ibm_whcs_sdk/insights_for_medical_literature/tests/config.ini')
BASE_URL = CONFIG.get('settings', 'base_url')
APIKEY = CONFIG.get('settings', 'key')
IAMURL = CONFIG.get('settings', 'iam_URL')
LEVEL = CONFIG.get('settings', 'logging_level')
VERSION = CONFIG.get('settings', 'version')
DISABLE_SSL = CONFIG.get('settings', 'disable_ssl')
VERSION = CONFIG.get('settings', 'version')
CORPUS = CONFIG.get('settings', 'corpus')
ONTOLGOY = CONFIG.get('search', 'umls')
QUERY = CONFIG.get('search', 'typeahead_query')
TYPE = CONFIG.get('search', 'typeahead_type')
IML_TEST = wh.InsightsForMedicalLiteratureServiceV1(BASE_URL, APIKEY, IAMURL, VERSION, LEVEL, DISABLE_SSL)
# test can only be successful against a custom plan intance
def test_search_typeahead():
types = [TYPE]
ontologies = [ONTOLGOY]
response = IML_TEST.typeahead(corpus=CORPUS, query=QUERY, types=types, category='disorders', verbose=False,
limit=10, max_hit_count=1000, no_duplicates=True, ontologies=ontologies)
concept_list = wh.ConceptListModel._from_dict(response.get_result())
assert concept_list is not None
concepts = concept_list.concepts
for concept in concepts:
assert concept.cui is not None
assert concept.ontology is not None
def test_search_typeahead_verbose():
types = [TYPE]
ontologies = [ONTOLGOY]
response = IML_TEST.typeahead(corpus=CORPUS, query=QUERY, types=types, category='disorders', verbose=True,
limit=10, max_hit_count=1000, no_duplicates=True, ontologies=ontologies)
concept_list = wh.ConceptListModel._from_dict(response.get_result())
assert concept_list is not None
concepts = concept_list.concepts
for concept in concepts:
assert concept.cui is not None
assert concept.ontology is not None
def test_search_typeahead_no_corpus():
types = [TYPE]
ontologies = [ONTOLGOY]
try:
response = IML_TEST.typeahead(corpus=None, query=QUERY, types=types, category='disorders', verbose=True,
limit=10, max_hit_count=1000, no_duplicates=True, ontologies=ontologies)
except ValueError as imle:
assert imle is not None
def test_search_typeahead_verbose_no_query():
types = [TYPE]
ontologies = [ONTOLGOY]
try:
response = IML_TEST.typeahead(corpus=CORPUS, query=None, types=types, category='disorders', verbose=True,
limit=10, max_hit_count=1000, no_duplicates=True, ontologies=ontologies)
except ValueError as imle:
assert imle is not None
| [
"[email protected]"
]
| |
286e6fc6fadb3b6f3acf790a8ff9b86949ce9f42 | c9b1e04ba65ba3e0af2a8ae86b88187b72bcaa0b | /.svn/pristine/28/286e6fc6fadb3b6f3acf790a8ff9b86949ce9f42.svn-base | 906aef8455f144197811b24c002a226a851f74cc | []
| no_license | feitianyiren/TaskCoach | 7762a89d5b521cfba0827323a9e8a91d1579810b | 0b7427562074845ac771e59e24a750aa5b432589 | refs/heads/master | 2020-04-08T04:56:35.491490 | 2016-01-12T13:29:03 | 2016-01-12T13:29:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,438 | import patterns, time, copy
import domain.date as date
class TaskProperty(property):
pass
class Task(patterns.ObservableComposite):
def __init__(self, subject='', description='', dueDate=None,
startDate=None, completionDate=None, budget=None,
priority=0, id_=None, lastModificationTime=None, hourlyFee=0,
fixedFee=0, reminder=None, attachments=None, categories=None,
efforts=None,
shouldMarkCompletedWhenAllChildrenCompleted=None, *args, **kwargs):
super(Task, self).__init__(*args, **kwargs)
self._subject = subject
self._description = description
self._dueDate = dueDate or date.Date()
self._startDate = startDate or date.Today()
self._completionDate = completionDate or date.Date()
self._budget = budget or date.TimeDelta()
self._id = id_ or '%s:%s'%(id(self), time.time()) # FIXME: Not a valid XML id
self._efforts = efforts or []
for effort in self._efforts:
effort.setTask(self)
self._categories = set(categories or [])
self._priority = priority
self._hourlyFee = hourlyFee
self._fixedFee = fixedFee
self._reminder = reminder
self._attachments = attachments or []
self._shouldMarkCompletedWhenAllChildrenCompleted = \
shouldMarkCompletedWhenAllChildrenCompleted
self.setLastModificationTime(lastModificationTime)
def __setstate__(self, state):
self.setSubject(state['subject'])
self.setDescription(state['description'])
self.setId(state['id'])
self.setStartDate(state['startDate'])
self.setDueDate(state['dueDate'])
self.setCompletionDate(state['completionDate'])
self.replaceChildren(state['children'])
self.replaceParent(state['parent'])
self.setEfforts(state['efforts'])
self.setBudget(state['budget'])
self.setCategories(state['categories'])
self.setPriority(state['priority'])
self.setAttachments(state['attachments'])
self.setHourlyFee(state['hourlyFee'])
self.setFixedFee(state['fixedFee'])
self.shouldMarkCompletedWhenAllChildrenCompleted = \
state['shouldMarkCompletedWhenAllChildrenCompleted']
def __getstate__(self):
return dict(subject=self._subject, description=self._description,
id=self._id, dueDate=self._dueDate, startDate=self._startDate,
completionDate=self._completionDate, children=self.children(),
parent=self.parent(), efforts=self._efforts, budget=self._budget,
categories=set(self._categories), priority=self._priority,
attachments=self._attachments[:], hourlyFee=self._hourlyFee,
fixedFee=self._fixedFee,
shouldMarkCompletedWhenAllChildrenCompleted=\
self._shouldMarkCompletedWhenAllChildrenCompleted)
def __repr__(self):
return self._subject
def id(self):
return self._id
def setId(self, id):
self._id = id
def __notifyObservers(self, event):
patterns.Publisher().notifyObservers(event)
# I want to use properties more, but I still need to make all the changes.
# So, only description is a property right now.
def __getDescription(self):
return self.__description
def __setDescription(self, description):
self.__description = description
_description = TaskProperty(__getDescription, __setDescription)
def description(self):
return self._description
def setDescription(self, description):
if description != self._description:
self._description = description
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.description',
description))
def allChildrenCompleted(self):
if not self.children():
return False
for child in self.children():
if not child.completed():
return False
return True
def copy(self):
''' Copy constructor '''
return self.__class__(self.subject(), self.description(),
self.dueDate(), self.startDate(), self.completionDate(),
parent=self.parent(),
budget=self.budget(), priority=self.priority(),
categories=set(self.categories()), fixedFee=self.fixedFee(),
hourlyFee=self.hourlyFee(), attachments=self.attachments()[:],
reminder=self.reminder(),
shouldMarkCompletedWhenAllChildrenCompleted=\
self.shouldMarkCompletedWhenAllChildrenCompleted,
children=[child.copy() for child in self.children()])
def newChild(self, subject='New subtask'):
''' Subtask constructor '''
return super(Task, self).newChild(subject=subject,
dueDate=self.dueDate(),
startDate=max(date.Today(), self.startDate()), parent=self)
def addChild(self, child):
if child in self.children():
return
oldTotalBudgetLeft = self.budgetLeft(recursive=True)
oldTotalPriority = self.priority(recursive=True)
super(Task, self).addChild(child)
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, Task.addChildEventType(), child))
newTotalBudgetLeft = self.budgetLeft(recursive=True)
if child.budget(recursive=True):
self.notifyObserversOfTotalBudgetChange()
if newTotalBudgetLeft != oldTotalBudgetLeft:
self.notifyObserversOfTotalBudgetLeftChange()
if child.timeSpent(recursive=True):
self.notifyObserversOfTotalTimeSpentChange()
if child.priority(recursive=True) > oldTotalPriority:
self.notifyObserversOfTotalPriorityChange()
if child.revenue(recursive=True):
self.notifyObserversOfTotalRevenueChange()
if child.isBeingTracked(recursive=True):
self.notifyObserversOfStartTracking(*child.activeEfforts(recursive=True))
def removeChild(self, child):
if child not in self.children():
return
oldTotalBudgetLeft = self.budgetLeft(recursive=True)
oldTotalPriority = self.priority(recursive=True)
super(Task, self).removeChild(child)
self.setLastModificationTime()
newTotalBudgetLeft = self.budgetLeft(recursive=True)
if child.budget(recursive=True):
self.notifyObserversOfTotalBudgetChange()
if newTotalBudgetLeft != oldTotalBudgetLeft:
self.notifyObserversOfTotalBudgetLeftChange()
if child.timeSpent(recursive=True):
self.notifyObserversOfTotalTimeSpentChange()
if child.priority(recursive=True) == oldTotalPriority:
self.notifyObserversOfTotalPriorityChange()
if child.revenue(recursive=True):
self.notifyObserversOfTotalRevenueChange()
if child.isBeingTracked(recursive=True) and not \
self.isBeingTracked(recursive=True):
self.notifyObserversOfStopTracking(*child.activeEfforts(recursive=True))
def subject(self, recursive=False):
''' The recursive flag is allowed, but ignored. This makes
task.sorter.Sorter.__createRegularSortKey easier. '''
return self._subject
def setSubject(self, subject):
if subject != self._subject:
self._subject = subject
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.subject', subject))
def dueDate(self, recursive=False):
if recursive:
childrenDueDates = [child.dueDate(recursive=True) for child in self.children() if not child.completed()]
return min(childrenDueDates + [self._dueDate])
else:
return self._dueDate
def setDueDate(self, dueDate):
if dueDate != self._dueDate:
self._dueDate = dueDate
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.dueDate', dueDate))
def startDate(self, recursive=False):
if recursive:
childrenStartDates = [child.startDate(recursive=True) for child in self.children() if not child.completed()]
return min(childrenStartDates+[self._startDate])
else:
return self._startDate
def setStartDate(self, startDate):
if startDate != self._startDate:
self._startDate = startDate
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.startDate',
startDate))
def timeLeft(self, recursive=False):
return self.dueDate(recursive) - date.Today()
def completionDate(self, recursive=False):
if recursive:
childrenCompletionDates = [child.completionDate(recursive=True) \
for child in self.children() if child.completed()]
return max(childrenCompletionDates+[self._completionDate])
else:
return self._completionDate
def setCompletionDate(self, completionDate=None):
completionDate = completionDate or date.Today()
if completionDate != self._completionDate:
self._completionDate = completionDate
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.completionDate',
completionDate))
if completionDate != date.Date():
self.setReminder(None)
def completed(self):
return self.completionDate() != date.Date()
def overdue(self):
return self.dueDate() < date.Today() and not self.completed()
def inactive(self):
return (self.startDate() > date.Today()) and not self.completed()
def active(self):
return not self.inactive() and not self.completed()
def dueToday(self):
return (self.dueDate() == date.Today() and not self.completed())
def dueTomorrow(self):
return (self.dueDate() == date.Tomorrow() and not self.completed())
# effort related methods:
def efforts(self, recursive=False):
childEfforts = []
if recursive:
for child in self.children():
childEfforts.extend(child.efforts(recursive=True))
return self._efforts + childEfforts
def activeEfforts(self, recursive=False):
return [effort for effort in self.efforts(recursive) \
if effort.isBeingTracked()]
def nrActiveEfforts(self):
return len(self.activeEfforts())
def isBeingTracked(self, recursive=False):
return self.activeEfforts(recursive)
def addEffort(self, effort):
wasTracking = self.isBeingTracked()
if effort not in self._efforts:
self._efforts.append(effort)
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.effort.add',
effort))
if effort.isBeingTracked() and not wasTracking:
self.notifyObserversOfStartTracking(effort)
self.notifyObserversOfTimeSpentChange()
def removeEffort(self, effort):
if effort in self._efforts:
self._efforts.remove(effort)
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.effort.remove',
effort))
if effort.isBeingTracked() and not self.isBeingTracked():
self.notifyObserversOfStopTracking(effort)
self.notifyObserversOfTimeSpentChange()
def setEfforts(self, efforts):
self._efforts = efforts # FIXME: no notification?
def timeSpent(self, recursive=False):
if recursive:
return self._myTimeSpent() + self._childrenTimeSpent()
else:
return self._myTimeSpent()
def stopTracking(self):
stoppedEfforts = []
for effort in self.activeEfforts():
effort.setStop()
stoppedEfforts.append(effort)
if stoppedEfforts:
self.setLastModificationTime()
return stoppedEfforts
def budget(self, recursive=False):
result = self._budget
if recursive:
for task in self.children():
result += task.budget(recursive)
return result
def setBudget(self, budget):
if budget != self._budget:
self._budget = budget
self.setLastModificationTime()
self.notifyObserversOfBudgetChange()
self.notifyObserversOfBudgetLeftChange()
def budgetLeft(self, recursive=False):
budget = self.budget(recursive)
if budget:
return budget - self.timeSpent(recursive)
else:
return budget
def _myTimeSpent(self):
return sum([effort.duration() for effort in self.efforts()],
date.TimeDelta())
def _childrenTimeSpent(self):
return sum([child.timeSpent(recursive=True) \
for child in self.children()], date.TimeDelta())
def notifyObserversOfBudgetChange(self):
self.__notifyObservers(patterns.Event(self, 'task.budget', self.budget()))
self.notifyObserversOfTotalBudgetChange()
def notifyObserversOfTotalBudgetChange(self):
self.__notifyObservers(patterns.Event(self, 'task.totalBudget',
self.budget(recursive=True)))
parent = self.parent()
if parent:
parent.notifyObserversOfTotalBudgetChange()
def notifyObserversOfBudgetLeftChange(self):
self.__notifyObservers(patterns.Event(self, 'task.budgetLeft',
self.budgetLeft()))
self.notifyObserversOfTotalBudgetLeftChange()
def notifyObserversOfTotalBudgetLeftChange(self):
self.__notifyObservers(patterns.Event(self, 'task.totalBudgetLeft',
self.budgetLeft(recursive=True)))
parent = self.parent()
if parent:
parent.notifyObserversOfTotalBudgetLeftChange()
def notifyObserversOfTimeSpentChange(self):
self.__notifyObservers(patterns.Event(self, 'task.timeSpent',
self.timeSpent()))
self.notifyObserversOfTotalTimeSpentChange()
if self.budget():
self.notifyObserversOfBudgetLeftChange()
elif self.budget(recursive=True):
self.notifyObserversOfTotalBudgetLeftChange()
if self.hourlyFee() > 0:
self.notifyObserversOfRevenueChange()
def notifyObserversOfTotalTimeSpentChange(self):
self.__notifyObservers(patterns.Event(self, 'task.totalTimeSpent',
self.timeSpent(recursive=True)))
parent = self.parent()
if parent:
parent.notifyObserversOfTotalTimeSpentChange()
def notifyObserversOfStartTracking(self, *trackedEfforts):
self.__notifyObservers(patterns.Event(self, 'task.track.start',
*trackedEfforts))
parent = self.parent()
if parent:
parent.notifyObserversOfStartTracking(*trackedEfforts)
def notifyObserversOfStopTracking(self, *trackedEfforts):
self.__notifyObservers(patterns.Event(self, 'task.track.stop',
*trackedEfforts))
parent = self.parent()
if parent:
parent.notifyObserversOfStopTracking(*trackedEfforts)
# categories
def categories(self, recursive=False):
result = set(self._categories)
if recursive and self.parent() is not None:
result |= self.parent().categories(recursive=True)
return result
def addCategory(self, category):
if category not in self._categories:
self._categories.add(category)
category.addTask(self)
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.category.add',
category))
def removeCategory(self, category):
if category in self._categories:
self._categories.discard(category)
category.removeTask(self)
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.category.remove',
category))
def setCategories(self, categories):
self._categories = categories # FIXME: no notification?
# priority
def priority(self, recursive=False):
if recursive:
childPriorities = [child.priority(recursive=True) \
for child in self.children()]
return max(childPriorities + [self._priority])
else:
return self._priority
def setPriority(self, priority):
if priority != self._priority:
self._priority = priority
self.setLastModificationTime()
self.notifyObserversOfPriorityChange()
def notifyObserversOfPriorityChange(self):
self.__notifyObservers(patterns.Event(self, 'task.priority',
self.priority()))
self.notifyObserversOfTotalPriorityChange()
def notifyObserversOfTotalPriorityChange(self):
myTotalPriority = self.priority(recursive=True)
self.__notifyObservers(patterns.Event(self, 'task.totalPriority',
myTotalPriority))
parent = self.parent()
if parent and myTotalPriority == parent.priority(recursive=True):
parent.notifyObserversOfTotalPriorityChange()
# modifications
def lastModificationTime(self, recursive=False):
if recursive:
childModificationTimes = [child.lastModificationTime(recursive=True) for child in self.children()]
return max(childModificationTimes + [self._lastModificationTime])
else:
return self._lastModificationTime
def setLastModificationTime(self, time=None):
self._lastModificationTime = time or date.DateTime.now()
# revenue
def hourlyFee(self, recursive=False):
return self._hourlyFee
def setHourlyFee(self, hourlyFee):
if hourlyFee != self._hourlyFee:
self._hourlyFee = hourlyFee
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.hourlyFee',
hourlyFee))
if self.timeSpent() > date.TimeDelta():
self.notifyObserversOfRevenueChange()
def revenue(self, recursive=False):
if recursive:
childRevenues = sum(child.revenue(recursive) for child in self.children())
else:
childRevenues = 0
return self.timeSpent().hours() * self.hourlyFee() + self.fixedFee() + childRevenues
def fixedFee(self, recursive=False):
if recursive:
childFixedFees = sum(child.fixedFee(recursive) for child in self.children())
else:
childFixedFees = 0
return self._fixedFee + childFixedFees
def setFixedFee(self, fixedFee):
if fixedFee != self._fixedFee:
self._fixedFee = fixedFee
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.fixedFee',
fixedFee))
self.notifyObserversOfRevenueChange()
def notifyObserversOfRevenueChange(self):
self.__notifyObservers(patterns.Event(self, 'task.revenue',
self.revenue()))
self.notifyObserversOfTotalRevenueChange()
def notifyObserversOfTotalRevenueChange(self):
self.__notifyObservers(patterns.Event(self, 'task.totalRevenue',
self.revenue(recursive=True)))
parent = self.parent()
if parent:
parent.notifyObserversOfTotalRevenueChange()
# reminder
def reminder(self):
return self._reminder
def setReminder(self, reminderDateTime=None):
if reminderDateTime == date.DateTime.max:
reminderDateTime = None
if reminderDateTime != self._reminder:
self._reminder = reminderDateTime
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.reminder',
self._reminder))
# attachments
def attachments(self):
return self._attachments
def addAttachments(self, *attachments):
if attachments:
self._attachments.extend(attachments)
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.attachment.add',
*attachments))
def removeAttachments(self, *attachments):
attachmentsRemoved = []
for attachment in attachments:
if attachment in self._attachments:
self._attachments.remove(attachment)
attachmentsRemoved.append(attachment)
if attachmentsRemoved:
self.setLastModificationTime()
self.__notifyObservers(patterns.Event(self, 'task.attachment.remove',
*attachmentsRemoved))
def removeAllAttachments(self):
self.removeAttachments(*self._attachments)
def setAttachments(self, attachments):
self._attachments = attachments # FIXME: no notification?
# behavior
# To experiment, this attribute is coded by means of a proporty, which
# means you can set it like this: task.shouldMark... = True
def __setShouldMarkCompletedWhenAllChildrenCompleted(self, newValue):
if newValue == self._shouldMarkCompletedWhenAllChildrenCompleted:
return
self._shouldMarkCompletedWhenAllChildrenCompleted = newValue
self.__notifyObservers(patterns.Event(self,
'task.setting.shouldMarkCompletedWhenAllChildrenCompleted',
newValue))
def __getShouldMarkCompletedWhenAllChildrenCompleted(self):
return self._shouldMarkCompletedWhenAllChildrenCompleted
shouldMarkCompletedWhenAllChildrenCompleted = \
property(fget=__getShouldMarkCompletedWhenAllChildrenCompleted,
fset=__setShouldMarkCompletedWhenAllChildrenCompleted)
| [
"[email protected]"
]
| ||
6dbabae65ed075a12913a12e8af2019751371a24 | fdd2ed32e45ca3dcc978cf7e5af76d2afd8cb9f9 | /87.py | ecdbdd1ab3aee85a6e5da158c4511eb5ef0c7440 | []
| no_license | Narendon123/python | e5295e5b71867fd6a90d080c01e2db6930659f95 | cf0b3dd4ff4eb4d6d44f061b45d00baa25de5a38 | refs/heads/master | 2020-05-31T06:06:19.230781 | 2019-07-11T12:51:25 | 2019-07-11T12:51:25 | 190,134,515 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | w=input()
w=w.split()
a=int(w[0])
b=int(w[1])
i=1
while(i<=a and i<=b):
if(a%i==0 and b%i==0):
gcd=i
i=i+1
print(gcd)
| [
"[email protected]"
]
| |
8a33374b9c01ded55865a5c9464ca843e32074d6 | 37220d7b60d682eb1abf40326d061485581aab36 | /ajax/urls.py | b06af246996089bc8452ee5a25eabcdc705623a1 | [
"BSD-3-Clause"
]
| permissive | lautarianoo/LautAvito | 547fba9a0bb3a65aac6132e00382a8876bca4a28 | 106dcb6f04230af2540bd3883c85713828cd051c | refs/heads/master | 2023-06-24T11:08:17.889875 | 2021-07-26T17:30:08 | 2021-07-26T17:30:08 | 377,897,865 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | from django.urls import path
from . import views
urlpatterns = [
path('get_districts', views.get_districts, name='get_districts')
]
| [
"neonchick1"
]
| neonchick1 |
b3589abd67feb9c0e4a4504c8763190db14f3597 | c08721ea0ab07fbf866b0612d315fed770a4e260 | /docs/source/conf.py | 5d8789ab32ae4f9a3d934bae3cd3c78ab84299bf | [
"MIT"
]
| permissive | gunnarx/pyfranca | 84262c7f760eeeb60285fae401ff4f3d9b1d6f67 | d986e4b97229f9673d8349b2de77c541a9377faf | refs/heads/master | 2022-05-03T13:07:35.027609 | 2022-04-12T07:51:30 | 2022-04-12T21:15:10 | 88,442,258 | 3 | 1 | MIT | 2022-04-12T21:15:12 | 2017-04-16T21:00:37 | Python | UTF-8 | Python | false | false | 9,369 | py | # -*- coding: utf-8 -*-
#
# pyfranca documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 13 03:43:16 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from recommonmark.parser import CommonMarkParser
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
]
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyFranca'
copyright = u'2016-2017, Kaloyan Tenchov'
author = u'Kaloyan Tenchov'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.4.1'
# The full version, including alpha/beta/rc tags.
release = u'0.4.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["**tests**"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyfrancadoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pyfranca.tex', u'PyFranca Documentation',
u'Kaloyan Tenchov', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pyfranca', u'PyFranca Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pyfranca', u'PyFranca Documentation',
author, 'pyfranca', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
source_parsers = {
'.md': CommonMarkParser
} | [
"[email protected]"
]
| |
49c119a4c7491a7b5b8bcf0c18b0dbbd7e0c9b34 | 19ac8aa8ee916cef99ddc85b6565c4d6fbe40749 | /FunctionsAndFunctionalProgramming/functionalFizzing.py | 953c863c737f9d99921591c2b75d1cc537db621e | []
| no_license | Darrenrodricks/IntermediatePythonNanodegree | 53570bb1b97d9d10d6e6bd19d3a1f8f654a1cfe9 | 5e597fbe147c23b694fc9b354797e443f0a87a67 | refs/heads/main | 2023-06-25T18:54:38.962503 | 2021-07-28T17:15:44 | 2021-07-28T17:15:44 | 389,006,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | import helper
"""Generate an infinite stream of successively larger random lists."""
def generate_cases():
a = 0
while True:
yield helper.random_list(a)
a += 1
if __name__ == '__main__':
for case in generate_cases():
if len(case) > 10:
break
print(case) | [
"[email protected]"
]
| |
d5f638b16f492c7594a3fcea541c45e8aae9fab2 | 2435099201902a12689621baba62f7799a260ae3 | /backend/red_frost_25038/urls.py | 990009da86771f717c4f523d324c2d500dcb88a5 | []
| no_license | crowdbotics-apps/red-frost-25038 | eab0bada99927f8f7d76f4866bbcf042be762a0d | cfb48c84f707a558d0cf6405f5057371bdcb2778 | refs/heads/master | 2023-03-30T10:07:45.116090 | 2021-03-15T15:46:25 | 2021-03-15T15:46:25 | 348,029,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,217 | py | """red_frost_25038 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Red Frost"
admin.site.site_title = "Red Frost Admin Portal"
admin.site.index_title = "Red Frost Admin"
# swagger
api_info = openapi.Info(
title="Red Frost API",
default_version="v1",
description="API documentation for Red Frost App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.