blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dd97094e0e53418b16229ca0ca1a5efacd5e520f | 1b53325f6976bd2697f1d9678054b8a1e5dd059c | /update/without_expansion/2.run_calculate_concept_map.py | d0f902e4761716435b798ad4bda40a5255298bc5 | [
"MIT"
] | permissive | vsoch/semantic-image-comparison | d34150b4fed36d55f934e727297ee188951e3ed9 | ab029ad124fc6d6e7ae840c24a8e9471d8737525 | refs/heads/master | 2020-04-06T07:04:21.726094 | 2016-08-13T23:13:10 | 2016-08-13T23:13:10 | 48,921,431 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,208 | py | #!/usr/bin/python
from glob import glob
import sys
import pandas
import os
# Classification framework
# for image1 in all images:
# for image2 in allimages:
# if image1 != image2:
# hold out image 1 and image 2, generate regression parameter matrix using other images
# generate predicted image for image 1 [PR1]
# generate predicted image for image 2 [PR2]
# classify image 1 as fitting best to PR1 or PR2
# classify image 2 as fitting best to PR1 or PR2
base = sys.argv[1]
update = "%s/update" %base
output_folder = "%s/classification" %update # any kind of tsv/result file
results = "%s/results" %update # any kind of tsv/result file
for x in [output_folder,results]:
if not os.path.exists(x):
os.mkdir(x)
# Images by Concepts data frame (NOT including all levels of ontology)
labels_tsv = "%s/concepts_binary_df.tsv" %update
image_lookup = "%s/image_nii_lookup.pkl" %update
df = pandas.read_csv(labels_tsv,sep="\t",index_col=0)
for image1_holdout in df.index.tolist():
print "Parsing %s" %(image1_holdout)
for image2_holdout in df.index.tolist():
if (image1_holdout != image2_holdout) and (image1_holdout < image2_holdout):
output_file = "%s/%s_%s_predict.pkl" %(output_folder,image1_holdout,image2_holdout)
if not os.path.exists(output_file):
job_id = "%s_%s" %(image1_holdout,image2_holdout)
filey = ".job/class_%s.job" %(job_id)
filey = open(filey,"w")
filey.writelines("#!/bin/bash\n")
filey.writelines("#SBATCH --job-name=%s\n" %(job_id))
filey.writelines("#SBATCH --output=.out/%s.out\n" %(job_id))
filey.writelines("#SBATCH --error=.out/%s.err\n" %(job_id))
filey.writelines("#SBATCH --time=2-00:00\n")
filey.writelines("#SBATCH --mem=32000\n")
filey.writelines("python 2.calculate_concept_map.py %s %s %s %s %s" %(image1_holdout, image2_holdout, output_file, labels_tsv, image_lookup))
filey.close()
os.system("sbatch -p russpold --qos russpold " + ".job/class_%s.job" %(job_id))
| [
"[email protected]"
] | |
f44574379435b1f2cd4ce38956cd022587c8a169 | f64fde1c4ae338987b76c10c1029468143f1d83a | /Test_programs/stacking_arm/main.py | 86a75d8333a3fe74d564dc64820892d75fccba01 | [] | no_license | abhijithneilabraham/Project-ANTON | 56a21941042034c9c2b407e25d4e75925a158e71 | 03478d9c9a537c2507a06e3c022a1092587cdc06 | refs/heads/master | 2023-04-01T21:01:14.568164 | 2020-05-01T14:19:24 | 2020-05-01T14:19:24 | 203,203,760 | 2 | 0 | null | 2023-03-24T22:42:40 | 2019-08-19T15:52:11 | Python | UTF-8 | Python | false | false | 1,285 | py | """
Make it more robust.
Stop episode once the finger stop at the final position for 50 steps.
Feature & reward engineering.
"""
from env import ArmEnv
from rl import DDPG
MAX_EPISODES = 900
MAX_EP_STEPS = 200
ON_TRAIN = False
# set env
env = ArmEnv()
s_dim = env.state_dim
a_dim = env.action_dim
a_bound = env.action_bound
rl = DDPG(a_dim, s_dim, a_bound)
steps = []
print(s_dim)
def train():
# start training
for i in range(MAX_EPISODES):
s = env.reset()
ep_r = 0.
for j in range(MAX_EP_STEPS):
# env.render()
a = rl.choose_action(s)
s_, r, done = env.step(a)
rl.store_transition(s, a, r, s_)
ep_r += r
if rl.memory_full:
# start to learn once has fulfilled the memory
rl.learn()
s = s_
if done or j == MAX_EP_STEPS-1:
print('Ep: %i | %s | ep_r: %.1f | step: %i' % (i, '---' if not done else 'done', ep_r, j))
break
rl.save()
def eval():
rl.restore()
env.render()
env.viewer.set_vsync(True)
s = env.reset()
while True:
env.render()
a = rl.choose_action(s)
s, r, done = env.step(a)
#if ON_TRAIN:
# train()
#else:
# eval()
| [
"[email protected]"
] | |
96a2c8ceb28ab064438abaa8b14ad96c713bff9c | b1d921644161105c3fa12d51702565a22b3e0d1e | /typeidea/blog/migrations/0001_initial.py | 84095c3a37f3779d83ece9dee0a3985fb3718f2e | [] | no_license | FATE-0/blog | 01e74a1f105ea2fc1b27e69be376ce4270e32f13 | fca878f68f8dc67a4e8b75d9c8f109d6e820375d | refs/heads/master | 2020-06-19T10:17:35.152719 | 2019-07-19T11:17:26 | 2019-07-19T11:17:26 | 196,675,430 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,347 | py | # Generated by Django 2.2.3 on 2019-07-14 08:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, verbose_name='名称')),
('status', models.PositiveIntegerField(choices=[(1, '正常'), (0, '删除')], default=1, verbose_name='状态')),
('is_nav', models.BooleanField(default=False, verbose_name='是否为导航')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='作者')),
],
options={
'verbose_name': '分类',
'verbose_name_plural': '分类',
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=10, verbose_name='名称')),
('status', models.PositiveIntegerField(choices=[(1, '正常'), (0, '删除')], default=1, verbose_name='状态')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='作者')),
],
options={
'verbose_name': '标签',
'verbose_name_plural': '标签',
},
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='标题')),
('desc', models.CharField(blank=True, max_length=1024, verbose_name='摘要')),
('content', models.TextField(help_text='正文必须为 MarkDown 格式', verbose_name='正文')),
('status', models.PositiveIntegerField(choices=[(1, '正常'), (0, '删除'), (2, '草稿')], default=1, verbose_name='状态')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Category', verbose_name='分类')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='作者')),
('tag', models.ManyToManyField(to='blog.Tag', verbose_name='标签')),
],
options={
'verbose_name': '文章',
'verbose_name_plural': '文章',
'ordering': ['-id'],
},
),
]
| [
"[email protected]"
] | |
79ce3d8730d723b0fcac829ec4d82d885bbddb31 | a2e638cd0c124254e67963bda62c21351881ee75 | /Extensions/StructuredProductsDealPackage/FPythonCode/SP_TrfExerciseCalculations.py | 908ccec668aabaa4a82d6a6f0e52e0a4cb029113 | [] | no_license | webclinic017/fa-absa-py3 | 1ffa98f2bd72d541166fdaac421d3c84147a4e01 | 5e7cc7de3495145501ca53deb9efee2233ab7e1c | refs/heads/main | 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,027 | py |
#***************************************************************
#
# Module calculating the settlement amounts
#
#***************************************************************
import acm
from SP_TrfUtils import BuySellMapping, TrfHasBarrier, TrfExpiryEvent, TrfExpiryEventsSortedByDate
from SP_DealPackageHelper import GetCurrencyPairPointsDomesticPerForeign, GetCurrencyPairPointsForeignPerDomestic
epsilon = 0.000001
def IsDomesticPerForeign(rateDirection):
if rateDirection not in ('DomesticPerForeign', 'ForeignPerDomestic'):
raise RuntimeError ('Invalid value for parameter "rateDirection"')
return rateDirection == 'DomesticPerForeign'
def BuySellAsMultiplier(buySellForeign):
if buySellForeign == 'SELL':
return -1.0
elif buySellForeign == 'BUY':
return 1.0
else:
raise RuntimeError ('Invalid value for parameter "buySellForeign"')
def RateDirectionAsMultiplier(rateDirection):
return 1.0 if IsDomesticPerForeign(rateDirection) else -1.0
def InverseTargetAsMultiplier(inverseTarget):
return -1.0 if inverseTarget is True else 1.0
def FxRateValueToUse(rate, inverse, inverseMultiplier = 1.0):
return inverseMultiplier * (rate if inverse is False or abs(rate) < epsilon else 1.0/rate)
def IntrinsicForAccumulation(strike, fixing, inverseTarget, buySellForeign, rateDirection):
accumulationStrike = FxRateValueToUse(strike, inverseTarget == IsDomesticPerForeign(rateDirection), -1.0 if inverseTarget else 1.0 )
accumulationFixing = FxRateValueToUse(fixing, inverseTarget == IsDomesticPerForeign(rateDirection), -1.0 if inverseTarget else 1.0 )
return IntrinsicValue(accumulationStrike, accumulationFixing, buySellForeign)
def AdjustedStrike(strike, fixing, target, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection):
if exactTarget is False or target < epsilon:
return strike
else:
accumulation = IntrinsicForAccumulation(strike, fixing, inverseTarget, buySellForeign, rateDirection)
return strike if accumulation < (target - previousTarget) else AdjustStrikeWithRemainingTarget(fixing, target - previousTarget, inverseTarget, buySellForeign, rateDirection)
def AdjustedStrikeCommodity(strike, fixing, target, previousTarget, exactTarget):
if exactTarget is False or target < epsilon:
return strike
else:
accumulation = fixing - strike
if accumulation < (target - previousTarget):
return strike
else:
return fixing - (target - previousTarget)
def AdjustStrikeMultiplier(inverseTarget, buySellForeign):
return BuySellAsMultiplier(buySellForeign) * InverseTargetAsMultiplier(inverseTarget)
def AdjustStrikeWithRemainingTarget(fixing, remainingTarget, inverseTarget, buySellForeign, rateDirection):
fixingForAdjustment = FxRateValueToUse(fixing, inverseTarget == IsDomesticPerForeign(rateDirection), 1.0)
adjustedStrike = fixingForAdjustment - (remainingTarget * AdjustStrikeMultiplier(inverseTarget, buySellForeign))
return FxRateValueToUse(adjustedStrike, inverseTarget == IsDomesticPerForeign(rateDirection), 1.0)
def IntrinsicValue(strike, fixing, buySellForeign):
intrinsicValue = fixing - strike
if buySellForeign != None:
intrinsicValue *= BuySellAsMultiplier(buySellForeign)
return intrinsicValue
def IntrinsicValueForPayOff(strike, fixing, buySellForeign, rateDirection):
if rateDirection == None:
strikeToUse = strike
fixingToUse = fixing
else:
strikeToUse = FxRateValueToUse(strike, not IsDomesticPerForeign(rateDirection))
fixingToUse = FxRateValueToUse(fixing, not IsDomesticPerForeign(rateDirection))
return IntrinsicValue(strikeToUse, fixingToUse, buySellForeign)
def StrikeAdjustedIntrinsicValue(strike, fixing, target, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection):
adjustedStrike = AdjustedStrike(strike, fixing, target, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection)
return IntrinsicValueForPayOff(adjustedStrike, fixing, buySellForeign, rateDirection)
def StrikeAdjustedIntrinsicValueCommodity(strike, fixing, target, previousTarget, exactTarget):
adjustedStrike = AdjustedStrikeCommodity(strike, fixing, target, previousTarget, exactTarget)
return IntrinsicValueForPayOff(adjustedStrike, fixing, None, None)
def NotionalAtStrike(notional1, notional2, notionalAtStrike, hasBarrier):
if notionalAtStrike == 'Notional 1':
return notional1
elif notionalAtStrike == 'Notional 2' and hasBarrier is False:
return notional2
return 0.0
def NotionalAmount(notional1, notional2, strike, fixing, buySellForeign, strikeDirection, hasBarrier, notionalAtStrike = None):
intrinsic = IntrinsicValueForPayOff(strike, fixing, buySellForeign, strikeDirection)
if abs(intrinsic) < epsilon:
return NotionalAtStrike(notional1, notional2, notionalAtStrike, hasBarrier)
return notional1 if intrinsic > 0.0 else notional2
def CurrencyConversion(fixing, settleInDomesticCurrency, rateDirection):
return 1.0 if settleInDomesticCurrency is True else FxRateValueToUse(fixing, rateDirection == 'DomesticPerForeign')
def BarrierHit(fixing, barrierLevel, barrierInterpretation, buySellForeign, rateDirection):
if barrierInterpretation == 'Past':
return IntrinsicValueForPayOff(barrierLevel, fixing, buySellForeign, rateDirection) < 0
else:
return IntrinsicValueForPayOff(barrierLevel, fixing, buySellForeign, rateDirection) <= 0
def BarrierHitOrIgnored(fixing, hasBarrier, barrierLevel, barrierInterpretation, buySellForeign, rateDirection):
return (not hasBarrier) or BarrierHit(fixing, barrierLevel, barrierInterpretation, buySellForeign, rateDirection)
def BarrierMultiplier(fixing, hasBarrier, barrierLevel, barrierInterpretation, buySellForeign, rateDirection):
return 1.0 if BarrierHitOrIgnored(fixing, hasBarrier, barrierLevel, barrierInterpretation, buySellForeign, rateDirection) else 0.0
def PhysicalStrikeToUse(strike, fixing, target, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection):
adjustedStrike = AdjustedStrike(strike, fixing, target, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection)
return FxRateValueToUse(adjustedStrike, not IsDomesticPerForeign(rateDirection))
def TargetMultiplier(previousTarget, targetLevel):
return 1.0 if targetLevel < epsilon or (targetLevel - previousTarget) > epsilon else 0.0
def TakeBarrierIntoAccount(hasBarrier, intrinsicValue):
return hasBarrier and intrinsicValue < -epsilon
def CalculateCashAmount(fixing,
strike,
rateDirection,
targetLevel,
inverseTarget,
previousTarget,
exactTarget,
notional1,
notional2,
settleInDomesticCurrency,
buySellForeign,
hasBarrier,
barrierLevel,
barrierInterpretation):
intrinsicValue = StrikeAdjustedIntrinsicValue(strike, fixing, targetLevel, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection)
notional = NotionalAmount(notional1, notional2, strike, fixing, buySellForeign, rateDirection, hasBarrier)
currencyConversion = CurrencyConversion(fixing, settleInDomesticCurrency, rateDirection)
barrierMultiplier = BarrierMultiplier(fixing, TakeBarrierIntoAccount(hasBarrier, intrinsicValue), barrierLevel, barrierInterpretation, buySellForeign, rateDirection)
targetMultiplier = TargetMultiplier(previousTarget, targetLevel)
return intrinsicValue * notional * currencyConversion * barrierMultiplier * targetMultiplier
def CalculatePhysicalAmounts(fixing,
strike,
rateDirection,
targetLevel,
inverseTarget,
previousTarget,
exactTarget,
notional1,
notional2,
buySellForeign,
hasBarrier,
barrierLevel,
barrierInterpretation,
notionalAtStrike):
intrinsicValue = StrikeAdjustedIntrinsicValue(strike, fixing, targetLevel, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection)
notionalDomestic = NotionalAmount(notional1, notional2, strike, fixing, buySellForeign, rateDirection, hasBarrier, notionalAtStrike)
strikeToUse = PhysicalStrikeToUse(strike, fixing, targetLevel, previousTarget, inverseTarget, exactTarget, buySellForeign, rateDirection)
barrierMultiplier = BarrierMultiplier(fixing, TakeBarrierIntoAccount(hasBarrier, intrinsicValue), barrierLevel, barrierInterpretation, buySellForeign, rateDirection)
targetMultiplier = TargetMultiplier(previousTarget, targetLevel)
amountDomestic = notionalDomestic * BuySellAsMultiplier(buySellForeign) * barrierMultiplier * targetMultiplier
amountForeign = -amountDomestic * strikeToUse
return amountDomestic, amountForeign
def CalculateCommodityCashAmount(fixing,
strike,
targetLevel,
previousTarget,
exactTarget,
notional1,
notional2):
intrinsicValue = StrikeAdjustedIntrinsicValueCommodity(strike, fixing, targetLevel, previousTarget, exactTarget)
notional = NotionalAmount(notional1, notional2, strike, fixing, None, None, None)
targetMultiplier = TargetMultiplier(previousTarget, targetLevel)
return intrinsicValue * notional * targetMultiplier
def GetStrikeDecimals(instrument, rateDirection):
if not hasattr(instrument, 'DecoratedObject'):
instrument = acm.FBusinessLogicDecorator.WrapObject(instrument)
if rateDirection == 'DomesticPerForeign':
return GetCurrencyPairPointsDomesticPerForeign(instrument.ForeignCurrency(), instrument.DomesticCurrency())
else:
return GetCurrencyPairPointsForeignPerDomestic(instrument.ForeignCurrency(), instrument.DomesticCurrency())
def GetFixingValue(instrument, date, rateDirection):
fixing = TrfExpiryEvent(instrument, date)
if fixing is not None:
if acm.Time.DateDifference(date, fixing.Date()) == 0.0:
if fixing.EventValue() > epsilon:
if rateDirection == 'ForeignPerDomestic':
return 1.0 / fixing.EventValue()
else:
return fixing.EventValue()
else:
raise RuntimeError ('No fixing entered for %s' % date)
raise RuntimeError ('Date %s is not a valid fixing date for %s' % (date, instrument.Name()))
def GetPreviousTarget(instrument, date):
allFixings = TrfExpiryEventsSortedByDate(instrument)
accumulation = 0.0
for fixing in allFixings:
if acm.Time.DateDifference(fixing.Date(), date) >= 0:
break
accumulation = fixing.TrfAccTarget()
return accumulation
def BaseSettlementParameters(instrument, date):
rateDirection = 'ForeignPerDomestic' if instrument.StrikeQuotation() and instrument.StrikeQuotation().Name() == 'Per Unit Inverse' else 'DomesticPerForeign'
rateDecimals = GetStrikeDecimals(instrument, rateDirection)
fixing = round(GetFixingValue(instrument, date, rateDirection), rateDecimals)
strike = round(instrument.StrikePrice(), rateDecimals)
barrier = round(instrument.Barrier(), rateDecimals)
return {
'fixing' : fixing,
'strike' : strike,
'rateDirection' : rateDirection,
'targetLevel' : instrument.AdditionalInfo().Sp_TargetLevel(),
'inverseTarget' : instrument.AdditionalInfo().Sp_InvertedTarget(),
'previousTarget' : GetPreviousTarget(instrument, date),
'exactTarget' : instrument.AdditionalInfo().Sp_AdjustedStrike(),
'notional1' : instrument.ContractSize(),
'notional2' : instrument.AdditionalInfo().Sp_LeverageNotional(),
'buySellForeign' : BuySellMapping(instrument, 'Foreign'),
'hasBarrier' : TrfHasBarrier(instrument),
'barrierLevel' : barrier,
'barrierInterpretation' : instrument.AdditionalInfo().Sp_BarrierCondition()
}
def BaseCommoditySettlementParameters(instrument, date):
return {
'fixing': GetFixingValue(instrument, date, None),
'strike': instrument.StrikePrice(),
'targetLevel': instrument.AdditionalInfo().Sp_TargetLevel(),
'previousTarget': GetPreviousTarget(instrument, date),
'exactTarget': instrument.AdditionalInfo().Sp_AdjustedStrike(),
'notional1': instrument.ContractSizeInQuotation(),
'notional2': instrument.AdditionalInfo().Sp_LeverageNotional(),
}
def CashSettlementParameters(instrument, date):
params = BaseSettlementParameters(instrument, date)
params['settleInDomesticCurrency'] = instrument.AdditionalInfo().Sp_SettleInCurr2()
return params
def PhysicalSettlementParameters(instrument, date):
params = BaseSettlementParameters(instrument, date)
params['notionalAtStrike'] = instrument.AdditionalInfo().Sp_StrikeSettle()
return params
def CommodityCashSettlementParameters(instrument, date):
params = BaseCommoditySettlementParameters(instrument, date)
return params
def CalculateTRFSettlementAmounts(trade, date):
instrument = trade.Instrument()
if instrument.AdditionalInfo().StructureType() != 'Target Redemption Forward':
raise RuntimeError('TRF settlement calculations only implemented for Target Redemption Forward')
if instrument.SettlementType() == 'Cash':
return CalculateCashAmount(**CashSettlementParameters(instrument, date))
else:
return CalculatePhysicalAmounts(**PhysicalSettlementParameters(instrument, date))
def CalculateCommodityTRFSettlementAmounts(trade, date):
instrument = trade.Instrument()
if instrument.AdditionalInfo().StructureType() != 'Target Redemption Forward':
raise RuntimeError('TRF settlement calculations only implemented for Target Redemption Forward')
if instrument.SettlementType() == 'Cash':
return CalculateCommodityCashAmount(**CommodityCashSettlementParameters(instrument, date))
else:
raise RuntimeError('TRF settlement calculations only implemented for Cash settlement')
| [
"[email protected]"
] | |
a264914ada26cf2cef65b45470569fb9c72b51bb | 01dc09fdf4a9203da336b893650235f16ff5380f | /Backtest/Historical_BackTest/Neat/tf_neat-trader-intraday/no_hidden_layer/Tech_Input/simple/genome_test.py | 91c0fbe7c5d8937396ad29d1897557fa3872d7e4 | [] | no_license | webclinic017/RayTrader_v3 | 2b15228881bf7a08e90682a2364905317c282f65 | 2ea39946a2654dbc3b05b41abcaf5a4a4082a1b6 | refs/heads/master | 2023-03-16T04:40:41.392465 | 2019-06-04T04:46:46 | 2019-06-04T04:46:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,544 | py | import glob
import multiprocessing
import trader_env
import trader_data
import visualize
import reporter
from statistics import mean
import numpy as np
import neat
import pickle
import matplotlib.pyplot as plt
file_name = "G:\\AI Trading\\Code\\RayTrader_v3\\HistoricalData\\Min_data\\ADANIPORTS-EQ.csv"
data = trader_data.csv_to_df(file_name)
train_data, test_data = trader_data.split_data(data)
env = trader_env.Weighted_Unrealized_BS_Env(train_data)
max_env_steps = len(env.data) - env.t - 1
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction,
neat.DefaultSpeciesSet, neat.DefaultStagnation,
'config.cfg')
def eval_genome(genome, config):
global env, max_env_steps
ob = env.reset()
net = neat.nn.recurrent.RecurrentNetwork.create(genome, config)
current_max_fitness = 0
fitness_current = 0
counter = 0
step = 0
step_max = max_env_steps
done = False
while not done:
# inputs = trader_data.get_inputs(signals, step)
nnOutput = net.activate(ob)
ob, rew, done, _ = env.step(np.argmax(nnOutput))
# print("id",genome_id,"Step:",step,"act:",np.argmax(nnOutput),"reward:",rew)
fitness_current += rew
step += 1
if fitness_current > current_max_fitness:
current_max_fitness = fitness_current
counter = 0
else:
counter += 1
if step >= step_max:
done = True
if done or env.amt<=0:
done = True
print("Genome id#: ", genome.key)
message = "Fitness :{} Max Fitness :{} Avg Daily Profit :{} %".format(fitness_current,
current_max_fitness,
round(mean(env.daily_profit_per), 3))
print("Initial Value: ",2000)
print("Final Value: ",env.amt)
print("Days: ",len(env.daily_profit_per))
print(message)
plt.title(genome.key)
plt.plot(env.daily_profit_per)
plt.show()
# logger.info(message)
genome.fitness = fitness_current
def run_tests(genome):
global env, max_env_steps, config
env = trader_env.Weighted_Unrealized_BS_Env(train_data)
max_env_steps = len(env.data) - env.t - 1
eval_genome(genome,config)
env = trader_env.Weighted_Unrealized_BS_Env(test_data)
max_env_steps = len(env.data) - env.t - 1
eval_genome(genome,config)
def run_files(files_set):
for genomeFile in files_set:
genome = pickle.load(open(genomeFile, 'rb'))
run_tests(genome)
print("#"*50)
def chunks(seq, num):
avg = len(seq) / float(num)
out = []
last = 0.0
while last < len(seq):
out.append(seq[int(last):int(last + avg)])
last += avg
return out
# Load all the genomes
files = glob.glob(".\\genomes\\*.pkl")
n_processes = 3
threads = []
if __name__ == "__main__":
# divide the file-list
chunks_list = chunks(files, n_processes)
for i in range(n_processes):
threads.append(multiprocessing.Process(target=run_files, args=(chunks_list[i],)))
# start all threads
for t in threads:
t.start()
# Join all threads
for t in threads:
t.join()
#
# if __name__ == "__main__":
# genomeFile = '.\\genomes\\594.pkl'
# genome = pickle.load(open(genomeFile, 'rb'))
# run_tests(genome)
| [
"[email protected]"
] | |
487890ec6dfa248593a93530920bc2c0b559b453 | 3cdb4faf34d8375d6aee08bcc523adadcb0c46e2 | /web/env/lib/python3.6/site-packages/django/contrib/messages/storage/base.py | fd5d0c24aa8037c6beb35ed14e85fda6851aa798 | [
"MIT",
"GPL-3.0-only"
] | permissive | rizwansoaib/face-attendence | bc185d4de627ce5adab1cda7da466cb7a5fddcbe | 59300441b52d32f3ecb5095085ef9d448aef63af | refs/heads/master | 2020-04-25T23:47:47.303642 | 2019-09-12T14:26:17 | 2019-09-12T14:26:17 | 173,157,284 | 45 | 12 | MIT | 2020-02-11T23:47:55 | 2019-02-28T17:33:14 | Python | UTF-8 | Python | false | false | 5,643 | py | from django.conf import settings
from django.contrib.messages import constants, utils
LEVEL_TAGS = utils.get_level_tags()
class Message:
"""
Represent an actual message that can be stored in any of the supported
storage classes (typically session- or cookie-based) and rendered in a view
or template.
"""
def __init__(self, level, message, extra_tags=None):
self.level = int(level)
self.message = message
self.extra_tags = extra_tags
def _prepare(self):
"""
Prepare the message for serialization by forcing the ``message``
and ``extra_tags`` to str in case they are lazy translations.
"""
self.message = str(self.message)
self.extra_tags = str(self.extra_tags) if self.extra_tags is not None else None
def __eq__(self, other):
return isinstance(other, Message) and self.level == other.level and \
self.message == other.message
def __str__(self):
return str(self.message)
@property
def tags(self):
return ' '.join(tag for tag in [self.extra_tags, self.level_tag] if tag)
@property
def level_tag(self):
return LEVEL_TAGS.get(self.level, '')
class BaseStorage:
"""
This is the base backend for temporary message storage.
This is not a complete class; to be a usable storage backend, it must be
subclassed and the two methods ``_get`` and ``_store`` overridden.
"""
def __init__(self, request, *args, **kwargs):
self.request = request
self._queued_messages = []
self.used = False
self.added_new = False
super().__init__(*args, **kwargs)
def __len__(self):
return len(self._loaded_messages) + len(self._queued_messages)
def __iter__(self):
self.used = True
if self._queued_messages:
self._loaded_messages.extend(self._queued_messages)
self._queued_messages = []
return iter(self._loaded_messages)
def __contains__(self, item):
return item in self._loaded_messages or item in self._queued_messages
@property
def _loaded_messages(self):
"""
Return a list of loaded messages, retrieving them first if they have
not been loaded yet.
"""
if not hasattr(self, '_loaded_data'):
messages, all_retrieved = self._get()
self._loaded_data = messages or []
return self._loaded_data
def _get(self, *args, **kwargs):
"""
Retrieve a list of stored messages. Return a tuple of the messages
and a flag indicating whether or not all the messages originally
intended to be stored in this storage were, in fact, stored and
retrieved; e.g., ``(messages, all_retrieved)``.
**This method must be implemented by a subclass.**
If it is possible to tell if the backend was not used (as opposed to
just containing no messages) then ``None`` should be returned in
place of ``messages``.
"""
raise NotImplementedError('subclasses of BaseStorage must provide a _get() method')
def _store(self, messages, response, *args, **kwargs):
"""
Store a list of messages and return a list of any messages which could
not be stored.
One type of object must be able to be stored, ``Message``.
**This method must be implemented by a subclass.**
"""
raise NotImplementedError('subclasses of BaseStorage must provide a _store() method')
def _prepare_messages(self, messages):
"""
Prepare a list of messages for storage.
"""
for message in messages:
message._prepare()
def update(self, response):
"""
Store all unread messages.
If the backend has yet to be iterated, store previously stored messages
again. Otherwise, only store messages added after the last iteration.
"""
self._prepare_messages(self._queued_messages)
if self.used:
return self._store(self._queued_messages, response)
elif self.added_new:
messages = self._loaded_messages + self._queued_messages
return self._store(messages, response)
def add(self, level, message, extra_tags=''):
"""
Queue a message to be stored.
The message is only queued if it contained something and its level is
not less than the recording level (``self.level``).
"""
if not message:
return
# Check that the message level is not less than the recording level.
level = int(level)
if level < self.level:
return
# Add the message.
self.added_new = True
message = Message(level, message, extra_tags=extra_tags)
self._queued_messages.append(message)
def _get_level(self):
"""
Return the minimum recorded level.
The default level is the ``MESSAGE_LEVEL`` setting. If this is
not found, the ``INFO`` level is used.
"""
if not hasattr(self, '_level'):
self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO)
return self._level
def _set_level(self, value=None):
"""
Set a custom minimum recorded level.
If set to ``None``, the default level will be used (see the
``_get_level`` method).
"""
if value is None and hasattr(self, '_level'):
del self._level
else:
self._level = int(value)
level = property(_get_level, _set_level, _set_level)
| [
"[email protected]"
] | |
5b6746bc96796294065d58ec98028daa3d44bbf9 | 2f5ab43956b947b836e8377370d786e5ee16e4b0 | /sklearn2code/sym/test/test_printers.py | d1f8d27e37ac139c656be81f1359268ce15271d4 | [
"MIT"
] | permissive | modusdatascience/sklearn2code | b175fb268fa2871c95f0e319f3cd35dd54561de9 | 3ab82d82aa89b18b18ff77a49d0a524f069d24b9 | refs/heads/master | 2022-09-11T06:16:37.604407 | 2022-08-24T04:43:59 | 2022-08-24T04:43:59 | 115,747,326 | 4 | 2 | MIT | 2018-05-01T00:11:51 | 2017-12-29T19:05:03 | Python | UTF-8 | Python | false | false | 874 | py | from sklearn2code.sym.expression import FiniteMap, Integer, false, true,\
IntegerVariable, RealPiecewise, RealNumber
from sklearn2code.sym.printers import JavascriptPrinter
from nose.tools import assert_equal
def test_javascript_finite_map():
expr = FiniteMap({Integer(0): false, Integer(1): true}, IntegerVariable('x'))
assert_equal(JavascriptPrinter()(expr), '(x===0?false:(x===1?true:null))')
def test_javascript_piecewise():
expr = RealPiecewise((RealNumber(0), false), (RealNumber(1), true))
assert_equal(JavascriptPrinter()(expr), '(false?0.0:(true?1.0:null))')
if __name__ == '__main__':
import sys
import nose
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
result = nose.run(argv=[sys.argv[0],
module_name,
'-s', '-v'])
| [
"[email protected]"
] | |
1a01f5c2747cdd429c329c7250f34280b5f686d2 | 412b699e0f497ac03d6618fe349f4469646c6f2d | /env/lib/python3.8/site-packages/web3/_utils/threads.py | ba45d8775e0e35fd72ae6117133e9d50ea23bdc3 | [
"MIT"
] | permissive | EtienneBrJ/Portfolio | 7c70573f02a5779f9070d6d9df58d460828176e3 | 6b8d8cf9622eadef47bd10690c1bf1e7fd892bfd | refs/heads/main | 2023-09-03T15:03:43.698518 | 2021-11-04T01:02:33 | 2021-11-04T01:02:33 | 411,076,325 | 1 | 0 | MIT | 2021-10-31T13:43:09 | 2021-09-27T23:48:59 | HTML | UTF-8 | Python | false | false | 3,979 | py | """
A minimal implementation of the various gevent APIs used within this codebase.
"""
import threading
import time
from types import (
TracebackType,
)
from typing import (
Any,
Callable,
Generic,
Type,
)
from web3._utils.compat import (
Literal,
)
from web3.types import (
TReturn,
)
class Timeout(Exception):
"""
A limited subset of the `gevent.Timeout` context manager.
"""
seconds = None
exception = None
begun_at = None
is_running = None
def __init__(
self, seconds: float = None, exception: Type[BaseException] = None, *args: Any,
**kwargs: Any
) -> None:
self.seconds = seconds
self.exception = exception
def __enter__(self) -> 'Timeout':
self.start()
return self
def __exit__(
self, exc_type: Type[BaseException], exc_val: BaseException, exc_tb: TracebackType
) -> Literal[False]:
return False
def __str__(self) -> str:
if self.seconds is None:
return ''
return "{0} seconds".format(self.seconds)
@property
def expire_at(self) -> int:
if self.seconds is None:
raise ValueError("Timeouts with `seconds == None` do not have an expiration time")
elif self.begun_at is None:
raise ValueError("Timeout has not been started")
return self.begun_at + self.seconds
def start(self) -> None:
if self.is_running is not None:
raise ValueError("Timeout has already been started")
self.begun_at = time.time()
self.is_running = True
def check(self) -> None:
if self.is_running is None:
raise ValueError("Timeout has not been started")
elif self.is_running is False:
raise ValueError("Timeout has already been cancelled")
elif self.seconds is None:
return
elif time.time() > self.expire_at:
self.is_running = False
if isinstance(self.exception, type):
raise self.exception(str(self))
elif isinstance(self.exception, Exception):
raise self.exception
else:
raise self
def cancel(self) -> None:
self.is_running = False
def sleep(self, seconds: float) -> None:
time.sleep(seconds)
self.check()
class ThreadWithReturn(threading.Thread, Generic[TReturn]):
def __init__(
self, target: Callable[..., TReturn] = None, args: Any = None, kwargs: Any = None
) -> None:
super().__init__(
target=target,
args=args or tuple(),
kwargs=kwargs or {},
)
self.target = target
self.args = args
self.kwargs = kwargs
def run(self) -> None:
self._return = self.target(*self.args, **self.kwargs)
def get(self, timeout: float = None) -> TReturn:
self.join(timeout)
try:
return self._return
except AttributeError:
raise RuntimeError("Something went wrong. No `_return` property was set")
class TimerClass(threading.Thread):
def __init__(self, interval: int, callback: Callable[..., Any], *args: Any) -> None:
threading.Thread.__init__(self)
self.callback = callback
self.terminate_event = threading.Event()
self.interval = interval
self.args = args
def run(self) -> None:
while not self.terminate_event.is_set():
self.callback(*self.args)
self.terminate_event.wait(self.interval)
def stop(self) -> None:
self.terminate_event.set()
def spawn(
target: Callable[..., TReturn],
*args: Any,
thread_class: Type[ThreadWithReturn[TReturn]] = ThreadWithReturn,
**kwargs: Any,
) -> ThreadWithReturn[TReturn]:
thread = thread_class(
target=target,
args=args,
kwargs=kwargs,
)
thread.daemon = True
thread.start()
return thread
| [
"[email protected]"
] | |
d5439756a472a776f6e2de4f77152fbc8854b8cf | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/280/97935/submittedfiles/testes.py | d88ecb76f5e989a6ee41f07dc266207edd3ddf88 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,328 | py | """
valor=["X","O"]
symh=valor[0]
sympc=valor[1]
print(symh)
print(sympc)
line1=[" "," "," "]
line2=[" "," "," "]
line3=[" "," "," "]
print("|%s|%s|%s|" % (line1[0],line1[1],line1[2]) )
print("|%s|%s|%s|" % (line2[0],line2[1],line2[2]) )
print("|%s|%s|%s|" % (line3[0],line3[1],line3[2]) )
line1[2]=symh
print("|%s|%s|%s|" % (line1[0],line1[1],line1[2]) )
print("|%s|%s|%s|" % (line2[0],line2[1],line2[2]) )
print("|%s|%s|%s|" % (line3[0],line3[1],line3[2]) )
line2[1]=sympc
print("|%s|%s|%s|" % (line1[0],line1[1],line1[2]) )
print("|%s|%s|%s|" % (line2[0],line2[1],line2[2]) )
print("|%s|%s|%s|" % (line3[0],line3[1],line3[2]) )
line3[2]=symh
print("|%s|%s|%s|" % (line1[0],line1[1],line1[2]) )
print("|%s|%s|%s|" % (line2[0],line2[1],line2[2]) )
print("|%s|%s|%s|" % (line3[0],line3[1],line3[2]) )
"""
"""
x=int(input("Número de médias: "))
while x <= 1:
x=int(input("Número de médias: "))
notas=[]
for i in range (0,x,1):
notas.append(float(input("Insira a nota %d: " %(i+1))))
soma=sum(notas)
res=soma/x
print(res)
"""
"""
n=int(input("Insira n: "))
a=[]
for i in range (0,n,1):
a.append(int(input("Digite o termo %d do vetor a: " %(i+1))))
med=sum(a)/len(a)
somat=0
for i in range (0,len(a),1):
somat=somat + ((a[i]-med)**2)
desvpad=(((1/(n-1))*(somat))**0.5)
print(desvpad)
"""
import numpy as np
cont1=0
cont2=0
cont3=0
dim=int(input("Dimensão n da matriz: "))
matriz=np.empty([dim,dim])
matriztrans=np.empty([dim,dim])
matrizdiag=np.empty([2,dim])
for i in range (0,dim,1):
for j in range (0,dim,1):
matriz[i][j]=float(input("Digite o nº da linha %d na coluna %d: " ))
#transposta
for i in range(0,dim,1):
for j in range(0,dim,1):
matriztrans[i][j] = matriz[j][i]
#diagonais
for i in range(0,dim,1):
matrizdiag[0][i]=matriz[i][i]
for i in range(0,dim,1):
for j in range(dim-1,0,-1):
matrizdiag[1]=matriz[i][j]
print(matriz)
print(matriztrans)
print(matrizdiag)
for i in range (0,dim-1,1):
if sum(matriz[i]) == sum(matriz[i+1]):
cont1=cont1+1
for i in range (0,dim-1,1):
if sum(matriztrans[i]) == sum(matriz[i+1]):
cont2=cont2+1
for i in range (0,dim-1,1):
if matriz[i][i] == sum(matriz[i+1]):
cont3=cont3+1 | [
"[email protected]"
] | |
3b86e81c1aefa746ea0b2327c9bc1e620689dd0a | 7a013424c82b71bc82aa312e0165a1af4170ac23 | /ABC/ABC173/C.py | c0f86d46455b822b965fac48b703f8bf73750487 | [] | no_license | kikugawa-shoma/Atcoder | fe3405e36dd3e4e25127b6110d6009db507e7095 | 7299116b7beb84815fe34d41f640a2ad1e74ba29 | refs/heads/master | 2020-12-21T19:10:12.471507 | 2020-10-10T16:38:18 | 2020-10-10T16:38:18 | 236,531,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | import copy
H,W,K = map(int,input().split())
C = [list(input()) for _ in range(H)]
M = [[0]*W for _ in range(H)]
for i in range(H):
for j in range(W):
if C[i][j] == "#":
M[i][j] = 1
def bit_01(keta):
ans = []
for i in range(2**(keta)):
ans.append("".join(["{:0", str(keta), "b}"]).format(i))
return ans
vert = bit_01(H)
hori = bit_01(W)
def check(v,h,M):
M = copy.deepcopy(M)
for i in range(len(v)):
if v[i] == "1":
for ii in range(W):
M[i][ii] = 0
for j in range(len(h)):
if h[j] == "1":
for jj in range(H):
M[jj][j] = 0
S = 0
for i in range(W):
for j in range(H):
S += M[j][i]
return S == K
ans = 0
for vp in vert:
for hp in hori:
if check(vp,hp,M):
ans += 1
print(ans)
| [
"[email protected]"
] | |
9176d3e53da70f0692fbab648cb4c76f58216f6d | 059c4606fd93b70c244a0017cc1727d1b951e75a | /5-packages/http-examples/httpie-notes/httpie/context.py | c0840c9d051252a44b25937acfd607e94db2b7e7 | [
"BSD-3-Clause"
] | permissive | andyguwc/python-resources | 1f6850b1fde243912644530ee8985ae09773c68e | d8ab7e54d287a697e4763a36b10136af461ec820 | refs/heads/master | 2021-06-24T13:30:25.196129 | 2021-03-02T03:11:49 | 2021-03-02T03:11:49 | 210,958,803 | 1 | 1 | null | 2019-10-25T03:12:31 | 2019-09-25T23:29:29 | Python | UTF-8 | Python | false | false | 3,005 | py | import os
import sys
from pathlib import Path
from typing import Union, IO, Optional
try:
import curses
except ImportError:
curses = None # Compiled w/o curses
from httpie.compat import is_windows
from httpie.config import DEFAULT_CONFIG_DIR, Config, ConfigFileError
from httpie.utils import repr_dict
# use this to manage all things environment related
class Environment:
"""
Information about the execution context
(standard streams, config directory, etc).
By default, it represents the actual environment.
All of the attributes can be overwritten though, which
is used by the test suite to simulate various scenarios.
"""
is_windows: bool = is_windows
config_dir: Path = DEFAULT_CONFIG_DIR
stdin: Optional[IO] = sys.stdin
stdin_isatty: bool = stdin.isatty() if stdin else False
stdin_encoding: str = None
stdout: IO = sys.stdout
stdout_isatty: bool = stdout.isatty()
stdout_encoding: str = None
stderr: IO = sys.stderr
stderr_isatty: bool = stderr.isatty()
colors = 256
program_name: str = 'http'
def __init__(self, **kwargs):
"""
Use keyword arguments to overwrite
any of the class attributes for this instance.
"""
# making sure all the keyword args are actually attributes of this class
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
self.__dict__.update(**kwargs) # easy way to update all attributes
# Keyword arguments > stream.encoding > default utf8
if self.stdin and self.stdin_encoding is None:
self.stdin_encoding = getattr(
self.stdin, 'encoding', None) or 'utf8'
if self.stdout_encoding is None:
actual_stdout = self.stdout
self.stdout_encoding = getattr(
actual_stdout, 'encoding', None) or 'utf8'
def __str__(self):
defaults = dict(type(self).__dict__)
actual = dict(defaults)
actual.update(self.__dict__)
actual['config'] = self.config
return repr_dict({
key: value
for key, value in actual.items()
if not key.startswith('_')
})
def __repr__(self):
return f'<{type(self).__name__} {self}>'
_config = None # this is a cache for config
# core part of Environment
# Support loading config from the config file directory https://httpie.org/doc#config-file-directory
@property
def config(self) -> Config:
config = self._config
if not config:
self._config = config = Config(directory=self.config_dir)
if not config.is_new():
try:
config.load()
except ConfigFileError as e:
self.log_error(e, level='warning')
def log_error(self, msg, level='error'):
assert level in ['error', 'warning']
self.stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')
| [
"[email protected]"
] | |
da2ea8e51998c780767ce2552d82184f69db07fe | 8015f1c62a2cb4efd21aa8938336913bf8117868 | /bamap/ba4199.pngMap.py | 1ad5d97a840efedf5f02dfe430ab6575dc397483 | [] | no_license | GamerNoTitle/Beepers-and-OLED | 675b5e3c179df0f0e27b42bf594c43860d03b9af | afe1340e5394ae96bda5f9022a8a66824368091e | refs/heads/master | 2020-04-20T00:09:47.122471 | 2019-04-29T04:59:35 | 2019-04-29T04:59:35 | 168,515,579 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 8,468 | py | ba4199.pngMap = [
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111100000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111110000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000010011111111111111111111111111111111111111110000000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000001011111111111111111111111111111111111111111110000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111000000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111110000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111110000000000',
'00000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111100000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111100000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111000000',
'00000000000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111100000',
'11000000000000000000000000000000000000101111000000000000000000000000000000001111111111111111111111111111111111111111111111110000',
'11000000000000000000000000000000000000001111110000000000000000000000000000000111111111111111111111111111111111111111111111110000',
'10000000000000000000000000000000000111111111111000000000000000000000000000000111111111111111111111111111111111111111111111111000',
'10000000000000000000000000000000001111111111111100000000000000000000000000000111111111111111111111111111111111111111111111111100',
'00000000000000000000000000000000001111111111111100000000000000000000000000000111111111111111111111111111111111111111111111111100',
'00000000000000000000000000000000001111111111111110000000000000000000000000000111111111111111111111111111111111111111111111111100',
'00000000000000000000000000000000000111111111111110000000000000000000000000000111111111111111111111111111111111111111111111111100',
'00000000000000000000000000000000000011111111111110000000000000000000000000100111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000111111110000000000000000000000000000001111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000001111100000000000000000000000000000001111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000101111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000101111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000011111111111111111111111111000011111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000100011111111111111111111111000000001111111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000000111111111111111111111000000000000001111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000111111111111111111111111000000000000000111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000001111111111111111111111100000000000000000111111111111111111111111111111111',
'00000000000000000000000000000000000000000000000000000000111111111111111111111100000000000000000011111111111111111111111111111110',
'00000000000000000000000000000000000000000000000000000000111111111111111111111000000000000000000011111111111111111111111111111110',
'00000000000000000000000000000000000000000000000000000000111111111111111111111000000000000000000111111111111111111111111111111100',
'00000000000000000000000000000000000000000000000000000001111111111111111111111000000000000000000111111111111111111111111111111000',
'00000000000000000000000000000000000000000000000000000001111111111111111111111000000000000000001111111111111111111111111111111000',
'10000000000000000000000000000000000000000000000000000000111111111111111111111000000000000000001111111111111111111111111111110000',
'00000000000000000000000000000000000000000000000000000000111111111111111111111100000000000000001111111111111111111111111111110000',
'11110000000000000000000000000000000000000000000000000000011111111111111111111111000000000011111111111111111111111111111111000000',
'11110000000000000000000000000000000000000000000000000000011111111111111111111100111111101011111111111111111111111111111111000000',
'11110000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111111010000000',
'11110000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111100000110',
'11111100000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111111000001111',
'11111110000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111110000001111',
'11111111000000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111111111100000111111',
'11111111100000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111110000000111111',
'11111111111000000000000000000000000000000000000000000000000000101111111111111111111111111111111111111111111111111100001111111111',
'11111111111100000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111111111000111111111111',
'11111111111110000000000000000000000000000000000000000000000000000111111111111111111111111111111111111111111111110011111111111111',
'11111111111111110000000000000000000000000000000000000000000000000011111111111111111111111111111111111111111111101111111111111111',
]
| [
"[email protected]"
] | |
05eacae54547837444451aba6a9ab0c685add15e | 03198f075072bfb9d5c5afab2fef99d3ec5f37db | /source/api_v2/serializers/advert.py | 8c9cf5e5ce4d0f747676fb2b5908d2bbc2e61240 | [] | no_license | Azer-Denker/Ex_12 | 2c402dffddbf726bfaab61f5022ea0cf6b6b3562 | 97d4eda2d621163c6e12ea388569b50157d09fd5 | refs/heads/main | 2023-07-14T19:05:39.763400 | 2021-08-21T13:30:31 | 2021-08-21T13:30:31 | 398,558,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | from rest_framework import serializers
from webapp.models import Advert
class AdvertSerializer(serializers.ModelSerializer):
class Meta:
model = Advert
fields = ('id', 'title', 'text', 'author', 'created_at')
read_only_fields = ('author', 'id')
def create(self, validated_data):
return Advert.objects.create(**validated_data)
def update(self, instance, validated_data):
for field, value in validated_data.items():
setattr(instance, field, value)
instance.save()
return instance
def delete(self, instance):
instance.delete()
return instance.pk
| [
"[email protected]"
] | |
efceb7bfde0ca3da44a812a43f838b7ac79170bb | 79eb159b3ee36eb76bd921be24081708f44ac735 | /tests/test_codec.py | d9f73bcd0a34d29503f034d3199e6d5a2172f9d8 | [] | no_license | osh/PyAV | d7139f8faf7ee0973376db807e3b917863e9fb73 | 5fa85fd142ee8dabf01f4873e29678aeca153b4f | refs/heads/master | 2021-01-18T13:24:52.202662 | 2015-12-11T04:01:52 | 2015-12-11T04:01:52 | 47,802,016 | 1 | 0 | null | 2015-12-11T03:12:18 | 2015-12-11T03:12:18 | null | UTF-8 | Python | false | false | 620 | py | from .common import *
from av.codec import Codec
from av.video.format import VideoFormat
class TestCodecs(TestCase):
def test_codec_mpeg4(self):
c = Codec('mpeg4')
self.assertEqual(c.name, 'mpeg4')
self.assertEqual(c.long_name, 'MPEG-4 part 2')
self.assertEqual(c.type, 'video')
self.assertEqual(c.id, 13)
self.assertTrue(c.is_encoder)
self.assertTrue(c.is_decoder)
formats = c.video_formats
self.assertTrue(formats)
self.assertIsInstance(formats[0], VideoFormat)
self.assertTrue(any(f.name == 'yuv420p' for f in formats))
| [
"[email protected]"
] | |
13d80f56e85681da4140ed64b47d36026edbf212 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/subscriptions/v2019_11_01/operations/_subscriptions_operations.py | 44a1d6b996b444a635fd1f5eca5b7f564867551d | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 10,390 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SubscriptionsOperations(object):
"""SubscriptionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.subscriptions.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_locations(
self,
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.LocationListResult"]
"""Gets all available geo-locations.
This operation provides all the locations that are available for resource providers; however,
each resource provider may support a subset of this list.
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LocationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resource.subscriptions.v2019_11_01.models.LocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_locations.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LocationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_locations.metadata = {'url': '/subscriptions/{subscriptionId}/locations'} # type: ignore
def get(
self,
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.Subscription"
"""Gets details about a specified subscription.
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Subscription, or the result of cls(response)
:rtype: ~azure.mgmt.resource.subscriptions.v2019_11_01.models.Subscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subscription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Subscription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.SubscriptionListResult"]
"""Gets all subscriptions for a tenant.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SubscriptionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resource.subscriptions.v2019_11_01.models.SubscriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SubscriptionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions'} # type: ignore
| [
"[email protected]"
] | |
2614cf1f44792beeb55c2a2e4257282366b8da9c | f33b30743110532ddae286ba1b34993e61669ab7 | /869. Reordered Power of 2.py | 9fecfa97156c715493e1bcf2e58aab9b47cf8034 | [] | no_license | c940606/leetcode | fe9dcee7a5daa4d52999d5f53253dd6dd33c348b | 631df2ce6892a6fbb3e435f57e90d85f8200d125 | refs/heads/master | 2021-07-10T14:01:26.164966 | 2020-08-16T10:46:16 | 2020-08-16T10:46:16 | 186,588,449 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | class Solution:
def reorderedPowerOf2(self, N):
"""
:type N: int
:rtype: bool
"""
from collections import Counter
c = Counter(str(N))
return any(c == Counter(str(1 << i)) for i in range(32))
a = Solution()
print(a.reorderedPowerOf2(16))
| [
"[email protected]"
] | |
100195dfd715236cf3362301bc411a12a0be41c5 | 693567f042c6bd93ecdda41cb5db81c55ccf3158 | /List/swap two elements in a list (another approach).py | c013b3bb504c2f734752ab41623c4161c62e0bf9 | [] | no_license | supriyo-pal/Python-Practice | 5806e0045ebfeb04856246a245430e2ab7921ba9 | 2025369f0d23d603ad27eaff149500137e98dbcf | refs/heads/main | 2023-01-25T05:31:58.404283 | 2020-12-09T19:08:22 | 2020-12-09T19:08:22 | 317,021,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 796 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 2 21:57:25 2020
@author: Supriyo
"""
number=input("enter the numbers between 0-9:")
number_list=list()
length=0
for i in range(len(number)):
number_list.append(number[i])
length=len(number_list)
print("Choose one position between 0 to",length)
pos1=int(input())
print("Choose another position except ",pos1)
pos2=int(input())
def swapPositions(list, pos1, pos2):
# popping both the elements from list
first_ele = list.pop(pos1)
second_ele = list.pop(pos2-1)
# inserting in each others positions
list.insert(pos1, second_ele)
list.insert(pos2, first_ele)
return list
# Driver function
print(swapPositions(number_list, pos1-1, pos2-1)) | [
"[email protected]"
] | |
7281be76556fd8d629e4800ad5a8fe24b6f8e645 | 2880ec0276330b0d3248ad21476769a4959a9f66 | /life_expectancy/views.py | 78b6b7477419a6eec68e53b376c7cb384fadaf8f | [
"MIT"
] | permissive | Cally99/Data-Labs-with-Python-Tableau | 5a13c3b4d241633848c7a16d83977e9ee9ca9599 | 6fe58cd45d6c2b058abe771b3ffcf108b8832a49 | refs/heads/master | 2020-07-06T00:47:27.801260 | 2019-07-03T06:50:27 | 2019-07-03T06:50:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | from django.shortcuts import render
def life_expectancy(request):
return render(request, "life_expectancy.html") | [
"[email protected]"
] | |
f735e541e2852a473ab392064cf9429ac1a90ffc | 0db19410e9751790af8ce4a0a9332293e379c02f | /configs/body_2d_keypoint/rtmpose/humanart/rtmpose-s_8xb256-420e_humanart-256x192.py | b4263f25e741e25a0ec5b85900ff1b2587d2805d | [
"Apache-2.0"
] | permissive | open-mmlab/mmpose | 2c9986521d35eee35d822fb255e8e68486026d94 | 537bd8e543ab463fb55120d5caaa1ae22d6aaf06 | refs/heads/main | 2023-08-30T19:44:21.349410 | 2023-07-04T13:18:22 | 2023-07-04T13:18:22 | 278,003,645 | 4,037 | 1,171 | Apache-2.0 | 2023-09-14T09:44:55 | 2020-07-08T06:02:55 | Python | UTF-8 | Python | false | false | 6,656 | py | _base_ = ['../../../_base_/default_runtime.py']
# runtime
max_epochs = 420
stage2_num_epochs = 30
base_lr = 4e-3
train_cfg = dict(max_epochs=max_epochs, val_interval=10)
randomness = dict(seed=21)
# optimizer
optim_wrapper = dict(
type='OptimWrapper',
optimizer=dict(type='AdamW', lr=base_lr, weight_decay=0.),
paramwise_cfg=dict(
norm_decay_mult=0, bias_decay_mult=0, bypass_duplicate=True))
# learning rate
param_scheduler = [
dict(
type='LinearLR',
start_factor=1.0e-5,
by_epoch=False,
begin=0,
end=1000),
dict(
# use cosine lr from 210 to 420 epoch
type='CosineAnnealingLR',
eta_min=base_lr * 0.05,
begin=max_epochs // 2,
end=max_epochs,
T_max=max_epochs // 2,
by_epoch=True,
convert_to_iter_based=True),
]
# automatically scaling LR based on the actual training batch size
auto_scale_lr = dict(base_batch_size=1024)
# codec settings
codec = dict(
type='SimCCLabel',
input_size=(192, 256),
sigma=(4.9, 5.66),
simcc_split_ratio=2.0,
normalize=False,
use_dark=False)
# model settings
model = dict(
type='TopdownPoseEstimator',
data_preprocessor=dict(
type='PoseDataPreprocessor',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
bgr_to_rgb=True),
backbone=dict(
_scope_='mmdet',
type='CSPNeXt',
arch='P5',
expand_ratio=0.5,
deepen_factor=0.33,
widen_factor=0.5,
out_indices=(4, ),
channel_attention=True,
norm_cfg=dict(type='SyncBN'),
act_cfg=dict(type='SiLU'),
init_cfg=dict(
type='Pretrained',
prefix='backbone.',
checkpoint='https://download.openmmlab.com/mmpose/v1/projects/'
'rtmpose/cspnext-s_udp-aic-coco_210e-256x192-92f5a029_20230130.pth' # noqa
)),
head=dict(
type='RTMCCHead',
in_channels=512,
out_channels=17,
input_size=codec['input_size'],
in_featuremap_size=(6, 8),
simcc_split_ratio=codec['simcc_split_ratio'],
final_layer_kernel_size=7,
gau_cfg=dict(
hidden_dims=256,
s=128,
expansion_factor=2,
dropout_rate=0.,
drop_path=0.,
act_fn='SiLU',
use_rel_bias=False,
pos_enc=False),
loss=dict(
type='KLDiscretLoss',
use_target_weight=True,
beta=10.,
label_softmax=True),
decoder=codec),
test_cfg=dict(flip_test=True))
# base dataset settings
dataset_type = 'HumanArtDataset'
data_mode = 'topdown'
data_root = 'data/'
backend_args = dict(backend='local')
# backend_args = dict(
# backend='petrel',
# path_mapping=dict({
# f'{data_root}': 's3://openmmlab/datasets/detection/coco/',
# f'{data_root}': 's3://openmmlab/datasets/detection/coco/'
# }))
# pipelines
train_pipeline = [
dict(type='LoadImage', backend_args=backend_args),
dict(type='GetBBoxCenterScale'),
dict(type='RandomFlip', direction='horizontal'),
dict(type='RandomHalfBody'),
dict(
type='RandomBBoxTransform', scale_factor=[0.6, 1.4], rotate_factor=80),
dict(type='TopdownAffine', input_size=codec['input_size']),
dict(type='mmdet.YOLOXHSVRandomAug'),
dict(
type='Albumentation',
transforms=[
dict(type='Blur', p=0.1),
dict(type='MedianBlur', p=0.1),
dict(
type='CoarseDropout',
max_holes=1,
max_height=0.4,
max_width=0.4,
min_holes=1,
min_height=0.2,
min_width=0.2,
p=1.),
]),
dict(type='GenerateTarget', encoder=codec),
dict(type='PackPoseInputs')
]
val_pipeline = [
dict(type='LoadImage', backend_args=backend_args),
dict(type='GetBBoxCenterScale'),
dict(type='TopdownAffine', input_size=codec['input_size']),
dict(type='PackPoseInputs')
]
train_pipeline_stage2 = [
dict(type='LoadImage', backend_args=backend_args),
dict(type='GetBBoxCenterScale'),
dict(type='RandomFlip', direction='horizontal'),
dict(type='RandomHalfBody'),
dict(
type='RandomBBoxTransform',
shift_factor=0.,
scale_factor=[0.75, 1.25],
rotate_factor=60),
dict(type='TopdownAffine', input_size=codec['input_size']),
dict(type='mmdet.YOLOXHSVRandomAug'),
dict(
type='Albumentation',
transforms=[
dict(type='Blur', p=0.1),
dict(type='MedianBlur', p=0.1),
dict(
type='CoarseDropout',
max_holes=1,
max_height=0.4,
max_width=0.4,
min_holes=1,
min_height=0.2,
min_width=0.2,
p=0.5),
]),
dict(type='GenerateTarget', encoder=codec),
dict(type='PackPoseInputs')
]
# data loaders
train_dataloader = dict(
batch_size=256,
num_workers=10,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=True),
dataset=dict(
type=dataset_type,
data_root=data_root,
data_mode=data_mode,
ann_file='HumanArt/annotations/training_humanart_coco.json',
data_prefix=dict(img=''),
pipeline=train_pipeline,
))
val_dataloader = dict(
batch_size=64,
num_workers=10,
persistent_workers=True,
drop_last=False,
sampler=dict(type='DefaultSampler', shuffle=False, round_up=False),
dataset=dict(
type=dataset_type,
data_root=data_root,
data_mode=data_mode,
ann_file='HumanArt/annotations/validation_humanart.json',
# bbox_file=f'{data_root}HumanArt/person_detection_results/'
# 'HumanArt_validation_detections_AP_H_56_person.json',
data_prefix=dict(img=''),
test_mode=True,
pipeline=val_pipeline,
))
test_dataloader = val_dataloader
# hooks
default_hooks = dict(
checkpoint=dict(save_best='coco/AP', rule='greater', max_keep_ckpts=1))
custom_hooks = [
dict(
type='EMAHook',
ema_type='ExpMomentumEMA',
momentum=0.0002,
update_buffers=True,
priority=49),
dict(
type='mmdet.PipelineSwitchHook',
switch_epoch=max_epochs - stage2_num_epochs,
switch_pipeline=train_pipeline_stage2)
]
# evaluators
val_evaluator = dict(
type='CocoMetric',
ann_file=data_root + 'HumanArt/annotations/validation_humanart.json')
test_evaluator = val_evaluator
| [
"[email protected]"
] | |
1cc3c3e0a40e800b3eca55bc1f2adf1f5bbcee2a | 0fb867b48b5a0bd88f9fefb5cdcad0b4abe720b6 | /calculator.spec | 646bfee63aa626e392df8c449574bd17d9edbe61 | [] | no_license | sparshjaincs/Simple-Calculator | c010181d0ad0bc09719f813e6d91f7b87d990d5d | 76c597c2e59a806c8d8a93ad8b798288639e7da1 | refs/heads/master | 2020-06-21T17:10:16.012591 | 2019-07-18T19:15:22 | 2019-07-18T19:15:22 | 197,511,687 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 817 | spec | # -*- mode: python -*-
block_cipher = None
a = Analysis(['calculator.py'],
pathex=['G:\\Projects\\Calculator'],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='calculator',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
runtime_tmpdir=None,
console=True )
| [
"[email protected]"
] | |
eb07d2a3f8f793245785b8e732d7d785d49671b6 | 282d0a84b45b12359b96bbf0b1d7ca9ee0cb5d19 | /Malware1/venv/Lib/site-packages/scipy/interpolate/fitpack2.py | 0f14d84f30435f315adac039526c16ae5d5cd92f | [] | no_license | sameerakhtar/CyberSecurity | 9cfe58df98495eac6e4e2708e34e70b7e4c055d3 | 594973df27b4e1a43f8faba0140ce7d6c6618f93 | refs/heads/master | 2022-12-11T11:53:40.875462 | 2020-09-07T23:13:22 | 2020-09-07T23:13:22 | 293,598,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:422178a9adf00766a95a781c4d8e1837d120bc65542ddd936c9f14decc375ae8
size 62749
| [
"[email protected]"
] | |
f046f12d7b3f16ea03cc78bebd1b08607193f082 | c086a38a366b0724d7339ae94d6bfb489413d2f4 | /PythonEnv/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py | 9e74114d102460a9401b98c1320ac20636a4a733 | [] | no_license | FlowkoHinti/Dionysos | 2dc06651a4fc9b4c8c90d264b2f820f34d736650 | d9f8fbf3bb0713527dc33383a7f3e135b2041638 | refs/heads/master | 2021-03-02T01:14:18.622703 | 2020-06-09T08:28:44 | 2020-06-09T08:28:44 | 245,826,041 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,164 | py | # frame.py - The MDI frame window for an editor.
import pywin.framework.window
import win32ui
import win32con
import afxres
from . import ModuleBrowser
class EditorFrame(pywin.framework.window.MDIChildWnd):
def OnCreateClient(self, cp, context):
# Create the default view as specified by the template (ie, the editor view)
view = context.template.MakeView(context.doc)
# Create the browser view.
browserView = ModuleBrowser.BrowserView(context.doc)
view2 = context.template.MakeView(context.doc)
splitter = win32ui.CreateSplitter()
style = win32con.WS_CHILD | win32con.WS_VISIBLE
splitter.CreateStatic(self, 1, 2, style, win32ui.AFX_IDW_PANE_FIRST)
sub_splitter = self.sub_splitter = win32ui.CreateSplitter()
sub_splitter.CreateStatic(splitter, 2, 1, style, win32ui.AFX_IDW_PANE_FIRST + 1)
# Note we must add the default view first, so that doc.GetFirstView() returns the editor view.
sub_splitter.CreateView(view, 1, 0, (0, 0))
splitter.CreateView(browserView, 0, 0, (0, 0))
sub_splitter.CreateView(view2, 0, 0, (0, 0))
## print "First view is", context.doc.GetFirstView()
## print "Views are", view, view2, browserView
## print "Parents are", view.GetParent(), view2.GetParent(), browserView.GetParent()
## print "Splitter is", splitter
## print "sub splitter is", sub_splitter
## Old
## splitter.CreateStatic (self, 1, 2)
## splitter.CreateView(view, 0, 1, (0,0)) # size ignored.
## splitter.CreateView (browserView, 0, 0, (0, 0))
# Restrict the size of the browser splitter (and we can avoid filling
# it until it is shown)
splitter.SetColumnInfo(0, 10, 20)
# And the active view is our default view (so it gets initial focus)
self.SetActiveView(view)
def GetEditorView(self):
# In a multi-view (eg, splitter) environment, get
# an editor (ie, scintilla) view
# Look for the splitter opened the most!
if self.sub_splitter is None:
return self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST)
v1 = self.sub_splitter.GetPane(0, 0)
v2 = self.sub_splitter.GetPane(1, 0)
r1 = v1.GetWindowRect()
r2 = v2.GetWindowRect()
if r1[3] - r1[1] > r2[3] - r2[1]:
return v1
return v2
def GetBrowserView(self):
# XXX - should fix this :-)
return self.GetActiveDocument().GetAllViews()[1]
def OnClose(self):
doc = self.GetActiveDocument()
if not doc.SaveModified():
## Cancel button selected from Save dialog, do not actually close
## print 'close cancelled'
return 0
## So the 'Save' dialog doesn't come up twice
doc._obj_.SetModifiedFlag(False)
# Must force the module browser to close itself here (OnDestroy for the view itself is too late!)
self.sub_splitter = None # ensure no circles!
self.GetBrowserView().DestroyBrowser()
return self._obj_.OnClose()
| [
"="
] | = |
1c10b542571142609fd929c7bb5db2a96ef660cd | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/H3C-PORTAL-MIB.py | 64fd13746f0c6c4162f4bf5d9b6313b40e2af202 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 30,326 | py | #
# PySNMP MIB module H3C-PORTAL-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/H3C-PORTAL-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:10:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion")
h3cCommon, = mibBuilder.importSymbols("HUAWEI-3COM-OID-MIB", "h3cCommon")
ifIndex, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "ifIndex", "InterfaceIndex")
InetAddressType, InetAddressPrefixLength, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddressPrefixLength", "InetAddress")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
NotificationType, Bits, TimeTicks, iso, IpAddress, MibIdentifier, Counter64, Unsigned32, Counter32, ModuleIdentity, Integer32, Gauge32, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Bits", "TimeTicks", "iso", "IpAddress", "MibIdentifier", "Counter64", "Unsigned32", "Counter32", "ModuleIdentity", "Integer32", "Gauge32", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TruthValue, TextualConvention, DisplayString, MacAddress, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "DisplayString", "MacAddress", "RowStatus")
h3cPortal = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99))
if mibBuilder.loadTexts: h3cPortal.setLastUpdated('201111080000Z')
if mibBuilder.loadTexts: h3cPortal.setOrganization('Hangzhou H3C Tech. Co., Ltd.')
h3cPortalConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 1))
h3cPortalMaxUserNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cPortalMaxUserNumber.setStatus('current')
h3cPortalCurrentUserNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalCurrentUserNumber.setStatus('current')
h3cPortalStatus = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatus.setStatus('current')
h3cPortalUserNumberUpperLimit = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalUserNumberUpperLimit.setStatus('current')
h3cPortalNasId = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 1, 5), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cPortalNasId.setStatus('current')
h3cPortalTables = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2))
h3cPortalServerTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 1), )
if mibBuilder.loadTexts: h3cPortalServerTable.setStatus('current')
h3cPortalServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 1, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalServerName"))
if mibBuilder.loadTexts: h3cPortalServerEntry.setStatus('current')
h3cPortalServerName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPortalServerName.setStatus('current')
h3cPortalServerUrl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cPortalServerUrl.setStatus('current')
h3cPortalServerPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65534))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cPortalServerPort.setStatus('current')
h3cPortalIfInfoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 2), )
if mibBuilder.loadTexts: h3cPortalIfInfoTable.setStatus('current')
h3cPortalIfInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: h3cPortalIfInfoEntry.setStatus('current')
h3cPortalAuthReqNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalAuthReqNumber.setStatus('current')
h3cPortalAuthSuccNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalAuthSuccNumber.setStatus('current')
h3cPortalAuthFailNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalAuthFailNumber.setStatus('current')
h3cPortalIfServerTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 3), )
if mibBuilder.loadTexts: h3cPortalIfServerTable.setStatus('current')
h3cPortalIfServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 3, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalIfServerIndex"))
if mibBuilder.loadTexts: h3cPortalIfServerEntry.setStatus('current')
h3cPortalIfServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cPortalIfServerIndex.setStatus('current')
h3cPortalIfServerUrl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 3, 1, 2), OctetString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalIfServerUrl.setStatus('current')
h3cPortalIfServerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 3, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalIfServerRowStatus.setStatus('current')
h3cPortalIfVlanNasIDTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 4), )
if mibBuilder.loadTexts: h3cPortalIfVlanNasIDTable.setStatus('current')
h3cPortalIfVlanNasIDEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 4, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalIfVlanNasIDIfIndex"), (0, "H3C-PORTAL-MIB", "h3cPortalIfVlanNasIDVlanID"))
if mibBuilder.loadTexts: h3cPortalIfVlanNasIDEntry.setStatus('current')
h3cPortalIfVlanNasIDIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 4, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: h3cPortalIfVlanNasIDIfIndex.setStatus('current')
h3cPortalIfVlanNasIDVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: h3cPortalIfVlanNasIDVlanID.setStatus('current')
h3cPortalIfVlanNasIDNasID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 4, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalIfVlanNasIDNasID.setStatus('current')
h3cPortalSSIDFreeRuleTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 5), )
if mibBuilder.loadTexts: h3cPortalSSIDFreeRuleTable.setStatus('current')
h3cPortalSSIDFreeRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 5, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalSSIDFreeRuleIndex"))
if mibBuilder.loadTexts: h3cPortalSSIDFreeRuleEntry.setStatus('current')
h3cPortalSSIDFreeRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cPortalSSIDFreeRuleIndex.setStatus('current')
h3cPortalSSIDFreeRuleSrcSSID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 5, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalSSIDFreeRuleSrcSSID.setStatus('current')
h3cPortalSSIDFreeRuleRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 5, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalSSIDFreeRuleRowStatus.setStatus('current')
h3cPortalMacTriggerSrvTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6), )
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvTable.setStatus('current')
h3cPortalMacTriggerSrvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalMacTriggerSrvIndex"))
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvEntry.setStatus('current')
h3cPortalMacTriggerSrvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvIndex.setStatus('current')
h3cPortalMacTriggerSrvIPAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6, 1, 2), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvIPAddrType.setStatus('current')
h3cPortalMacTriggerSrvIP = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvIP.setStatus('current')
h3cPortalMacTriggerSrvPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65534))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvPort.setStatus('current')
h3cPortalMacTriggerSrvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 6, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerSrvRowStatus.setStatus('current')
h3cPortalMacTriggerOnIfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 7), )
if mibBuilder.loadTexts: h3cPortalMacTriggerOnIfTable.setStatus('current')
h3cPortalMacTriggerOnIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 7, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalMacTriggerOnIfIfIndex"))
if mibBuilder.loadTexts: h3cPortalMacTriggerOnIfEntry.setStatus('current')
h3cPortalMacTriggerOnIfIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 7, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: h3cPortalMacTriggerOnIfIfIndex.setStatus('current')
h3cPortalMacTriggerOnIfDetctFlowPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 7, 1, 2), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerOnIfDetctFlowPeriod.setStatus('current')
h3cPortalMacTriggerOnIfThresholdFlow = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 7, 1, 3), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerOnIfThresholdFlow.setStatus('current')
h3cPortalMacTriggerOnIfRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 7, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalMacTriggerOnIfRowStatus.setStatus('current')
h3cPortalFreeRuleTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8), )
if mibBuilder.loadTexts: h3cPortalFreeRuleTable.setStatus('current')
h3cPortalFreeRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalFreeRuleIndex"))
if mibBuilder.loadTexts: h3cPortalFreeRuleEntry.setStatus('current')
h3cPortalFreeRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cPortalFreeRuleIndex.setStatus('current')
h3cPortalFreeRuleSrcIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 2), InterfaceIndex()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleSrcIfIndex.setStatus('current')
h3cPortalFreeRuleSrcVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 3), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleSrcVlanID.setStatus('current')
h3cPortalFreeRuleSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 4), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleSrcMac.setStatus('current')
h3cPortalFreeRuleAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 5), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleAddrType.setStatus('current')
h3cPortalFreeRuleSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 6), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleSrcAddr.setStatus('current')
h3cPortalFreeRuleSrcPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 7), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleSrcPrefix.setStatus('current')
h3cPortalFreeRuleDstAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 8), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleDstAddr.setStatus('current')
h3cPortalFreeRuleDstPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 9), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleDstPrefix.setStatus('current')
h3cPortalFreeRuleProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 6, 17))).clone(namedValues=NamedValues(("invalid", 0), ("tcp", 6), ("udp", 17)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleProtocol.setStatus('current')
h3cPortalFreeRuleSrcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 11), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleSrcPort.setStatus('current')
h3cPortalFreeRuleDstPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 12), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleDstPort.setStatus('current')
h3cPortalFreeRuleRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 8, 1, 13), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalFreeRuleRowStatus.setStatus('current')
h3cPortalForbiddenRuleTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9), )
if mibBuilder.loadTexts: h3cPortalForbiddenRuleTable.setStatus('current')
h3cPortalForbiddenRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1), ).setIndexNames((0, "H3C-PORTAL-MIB", "h3cPortalForbiddenRuleIndex"))
if mibBuilder.loadTexts: h3cPortalForbiddenRuleEntry.setStatus('current')
h3cPortalForbiddenRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cPortalForbiddenRuleIndex.setStatus('current')
h3cPortalForbiddenRuleSrcIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 2), InterfaceIndex()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleSrcIfIndex.setStatus('current')
h3cPortalForbiddenRuleSrcVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 3), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleSrcVlanID.setStatus('current')
h3cPortalForbiddenRuleSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 4), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleSrcMac.setStatus('current')
h3cPortalForbiddenRuleAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 5), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleAddrType.setStatus('current')
h3cPortalForbiddenRuleSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 6), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleSrcAddr.setStatus('current')
h3cPortalForbiddenRuleSrcPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 7), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleSrcPrefix.setStatus('current')
h3cPortalForbiddenRuleDstAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 8), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleDstAddr.setStatus('current')
h3cPortalForbiddenRuleDstPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 9), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleDstPrefix.setStatus('current')
h3cPortalForbiddenRuleProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 6, 17))).clone(namedValues=NamedValues(("invalid", 0), ("tcp", 6), ("udp", 17)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleProtocol.setStatus('current')
h3cPortalForbiddenRuleSrcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 11), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleSrcPort.setStatus('current')
h3cPortalForbiddenRuleDstPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 12), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleDstPort.setStatus('current')
h3cPortalForbiddenRuleRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 2, 9, 1, 13), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPortalForbiddenRuleRowStatus.setStatus('current')
h3cPortalTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 3))
h3cPortalTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 3, 0))
h3cPortalServerLost = NotificationType((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 3, 0, 1)).setObjects(("H3C-PORTAL-MIB", "h3cPortalServerName"), ("H3C-PORTAL-MIB", "h3cPortalFirstTrapTime"))
if mibBuilder.loadTexts: h3cPortalServerLost.setStatus('current')
h3cPortalServerGet = NotificationType((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 3, 0, 2)).setObjects(("H3C-PORTAL-MIB", "h3cPortalServerName"), ("H3C-PORTAL-MIB", "h3cPortalFirstTrapTime"))
if mibBuilder.loadTexts: h3cPortalServerGet.setStatus('current')
h3cPortalTrapVarObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 3, 1))
h3cPortalFirstTrapTime = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 3, 1, 1), TimeTicks()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPortalFirstTrapTime.setStatus('current')
h3cPortalStatistic = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4))
h3cPortalStatAuthReq = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthReq.setStatus('current')
h3cPortalStatAckLogout = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAckLogout.setStatus('current')
h3cPortalStatNotifyLogout = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatNotifyLogout.setStatus('current')
h3cPortalStatChallengeTimeout = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatChallengeTimeout.setStatus('current')
h3cPortalStatChallengeBusy = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatChallengeBusy.setStatus('current')
h3cPortalStatChallengeFail = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatChallengeFail.setStatus('current')
h3cPortalStatAuthTimeout = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthTimeout.setStatus('current')
h3cPortalStatAuthFail = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthFail.setStatus('current')
h3cPortalStatPwdError = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatPwdError.setStatus('current')
h3cPortalStatAuthBusy = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthBusy.setStatus('current')
h3cPortalStatAuthDisordered = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthDisordered.setStatus('current')
h3cPortalStatAuthUnknownError = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthUnknownError.setStatus('current')
h3cPortalStatAuthResp = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatAuthResp.setStatus('current')
h3cPortalStatChallengeReq = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatChallengeReq.setStatus('current')
h3cPortalStatChallengeResp = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 4, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalStatChallengeResp.setStatus('current')
h3cPortalPktStatistic = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5))
h3cPortalPktStaReqAuthNum = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaReqAuthNum.setStatus('current')
h3cPortalPktStaAckAuthSuccess = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckAuthSuccess.setStatus('current')
h3cPortalPktStaAckAuthReject = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckAuthReject.setStatus('current')
h3cPortalPktStaAckAuthEstablish = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckAuthEstablish.setStatus('current')
h3cPortalPktStaAckAuthBusy = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckAuthBusy.setStatus('current')
h3cPortalPktStaAckAuthAuthFail = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckAuthAuthFail.setStatus('current')
h3cPortalPktStaReqChallengeNum = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaReqChallengeNum.setStatus('current')
h3cPortalPktStaAckChallengeSuccess = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckChallengeSuccess.setStatus('current')
h3cPortalPktStaAckChallengeReject = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckChallengeReject.setStatus('current')
h3cPortalPktStaAckChallengeEstablish = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckChallengeEstablish.setStatus('current')
h3cPortalPktStaAckChallengeBusy = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckChallengeBusy.setStatus('current')
h3cPortalPktStaAckChallengeAuthFail = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 99, 5, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPortalPktStaAckChallengeAuthFail.setStatus('current')
mibBuilder.exportSymbols("H3C-PORTAL-MIB", h3cPortalStatistic=h3cPortalStatistic, h3cPortalTables=h3cPortalTables, h3cPortalMacTriggerSrvEntry=h3cPortalMacTriggerSrvEntry, h3cPortalForbiddenRuleSrcPort=h3cPortalForbiddenRuleSrcPort, h3cPortalFirstTrapTime=h3cPortalFirstTrapTime, h3cPortalForbiddenRuleDstPrefix=h3cPortalForbiddenRuleDstPrefix, h3cPortalFreeRuleEntry=h3cPortalFreeRuleEntry, h3cPortalForbiddenRuleRowStatus=h3cPortalForbiddenRuleRowStatus, h3cPortalPktStaAckAuthEstablish=h3cPortalPktStaAckAuthEstablish, h3cPortalStatAuthTimeout=h3cPortalStatAuthTimeout, h3cPortalForbiddenRuleDstAddr=h3cPortalForbiddenRuleDstAddr, h3cPortalPktStaAckAuthAuthFail=h3cPortalPktStaAckAuthAuthFail, h3cPortalForbiddenRuleSrcIfIndex=h3cPortalForbiddenRuleSrcIfIndex, h3cPortalServerLost=h3cPortalServerLost, h3cPortalStatAuthReq=h3cPortalStatAuthReq, h3cPortalPktStaReqChallengeNum=h3cPortalPktStaReqChallengeNum, h3cPortalPktStaAckChallengeBusy=h3cPortalPktStaAckChallengeBusy, h3cPortalFreeRuleAddrType=h3cPortalFreeRuleAddrType, h3cPortalFreeRuleRowStatus=h3cPortalFreeRuleRowStatus, h3cPortalFreeRuleDstPort=h3cPortalFreeRuleDstPort, h3cPortalForbiddenRuleProtocol=h3cPortalForbiddenRuleProtocol, h3cPortalMacTriggerOnIfRowStatus=h3cPortalMacTriggerOnIfRowStatus, h3cPortalStatAuthFail=h3cPortalStatAuthFail, h3cPortalIfVlanNasIDIfIndex=h3cPortalIfVlanNasIDIfIndex, h3cPortalIfInfoTable=h3cPortalIfInfoTable, h3cPortalForbiddenRuleAddrType=h3cPortalForbiddenRuleAddrType, h3cPortalIfVlanNasIDEntry=h3cPortalIfVlanNasIDEntry, h3cPortalMacTriggerOnIfTable=h3cPortalMacTriggerOnIfTable, h3cPortalServerTable=h3cPortalServerTable, h3cPortalFreeRuleDstAddr=h3cPortalFreeRuleDstAddr, h3cPortalMacTriggerSrvRowStatus=h3cPortalMacTriggerSrvRowStatus, h3cPortalFreeRuleSrcPort=h3cPortalFreeRuleSrcPort, h3cPortalFreeRuleTable=h3cPortalFreeRuleTable, h3cPortalIfServerIndex=h3cPortalIfServerIndex, h3cPortalNasId=h3cPortalNasId, h3cPortalIfInfoEntry=h3cPortalIfInfoEntry, h3cPortalForbiddenRuleSrcVlanID=h3cPortalForbiddenRuleSrcVlanID, h3cPortalMacTriggerSrvTable=h3cPortalMacTriggerSrvTable, h3cPortalMacTriggerSrvIP=h3cPortalMacTriggerSrvIP, h3cPortalFreeRuleDstPrefix=h3cPortalFreeRuleDstPrefix, h3cPortalPktStaAckAuthSuccess=h3cPortalPktStaAckAuthSuccess, h3cPortalStatAuthResp=h3cPortalStatAuthResp, h3cPortalTrapPrefix=h3cPortalTrapPrefix, h3cPortalStatAuthUnknownError=h3cPortalStatAuthUnknownError, h3cPortalFreeRuleProtocol=h3cPortalFreeRuleProtocol, h3cPortalStatAuthBusy=h3cPortalStatAuthBusy, h3cPortalFreeRuleSrcIfIndex=h3cPortalFreeRuleSrcIfIndex, h3cPortalStatChallengeBusy=h3cPortalStatChallengeBusy, h3cPortalStatAuthDisordered=h3cPortalStatAuthDisordered, h3cPortalServerGet=h3cPortalServerGet, h3cPortalAuthFailNumber=h3cPortalAuthFailNumber, h3cPortalMacTriggerOnIfEntry=h3cPortalMacTriggerOnIfEntry, h3cPortalIfServerTable=h3cPortalIfServerTable, h3cPortalIfVlanNasIDNasID=h3cPortalIfVlanNasIDNasID, h3cPortalCurrentUserNumber=h3cPortalCurrentUserNumber, h3cPortalIfVlanNasIDVlanID=h3cPortalIfVlanNasIDVlanID, h3cPortalPktStaReqAuthNum=h3cPortalPktStaReqAuthNum, PYSNMP_MODULE_ID=h3cPortal, h3cPortalPktStaAckChallengeAuthFail=h3cPortalPktStaAckChallengeAuthFail, h3cPortalTraps=h3cPortalTraps, h3cPortalSSIDFreeRuleEntry=h3cPortalSSIDFreeRuleEntry, h3cPortalSSIDFreeRuleSrcSSID=h3cPortalSSIDFreeRuleSrcSSID, h3cPortalIfServerRowStatus=h3cPortalIfServerRowStatus, h3cPortalPktStaAckAuthReject=h3cPortalPktStaAckAuthReject, h3cPortalPktStaAckAuthBusy=h3cPortalPktStaAckAuthBusy, h3cPortalForbiddenRuleTable=h3cPortalForbiddenRuleTable, h3cPortalFreeRuleSrcVlanID=h3cPortalFreeRuleSrcVlanID, h3cPortalTrapVarObjects=h3cPortalTrapVarObjects, h3cPortalMacTriggerOnIfThresholdFlow=h3cPortalMacTriggerOnIfThresholdFlow, h3cPortalPktStaAckChallengeEstablish=h3cPortalPktStaAckChallengeEstablish, h3cPortalStatAckLogout=h3cPortalStatAckLogout, h3cPortalStatChallengeFail=h3cPortalStatChallengeFail, h3cPortalAuthReqNumber=h3cPortalAuthReqNumber, h3cPortalServerEntry=h3cPortalServerEntry, h3cPortalForbiddenRuleSrcPrefix=h3cPortalForbiddenRuleSrcPrefix, h3cPortalForbiddenRuleSrcMac=h3cPortalForbiddenRuleSrcMac, h3cPortalForbiddenRuleIndex=h3cPortalForbiddenRuleIndex, h3cPortalForbiddenRuleSrcAddr=h3cPortalForbiddenRuleSrcAddr, h3cPortalStatPwdError=h3cPortalStatPwdError, h3cPortalFreeRuleSrcAddr=h3cPortalFreeRuleSrcAddr, h3cPortalStatus=h3cPortalStatus, h3cPortalMaxUserNumber=h3cPortalMaxUserNumber, h3cPortalStatChallengeResp=h3cPortalStatChallengeResp, h3cPortalFreeRuleSrcPrefix=h3cPortalFreeRuleSrcPrefix, h3cPortalStatNotifyLogout=h3cPortalStatNotifyLogout, h3cPortalIfServerUrl=h3cPortalIfServerUrl, h3cPortal=h3cPortal, h3cPortalPktStaAckChallengeSuccess=h3cPortalPktStaAckChallengeSuccess, h3cPortalForbiddenRuleEntry=h3cPortalForbiddenRuleEntry, h3cPortalSSIDFreeRuleTable=h3cPortalSSIDFreeRuleTable, h3cPortalMacTriggerSrvIPAddrType=h3cPortalMacTriggerSrvIPAddrType, h3cPortalMacTriggerOnIfIfIndex=h3cPortalMacTriggerOnIfIfIndex, h3cPortalFreeRuleIndex=h3cPortalFreeRuleIndex, h3cPortalFreeRuleSrcMac=h3cPortalFreeRuleSrcMac, h3cPortalSSIDFreeRuleIndex=h3cPortalSSIDFreeRuleIndex, h3cPortalStatChallengeTimeout=h3cPortalStatChallengeTimeout, h3cPortalMacTriggerSrvPort=h3cPortalMacTriggerSrvPort, h3cPortalAuthSuccNumber=h3cPortalAuthSuccNumber, h3cPortalPktStaAckChallengeReject=h3cPortalPktStaAckChallengeReject, h3cPortalUserNumberUpperLimit=h3cPortalUserNumberUpperLimit, h3cPortalServerPort=h3cPortalServerPort, h3cPortalStatChallengeReq=h3cPortalStatChallengeReq, h3cPortalPktStatistic=h3cPortalPktStatistic, h3cPortalServerUrl=h3cPortalServerUrl, h3cPortalIfVlanNasIDTable=h3cPortalIfVlanNasIDTable, h3cPortalForbiddenRuleDstPort=h3cPortalForbiddenRuleDstPort, h3cPortalMacTriggerOnIfDetctFlowPeriod=h3cPortalMacTriggerOnIfDetctFlowPeriod, h3cPortalServerName=h3cPortalServerName, h3cPortalIfServerEntry=h3cPortalIfServerEntry, h3cPortalMacTriggerSrvIndex=h3cPortalMacTriggerSrvIndex, h3cPortalConfig=h3cPortalConfig, h3cPortalSSIDFreeRuleRowStatus=h3cPortalSSIDFreeRuleRowStatus)
| [
"[email protected]"
] | |
a2a2518930512317c83f34ef6273bff3efd67fe4 | 88a54c5e2cf3d16e5288261a37840428bf6c4834 | /src/article_loader.py | 8654a31e9a45bdbf8fdbf8d3c4253eac3d4185af | [] | no_license | VitalyRomanov/document-clustering | f2fa1c617ef8f4e2ba69ba0c152d80c919361b25 | 412a21b857b79a644f77b728b8798dda9e854e29 | refs/heads/master | 2022-04-07T22:04:30.804892 | 2018-02-03T18:50:25 | 2018-02-03T18:50:25 | 104,849,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,666 | py | import json
import os
# import pickle as p
import joblib as p
from datetime import datetime
import urllib.request
import numpy as np
def date2int(date):
return int(datetime.strptime(date, '%Y-%m-%d %H:%M:%S').timestamp())
def get_date(ts):
return datetime.fromtimestamp(
int(repr(ts))
).strftime('%Y-%m-%d %H:%M:%S')
def load_latest():
dump_file = "articles_dump.dat"
l_time = 1509031277
if os.path.isfile(dump_file):
articles = p.load(open(dump_file, "rb"))
else:
articles = []
return articles
# def retreive_articles(l_time):
# data = json.load(open('1509031277.json'))
# # retreive articles' dates
# dates = list(map(date2int, map(lambda x: x['public_date'], data)))
# # sort articles by date
# s_ind = sorted(range(len(dates)), key=lambda k: dates[k])
# s_data = [data[ind] for ind in s_ind]
# return s_data
def retreive_articles_url(time):
"""
:param time: the last available record, encodes time as integer
:return: list of article records sorted by date
"""
url_addr = "https://www.business-gazeta.ru/index/monitoring/timestamp/%d" % time
data = None
with urllib.request.urlopen(url_addr) as url:
data = json.loads(url.read().decode())
dates = list(map(date2int, map(lambda x: x['public_date'], data)))
# sort articles by date
s_ind = sorted(range(len(dates)), key=lambda k: dates[k])
s_data = [data[ind] for ind in s_ind]
return s_data
def post_json(data_json):
url_addr = "https://www.business-gazeta.ru/index/similar"
enc_json = data_json.encode('utf-8')
req = urllib.request.Request(url_addr, data=enc_json,
headers={'content-type': 'application/json'})
response = urllib.request.urlopen(req)
print(response.read())
# def get_last_time(articles):
# return articles[-1] if len(articles) != 0 else 0
# latest = 0
# for article in articles:
# candidate = date2int(article['public_date'])
# if candidate > latest:
# latest = candidate
# return latest
def get_sections(s_data):
# split data into sections
ids = list(map(lambda x: x['id'], s_data))
titles = list(map(lambda x: x['title'], s_data))
content = list(map(lambda x: x['content'], s_data))
dates = list(map(date2int, map(lambda x: x['public_date'], s_data)))
links = list(map(lambda x: x['link'], s_data))
return ids, titles, content, dates, links
class AData:
ids = None
titles = None
content = None
dates = None
links = None
_TWO_DAYS = 60 * 60 * 24 * 2 # sec*min*hr*2d
def __init__(self):
self.ids = []
self.titles = []
self.content = []
self.dates = []
self.links = []
articles_data = get_sections(load_latest())
self.join_sections(articles_data)
self._latest = self.get_last_time()
self.new = len(self.ids)
def load_new(self):
self._latest = self.get_last_time()
self.new = len(self.ids)
print("Retreiving after %s" % get_date(self._latest), end=": ")
new_articles = retreive_articles_url(self._latest)
articles_data = get_sections(new_articles)
self.join_sections(articles_data)
self.new = len(new_articles)
if self.new == 0:
print("Nothing new")
else:
print("%d added" % self.new)
def join_sections(self, articles_data):
ids, titles, content, dates, links = articles_data
self.ids += ids
self.titles += titles
self.content += content
self.dates += dates
self.links += links
def get_article(self, a_id):
return self.content[a_id]
def get_last_time(self):
return self.dates[-1] if len(self.dates) > 0 else 1509031277
def two_days_range(self, id1, id2):
return True if abs(self.dates[id1] - self.dates[id2]) < self._TWO_DAYS else False
def get_last_two_days(self, a_id):
begin_with = self.ids.index(a_id)
ids = []
for i in range(begin_with, -1, -1):
if self.two_days_range(begin_with, i):
ids.append(i)
else:
break
return np.array(ids)
def make_json(self, doc_id, similar_id):
return json.dumps({"article_id": self.ids[doc_id],
"similar_id": [self.ids[s_id] for s_id in similar_id]},
indent=4)
def get_latest(self, last_id, content_type='titles', filter_bl = True):
"""
Input: last_id - the id in self.ids.
content_type - optional. Specifies whether to return titles or articles'
body
filter_bl - specifies whether to apply blacklist filtering or not
Returns: all documents and ids that appear after the doc with last_id
"""
try:
last_pos = self.ids.index(last_id)
except:
if last_id != -1:
raise Exception("No document with such id")
last_pos = last_id
if content_type == 'titles':
content_source = self.titles
elif content_type == 'content':
content_source = self.content
else:
raise NotImplemented
latest_ids = []
latest_content = []
for i in range(last_pos + 1, len(self.ids)):
if filter_bl and self.is_blacklisted(i):
continue
latest_ids.append(self.ids[i])
latest_content.append(content_source[i])
return {'ids': latest_ids, 'docs': latest_content}
def get_titles(self, last_n=-1):
"""
:param last_n: the number of latest titles to return
:return: dictionary that contains ids and the content of titles
"""
titles_total = len(self.titles)
if last_n == -1:
titles_range = range(titles_total)
else:
titles_range = range(max(titles_total - last_n, 0), titles_total)
titles_ids = []
titles_content = []
for i in titles_range:
if not self.is_blacklisted(i):
titles_ids.append(self.ids[i])
titles_content.append(self.titles[i])
return {'ids': titles_ids, 'titles': titles_content}
def is_blacklisted(self, ind: int) -> bool:
black_list = ['realnoevremya.ru', 'tatcenter.ru']
url = self.links[ind].split("/")[2]
return url in black_list
def load(path):
return p.load(open(path, "rb"))
def save(self,path):
p.dump(self, open(path, "wb"))
| [
"[email protected]"
] | |
be1d104b2f9883aeb4d68360c1c230337ff776cd | 3740de0d6e43ea140fc09ab314e4c492603ba185 | /scripts/sources/S_EstimateQuantileMixtureStressTest.py | fb9d5a602ba8a987d718463a4796f474687762ad | [
"MIT"
] | permissive | s0ap/arpmRes | 29c60c65fd3e11be1cc31d46494e5b3ebf6e05ab | ddcc4de713b46e3e9dcb77cc08c502ce4df54f76 | refs/heads/master | 2022-02-16T05:01:22.118959 | 2019-08-20T16:45:02 | 2019-08-20T16:45:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,523 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.4'
# jupytext_version: 1.1.4
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# # S_EstimateQuantileMixtureStressTest [<img src="https://www.arpm.co/lab/icons/icon_permalink.png" width=30 height=30 style="display: inline;">](https://www.arpm.co/lab/redirect.php?code=S_EstimateQuantileMixtureStressTest&codeLang=Python)
# For details, see [here](https://www.arpm.co/lab/redirect.php?permalink=e-sta-ssessq-uant-copy-1).
# ## Prepare the environment
# +
import os
import os.path as path
import sys
sys.path.append(path.abspath('../../functions-legacy'))
import numpy as np
from numpy import arange, zeros, var, \
mean
from numpy.random import rand
import matplotlib.pyplot as plt
from matplotlib.pyplot import plot, bar, legend, subplots, title
plt.style.use('seaborn')
from ARPM_utils import save_plot
from QuantileMixture import QuantileMixture
# -
# ## Compute error, bias and inefficiency for every estimator and for every DGP within the stress-test set
# +
# define estimators
g_b = lambda X: mean(X, 1, keepdims=True)
g_e = lambda X: np.median(X, 1, keepdims=True)
# generate the scenarios for the time series
t_ = 50
j_ = 10 ** 4
alpha = 0.5
sigma_Y = 0.2
mu_Z = 0
sigma_Z = 0.15
S = arange(0,0.22,0.02) # stress-test set for parameter mu_Y
k_ = len(S)
I = zeros((j_, t_))
er_b = zeros(k_)
er_e = zeros(k_)
bias2_b = zeros(k_)
bias2_e = zeros(k_)
inef2_b = zeros(k_)
inef2_e = zeros(k_)
for k in range(k_):
# compute the true value of the property
mu_Y = S[k]
g_f = QuantileMixture(0.5, alpha, mu_Y, sigma_Y, mu_Z, sigma_Z)
# generate j_ simulations of the time series
P = rand(j_, t_)
for j in range(j_):
I[j,:] = QuantileMixture(P[j, :], alpha, mu_Y, sigma_Y, mu_Z, sigma_Z)
# compute simulations of the estimators
G_b = g_b(I)
G_e = g_e(I)
# compute the losses of the estimators
L_b = (G_b - g_f) ** 2
L_e = (G_e - g_f) ** 2
# compute errors
er_b[k] = mean(L_b)
er_e[k] = mean(L_e)
# compute square bias
bias2_b[k] = (mean((G_b) - g_f)) ** 2
bias2_e[k] = (mean((G_e) - g_f)) ** 2
# compute square inefficiency
inef2_b[k] = var(G_b, ddof=1)
inef2_e[k] = var(G_e, ddof=1)
# -
# ## Compute robust and ensemble errors
# +
er_rob_b = max(er_b)
er_rob_e = max(er_e)
er_ens_b = mean(er_b)
er_ens_e = mean(er_e)
# -
# ## Determine the optimal estimator
# best robust estimator
er_rob = min([er_rob_b, er_rob_e]),
# best ensemble estimator
er_ens = min([er_ens_b, er_ens_e])
# ## plot error, bias and inefficiency for each DGP within the stress-test set
# +
red = [.9, .4, 0]
blue = [0, .45, .7]
f, ax = subplots(2,1)
plt.sca(ax[0])
b = bar(range(1,k_+1),bias2_b.T+inef2_b.T, facecolor= red, label='bias$^2$')
b = bar(range(1,k_+1),inef2_b.T,facecolor= blue,label='ineff$^2$')
plot(range(1,k_+1), er_b, 'k',lw=1.5, label='error')
plt.xticks(range(0,k_+2,2))
legend()
title('stress-test of estimator b')
plt.sca(ax[1])
b = bar(range(1,k_+1),bias2_e.T+inef2_e.T,facecolor= red)
b = bar(range(1,k_+1),inef2_e.T,facecolor= blue)
plot(range(1,k_+1), er_e, 'k',lw= 1.5)
plt.xticks(range(0,k_+2,2))
title('stress-test of estimator e')
plt.tight_layout();
plt.show()
# save_plot(ax=plt.gca(), extension='png', scriptname=os.path.basename('.')[:-3], count=plt.get_fignums()[-1])
| [
"[email protected]"
] | |
78143c4e6942051b155a1e0dc181ef0d38715934 | c67f2d0677f8870bc1d970891bbe31345ea55ce2 | /zippy/lib-python/3/test/test_genexps.py | cc75ac26ee667116ef05274e3e3a41516ae62aeb | [
"BSD-3-Clause"
] | permissive | securesystemslab/zippy | a5a1ecf5c688504d8d16128ce901406ffd6f32c2 | ff0e84ac99442c2c55fe1d285332cfd4e185e089 | refs/heads/master | 2022-07-05T23:45:36.330407 | 2018-07-10T22:17:32 | 2018-07-10T22:17:32 | 67,824,983 | 324 | 27 | null | null | null | null | UTF-8 | Python | false | false | 7,149 | py | doctests = """
Test simple loop with conditional
>>> sum(i*i for i in range(100) if i&1 == 1)
166650
Test simple nesting
>>> list((i,j) for i in range(3) for j in range(4) )
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Test nesting with the inner expression dependent on the outer
>>> list((i,j) for i in range(4) for j in range(i) )
[(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]
Make sure the induction variable is not exposed
>>> i = 20
>>> sum(i*i for i in range(100))
328350
>>> i
20
Test first class
>>> g = (i*i for i in range(4))
>>> type(g)
<class 'generator'>
>>> list(g)
[0, 1, 4, 9]
Test direct calls to next()
>>> g = (i*i for i in range(3))
>>> next(g)
0
>>> next(g)
1
>>> next(g)
4
>>> next(g)
Traceback (most recent call last):
File "<pyshell#21>", line 1, in -toplevel-
next(g)
StopIteration
Does it stay stopped?
>>> next(g)
Traceback (most recent call last):
File "<pyshell#21>", line 1, in -toplevel-
next(g)
StopIteration
>>> list(g)
[]
Test running gen when defining function is out of scope
>>> def f(n):
... return (i*i for i in range(n))
>>> list(f(10))
[0, 1, 4, 9, 16, 25, 36, 49, 64, 81]
>>> def f(n):
... return ((i,j) for i in range(3) for j in range(n))
>>> list(f(4))
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
>>> def f(n):
... return ((i,j) for i in range(3) for j in range(4) if j in range(n))
>>> list(f(4))
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
>>> list(f(2))
[(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1)]
Verify that parenthesis are required in a statement
>>> def f(n):
... return i*i for i in range(n)
Traceback (most recent call last):
...
SyntaxError: invalid syntax
Verify that parenthesis are required when used as a keyword argument value
>>> dict(a = i for i in range(10))
Traceback (most recent call last):
...
SyntaxError: invalid syntax
Verify that parenthesis are required when used as a keyword argument value
>>> dict(a = (i for i in range(10))) #doctest: +ELLIPSIS
{'a': <generator object <genexpr> at ...>}
Verify early binding for the outermost for-expression
>>> x=10
>>> g = (i*i for i in range(x))
>>> x = 5
>>> list(g)
[0, 1, 4, 9, 16, 25, 36, 49, 64, 81]
Verify that the outermost for-expression makes an immediate check
for iterability
>>> (i for i in 6)
Traceback (most recent call last):
File "<pyshell#4>", line 1, in -toplevel-
(i for i in 6)
TypeError: 'int' object is not iterable
Verify late binding for the outermost if-expression
>>> include = (2,4,6,8)
>>> g = (i*i for i in range(10) if i in include)
>>> include = (1,3,5,7,9)
>>> list(g)
[1, 9, 25, 49, 81]
Verify late binding for the innermost for-expression
>>> g = ((i,j) for i in range(3) for j in range(x))
>>> x = 4
>>> list(g)
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Verify re-use of tuples (a side benefit of using genexps over listcomps)
>>> from test.support import check_impl_detail
>>> tupleids = list(map(id, ((i,i) for i in range(10))))
>>> int(max(tupleids) - min(tupleids)) if check_impl_detail() else 0
0
Verify that syntax error's are raised for genexps used as lvalues
>>> (y for y in (1,2)) = 10
Traceback (most recent call last):
...
SyntaxError: can't assign to generator expression
>>> (y for y in (1,2)) += 10
Traceback (most recent call last):
...
SyntaxError: can't assign to generator expression
########### Tests borrowed from or inspired by test_generators.py ############
Make a generator that acts like range()
>>> yrange = lambda n: (i for i in range(n))
>>> list(yrange(10))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Generators always return to the most recent caller:
>>> def creator():
... r = yrange(5)
... print("creator", next(r))
... return r
>>> def caller():
... r = creator()
... for i in r:
... print("caller", i)
>>> caller()
creator 0
caller 1
caller 2
caller 3
caller 4
Generators can call other generators:
>>> def zrange(n):
... for i in yrange(n):
... yield i
>>> list(zrange(5))
[0, 1, 2, 3, 4]
Verify that a gen exp cannot be resumed while it is actively running:
>>> g = (next(me) for i in range(10))
>>> me = g
>>> next(me)
Traceback (most recent call last):
File "<pyshell#30>", line 1, in -toplevel-
next(me)
File "<pyshell#28>", line 1, in <generator expression>
g = (next(me) for i in range(10))
ValueError: generator already executing
Verify exception propagation
>>> g = (10 // i for i in (5, 0, 2))
>>> next(g)
2
>>> next(g)
Traceback (most recent call last):
File "<pyshell#37>", line 1, in -toplevel-
next(g)
File "<pyshell#35>", line 1, in <generator expression>
g = (10 // i for i in (5, 0, 2))
ZeroDivisionError: integer division or modulo by zero
>>> next(g)
Traceback (most recent call last):
File "<pyshell#38>", line 1, in -toplevel-
next(g)
StopIteration
Make sure that None is a valid return value
>>> list(None for i in range(10))
[None, None, None, None, None, None, None, None, None, None]
Check that generator attributes are present
>>> g = (i*i for i in range(3))
>>> expected = set(['gi_frame', 'gi_running'])
>>> set(attr for attr in dir(g) if not attr.startswith('__')) >= expected
True
>>> print(g.__next__.__doc__)
x.__next__() <==> next(x)
>>> import types
>>> isinstance(g, types.GeneratorType)
True
Check the __iter__ slot is defined to return self
>>> iter(g) is g
True
Verify that the running flag is set properly
>>> g = (me.gi_running for i in (0,1))
>>> me = g
>>> me.gi_running
0
>>> next(me)
1
>>> me.gi_running
0
Verify that genexps are weakly referencable
>>> import weakref
>>> g = (i*i for i in range(4))
>>> wr = weakref.ref(g)
>>> wr() is g
True
>>> p = weakref.proxy(g)
>>> list(p)
[0, 1, 4, 9]
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=None):
import sys
from test import support
from test import test_genexps
support.run_doctest(test_genexps, verbose)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_doctest(test_genexps, verbose)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
| [
"[email protected]"
] | |
43d2678fe00adbaa6aeb89d3ac85cee449782bf5 | 2ed86a79d0fcd299ad4a01310954c5eddcf01edf | /homeassistant/components/tankerkoenig/binary_sensor.py | 5f10b54f7042763cd7b371c8f9cef7f5b76c43ec | [
"Apache-2.0"
] | permissive | konnected-io/home-assistant | 037f12c87bb79e19220192eb918e49db1b1a8b3e | 2e65b77b2b5c17919939481f327963abdfdc53f0 | refs/heads/dev | 2023-05-11T08:57:41.891518 | 2023-05-07T20:03:37 | 2023-05-07T20:03:37 | 109,931,626 | 24 | 10 | Apache-2.0 | 2023-02-22T06:24:01 | 2017-11-08T05:27:21 | Python | UTF-8 | Python | false | false | 2,257 | py | """Tankerkoenig binary sensor integration."""
from __future__ import annotations
import logging
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import TankerkoenigCoordinatorEntity, TankerkoenigDataUpdateCoordinator
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the tankerkoenig binary sensors."""
coordinator: TankerkoenigDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
stations = coordinator.stations.values()
entities = []
for station in stations:
sensor = StationOpenBinarySensorEntity(
station,
coordinator,
coordinator.show_on_map,
)
entities.append(sensor)
_LOGGER.debug("Added sensors %s", entities)
async_add_entities(entities)
class StationOpenBinarySensorEntity(TankerkoenigCoordinatorEntity, BinarySensorEntity):
"""Shows if a station is open or closed."""
_attr_device_class = BinarySensorDeviceClass.DOOR
def __init__(
self,
station: dict,
coordinator: TankerkoenigDataUpdateCoordinator,
show_on_map: bool,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator, station)
self._station_id = station["id"]
self._attr_name = (
f"{station['brand']} {station['street']} {station['houseNumber']} status"
)
self._attr_unique_id = f"{station['id']}_status"
if show_on_map:
self._attr_extra_state_attributes = {
ATTR_LATITUDE: station["lat"],
ATTR_LONGITUDE: station["lng"],
}
@property
def is_on(self) -> bool | None:
"""Return true if the station is open."""
data: dict = self.coordinator.data[self._station_id]
return data is not None and data.get("status") == "open"
| [
"[email protected]"
] | |
878ea199022c142618b146acd39e6a8e298d8e7d | 2b42b40ae2e84b438146003bf231532973f1081d | /spec/mgm4456629.3.spec | 8a2485d2abe4621730ed59fee93f7b17ba12906e | [] | no_license | MG-RAST/mtf | 0ea0ebd0c0eb18ec6711e30de7cc336bdae7215a | e2ddb3b145068f22808ef43e2bbbbaeec7abccff | refs/heads/master | 2020-05-20T15:32:04.334532 | 2012-03-05T09:51:49 | 2012-03-05T09:51:49 | 3,625,755 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 14,301 | spec | {
"id": "mgm4456629.3",
"metadata": {
"mgm4456629.3.metadata.json": {
"format": "json",
"provider": "metagenomics.anl.gov"
}
},
"providers": {
"metagenomics.anl.gov": {
"files": {
"100.preprocess.info": {
"compression": null,
"description": null,
"size": 736,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/100.preprocess.info"
},
"100.preprocess.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 38759,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/100.preprocess.passed.fna.gz"
},
"100.preprocess.passed.fna.stats": {
"compression": null,
"description": null,
"size": 309,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/100.preprocess.passed.fna.stats"
},
"100.preprocess.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 491,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/100.preprocess.removed.fna.gz"
},
"100.preprocess.removed.fna.stats": {
"compression": null,
"description": null,
"size": 305,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/100.preprocess.removed.fna.stats"
},
"205.screen.h_sapiens_asm.info": {
"compression": null,
"description": null,
"size": 448,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/205.screen.h_sapiens_asm.info"
},
"299.screen.info": {
"compression": null,
"description": null,
"size": 410,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/299.screen.info"
},
"299.screen.passed.fna.gcs": {
"compression": null,
"description": null,
"size": 1065,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/299.screen.passed.fna.gcs"
},
"299.screen.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 38060,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/299.screen.passed.fna.gz"
},
"299.screen.passed.fna.lens": {
"compression": null,
"description": null,
"size": 346,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/299.screen.passed.fna.lens"
},
"299.screen.passed.fna.stats": {
"compression": null,
"description": null,
"size": 309,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/299.screen.passed.fna.stats"
},
"440.cluster.rna97.fna.gz": {
"compression": "gzip",
"description": null,
"size": 8236,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/440.cluster.rna97.fna.gz"
},
"440.cluster.rna97.fna.stats": {
"compression": null,
"description": null,
"size": 307,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/440.cluster.rna97.fna.stats"
},
"440.cluster.rna97.info": {
"compression": null,
"description": null,
"size": 947,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/440.cluster.rna97.info"
},
"440.cluster.rna97.mapping": {
"compression": null,
"description": null,
"size": 40949,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/440.cluster.rna97.mapping"
},
"440.cluster.rna97.mapping.stats": {
"compression": null,
"description": null,
"size": 48,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/440.cluster.rna97.mapping.stats"
},
"450.rna.expand.lca.gz": {
"compression": "gzip",
"description": null,
"size": 56575,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/450.rna.expand.lca.gz"
},
"450.rna.expand.rna.gz": {
"compression": "gzip",
"description": null,
"size": 14500,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/450.rna.expand.rna.gz"
},
"450.rna.sims.filter.gz": {
"compression": "gzip",
"description": null,
"size": 10174,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/450.rna.sims.filter.gz"
},
"450.rna.sims.gz": {
"compression": "gzip",
"description": null,
"size": 104770,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/450.rna.sims.gz"
},
"900.abundance.function.gz": {
"compression": "gzip",
"description": null,
"size": 5859,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.abundance.function.gz"
},
"900.abundance.lca.gz": {
"compression": "gzip",
"description": null,
"size": 5364,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.abundance.lca.gz"
},
"900.abundance.md5.gz": {
"compression": "gzip",
"description": null,
"size": 8554,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.abundance.md5.gz"
},
"900.abundance.ontology.gz": {
"compression": "gzip",
"description": null,
"size": 43,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.abundance.ontology.gz"
},
"900.abundance.organism.gz": {
"compression": "gzip",
"description": null,
"size": 12814,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.abundance.organism.gz"
},
"900.loadDB.sims.filter.seq": {
"compression": null,
"description": null,
"size": 812824,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.loadDB.sims.filter.seq"
},
"900.loadDB.source.stats": {
"compression": null,
"description": null,
"size": 94,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/900.loadDB.source.stats"
},
"999.done.COG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.COG.stats"
},
"999.done.KO.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.KO.stats"
},
"999.done.NOG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.NOG.stats"
},
"999.done.Subsystems.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.Subsystems.stats"
},
"999.done.class.stats": {
"compression": null,
"description": null,
"size": 355,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.class.stats"
},
"999.done.domain.stats": {
"compression": null,
"description": null,
"size": 35,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.domain.stats"
},
"999.done.family.stats": {
"compression": null,
"description": null,
"size": 1491,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.family.stats"
},
"999.done.genus.stats": {
"compression": null,
"description": null,
"size": 1790,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.genus.stats"
},
"999.done.order.stats": {
"compression": null,
"description": null,
"size": 633,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.order.stats"
},
"999.done.phylum.stats": {
"compression": null,
"description": null,
"size": 206,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.phylum.stats"
},
"999.done.rarefaction.stats": {
"compression": null,
"description": null,
"size": 36222,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.rarefaction.stats"
},
"999.done.sims.stats": {
"compression": null,
"description": null,
"size": 79,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.sims.stats"
},
"999.done.species.stats": {
"compression": null,
"description": null,
"size": 4498,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4456629.3/file/999.done.species.stats"
}
},
"id": "mgm4456629.3",
"provider": "metagenomics.anl.gov",
"providerId": "mgm4456629.3"
}
},
"raw": {
"mgm4456629.3.fna.gz": {
"compression": "gzip",
"format": "fasta",
"provider": "metagenomics.anl.gov",
"url": "http://api.metagenomics.anl.gov/reads/mgm4456629.3"
}
}
} | [
"[email protected]"
] | |
a0ba0f8d5ac0bf71c18e90491831eea9e884eea3 | 2a91a64f5464c48fb56bd2f0e01668737d4eafa9 | /google-cloud-sdk/lib/googlecloudsdk/third_party/apis/resourcesettings/v1alpha1/resourcesettings_v1alpha1_messages.py | b11a7c1f6f7f92eb4e127e2fa1b46f7f49f45edf | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | salewski/google-cloud-sdk | 8513faf8c2f5b9180361efb567c4cfb9986d1e21 | 060174026ac068b6442b6c58bedf5adc7bc549cb | refs/heads/master | 2023-08-16T09:44:57.948620 | 2021-10-05T00:00:00 | 2021-10-05T16:15:40 | 417,465,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,806 | py | """Generated message classes for resourcesettings version v1alpha1.
The Resource Settings API allows users to control and modify the behavior of
their GCP resources (e.g., VM, firewall, Project, etc.) across the Cloud
Resource Hierarchy.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
package = 'resourcesettings'
class GoogleCloudResourcesettingsV1alpha1ListSettingsResponse(_messages.Message):
r"""The response from ListSettings.
Fields:
nextPageToken: Unused. A page token used to retrieve the next page.
settings: A list of settings that are available at the specified Cloud
resource.
"""
nextPageToken = _messages.StringField(1)
settings = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1Setting', 2, repeated=True)
class GoogleCloudResourcesettingsV1alpha1SearchSettingValuesResponse(_messages.Message):
r"""The response from SearchSettingValues.
Fields:
nextPageToken: Unused. A page token used to retrieve the next page.
settingValues: All setting values that exist on the specified Cloud
resource.
"""
nextPageToken = _messages.StringField(1)
settingValues = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 2, repeated=True)
class GoogleCloudResourcesettingsV1alpha1Setting(_messages.Message):
r"""The schema for setting values. At a given Cloud resource, a setting can
parent at most one setting value.
Enums:
DataTypeValueValuesEnum: The data type for this setting.
Fields:
dataType: The data type for this setting.
defaultValue: The value received by LookupEffectiveSettingValue if no
setting value is explicitly set. Note: not all settings have a default
value.
description: A detailed description of what this setting does.
displayName: The human readable name for this setting.
name: The resource name of the setting. Must be in one of the following
forms: * `projects/{project_number}/settings/{setting_name}` *
`folders/{folder_id}/settings/{setting_name}` *
`organizations/{organization_id}/settings/{setting_name}` For example,
"/projects/123/settings/gcp-enableMyFeature"
readOnly: A flag indicating that values of this setting cannot be modified
(see documentation of the specific setting for updates and reasons);
however, it may be deleted using DeleteSettingValue if
DeleteSettingValueRequest.ignore_read_only is set to true. Using this
flag is considered an acknowledgement that the setting value cannot be
recreated. See DeleteSettingValueRequest.ignore_read_only for more
details.
"""
class DataTypeValueValuesEnum(_messages.Enum):
r"""The data type for this setting.
Values:
DATA_TYPE_UNSPECIFIED: Unspecified data type.
BOOLEAN: A boolean setting.
STRING: A string setting.
STRING_SET: A string set setting.
ENUM_VALUE: A Enum setting
DURATION_VALUE: A Duration setting
STRING_MAP: A string->string map setting
"""
DATA_TYPE_UNSPECIFIED = 0
BOOLEAN = 1
STRING = 2
STRING_SET = 3
ENUM_VALUE = 4
DURATION_VALUE = 5
STRING_MAP = 6
dataType = _messages.EnumField('DataTypeValueValuesEnum', 1)
defaultValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1Value', 2)
description = _messages.StringField(3)
displayName = _messages.StringField(4)
name = _messages.StringField(5)
readOnly = _messages.BooleanField(6)
class GoogleCloudResourcesettingsV1alpha1SettingValue(_messages.Message):
r"""The instantiation of a setting. Every setting value is parented by its
corresponding setting.
Fields:
etag: A fingerprint used for optimistic concurrency. See
UpdateSettingValue for more details.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
readOnly: Output only. A flag indicating that this setting value cannot be
modified; however, it may be deleted using DeleteSettingValue if
DeleteSettingValueRequest.ignore_read_only is set to true. Using this
flag is considered an acknowledgement that the setting value cannot be
recreated. This flag is inherited from its parent setting and is for
convenience purposes. See Setting.read_only for more details.
updateTime: Output only. The timestamp indicating when the setting value
was last updated.
value: The value of the setting. The data type of Value must always be
consistent with the data type defined by the parent setting.
"""
etag = _messages.StringField(1)
name = _messages.StringField(2)
readOnly = _messages.BooleanField(3)
updateTime = _messages.StringField(4)
value = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1Value', 5)
class GoogleCloudResourcesettingsV1alpha1Value(_messages.Message):
r"""The data in a setting value.
Fields:
booleanValue: Defines this value as being a boolean value.
durationValue: Defines this value as being a Duration.
enumValue: Defines this value as being a Enum.
stringMapValue: Defines this value as being a StringMap.
stringSetValue: Defines this value as being a StringSet.
stringValue: Defines this value as being a string value.
"""
booleanValue = _messages.BooleanField(1)
durationValue = _messages.StringField(2)
enumValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1ValueEnumValue', 3)
stringMapValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1ValueStringMap', 4)
stringSetValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1ValueStringSet', 5)
stringValue = _messages.StringField(6)
class GoogleCloudResourcesettingsV1alpha1ValueEnumValue(_messages.Message):
r"""A enum value that can hold any enum type setting values. Each enum type
is represented by a number, this representation is stored in the
definitions.
Fields:
value: The value of this enum
"""
value = _messages.StringField(1)
class GoogleCloudResourcesettingsV1alpha1ValueStringMap(_messages.Message):
r"""A string->string map value that can hold a map of string keys to string
values. The maximum length of each string is 200 characters and there can be
a maximum of 50 key-value pairs in the map.
Messages:
MappingsValue: The key-value pairs in the map
Fields:
mappings: The key-value pairs in the map
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class MappingsValue(_messages.Message):
r"""The key-value pairs in the map
Messages:
AdditionalProperty: An additional property for a MappingsValue object.
Fields:
additionalProperties: Additional properties of type MappingsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MappingsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
mappings = _messages.MessageField('MappingsValue', 1)
class GoogleCloudResourcesettingsV1alpha1ValueStringSet(_messages.Message):
r"""A string set value that can hold a set of strings. The maximum length of
each string is 60 characters and there can be a maximum of 50 strings in the
string set.
Fields:
values: The strings in the set
"""
values = _messages.StringField(1, repeated=True)
class GoogleProtobufEmpty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo { rpc
Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON
representation for `Empty` is empty JSON object `{}`.
"""
class ResourcesettingsFoldersSettingsDeleteValueRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsDeleteValueRequest object.
Fields:
ignoreReadOnly: A flag that allows the deletion of the value of a
`read_only` setting. WARNING: use at your own risk. Deleting the value
of a read only setting is an irreversible action (i.e., it cannot be
created again).
name: The name of the setting value to delete. See SettingValue for naming
requirements.
"""
ignoreReadOnly = _messages.BooleanField(1)
name = _messages.StringField(2, required=True)
class ResourcesettingsFoldersSettingsGetValueRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsGetValueRequest object.
Fields:
name: The name of the setting value to get. See SettingValue for naming
requirements.
"""
name = _messages.StringField(1, required=True)
class ResourcesettingsFoldersSettingsListRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsListRequest object.
Fields:
pageSize: Unused. The size of the page to be returned.
pageToken: Unused. A page token used to retrieve the next page.
parent: The Cloud resource that parents the setting. Must be in one of the
following forms: * `projects/{project_number}` * `projects/{project_id}`
* `folders/{folder_id}` * `organizations/{organization_id}`
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class ResourcesettingsFoldersSettingsLookupEffectiveValueRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsLookupEffectiveValueRequest object.
Fields:
parent: The setting for which an effective value will be evaluated. See
Setting for naming requirements.
"""
parent = _messages.StringField(1, required=True)
class ResourcesettingsFoldersSettingsSearchRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsSearchRequest object.
Fields:
pageSize: Unused. The size of the page to be returned.
pageToken: Unused. A page token used to retrieve the next page.
parent: The Cloud resource that parents the setting. Must be in one of the
following forms: * `projects/{project_number}` * `projects/{project_id}`
* `folders/{folder_id}` * `organizations/{organization_id}`
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class ResourcesettingsFoldersSettingsUpdateValueRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsUpdateValueRequest object.
Fields:
googleCloudResourcesettingsV1alpha1SettingValue: A
GoogleCloudResourcesettingsV1alpha1SettingValue resource to be passed as
the request body.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
"""
googleCloudResourcesettingsV1alpha1SettingValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 1)
name = _messages.StringField(2, required=True)
class ResourcesettingsFoldersSettingsValueCreateRequest(_messages.Message):
r"""A ResourcesettingsFoldersSettingsValueCreateRequest object.
Fields:
googleCloudResourcesettingsV1alpha1SettingValue: A
GoogleCloudResourcesettingsV1alpha1SettingValue resource to be passed as
the request body.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
"""
googleCloudResourcesettingsV1alpha1SettingValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 1)
name = _messages.StringField(2, required=True)
class ResourcesettingsOrganizationsSettingsDeleteValueRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsDeleteValueRequest object.
Fields:
ignoreReadOnly: A flag that allows the deletion of the value of a
`read_only` setting. WARNING: use at your own risk. Deleting the value
of a read only setting is an irreversible action (i.e., it cannot be
created again).
name: The name of the setting value to delete. See SettingValue for naming
requirements.
"""
ignoreReadOnly = _messages.BooleanField(1)
name = _messages.StringField(2, required=True)
class ResourcesettingsOrganizationsSettingsGetValueRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsGetValueRequest object.
Fields:
name: The name of the setting value to get. See SettingValue for naming
requirements.
"""
name = _messages.StringField(1, required=True)
class ResourcesettingsOrganizationsSettingsListRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsListRequest object.
Fields:
pageSize: Unused. The size of the page to be returned.
pageToken: Unused. A page token used to retrieve the next page.
parent: The Cloud resource that parents the setting. Must be in one of the
following forms: * `projects/{project_number}` * `projects/{project_id}`
* `folders/{folder_id}` * `organizations/{organization_id}`
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class ResourcesettingsOrganizationsSettingsLookupEffectiveValueRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsLookupEffectiveValueRequest
object.
Fields:
parent: The setting for which an effective value will be evaluated. See
Setting for naming requirements.
"""
parent = _messages.StringField(1, required=True)
class ResourcesettingsOrganizationsSettingsSearchRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsSearchRequest object.
Fields:
pageSize: Unused. The size of the page to be returned.
pageToken: Unused. A page token used to retrieve the next page.
parent: The Cloud resource that parents the setting. Must be in one of the
following forms: * `projects/{project_number}` * `projects/{project_id}`
* `folders/{folder_id}` * `organizations/{organization_id}`
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class ResourcesettingsOrganizationsSettingsUpdateValueRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsUpdateValueRequest object.
Fields:
googleCloudResourcesettingsV1alpha1SettingValue: A
GoogleCloudResourcesettingsV1alpha1SettingValue resource to be passed as
the request body.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
"""
googleCloudResourcesettingsV1alpha1SettingValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 1)
name = _messages.StringField(2, required=True)
class ResourcesettingsOrganizationsSettingsValueCreateRequest(_messages.Message):
r"""A ResourcesettingsOrganizationsSettingsValueCreateRequest object.
Fields:
googleCloudResourcesettingsV1alpha1SettingValue: A
GoogleCloudResourcesettingsV1alpha1SettingValue resource to be passed as
the request body.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
"""
googleCloudResourcesettingsV1alpha1SettingValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 1)
name = _messages.StringField(2, required=True)
class ResourcesettingsProjectsSettingsDeleteValueRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsDeleteValueRequest object.
Fields:
ignoreReadOnly: A flag that allows the deletion of the value of a
`read_only` setting. WARNING: use at your own risk. Deleting the value
of a read only setting is an irreversible action (i.e., it cannot be
created again).
name: The name of the setting value to delete. See SettingValue for naming
requirements.
"""
ignoreReadOnly = _messages.BooleanField(1)
name = _messages.StringField(2, required=True)
class ResourcesettingsProjectsSettingsGetValueRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsGetValueRequest object.
Fields:
name: The name of the setting value to get. See SettingValue for naming
requirements.
"""
name = _messages.StringField(1, required=True)
class ResourcesettingsProjectsSettingsListRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsListRequest object.
Fields:
pageSize: Unused. The size of the page to be returned.
pageToken: Unused. A page token used to retrieve the next page.
parent: The Cloud resource that parents the setting. Must be in one of the
following forms: * `projects/{project_number}` * `projects/{project_id}`
* `folders/{folder_id}` * `organizations/{organization_id}`
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class ResourcesettingsProjectsSettingsLookupEffectiveValueRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsLookupEffectiveValueRequest object.
Fields:
parent: The setting for which an effective value will be evaluated. See
Setting for naming requirements.
"""
parent = _messages.StringField(1, required=True)
class ResourcesettingsProjectsSettingsSearchRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsSearchRequest object.
Fields:
pageSize: Unused. The size of the page to be returned.
pageToken: Unused. A page token used to retrieve the next page.
parent: The Cloud resource that parents the setting. Must be in one of the
following forms: * `projects/{project_number}` * `projects/{project_id}`
* `folders/{folder_id}` * `organizations/{organization_id}`
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class ResourcesettingsProjectsSettingsUpdateValueRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsUpdateValueRequest object.
Fields:
googleCloudResourcesettingsV1alpha1SettingValue: A
GoogleCloudResourcesettingsV1alpha1SettingValue resource to be passed as
the request body.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
"""
googleCloudResourcesettingsV1alpha1SettingValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 1)
name = _messages.StringField(2, required=True)
class ResourcesettingsProjectsSettingsValueCreateRequest(_messages.Message):
r"""A ResourcesettingsProjectsSettingsValueCreateRequest object.
Fields:
googleCloudResourcesettingsV1alpha1SettingValue: A
GoogleCloudResourcesettingsV1alpha1SettingValue resource to be passed as
the request body.
name: The resource name of the setting value. Must be in one of the
following forms: *
`projects/{project_number}/settings/{setting_name}/value` *
`folders/{folder_id}/settings/{setting_name}/value` *
`organizations/{organization_id}/settings/{setting_name}/value` For
example, "/projects/123/settings/gcp-enableMyFeature/value"
"""
googleCloudResourcesettingsV1alpha1SettingValue = _messages.MessageField('GoogleCloudResourcesettingsV1alpha1SettingValue', 1)
name = _messages.StringField(2, required=True)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
| [
"[email protected]"
] | |
94c88e893fab70eb22becd4d8470f07518bbf6a5 | 8acffb8c4ddca5bfef910e58d3faa0e4de83fce8 | /ml-flask/Lib/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py | ea1ba9e9b7e48392782524321a3dcf960ee5d629 | [
"MIT"
] | permissive | YaminiHP/SimilitudeApp | 8cbde52caec3c19d5fa73508fc005f38f79b8418 | 005c59894d8788c97be16ec420c0a43aaec99b80 | refs/heads/master | 2023-06-27T00:03:00.404080 | 2021-07-25T17:51:27 | 2021-07-25T17:51:27 | 389,390,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:831ddc7c07f1bc0699f90cfd23a91ebe38264e16cd0c35dcf82dab49654d5e00
size 1601
| [
"[email protected]"
] | |
d00f8a5113df64077c306d43ae28a8fd05eda42a | ae4e517aebe74a851df977af1a11d2a67120050c | /h2o-py/tests/testdir_munging/unop/pyunit_expr_math_ops.py | 970952c2b4e4d21a1f40dda8da7beca2cf42bea5 | [
"Apache-2.0"
] | permissive | StephaneFeniar/h2o-dev | 8dd06549ddee490d6db5b7dd41f043e061cee121 | 2c0c69aeda69d08be5edce330bf34898e9b2ab2b | refs/heads/master | 2021-01-14T08:51:40.694426 | 2015-04-18T21:01:23 | 2015-04-18T21:01:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,544 | py | import sys
sys.path.insert(1, "../../../")
import h2o
import numpy as np
import random
import math
import scipy.special
def expr_math_ops(ip,port):
# Connect to h2o
h2o.init(ip,port)
sin_cos_tan_atan_sinh_cosh_tanh_asinh_data = [[random.uniform(-10,10) for r in range(10)] for c in range(10)]
asin_acos_atanh_data = [[random.uniform(-1,1) for r in range(10)] for c in range(10)]
acosh_data = [[random.uniform(1,10) for r in range(10)] for c in range(10)]
abs_data = [[random.uniform(-100000,0) for r in range(10)] for c in range(10)]
h2o_data1 = h2o.H2OFrame(python_obj=sin_cos_tan_atan_sinh_cosh_tanh_asinh_data)
h2o_data2 = h2o.H2OFrame(python_obj=asin_acos_atanh_data)
h2o_data3 = h2o.H2OFrame(python_obj=acosh_data)
h2o_data4 = h2o.H2OFrame(python_obj=abs_data)
np_data1 = np.array(sin_cos_tan_atan_sinh_cosh_tanh_asinh_data)
np_data2 = np.array(asin_acos_atanh_data)
np_data3 = np.array(acosh_data)
np_data4 = np.array(abs_data)
row, col = h2o_data1.dim()
def check_values(h2o_data, numpy_data):
success = True
for i in range(10):
r = random.randint(0,row-1)
c = random.randint(0,col-1)
h2o_val = h2o.as_list(h2o_data[r,c])[0][0]
num_val = numpy_data[r,c]
if not abs(h2o_val - num_val) < 1e-06:
success = False
print "check unsuccessful! h2o computed {0} and numpy computed {1}".format(h2o_val,num_val)
return success
h2o_data1 = h2o_data1 + 2
h2o_data2 = h2o_data2 / 1.01
h2o_data3 = h2o_data3 * 1.5
h2o_data4 = h2o_data4 - 1.5
np_data1 = np_data1 + 2
np_data2 = np_data2 / 1.01
np_data3 = np_data3 * 1.5
np_data4 = np_data4 - 1.5
assert check_values(h2o.cos(h2o_data1), np.cos(np_data1)), "expected equal cos values between h2o and numpy"
assert check_values(h2o.sin(h2o_data1), np.sin(np_data1)), "expected equal sin values between h2o and numpy"
assert check_values(h2o.tan(h2o_data1), np.tan(np_data1)), "expected equal tan values between h2o and numpy"
assert check_values(h2o.acos(h2o_data2), np.arccos(np_data2)), "expected equal acos values between h2o and numpy"
assert check_values(h2o.asin(h2o_data2), np.arcsin(np_data2)), "expected equal asin values between h2o and numpy"
assert check_values(h2o.atan(h2o_data1), np.arctan(np_data1)), "expected equal atan values between h2o and numpy"
assert check_values(h2o.cosh(h2o_data1), np.cosh(np_data1)), "expected equal cosh values between h2o and numpy"
assert check_values(h2o.sinh(h2o_data1), np.sinh(np_data1)), "expected equal sinh values between h2o and numpy"
assert check_values(h2o.tanh(h2o_data1), np.tanh(np_data1)), "expected equal tanh values between h2o and numpy"
assert check_values(h2o.acosh(h2o_data3), np.arccosh(np_data3)), "expected equal acosh values between h2o and numpy"
assert check_values(h2o.asinh(h2o_data1), np.arcsinh(np_data1)), "expected equal asinh values between h2o and numpy"
assert check_values(h2o.atanh(h2o_data2), np.arctanh(np_data2)), "expected equal atanh values between h2o and numpy"
assert check_values(h2o.cospi(h2o_data2/math.pi), np.cos(np_data2)), "expected equal cospi values between h2o and numpy"
assert check_values(h2o.sinpi(h2o_data2/math.pi), np.sin(np_data2)), "expected equal sinpi values between h2o and numpy"
assert check_values(h2o.tanpi(h2o_data2/math.pi), np.tan(np_data2)), "expected equal tanpi values between h2o and numpy"
assert check_values(h2o.abs(h2o_data4), np.fabs(np_data4)), "expected equal abs values between h2o and numpy"
assert check_values(h2o.sign(h2o_data2), np.sign(np_data2)), "expected equal sign values between h2o and numpy"
assert check_values(h2o.sqrt(h2o_data3), np.sqrt(np_data3)), "expected equal sqrt values between h2o and numpy"
assert check_values(h2o.trunc(h2o_data3), np.trunc(np_data3)), "expected equal trunc values between h2o and numpy"
assert check_values(h2o.ceil(h2o_data3), np.ceil(np_data3)), "expected equal ceil values between h2o and numpy"
assert check_values(h2o.floor(h2o_data3), np.floor(np_data3)), "expected equal floor values between h2o and numpy"
assert check_values(h2o.log(h2o_data3), np.log(np_data3)), "expected equal log values between h2o and numpy"
assert check_values(h2o.log10(h2o_data3), np.log10(np_data3)), "expected equal log10 values between h2o and numpy"
assert check_values(h2o.log1p(h2o_data3), np.log1p(np_data3)), "expected equal log1p values between h2o and numpy"
assert check_values(h2o.log2(h2o_data3), np.log2(np_data3)), "expected equal log2 values between h2o and numpy"
assert check_values(h2o.exp(h2o_data3), np.exp(np_data3)), "expected equal exp values between h2o and numpy"
assert check_values(h2o.expm1(h2o_data3), np.expm1(np_data3)), "expected equal expm1 values between h2o and numpy"
h2o_val = h2o.as_list(h2o.gamma(h2o_data3))[5][5]
num_val = math.gamma(h2o.as_list(h2o_data3)[5][5])
assert abs(h2o_val - num_val) < max(abs(h2o_val), abs(num_val)) * 1e-6, \
"check unsuccessful! h2o computed {0} and math computed {1}. expected equal gamma values between h2o and math".format(h2o_val,num_val)
h2o_val = h2o.as_list(h2o.lgamma(h2o_data3))[5][5]
num_val = math.lgamma(h2o.as_list(h2o_data3)[5][5])
assert abs(h2o_val - num_val) < max(abs(h2o_val), abs(num_val)) * 1e-6, \
"check unsuccessful! h2o computed {0} and math computed {1}. expected equal lgamma values between h2o and math".format(h2o_val,num_val)
h2o_val = h2o.as_list(h2o.digamma(h2o_data3))[5][5]
num_val = scipy.special.polygamma(0,h2o.as_list(h2o_data3)[5][5])
assert abs(h2o_val - num_val) < max(abs(h2o_val), abs(num_val)) * 1e-6, \
"check unsuccessful! h2o computed {0} and math computed {1}. expected equal digamma values between h2o and math".format(h2o_val,num_val)
h2o_val = h2o.as_list(h2o.trigamma(h2o_data3))[5][5]
num_val = scipy.special.polygamma(1,h2o.as_list(h2o_data3)[5][5])
assert abs(h2o_val - num_val) < max(abs(h2o_val), abs(num_val)) * 1e-6, \
"check unsuccessful! h2o computed {0} and math computed {1}. expected equal trigamma values between h2o and math".format(h2o_val,num_val)
if __name__ == "__main__":
h2o.run_test(sys.argv, expr_math_ops)
| [
"[email protected]"
] | |
54784ae241ebb27af2105733d27895990c63c635 | d024ccbb4cc04af3866a4db1ac1d8c1d7395d909 | /boj/4673.py | 28d025abbfa54b5cb36be7af6190215810610b63 | [] | no_license | demetoir/ps-solved-code | ff0418dddd10f3b053c9b8d32af48027b10c8481 | f4d4fd2183176b083f2287c9d89c6d5a1e983cc5 | refs/heads/master | 2022-10-14T20:11:34.581439 | 2020-06-12T11:24:11 | 2020-06-12T11:24:11 | 68,782,768 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | l=lambda n:n%10+l(n//10) if n>0 else 0
a=[1]*20002
for i in range(1,10000):a[l(i)+i]=0
for i in range(1,10000):
if a[i]==1:print(i) | [
"[email protected]"
] | |
5df196843c25b81138c44c75987e86e0af7debc1 | 3dfa65c42241d866dcf82d2f6faf603e5aec096c | /gladweb/views/index.py | 340e3f77f04331b417dcf26cd0303c55a194ac4b | [] | no_license | Dav1dde/glad-web | 0ad5f11f4ca0966ae29b4c1972a02295bdd6c47c | ff05bd08efca97c2f40fbf3e9f8fde265b7c8e7d | refs/heads/master | 2023-03-07T18:31:52.638325 | 2023-02-25T16:14:12 | 2023-02-25T16:14:12 | 35,337,528 | 396 | 74 | null | null | null | null | UTF-8 | Python | false | false | 4,874 | py | import json
import os
import sys
import tempfile
import zipfile
from collections import namedtuple
from flask import Blueprint, request, render_template, g, url_for, redirect, flash, current_app
import glad.lang.c.generator
from glad.spec import SPECS
from gladweb.views.exception import InvalidUserInput
if sys.version_info >= (3, 0):
from itertools import zip_longest, chain
from urllib.parse import urlencode
else:
from itertools import izip_longest as zip_longest, chain
from urllib import urlencode
Version = namedtuple('Version', ['major', 'minor'])
index = Blueprint('index', __name__)
@index.route('/', methods=['GET'])
def landing():
return render_template(
'index.html', **g.metadata.as_dict()
)
def validate_form():
language = request.form.get('language')
specification = request.form.get('specification')
profile = request.form.get('profile', 'compatibility')
apis = request.form.getlist('api')
extensions = request.form.getlist('extensions')
loader = request.form.get('loader') is not None
omitkhr = request.form.get('omitkhr') is not None
local_files = request.form.get('localfiles') is not None
messages = list()
if language not in (l.id for l in g.metadata.languages):
raise InvalidUserInput('Invalid language "{0}"'.format(language))
if specification not in (s.id for s in g.metadata.specifications):
raise InvalidUserInput('Invalid specification "{0}"'.format(specification))
if profile not in (p.id for p in g.metadata.profiles):
raise InvalidUserInput('Invalid profile "{0}"'.format(profile))
apis_parsed = dict()
for api in apis:
name, version = api.split('=')
if version == 'none':
continue
apis_parsed[name] = Version(*map(int, version.split('.')))
if len(apis_parsed) == 0:
raise InvalidUserInput(
'No API for specification selected'.format(specification)
)
return messages, language, specification, profile, apis_parsed, extensions, loader, omitkhr, local_files
def write_dir_to_zipfile(path, zipf, exclude=None):
if exclude is None:
exclude = []
for root, dirs, files in os.walk(path):
for file_ in files:
if file_ in exclude:
continue
zipf.write(
os.path.join(root, file_),
os.path.relpath(os.path.join(root, file_), path)
)
def glad_generate():
# this is really getting ugly, where did my code quality standards go?
messages, language, specification, profile, apis, extensions, loader_enabled, omitkhr, local_files = validate_form()
cls = SPECS[specification]
spec = cls.fromstring(g.cache.open_specification(specification).read())
if spec.NAME == 'gl':
spec.profile = profile
generator_cls, loader_cls = glad.lang.get_generator(
language, spec.NAME.lower()
)
if loader_cls is None:
raise InvalidUserInput('API/Spec not yet supported')
loader = loader_cls(apis)
loader.disabled = not loader_enabled
loader.local_files = local_files
glad.lang.c.generator.KHRPLATFORM = 'file:' + g.cache.get_khrplatform()
# the suffix is required because mkdtemp sometimes creates directories with an
# underscore at the end, we later use werkzeug.utils.secure_filename on that directory,
# this function happens to strip underscores...
directory = tempfile.mkdtemp(dir=current_app.config['TEMP'], suffix='glad')
os.chmod(directory, 0o750)
with generator_cls(directory, spec, apis, extensions, loader, local_files=local_files, omit_khrplatform=omitkhr) as generator:
generator.generate()
zip_path = os.path.join(directory, 'glad.zip')
with open(zip_path, 'wb') as fobj:
zipf = zipfile.ZipFile(fobj, mode='w')
write_dir_to_zipfile(directory, zipf, exclude=['glad.zip'])
zipf.close()
serialized = urlencode(list(chain.from_iterable(
zip_longest('', x[1], fillvalue=x[0]) for x in request.form.lists())
))
serialized_path = os.path.join(directory, '.serialized')
with open(serialized_path, 'w') as fobj:
json.dump({'params': serialized, 'messages': messages}, fobj)
name = os.path.split(directory)[1]
if current_app.config['FREEZE']:
current_app.freezer.freeze(name)
return url_for('generated.autoindex', root=name)
@index.route('/generate', methods=['POST'])
def generate():
try:
url = glad_generate()
except Exception as e:
import gladweb
if gladweb.sentry is not None:
gladweb.sentry.captureException()
current_app.logger.exception(e)
current_app.logger.error(request.form)
flash(str(e), category='error')
return redirect(url_for('index.landing'))
return redirect(url)
| [
"[email protected]"
] | |
e44e9989565a9d1ffcbc9142748500ff5a274785 | e0980f704a573894350e285f66f4cf390837238e | /.history/streams/blocks_20201022114431.py | 652f1b7cfbfba3d9c2325e1ea062cb799ef42b97 | [] | no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,583 | py | from wagtail.core import blocks
from wagtail.images.blocks import ImageChooserBlock
class TitleBlock(blocks.StructBlock):
text = blocks.CharBlock(
required = True,
elp_text='Tekst do wyświetlenia',
)
class Meta:
template = 'streams/title_block.html'
icon = 'edycja'
label = 'Tytuł'
help_text = 'Wyśrodkowany tekst do wyświetlenia na stronie.'
class LinkValue(blocks.StructValue):
"""Dodatkowao logika dla lików"""
def url(self):
interlan_page = self.get('internal_page')
external_l
class Link(blocks.StructBlock):
link_text = blocks.CharBlock(
max_length=50,
default='Więcej szczegółów'
)
interal_page = blocks.PageChooserBlock(
required=False
)
external_link = blocks.URLBlock(
required=False
)
class Meta:
value_class = LinkValue
class Card(blocks.StructBlock):
title = blocks.CharBlock(
max_length=100,
help_text = 'Pogrubiony tytuł tej karty. Maksymalnie 100 znaków.'
)
text = blocks.TextBlock(
max_length=255,
help_text='Opcjonalny tekst tej karty. Maksymalnie 255 znaków.'
)
image = ImageChooserBlock(
help_text = 'Obraz zostanie automatycznie przycięty o 570 na 370 pikseli'
)
link = Link(help_text = 'Wwybierz link')
class CardsBlock(blocks.StructBlock):
cards = blocks.ListBlock(
Card()
)
class Meta:
template = 'streams/card_block.html'
icon = 'image'
label = 'Karty standardowe'
| [
"[email protected]"
] | |
566b949d5b6105ffa0ac3812e25ae751a59de219 | fdd67d3733d3db2fb381f25b0985952e3f7c9a4f | /epdAlarm.py | 9fc35afaa5fac12a5bf4482d115b6d71392bd049 | [] | no_license | star-controls/epdAlarm | 56c6ef50616ea4290217b41d0daf3c4ebf7ee952 | 4a966e38116344b9d209dd8efc9abfbbc0e4db5a | refs/heads/master | 2020-03-21T07:51:27.655488 | 2019-03-05T15:04:12 | 2019-03-05T15:04:12 | 138,303,832 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,072 | py |
from epdchan import epdchan
import paho.mqtt.client as mqtt
from watchdog import watchdog
from softioc import builder
import time
import pandas as pd
#EPD PVs
builder.SetDeviceName('EPD')
#list of all EPD channels as 3-index list
npp = 12
ntile = 31
elist = []
#east/west loop
for ew in range(0,2):
elist.append([])
#PP loop
for ipp in range(0,npp+1):
elist[ew].append([])
#tile loop
for itile in range(ntile+1):
#PP starts at 1, handled in epdchan constructor
elist[ew][ipp].append( epdchan(ew, ipp, itile) )
#watchdog timer for 60 seconds
wdt = watchdog(60, elist)
#file holding alarm limit values
csvlim = "limits.csv"
lframe = pd.read_csv(csvlim)
#set initial alarm values
elist[0][0][0].limits.imon_max = lframe['imon_max'][0]
elist[0][0][0].limits.rmon_min = lframe['rmon_min'][0]
elist[0][0][0].limits.rmon_max = lframe['rmon_max'][0]
elist[0][0][0].limits.temp_max = lframe['temp_max'][0]
#functions to show alarm limits
#_____________________________________________________________________________
def get_imon_max():
return elist[0][0][0].limits.imon_max
#_____________________________________________________________________________
def get_rmon_min():
return elist[0][0][0].limits.rmon_min
#_____________________________________________________________________________
def get_rmon_max():
return elist[0][0][0].limits.rmon_max
#_____________________________________________________________________________
def get_temp_max():
return elist[0][0][0].limits.temp_max
#_____________________________________________________________________________
def put_limit(key, val):
#put limit value to file
lframe[key][0] = val
lframe.to_csv(csvlim, index=False)
#PVs to set alarm limits
#_____________________________________________________________________________
def set_imon_max(val):
elist[0][0][0].limits.imon_max = val
put_limit('imon_max', val)
imon_max_pv = builder.aOut("imon_max", on_update=set_imon_max, initial_value=get_imon_max(), PREC=2)
#_____________________________________________________________________________
def set_rmon_min(val):
elist[0][0][0].limits.rmon_min = val
put_limit('rmon_min', val)
rmon_min_pv = builder.aOut("rmon_min", on_update=set_rmon_min, initial_value=get_rmon_min(), PREC=1)
#_____________________________________________________________________________
def set_rmon_max(val):
elist[0][0][0].limits.rmon_max = val
put_limit('rmon_max', val)
rmon_max_pv = builder.aOut("rmon_max", on_update=set_rmon_max, initial_value=get_rmon_max(), PREC=1)
#_____________________________________________________________________________
def set_temp_max(val):
elist[0][0][0].limits.temp_max = val
put_limit('temp_max', val)
temp_max_pv = builder.aOut("temp_max", on_update=set_temp_max, initial_value=get_temp_max(), PREC=1)
#_____________________________________________________________________________
def init_alarm_limits():
#put initial values to alarm limits PVs
#imon_max_pv.set(get_imon_max())
#rmon_min_pv.set(get_rmon_min())
#rmon_max_pv.set(get_rmon_max())
#temp_max_pv.set(get_temp_max())
pass
#functions for mqtt message
#_____________________________________________________________________________
def get_msg_id(msg, idnam):
#get message id
return ( msg[msg.find(idnam):] ).split('"')[2]
#_____________________________________________________________________________
def process_msg(msg):
#parse the message, get the values, put them to EPD channel objects
#check message validity
if get_msg_id(msg, "dcs_id") != "epd_controller" or get_msg_id(msg, "dcs_uid") != "tonko":
return
wdt.reset()
#message header
hstart = msg.find("[", msg.find("dcs_header")) + 1
hend = msg.find("]")
hlist = msg[hstart:hend].split(",")
id_ew = hlist.index('"fps_quad"')
id_pp = hlist.index('"fps_layer"')
id_tile = hlist.index('"fps_channel"')
id_vslope = hlist.index('"vslope"')
id_vcomp = hlist.index('"temp"')
id_imon = hlist.index('"imon0"')
id_rmon = hlist.index('"rmon0"')
id_state = hlist.index('"state"')
#get values table
vstart = msg.find("{", msg.find("dcs_values")) + 1
vend = msg.find("}", vstart)
vtab = msg[vstart:vend].split("]")
#table lines loop
for i in range(len(vtab)):
if vtab[i] == "":
continue
#list of values
vlist = vtab[i][vtab[i].find("[")+1:].split(",")
#EPD indices
ew = int(vlist[id_ew])
pp = int(vlist[id_pp])
tile = int(vlist[id_tile])
#print repr(ew), repr(pp), repr(tile)
#voltage and current values
epd = elist[ew][pp][tile]
epd.vslope = float(vlist[id_vslope])
epd.vcomp = float(vlist[id_vcomp])
epd.imon = float(vlist[id_imon])
epd.rmon = float(vlist[id_rmon])
epd.state = str(vlist[id_state]).lower().strip('"')
#print repr(epd.ew), repr(epd.pp), repr(epd.tile), repr(epd.vslope), repr(epd.vcomp), repr(epd.imon), repr(epd.rmon)
#put values to PVs in EPD object
epd.pvput()
#mqtt client functions
#_____________________________________________________________________________
def on_connect(client, userdata, flags, rc):
# The callback for when the client receives a CONNACK response from the server.
print("MQTT connected with result code "+str(rc))
client.subscribe("dcs/set/Control/epd/epd_control_fee")
#_____________________________________________________________________________
def on_message(client, userdata, msg):
# The callback for when a PUBLISH message is received from the server.
process_msg(msg.payload)
#_____________________________________________________________________________
def read_mqtt():
#initialize alarm limits PVs
init_alarm_limits()
#main mqtt loop
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("mq01.starp.bnl.gov")
client.loop_start()
wdt.start()
#watchdog test, 10 sec timeout
#time.sleep(10)
#client.loop_stop()
#print "alarm on 0, 1, 0"
#elist[0][1][0].set_invalid()
#time.sleep(20)
#print "running again"
#client.loop_start()
| [
"[email protected]"
] | |
a06b4cdb26e979978b7442a5953e6661148f9c4d | 90c6262664d013d47e9a3a9194aa7a366d1cabc4 | /scripts/make_contract_tests.py | c5a713b158970664e7323b7f9745d351a8a8b188 | [
"MIT"
] | permissive | tqtezos/pytezos | 3942fdab7aa7851e9ea81350fa360180229ec082 | a4ac0b022d35d4c9f3062609d8ce09d584b5faa8 | refs/heads/master | 2021-07-10T12:24:24.069256 | 2020-04-04T12:46:24 | 2020-04-04T12:46:24 | 227,664,211 | 1 | 0 | MIT | 2020-12-30T16:44:56 | 2019-12-12T17:47:53 | Python | UTF-8 | Python | false | false | 5,441 | py | from os.path import join, dirname, exists
from os import mkdir
import json
from conseil import conseil
from conseil.api import ConseilApi
from pytezos import pytezos
from tests import relpath
from tests.templates import michelson_coding_test_case, micheline_coding_test_case, \
test_michelson_parse,test_michelson_format, test_michelson_inverse, test_micheline_inverse
data_dir = join(dirname(dirname(__file__)), 'tests/contracts')
Account = conseil.tezos.babylonnet.accounts
Operation = conseil.tezos.babylonnet.operations
def get_accounts(limit=1):
operations = Operation.query(Operation.destination,
Operation.operation_group_hash.count()) \
.filter(Operation.destination.startswith('KT1'),
Operation.parameters.isnot(None),
Operation.parameters.notlike('Unparsable'),
Operation.kind == 'transaction',
Operation.status == 'applied') \
.order_by(Operation.operation_group_hash.count().desc()) \
.limit(limit) \
.all()
addresses = list(map(lambda x: x['destination'], operations))
accounts = Account.query(Account.account_id, Account.script, Account.storage) \
.filter(Account.account_id.in_(*addresses),
Account.storage.notlike('Unparsable'),
Account.script.notlike('Unparsable')) \
.all()
return accounts
def get_operations(account_id, limit=1):
operations = Operation.query(Operation.block_level.max().label('level'),
Operation.parameters) \
.filter(Operation.destination == account_id,
Operation.parameters.isnot(None),
Operation.parameters.notlike('Unparsable'),
Operation.kind == 'transaction',
Operation.status == 'applied',
Operation.internal.is_(False)) \
.limit(limit) \
.all()
return operations
def find_operation(block_level, destination):
opg_list = pytezos.shell.blocks[block_level].operations.managers()
for opg in opg_list:
for content in opg['contents']:
if content.get('parameters') and content['destination'] == destination:
return content['parameters'], opg['hash']
assert False
def make_package(account, operations=1):
account_dir = join(data_dir, account["account_id"])
if exists(account_dir):
return
else:
mkdir(account_dir)
files = {
'dir': account_dir,
'name': account['account_id'][:6],
'code': [],
'storage': [],
'parameter': []
}
def write_files(michelson, micheline, section, name):
tz_path = join(account_dir, f'{section}_{name}.tz')
json_path = join(account_dir, f'{section}_{name}.json')
with open(tz_path, 'w+') as f:
f.write(michelson)
with open(json_path, 'w+') as f:
f.write(json.dumps(micheline, indent=2))
files[section].append((name, tz_path, json_path))
contract = pytezos.shell.contracts[account['account_id']]()
write_files(
michelson=account['script'],
micheline=contract['script']['code'],
section='code',
name=account['account_id'][:6]
)
write_files(
michelson=account['storage'],
micheline=contract['script']['storage'],
section='storage',
name=account['account_id'][:6]
)
operations = get_operations(account['account_id'], limit=operations)
for operation in operations:
parameters, opg_hash = find_operation(operation['level'], account['account_id'])
write_files(
michelson=operation['parameters'],
micheline=parameters,
section='parameter',
name=opg_hash[:6]
)
return files
def make_michelson_tests(files: dict):
test_case = [
michelson_coding_test_case.format(case=files['name'])
]
for section in ['code', 'storage', 'parameter']:
for name, tz_path, json_path in files[section]:
case = f'{section}_{name}'
test_case.extend([
test_michelson_parse.format(case=case, json_path=relpath(json_path), tz_path=relpath(tz_path)),
test_michelson_format.format(case=case, json_path=relpath(json_path), tz_path=relpath(tz_path)),
test_michelson_inverse.format(case=case, json_path=relpath(json_path))
])
with open(join(files['dir'], f'test_michelson_coding_{files["name"]}.py'), 'w+') as f:
f.write(''.join(test_case))
def make_micheline_tests(files: dict):
test_case = [
micheline_coding_test_case.format(case=files['name'], json_path=relpath(files['code'][0][2]))
]
for section in ['storage', 'parameter']:
for name, tz_path, json_path in files[section]:
case = f'{section}_{name}'
test_case.append(
test_micheline_inverse.format(case=case, json_path=relpath(json_path), section=section)
)
with open(join(files['dir'], f'test_micheline_coding_{files["name"]}.py'), 'w+') as f:
f.write(''.join(test_case))
if __name__ == '__main__':
accounts = get_accounts(limit=100)
for acc in accounts:
package = make_package(acc, operations=7)
if package:
make_michelson_tests(package)
make_micheline_tests(package)
| [
"[email protected]"
] | |
a3b4529f2a8af100e1863c8d7f61d0522f76b1ce | a46646a707b9d747fcf29a86f67a4ccbcbd0ddb9 | /week10/book/76prayme.py | 9a15021619c3da6599d23328531d5d56030c674c | [] | no_license | DevooKim/algorithm-study | 5720642bb43ea364dae924ee038f97379f2ef85b | 830b148defc7f0097abe2f5d3f4e9d8f3333efb0 | refs/heads/main | 2023-02-23T18:40:28.978111 | 2021-01-28T12:09:06 | 2021-01-28T12:09:06 | 302,206,505 | 2 | 1 | null | 2021-01-28T12:09:07 | 2020-10-08T01:54:08 | Python | UTF-8 | Python | false | false | 2,155 | py | import collections
import heapq
import functools
import itertools
import re
import sys
import math
import bisect
from typing import List
class Solution:
def minWindow(self, s: str, t: str) -> str:
# T의 크기부터 점점 키워가기
def contains(s_substr_lst: List, t_lst: List):
for t_elem in t_lst:
if t_elem in s_substr_lst:
s_substr_lst.remove(t_elem)
else:
return False
return True
if not s or not t:
return ''
window_size = len(t)
for size in range(window_size, len(s) + 1):
for left in range(len(s) - size + 1):
s_substr = s[left:left+size]
if contains(list(s_substr), list(t)):
return s_substr
return ''
def two_pointer_with_window(self, s: str, t: str) -> str:
need = collections.Counter(t)
missing = len(t)
left = start = end = 0
for right, char in enumerate(s, 1):
missing -= need[char] > 0
need[char] -= 1
if missing == 0:
while left < right and need[s[left]] < 0:
need[s[left]] += 1
left += 1
if not end or right - left <= end - start:
start, end = left, right
need[s[left]] += 1
missing += 1
left += 1
return s[start:end]
def boo_counter(self, s: str, t: str) -> str:
t_count = collections.Counter(t)
current_count = collections.Counter()
start = float('-inf')
end = float('inf')
left = 0
for right, char in enumerate(s, 1):
current_count[char] += 1
while current_count & t_count == t_count:
if right - left < end - start:
start,end = left, right
current_count[s[left]] -= 1
left += 1
return s[start:end] if end-start <= len(s) else ''
print(Solution().minWindow("ADOBECODEBANC", "ABC")) # "BANC
print(Solution().minWindow("a", "a")) | [
"[email protected]"
] | |
ba739e1e9487460532edf7325747f1c35b66b048 | 1e9ad304868c2bda918c19eba3d7b122bac3923b | /kubernetes/client/models/v1beta1_http_ingress_rule_value.py | 168b201cfa6cc6450e6154e0ffdd4d11d9e0805c | [
"Apache-2.0"
] | permissive | pineking/client-python | c77e5bd3d476ac852e6dffa96056008baa0f597f | 74a64d7325518f4298600d4bb300f92843c29347 | refs/heads/master | 2021-01-22T22:16:27.368406 | 2017-03-15T08:21:21 | 2017-03-15T08:21:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,219 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.5.1-660c2a2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1HTTPIngressRuleValue(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, paths=None):
"""
V1beta1HTTPIngressRuleValue - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'paths': 'list[V1beta1HTTPIngressPath]'
}
self.attribute_map = {
'paths': 'paths'
}
self._paths = paths
@property
def paths(self):
"""
Gets the paths of this V1beta1HTTPIngressRuleValue.
A collection of paths that map requests to backends.
:return: The paths of this V1beta1HTTPIngressRuleValue.
:rtype: list[V1beta1HTTPIngressPath]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this V1beta1HTTPIngressRuleValue.
A collection of paths that map requests to backends.
:param paths: The paths of this V1beta1HTTPIngressRuleValue.
:type: list[V1beta1HTTPIngressPath]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
16b6d2a2bb371aec5835e7f3d24bccfd8b4ec178 | b005d794cfd8e3b063b08d6a266b1e07f0f0f5e9 | /src/webapp/geolist/forms.py | 37418890337878c9eab7f4a4c60577f54493ef96 | [] | no_license | GeoRemindMe/GeoRemindMe_Web | 593c957faa5babb3040da86d94a5d884ad4b2db3 | d441693eedb32c36fe853895110df808a9959941 | refs/heads/master | 2021-01-16T18:29:39.633445 | 2011-11-05T23:50:37 | 2011-11-05T23:50:37 | 1,841,418 | 8 | 5 | null | null | null | null | UTF-8 | Python | false | false | 928 | py | # coding=utf-8
from django import forms
from django.utils.translation import gettext_lazy as _
from georemindme.models_utils import VISIBILITY_CHOICES
class ListRequestedForm(forms.Form):
name = forms.CharField(required=True)
description = forms.CharField(required=False,widget=forms.Textarea())
visibility = forms.ChoiceField(required=True, choices=VISIBILITY_CHOICES)
# only save if it is valid
def save(self, **kwargs):
from geouser.models import User
if not isinstance(kwargs['user'], User):
raise TypeError
from models import ListRequested
if kwargs['id'] is None:
list = ListRequested.insert_list(user=kwargs['user'],
name=self.cleaned_data['name'],
description=self.cleaned_data['description']
) | [
"[email protected]"
] | |
e564cbb6e5bd4a5146b48e57490b98887aa49bcc | bde6ed092b7b29703737e11c5a5ff90934af3d74 | /AtCoder/tkppc/c.py | c001eb99b71d1c90cfe2d44eb70b9b13d6f44518 | [] | no_license | takecian/ProgrammingStudyLog | 2ab7ea601e0996b3fa502b81ec141bc3772442b6 | 94485d131c0cc9842f1f4799da2d861dbf09b12a | refs/heads/master | 2023-04-28T16:56:18.943574 | 2023-04-18T06:34:58 | 2023-04-18T06:34:58 | 128,525,713 | 4 | 0 | null | 2022-12-09T06:15:19 | 2018-04-07T12:21:29 | Python | UTF-8 | Python | false | false | 321 | py | # https://tkppc.contest.atcoder.jp/tasks/tkppc2015_c
N, M = map(int, input().split())
S = int(input())
T = [0 for _ in range(10000)]
for _ in range(N):
t, k = map(int, input().split())
T[t-1] = k
# print(T)
total = 0
d = 0
for i in range(S - 1):
total += T[i]
if total >= M:
d += 1
print(d)
| [
"[email protected]"
] | |
4ab92065962d53964ce2f930d220837337ee3eac | c318bd15c40063639edc95bb8419f4c0f4a2b54f | /update_s3_configuration.py | e520e1541db8ce977a5e0513f0439b48d7e25a29 | [
"MIT"
] | permissive | cwestleyj/HearthstoneJSON | 716fa1b05782d311a04c16c5917ad6e6ae15749a | ed30c943983a4ee0da3a80562655d5a274faad39 | refs/heads/master | 2021-01-19T10:36:59.554294 | 2017-02-10T16:29:13 | 2017-02-10T16:29:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,954 | py | #!/usr/bin/env python
import sys
import boto3
from pprint import pprint
API_BUCKET = "api.hearthstonejson.com"
ART_BUCKET = "art.hearthstonejson.com"
def update_website_configuration(s3, build, bucket=API_BUCKET):
print("Querying website configuration for %r" % (bucket))
orig_config = s3.get_bucket_website(Bucket=bucket)
pprint(orig_config)
if "ResponseMetadata" in orig_config:
del orig_config["ResponseMetadata"]
config = orig_config.copy()
config["RoutingRules"] = [{
"Condition": {
"KeyPrefixEquals": "v1/latest/"
},
"Redirect": {
"ReplaceKeyPrefixWith": "v1/%i/" % (build),
"HttpRedirectCode": "302",
"Protocol": "https",
},
}]
if config != orig_config:
print("Updating website configuration")
pprint(config)
s3.put_bucket_website(Bucket=bucket, WebsiteConfiguration=config)
else:
print("Website configuration up-to-date")
def update_art_404_redirects(s3, bucket=ART_BUCKET):
orig_config = s3.get_bucket_website(Bucket=bucket)
if "ResponseMetadata" in orig_config:
del orig_config["ResponseMetadata"]
config = orig_config.copy()
prefixes = [
("v1/orig/", "png", "XXX_001"),
("v1/tiles/", "png", "HERO_01"),
("v1/256x/", "jpg", "XXX_001"),
("v1/512x/", "jpg", "XXX_001"),
]
config["RoutingRules"] = []
for prefix, ext, fallback in prefixes:
config["RoutingRules"].append({
"Condition": {
"HttpErrorCodeReturnedEquals": "404",
"KeyPrefixEquals": prefix,
},
"Redirect": {
"ReplaceKeyWith": prefix + "%s.%s" % (fallback, ext),
"HttpRedirectCode": "302",
"Protocol": "https",
}
})
if config != orig_config:
print("Updating 404 redirects")
pprint(config)
s3.put_bucket_website(Bucket=bucket, WebsiteConfiguration=config)
else:
print("404 redirects up-to-date")
def main():
build = int(sys.argv[1])
s3 = boto3.client("s3")
update_website_configuration(s3, build)
update_art_404_redirects(s3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
731800828469aa1b78563d3dae74e8f7ed296abf | 2bdedcda705f6dcf45a1e9a090377f892bcb58bb | /src/main/output/point/president/way/kerberos_place_part.py | a02fe413ce8add5d86a79fbfa0f688adb60943f7 | [] | no_license | matkosoric/GenericNameTesting | 860a22af1098dda9ea9e24a1fc681bb728aa2d69 | 03f4a38229c28bc6d83258e5a84fce4b189d5f00 | refs/heads/master | 2021-01-08T22:35:20.022350 | 2020-02-21T11:28:21 | 2020-02-21T11:28:21 | 242,123,053 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,105 | py | using System;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Translator.API;
namespace CSharp_TranslateSample
{
public class Program
{
public static string traducida;
public static void Main(string[] args)
{
//TranslateAsync().Wait();
//Console.ReadKey();
}
public static void iniciar() {
TranslateAsync().Wait();
Console.ReadKey();
}
/// Demonstrates getting an access token and using the token to translate.
private static async Task TranslateAsync()
{
var translatorService = new TranslatorService.LanguageServiceClient();
var authTokenSource = new AzureAuthToken(SubscriptionKey);
var token = string.Empty;
try
{
token = await authTokenSource.GetAccessTokenAsync();
}
catch (HttpRequestException)
{
switch (authTokenSource.RequestStatusCode)
{
case HttpStatusCode.Unauthorized:
Console.WriteLine("Request to token service is not authorized (401). Check that the Azure subscription key is valid.");
break;
case HttpStatusCode.Forbidden:
Console.WriteLine("Request to token service is not authorized (403). For accounts in the free-tier, check that the account quota is not exceeded.");
break;
}
throw;
}
traducida = translatorService.Translate(token, "Hello World", "en", "fr", "text/plain", "general", string.Empty);
private const string SubscriptionKey = "a82656b8f3060cebdca7483b1bf557d2"; //Enter here the Key from your Microsoft Translator Text subscription on http://portal.azure.com
//Console.WriteLine("Translated to French: {0}", translatorService.Translate(token, "Hello World", "en", "fr", "text/plain", "general", string.Empty));
}
}
}
| [
"[email protected]"
] | |
f5693ba9d5f5661315af0ff316348508bfffa665 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_42606.py | 4c7ba5374406b23f4f9662574ff7bdd96180c47f | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,833 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((480.133, 574.617, 353.855), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((472.89, 506.186, 366.499), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((461.36, 426.318, 371.916), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((463.5, 488.736, 247.655), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((445.537, 242.226, 430.618), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((466.957, 531.167, 363.335), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((466.273, 532.349, 363.508), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((440.013, 528.239, 372.237), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((425.324, 537.967, 393.901), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((399.261, 527.122, 392.677), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((377.08, 531.438, 376.089), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((377.278, 555.174, 361.198), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((483.905, 553.219, 369.612), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((271.035, 559.086, 359.439), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((314.991, 374.058, 427.888), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((314.991, 374.058, 427.888), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((341.953, 381.36, 436.356), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((370.147, 390.326, 439.651), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((394.479, 406.023, 436.611), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((411.069, 429.236, 429.918), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((424.036, 453.57, 419.548), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((435.58, 476.678, 405.281), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((189.494, 422.769, 348.469), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((682.681, 550.891, 440.684), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((457.67, 451.265, 412.51), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((457.67, 451.265, 412.51), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((477.136, 449.251, 391.197), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((489.693, 440.589, 366.536), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((475.566, 433.51, 341.909), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((497.788, 549.921, 303.974), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((447.684, 314.825, 371.61), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((481.892, 514.219, 340.56), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((482.006, 514.319, 340.367), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((487.412, 542.546, 339.741), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((476.438, 563.475, 355.169), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((456.629, 567.891, 375.9), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((433.885, 571.824, 392.34), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((408.443, 573.851, 404.449), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((387.496, 559.47, 392.46), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((453.421, 527.079, 436.448), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((320.645, 588.786, 344.08), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((491.899, 503.154, 427.54), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((490.244, 493.314, 402.88), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((484.724, 469.799, 350.456), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((478.939, 445.038, 298.961), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((532.383, 502.052, 278.523), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((432.292, 368.905, 244.95), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((505.686, 525.311, 392.624), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((505.669, 516.009, 365.984), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((519.318, 495.81, 380.205), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((521.203, 467.974, 383.863), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((522.364, 439.958, 383.551), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((514.033, 413.155, 380.166), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((497.001, 492.839, 378.454), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((531.46, 333.91, 382.133), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
b5d7d906ca1b952f86510b73dd4b2ab3e980c6db | 283815445952a37e0124801b456844774355733f | /app/models/__init__.py | ffca30aca69e6931817c66d4933a609c5d6bf330 | [] | no_license | paulosjd/woodrecs | 7aa4bec22f2c126bd51023e141f1a113c8faf3d8 | 19a8a53c753ae0978fc092d9a2f6f560dc8644bf | refs/heads/master | 2022-07-07T08:36:06.902801 | 2020-11-08T21:59:00 | 2020-11-08T21:59:00 | 251,587,209 | 0 | 0 | null | 2022-06-22T01:36:43 | 2020-03-31T11:47:53 | Python | UTF-8 | Python | false | false | 183 | py | from .profile import Profile
from .profile_board import ProfileBoard
from .route import Route
from .user import User
__all__ = [
Profile,
ProfileBoard,
Route,
User
]
| [
"[email protected]"
] | |
061b46322d284653c94c803921d86a35f31c4c3a | 8c067089ac94844919c4dc37681c898c0f93819e | /jenkins-master/jobs/scripts/workspace/config.py | c477a45df06ebcddcd194c10096182da65606db8 | [] | no_license | Ramireddyashok/mozmill-ci | 9ac1a5762fa8c14c4802447a9d5878422d2e164a | 0b8c6417e596235cca403ca80947fc328bd2fe8b | refs/heads/master | 2021-04-30T01:27:42.502771 | 2017-06-14T18:24:50 | 2017-06-14T18:24:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,089 | py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
here = os.path.dirname(os.path.abspath(__file__))
config = {
'test_types': {
'functional': {
'harness_config': os.path.join('firefox_ui_tests', 'qa_jenkins.py'),
'harness_script': os.path.join('firefox_ui_tests', 'functional.py'),
'treeherder': {
'group_name': 'Firefox UI Functional Tests',
'group_symbol': 'Fxfn',
'job_name': 'Firefox UI Functional Tests ({locale})',
'job_symbol': '{locale}',
'tier': 3,
'artifacts': {
'log_info.log': os.path.join(here, 'build', 'upload', 'logs', 'log_info.log'),
'report.html': os.path.join(here, 'build', 'upload', 'reports', 'report.html'),
},
'log_reference': 'log_info.log',
},
},
'update': {
'harness_config': os.path.join('firefox_ui_tests', 'qa_jenkins.py'),
'harness_script': os.path.join('firefox_ui_tests', 'update.py'),
'treeherder': {
'group_name': 'Firefox UI Update Tests - {update_channel}',
'group_symbol': 'Fxup-{update_channel}',
'job_name': 'Firefox UI Update Tests - {update_channel} {locale}-{update_number}',
'job_symbol': '{locale}-{update_number}',
'tier': 3,
'artifacts': {
'log_info.log': os.path.join(here, 'build', 'upload', 'logs', 'log_info.log'),
'report.html': os.path.join(here, 'build', 'upload', 'reports', 'report.html'),
# TODO: Bug 1210753: Move generation of log as option to mozharness
'http.log': os.path.join(here, 'build', 'http.log'),
},
'log_reference': 'log_info.log',
},
},
},
}
| [
"[email protected]"
] | |
b969aff50964ebae5ecd9541c8ed4af2b0ec93fa | 4d99350a527a88110b7bdc7d6766fc32cf66f211 | /OpenGLCffi/GLX/EXT/NV/copy_image.py | 1981ca497e85e50301da73e66cd5b08f9e4f85dd | [
"MIT"
] | permissive | cydenix/OpenGLCffi | e790ef67c2f6c9877badd5c38b7d58961c8739cd | c78f51ae5e6b655eb2ea98f072771cf69e2197f3 | refs/heads/master | 2021-01-11T07:31:10.591188 | 2017-04-17T11:04:55 | 2017-04-17T11:04:55 | 80,312,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | from OpenGLCffi.GLX import params
@params(api='glx', prms=['dpy', 'srcCtx', 'srcName', 'srcTarget', 'srcLevel', 'srcX', 'srcY', 'srcZ', 'dstCtx', 'dstName', 'dstTarget', 'dstLevel', 'dstX', 'dstY', 'dstZ', 'width', 'height', 'depth'])
def glXCopyImageSubDataNV(dpy, srcCtx, srcName, srcTarget, srcLevel, srcX, srcY, srcZ, dstCtx, dstName, dstTarget, dstLevel, dstX, dstY, dstZ, width, height, depth):
pass
| [
"[email protected]"
] | |
651930fd736184cb7f793d23885d3a0c3a2be442 | 67c3c2a310a4d129a45739ca6351052f36f6d5f4 | /venv/lib/python3.7/tarfile.py | 1ac8bdb103bbf0d353b2ffa45630fbbea77736ed | [] | no_license | cyobero/django-blog | a743203bdaf1d8ae9e6bd47c6e7b33a213a7abfd | 307335c84a0fa9eba6d3f69172a47580144cc066 | refs/heads/master | 2022-12-09T20:25:51.396813 | 2020-03-10T14:52:26 | 2020-03-10T14:52:26 | 245,950,344 | 0 | 0 | null | 2022-11-22T05:22:50 | 2020-03-09T05:20:31 | Python | UTF-8 | Python | false | false | 48 | py | /home/cyobero/anaconda3/lib/python3.7/tarfile.py | [
"[email protected]"
] | |
f3d1fe716956a41dcaccd88cddd806332ba54e33 | 1b5c3039c05427ad5e731a18e06e0e0accb5ce98 | /scripts/creatematches.py | 2c4bb9fd8b280c5439cdaa0f3eddc508cad483bc | [] | no_license | matthew-brett/beatbased | 1df43cb7f16b4d6cde18acecd7d2b7209887ed89 | f6c7c6bd0fb62efcb3397d512f70717b49f5cccd | refs/heads/master | 2021-01-23T21:42:29.063883 | 2014-05-30T19:05:06 | 2014-05-30T19:05:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | #!/bin/env python
'''Creates as many matches as possible for metric sequences, with 5-7 intervals'''
import beatsequence as BS
#First, create a list of all combinations of intervals, taking those which add up to 12
print "calculating possible combinations"
S=[]
for length in range(5,10):
L=[4 for n in range(length)]
for i in BS.valueperm(L):
#work out total, gp to next if not 12
total=0
for n in i:
total+=n
if total!=12:continue
i.sort()
if i not in S:
print "added",i
S.append(i)
#now run the match creator on S:
for i in S:
BS.getmatches(i,debug=True)
print i,"completed"
i=raw_input("Finished. Press enter to close")
| [
"[email protected]"
] | |
b2a97343f96ca9246962933acc173b23375b9a5c | 3474b315da3cc5cb3f7823f19a18b63a8da6a526 | /scratch/KRAMS/src/apps/scratch/rch/mlab/yarn_cs.py | 2d8a38ca1f2761ea2b42d42e5d831bb3cf157889 | [] | no_license | h4ck3rm1k3/scratch | 8df97462f696bc2be00f1e58232e1cd915f0fafd | 0a114a41b0d1e9b2d68dbe7af7cf34db11512539 | refs/heads/master | 2021-01-21T15:31:38.718039 | 2013-09-19T10:48:24 | 2013-09-19T10:48:24 | 29,173,525 | 0 | 0 | null | 2015-01-13T04:58:57 | 2015-01-13T04:58:56 | null | UTF-8 | Python | false | false | 3,840 | py | #-------------------------------------------------------------------------------
#
# Copyright (c) 2009, IMB, RWTH Aachen.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in simvisage/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.simvisage.com/licenses/BSD.txt
#
# Thanks for using Simvisage open source!
#
# Created on Jul 22, 2010 by: rch
from numpy import \
loadtxt, ones_like, vstack, c_, hstack, array, cumsum, \
zeros_like, zeros
import wxversion
wxversion.select( '2.8' )
from os.path import join
from promod.simdb import SimDB
simdb = SimDB()
data_dir = join( simdb.exdata_dir, 'trc', 'bond_structure' )
from enthought.tvtk.api import tvtk
from enthought.mayavi.scripts import mayavi2
from enthought.mayavi import mlab
n_slices = 15
start_slice = 4
slice_range = range( start_slice, start_slice + n_slices )
slice_distance = 500 # micrometers
def read_yarn_structure():
slice_point_list = []
slice_radius_list = []
slice_len_list = []
cut_off_start = zeros( ( n_slices, ), dtype = 'int' )
cut_off_start[ 1: ] += 0
for slice_idx, cut_off_idx in zip( slice_range, cut_off_start ):
data_file = join( data_dir, '1cOrientiertSchnitt%d.txt' % slice_idx )
print 'reading data_file'
points = loadtxt( data_file ,
skiprows = 1,
usecols = ( 1, 2, 3 ) )
y = points[ cut_off_idx:, 0]
z = points[ cut_off_idx:, 1]
x = ones_like( y ) * slice_idx * slice_distance
r = points[ cut_off_idx:, 2]
slice_point_list.append( c_[ x, y, z ] )
slice_radius_list.append( r )
slice_len_list.append( points.shape[0] )
lens_arr = array( slice_len_list )
print 'slice lens', lens_arr
offset_arr = cumsum( lens_arr )
slice_offset_arr = zeros_like( offset_arr )
slice_offset_arr[1:] = offset_arr[:-1]
print 'slice offsets', slice_offset_arr
data_file = join( data_dir, 'connectivity.txt' )
filam_connect_arr = loadtxt( data_file )
print filam_connect_arr.shape
print filam_connect_arr.shape
print slice_offset_arr.shape
fil_map = array( filam_connect_arr + slice_offset_arr, dtype = 'int' )
points = vstack( slice_point_list )
radius = hstack( slice_radius_list )
print points.shape
print max( fil_map.flatten() )
p = points[ fil_map.flatten() ]
r = radius[ fil_map.flatten() ]
mlab.plot3d( p[:, 0], p[:, 1], p[:, 2], r,
tube_radius = 20, colormap = 'Spectral' )
offset = array( [0, 3, 6] )
cells = array( [10, 4000, 20, 5005, 20, 4080, 4000, 20, 404 ] )
# line_type = tvtk.Line().cell_type # VTKLine == 10
# cell_types = array( [line_type] )
# # Create the array of cells unambiguously.
# cell_array = tvtk.CellArray()
# cell_array.set_cells( 3, cells )
# Now create the UG.
ug = tvtk.UnstructuredGrid( points = points )
# Now just set the cell types and reuse the ug locations and cells.
# ug.set_cells( cell_types, offset, cell_array )
ug.point_data.scalars = radius
ug.point_data.scalars.name = 'radius'
return ug
# Now view the data.
@mayavi2.standalone
def view( ug ):
from enthought.mayavi.sources.vtk_data_source import VTKDataSource
from enthought.mayavi.modules.outline import Outline
from enthought.mayavi.modules.surface import Surface
from enthought.mayavi.modules.vectors import Vectors
mayavi.new_scene()
src = VTKDataSource( data = ug )
mayavi.add_source( src )
s = Surface()
mayavi.add_module( s )
if __name__ == '__main__':
ug = read_yarn_structure()
mlab.show() # view( ug )
| [
"Axel@Axel-Pc"
] | Axel@Axel-Pc |
df4de3c89e3e0456ec62e028fb88040009f9c36e | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/lib/googlecloudsdk/third_party/appengine/api/taskqueue/taskqueue_service_pb.py | bb940144f8a18ef4f4b1e6b825080cea0ed6df38 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 262,899 | py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: apphosting/api/taskqueue/taskqueue_service.proto
from googlecloudsdk.third_party.appengine.proto import ProtocolBuffer
import array
import thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from googlecloudsdk.third_party.appengine.datastore.datastore_v3_pb import *
import googlecloudsdk.third_party.appengine.datastore.datastore_v3_pb
from googlecloudsdk.third_party.appengine.proto.message_set import MessageSet
class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
# ErrorCode values
OK = 0
UNKNOWN_QUEUE = 1
TRANSIENT_ERROR = 2
INTERNAL_ERROR = 3
TASK_TOO_LARGE = 4
INVALID_TASK_NAME = 5
INVALID_QUEUE_NAME = 6
INVALID_URL = 7
INVALID_QUEUE_RATE = 8
PERMISSION_DENIED = 9
TASK_ALREADY_EXISTS = 10
TOMBSTONED_TASK = 11
INVALID_ETA = 12
INVALID_REQUEST = 13
UNKNOWN_TASK = 14
TOMBSTONED_QUEUE = 15
DUPLICATE_TASK_NAME = 16
SKIPPED = 17
TOO_MANY_TASKS = 18
INVALID_PAYLOAD = 19
INVALID_RETRY_PARAMETERS = 20
INVALID_QUEUE_MODE = 21
ACL_LOOKUP_ERROR = 22
TRANSACTIONAL_REQUEST_TOO_LARGE = 23
INCORRECT_CREATOR_NAME = 24
TASK_LEASE_EXPIRED = 25
QUEUE_PAUSED = 26
INVALID_TAG = 27
DATASTORE_ERROR = 10000
_ErrorCode_NAMES = {
0: "OK",
1: "UNKNOWN_QUEUE",
2: "TRANSIENT_ERROR",
3: "INTERNAL_ERROR",
4: "TASK_TOO_LARGE",
5: "INVALID_TASK_NAME",
6: "INVALID_QUEUE_NAME",
7: "INVALID_URL",
8: "INVALID_QUEUE_RATE",
9: "PERMISSION_DENIED",
10: "TASK_ALREADY_EXISTS",
11: "TOMBSTONED_TASK",
12: "INVALID_ETA",
13: "INVALID_REQUEST",
14: "UNKNOWN_TASK",
15: "TOMBSTONED_QUEUE",
16: "DUPLICATE_TASK_NAME",
17: "SKIPPED",
18: "TOO_MANY_TASKS",
19: "INVALID_PAYLOAD",
20: "INVALID_RETRY_PARAMETERS",
21: "INVALID_QUEUE_MODE",
22: "ACL_LOOKUP_ERROR",
23: "TRANSACTIONAL_REQUEST_TOO_LARGE",
24: "INCORRECT_CREATOR_NAME",
25: "TASK_LEASE_EXPIRED",
26: "QUEUE_PAUSED",
27: "INVALID_TAG",
10000: "DATASTORE_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueServiceError'
class TaskQueueRetryParameters(ProtocolBuffer.ProtocolMessage):
has_retry_limit_ = 0
retry_limit_ = 0
has_age_limit_sec_ = 0
age_limit_sec_ = 0
has_min_backoff_sec_ = 0
min_backoff_sec_ = 0.1
has_max_backoff_sec_ = 0
max_backoff_sec_ = 3600.0
has_max_doublings_ = 0
max_doublings_ = 16
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def retry_limit(self): return self.retry_limit_
def set_retry_limit(self, x):
self.has_retry_limit_ = 1
self.retry_limit_ = x
def clear_retry_limit(self):
if self.has_retry_limit_:
self.has_retry_limit_ = 0
self.retry_limit_ = 0
def has_retry_limit(self): return self.has_retry_limit_
def age_limit_sec(self): return self.age_limit_sec_
def set_age_limit_sec(self, x):
self.has_age_limit_sec_ = 1
self.age_limit_sec_ = x
def clear_age_limit_sec(self):
if self.has_age_limit_sec_:
self.has_age_limit_sec_ = 0
self.age_limit_sec_ = 0
def has_age_limit_sec(self): return self.has_age_limit_sec_
def min_backoff_sec(self): return self.min_backoff_sec_
def set_min_backoff_sec(self, x):
self.has_min_backoff_sec_ = 1
self.min_backoff_sec_ = x
def clear_min_backoff_sec(self):
if self.has_min_backoff_sec_:
self.has_min_backoff_sec_ = 0
self.min_backoff_sec_ = 0.1
def has_min_backoff_sec(self): return self.has_min_backoff_sec_
def max_backoff_sec(self): return self.max_backoff_sec_
def set_max_backoff_sec(self, x):
self.has_max_backoff_sec_ = 1
self.max_backoff_sec_ = x
def clear_max_backoff_sec(self):
if self.has_max_backoff_sec_:
self.has_max_backoff_sec_ = 0
self.max_backoff_sec_ = 3600.0
def has_max_backoff_sec(self): return self.has_max_backoff_sec_
def max_doublings(self): return self.max_doublings_
def set_max_doublings(self, x):
self.has_max_doublings_ = 1
self.max_doublings_ = x
def clear_max_doublings(self):
if self.has_max_doublings_:
self.has_max_doublings_ = 0
self.max_doublings_ = 16
def has_max_doublings(self): return self.has_max_doublings_
def MergeFrom(self, x):
assert x is not self
if (x.has_retry_limit()): self.set_retry_limit(x.retry_limit())
if (x.has_age_limit_sec()): self.set_age_limit_sec(x.age_limit_sec())
if (x.has_min_backoff_sec()): self.set_min_backoff_sec(x.min_backoff_sec())
if (x.has_max_backoff_sec()): self.set_max_backoff_sec(x.max_backoff_sec())
if (x.has_max_doublings()): self.set_max_doublings(x.max_doublings())
def Equals(self, x):
if x is self: return 1
if self.has_retry_limit_ != x.has_retry_limit_: return 0
if self.has_retry_limit_ and self.retry_limit_ != x.retry_limit_: return 0
if self.has_age_limit_sec_ != x.has_age_limit_sec_: return 0
if self.has_age_limit_sec_ and self.age_limit_sec_ != x.age_limit_sec_: return 0
if self.has_min_backoff_sec_ != x.has_min_backoff_sec_: return 0
if self.has_min_backoff_sec_ and self.min_backoff_sec_ != x.min_backoff_sec_: return 0
if self.has_max_backoff_sec_ != x.has_max_backoff_sec_: return 0
if self.has_max_backoff_sec_ and self.max_backoff_sec_ != x.max_backoff_sec_: return 0
if self.has_max_doublings_ != x.has_max_doublings_: return 0
if self.has_max_doublings_ and self.max_doublings_ != x.max_doublings_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_retry_limit_): n += 1 + self.lengthVarInt64(self.retry_limit_)
if (self.has_age_limit_sec_): n += 1 + self.lengthVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_): n += 9
if (self.has_max_backoff_sec_): n += 9
if (self.has_max_doublings_): n += 1 + self.lengthVarInt64(self.max_doublings_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_retry_limit_): n += 1 + self.lengthVarInt64(self.retry_limit_)
if (self.has_age_limit_sec_): n += 1 + self.lengthVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_): n += 9
if (self.has_max_backoff_sec_): n += 9
if (self.has_max_doublings_): n += 1 + self.lengthVarInt64(self.max_doublings_)
return n
def Clear(self):
self.clear_retry_limit()
self.clear_age_limit_sec()
self.clear_min_backoff_sec()
self.clear_max_backoff_sec()
self.clear_max_doublings()
def OutputUnchecked(self, out):
if (self.has_retry_limit_):
out.putVarInt32(8)
out.putVarInt32(self.retry_limit_)
if (self.has_age_limit_sec_):
out.putVarInt32(16)
out.putVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_):
out.putVarInt32(25)
out.putDouble(self.min_backoff_sec_)
if (self.has_max_backoff_sec_):
out.putVarInt32(33)
out.putDouble(self.max_backoff_sec_)
if (self.has_max_doublings_):
out.putVarInt32(40)
out.putVarInt32(self.max_doublings_)
def OutputPartial(self, out):
if (self.has_retry_limit_):
out.putVarInt32(8)
out.putVarInt32(self.retry_limit_)
if (self.has_age_limit_sec_):
out.putVarInt32(16)
out.putVarInt64(self.age_limit_sec_)
if (self.has_min_backoff_sec_):
out.putVarInt32(25)
out.putDouble(self.min_backoff_sec_)
if (self.has_max_backoff_sec_):
out.putVarInt32(33)
out.putDouble(self.max_backoff_sec_)
if (self.has_max_doublings_):
out.putVarInt32(40)
out.putVarInt32(self.max_doublings_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_retry_limit(d.getVarInt32())
continue
if tt == 16:
self.set_age_limit_sec(d.getVarInt64())
continue
if tt == 25:
self.set_min_backoff_sec(d.getDouble())
continue
if tt == 33:
self.set_max_backoff_sec(d.getDouble())
continue
if tt == 40:
self.set_max_doublings(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_retry_limit_: res+=prefix+("retry_limit: %s\n" % self.DebugFormatInt32(self.retry_limit_))
if self.has_age_limit_sec_: res+=prefix+("age_limit_sec: %s\n" % self.DebugFormatInt64(self.age_limit_sec_))
if self.has_min_backoff_sec_: res+=prefix+("min_backoff_sec: %s\n" % self.DebugFormat(self.min_backoff_sec_))
if self.has_max_backoff_sec_: res+=prefix+("max_backoff_sec: %s\n" % self.DebugFormat(self.max_backoff_sec_))
if self.has_max_doublings_: res+=prefix+("max_doublings: %s\n" % self.DebugFormatInt32(self.max_doublings_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kretry_limit = 1
kage_limit_sec = 2
kmin_backoff_sec = 3
kmax_backoff_sec = 4
kmax_doublings = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "retry_limit",
2: "age_limit_sec",
3: "min_backoff_sec",
4: "max_backoff_sec",
5: "max_doublings",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueRetryParameters'
class TaskQueueAcl(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.user_email_ = []
self.writer_email_ = []
if contents is not None: self.MergeFromString(contents)
def user_email_size(self): return len(self.user_email_)
def user_email_list(self): return self.user_email_
def user_email(self, i):
return self.user_email_[i]
def set_user_email(self, i, x):
self.user_email_[i] = x
def add_user_email(self, x):
self.user_email_.append(x)
def clear_user_email(self):
self.user_email_ = []
def writer_email_size(self): return len(self.writer_email_)
def writer_email_list(self): return self.writer_email_
def writer_email(self, i):
return self.writer_email_[i]
def set_writer_email(self, i, x):
self.writer_email_[i] = x
def add_writer_email(self, x):
self.writer_email_.append(x)
def clear_writer_email(self):
self.writer_email_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.user_email_size()): self.add_user_email(x.user_email(i))
for i in xrange(x.writer_email_size()): self.add_writer_email(x.writer_email(i))
def Equals(self, x):
if x is self: return 1
if len(self.user_email_) != len(x.user_email_): return 0
for e1, e2 in zip(self.user_email_, x.user_email_):
if e1 != e2: return 0
if len(self.writer_email_) != len(x.writer_email_): return 0
for e1, e2 in zip(self.writer_email_, x.writer_email_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.user_email_)
for i in xrange(len(self.user_email_)): n += self.lengthString(len(self.user_email_[i]))
n += 1 * len(self.writer_email_)
for i in xrange(len(self.writer_email_)): n += self.lengthString(len(self.writer_email_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.user_email_)
for i in xrange(len(self.user_email_)): n += self.lengthString(len(self.user_email_[i]))
n += 1 * len(self.writer_email_)
for i in xrange(len(self.writer_email_)): n += self.lengthString(len(self.writer_email_[i]))
return n
def Clear(self):
self.clear_user_email()
self.clear_writer_email()
def OutputUnchecked(self, out):
for i in xrange(len(self.user_email_)):
out.putVarInt32(10)
out.putPrefixedString(self.user_email_[i])
for i in xrange(len(self.writer_email_)):
out.putVarInt32(18)
out.putPrefixedString(self.writer_email_[i])
def OutputPartial(self, out):
for i in xrange(len(self.user_email_)):
out.putVarInt32(10)
out.putPrefixedString(self.user_email_[i])
for i in xrange(len(self.writer_email_)):
out.putVarInt32(18)
out.putPrefixedString(self.writer_email_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_user_email(d.getPrefixedString())
continue
if tt == 18:
self.add_writer_email(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.user_email_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("user_email%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.writer_email_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("writer_email%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kuser_email = 1
kwriter_email = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "user_email",
2: "writer_email",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAcl'
class TaskQueueHttpHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 18:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kvalue = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "value",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueHttpHeader'
class TaskQueueMode(ProtocolBuffer.ProtocolMessage):
# Mode values
PUSH = 0
PULL = 1
_Mode_NAMES = {
0: "PUSH",
1: "PULL",
}
def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
Mode_Name = classmethod(Mode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueMode'
class TaskQueueAddRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(58)
out.putPrefixedString(self.key_)
out.putVarInt32(66)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(58)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(66)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 52: break
if tt == 58:
self.set_key(d.getPrefixedString())
continue
if tt == 66:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class TaskQueueAddRequest_CronTimetable(ProtocolBuffer.ProtocolMessage):
has_schedule_ = 0
schedule_ = ""
has_timezone_ = 0
timezone_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def schedule(self): return self.schedule_
def set_schedule(self, x):
self.has_schedule_ = 1
self.schedule_ = x
def clear_schedule(self):
if self.has_schedule_:
self.has_schedule_ = 0
self.schedule_ = ""
def has_schedule(self): return self.has_schedule_
def timezone(self): return self.timezone_
def set_timezone(self, x):
self.has_timezone_ = 1
self.timezone_ = x
def clear_timezone(self):
if self.has_timezone_:
self.has_timezone_ = 0
self.timezone_ = ""
def has_timezone(self): return self.has_timezone_
def MergeFrom(self, x):
assert x is not self
if (x.has_schedule()): self.set_schedule(x.schedule())
if (x.has_timezone()): self.set_timezone(x.timezone())
def Equals(self, x):
if x is self: return 1
if self.has_schedule_ != x.has_schedule_: return 0
if self.has_schedule_ and self.schedule_ != x.schedule_: return 0
if self.has_timezone_ != x.has_timezone_: return 0
if self.has_timezone_ and self.timezone_ != x.timezone_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_schedule_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: schedule not set.')
if (not self.has_timezone_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: timezone not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.schedule_))
n += self.lengthString(len(self.timezone_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_schedule_):
n += 1
n += self.lengthString(len(self.schedule_))
if (self.has_timezone_):
n += 1
n += self.lengthString(len(self.timezone_))
return n
def Clear(self):
self.clear_schedule()
self.clear_timezone()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.schedule_)
out.putVarInt32(114)
out.putPrefixedString(self.timezone_)
def OutputPartial(self, out):
if (self.has_schedule_):
out.putVarInt32(106)
out.putPrefixedString(self.schedule_)
if (self.has_timezone_):
out.putVarInt32(114)
out.putPrefixedString(self.timezone_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_schedule(d.getPrefixedString())
continue
if tt == 114:
self.set_timezone(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_schedule_: res+=prefix+("schedule: %s\n" % self.DebugFormatString(self.schedule_))
if self.has_timezone_: res+=prefix+("timezone: %s\n" % self.DebugFormatString(self.timezone_))
return res
class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
# RequestMethod values
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_method_ = 0
method_ = 2
has_url_ = 0
url_ = ""
has_body_ = 0
body_ = ""
has_transaction_ = 0
transaction_ = None
has_datastore_transaction_ = 0
datastore_transaction_ = ""
has_app_id_ = 0
app_id_ = ""
has_crontimetable_ = 0
crontimetable_ = None
has_description_ = 0
description_ = ""
has_payload_ = 0
payload_ = None
has_retry_parameters_ = 0
retry_parameters_ = None
has_mode_ = 0
mode_ = 0
has_tag_ = 0
tag_ = ""
has_cron_retry_parameters_ = 0
cron_retry_parameters_ = None
def __init__(self, contents=None):
self.header_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 2
def has_method(self): return self.has_method_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = TaskQueueAddRequest_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
# Warning: this method does not acquire the lock.
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def datastore_transaction(self): return self.datastore_transaction_
def set_datastore_transaction(self, x):
self.has_datastore_transaction_ = 1
self.datastore_transaction_ = x
def clear_datastore_transaction(self):
if self.has_datastore_transaction_:
self.has_datastore_transaction_ = 0
self.datastore_transaction_ = ""
def has_datastore_transaction(self): return self.has_datastore_transaction_
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def crontimetable(self):
if self.crontimetable_ is None:
self.lazy_init_lock_.acquire()
try:
if self.crontimetable_ is None: self.crontimetable_ = TaskQueueAddRequest_CronTimetable()
finally:
self.lazy_init_lock_.release()
return self.crontimetable_
def mutable_crontimetable(self): self.has_crontimetable_ = 1; return self.crontimetable()
def clear_crontimetable(self):
# Warning: this method does not acquire the lock.
if self.has_crontimetable_:
self.has_crontimetable_ = 0;
if self.crontimetable_ is not None: self.crontimetable_.Clear()
def has_crontimetable(self): return self.has_crontimetable_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def payload(self):
if self.payload_ is None:
self.lazy_init_lock_.acquire()
try:
if self.payload_ is None: self.payload_ = MessageSet()
finally:
self.lazy_init_lock_.release()
return self.payload_
def mutable_payload(self): self.has_payload_ = 1; return self.payload()
def clear_payload(self):
# Warning: this method does not acquire the lock.
if self.has_payload_:
self.has_payload_ = 0;
if self.payload_ is not None: self.payload_.Clear()
def has_payload(self): return self.has_payload_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def cron_retry_parameters(self):
if self.cron_retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cron_retry_parameters_ is None: self.cron_retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.cron_retry_parameters_
def mutable_cron_retry_parameters(self): self.has_cron_retry_parameters_ = 1; return self.cron_retry_parameters()
def clear_cron_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_cron_retry_parameters_:
self.has_cron_retry_parameters_ = 0;
if self.cron_retry_parameters_ is not None: self.cron_retry_parameters_.Clear()
def has_cron_retry_parameters(self): return self.has_cron_retry_parameters_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_method()): self.set_method(x.method())
if (x.has_url()): self.set_url(x.url())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_body()): self.set_body(x.body())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_datastore_transaction()): self.set_datastore_transaction(x.datastore_transaction())
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_description()): self.set_description(x.description())
if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_tag()): self.set_tag(x.tag())
if (x.has_cron_retry_parameters()): self.mutable_cron_retry_parameters().MergeFrom(x.cron_retry_parameters())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_datastore_transaction_ != x.has_datastore_transaction_: return 0
if self.has_datastore_transaction_ and self.datastore_transaction_ != x.datastore_transaction_: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_crontimetable_ != x.has_crontimetable_: return 0
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
if self.has_cron_retry_parameters_ != x.has_cron_retry_parameters_: return 0
if self.has_cron_retry_parameters_ and self.cron_retry_parameters_ != x.cron_retry_parameters_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_cron_retry_parameters_ and not self.cron_retry_parameters_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_datastore_transaction_): n += 2 + self.lengthString(len(self.datastore_transaction_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_mode_): n += 2 + self.lengthVarInt64(self.mode_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_cron_retry_parameters_): n += 2 + self.lengthString(self.cron_retry_parameters_.ByteSize())
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSizePartial()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_datastore_transaction_): n += 2 + self.lengthString(len(self.datastore_transaction_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSizePartial()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSizePartial())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_mode_): n += 2 + self.lengthVarInt64(self.mode_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_cron_retry_parameters_): n += 2 + self.lengthString(self.cron_retry_parameters_.ByteSizePartial())
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_eta_usec()
self.clear_method()
self.clear_url()
self.clear_header()
self.clear_body()
self.clear_transaction()
self.clear_datastore_transaction()
self.clear_app_id()
self.clear_crontimetable()
self.clear_description()
self.clear_payload()
self.clear_retry_parameters()
self.clear_mode()
self.clear_tag()
self.clear_cron_retry_parameters()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
for i in xrange(len(self.header_)):
out.putVarInt32(51)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(52)
if (self.has_body_):
out.putVarInt32(74)
out.putPrefixedString(self.body_)
if (self.has_transaction_):
out.putVarInt32(82)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_app_id_):
out.putVarInt32(90)
out.putPrefixedString(self.app_id_)
if (self.has_crontimetable_):
out.putVarInt32(99)
self.crontimetable_.OutputUnchecked(out)
out.putVarInt32(100)
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(130)
out.putVarInt32(self.payload_.ByteSize())
self.payload_.OutputUnchecked(out)
if (self.has_retry_parameters_):
out.putVarInt32(138)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_mode_):
out.putVarInt32(144)
out.putVarInt32(self.mode_)
if (self.has_tag_):
out.putVarInt32(154)
out.putPrefixedString(self.tag_)
if (self.has_cron_retry_parameters_):
out.putVarInt32(162)
out.putVarInt32(self.cron_retry_parameters_.ByteSize())
self.cron_retry_parameters_.OutputUnchecked(out)
if (self.has_datastore_transaction_):
out.putVarInt32(170)
out.putPrefixedString(self.datastore_transaction_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
for i in xrange(len(self.header_)):
out.putVarInt32(51)
self.header_[i].OutputPartial(out)
out.putVarInt32(52)
if (self.has_body_):
out.putVarInt32(74)
out.putPrefixedString(self.body_)
if (self.has_transaction_):
out.putVarInt32(82)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_app_id_):
out.putVarInt32(90)
out.putPrefixedString(self.app_id_)
if (self.has_crontimetable_):
out.putVarInt32(99)
self.crontimetable_.OutputPartial(out)
out.putVarInt32(100)
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(130)
out.putVarInt32(self.payload_.ByteSizePartial())
self.payload_.OutputPartial(out)
if (self.has_retry_parameters_):
out.putVarInt32(138)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_mode_):
out.putVarInt32(144)
out.putVarInt32(self.mode_)
if (self.has_tag_):
out.putVarInt32(154)
out.putPrefixedString(self.tag_)
if (self.has_cron_retry_parameters_):
out.putVarInt32(162)
out.putVarInt32(self.cron_retry_parameters_.ByteSizePartial())
self.cron_retry_parameters_.OutputPartial(out)
if (self.has_datastore_transaction_):
out.putVarInt32(170)
out.putPrefixedString(self.datastore_transaction_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 34:
self.set_url(d.getPrefixedString())
continue
if tt == 40:
self.set_method(d.getVarInt32())
continue
if tt == 51:
self.add_header().TryMerge(d)
continue
if tt == 74:
self.set_body(d.getPrefixedString())
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 90:
self.set_app_id(d.getPrefixedString())
continue
if tt == 99:
self.mutable_crontimetable().TryMerge(d)
continue
if tt == 122:
self.set_description(d.getPrefixedString())
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_payload().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 144:
self.set_mode(d.getVarInt32())
continue
if tt == 154:
self.set_tag(d.getPrefixedString())
continue
if tt == 162:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cron_retry_parameters().TryMerge(tmp)
continue
if tt == 170:
self.set_datastore_transaction(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_datastore_transaction_: res+=prefix+("datastore_transaction: %s\n" % self.DebugFormatString(self.datastore_transaction_))
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_crontimetable_:
res+=prefix+"CronTimetable {\n"
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
if self.has_payload_:
res+=prefix+"payload <\n"
res+=self.payload_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
if self.has_cron_retry_parameters_:
res+=prefix+"cron_retry_parameters <\n"
res+=self.cron_retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
keta_usec = 3
kmethod = 5
kurl = 4
kHeaderGroup = 6
kHeaderkey = 7
kHeadervalue = 8
kbody = 9
ktransaction = 10
kdatastore_transaction = 21
kapp_id = 11
kCronTimetableGroup = 12
kCronTimetableschedule = 13
kCronTimetabletimezone = 14
kdescription = 15
kpayload = 16
kretry_parameters = 17
kmode = 18
ktag = 19
kcron_retry_parameters = 20
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "eta_usec",
4: "url",
5: "method",
6: "Header",
7: "key",
8: "value",
9: "body",
10: "transaction",
11: "app_id",
12: "CronTimetable",
13: "schedule",
14: "timezone",
15: "description",
16: "payload",
17: "retry_parameters",
18: "mode",
19: "tag",
20: "cron_retry_parameters",
21: "datastore_transaction",
}, 21)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STARTGROUP,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STARTGROUP,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STRING,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.STRING,
21: ProtocolBuffer.Encoder.STRING,
}, 21, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddRequest'
class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
has_chosen_task_name_ = 0
chosen_task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def chosen_task_name(self): return self.chosen_task_name_
def set_chosen_task_name(self, x):
self.has_chosen_task_name_ = 1
self.chosen_task_name_ = x
def clear_chosen_task_name(self):
if self.has_chosen_task_name_:
self.has_chosen_task_name_ = 0
self.chosen_task_name_ = ""
def has_chosen_task_name(self): return self.has_chosen_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
def Equals(self, x):
if x is self: return 1
if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def Clear(self):
self.clear_chosen_task_name()
def OutputUnchecked(self, out):
if (self.has_chosen_task_name_):
out.putVarInt32(10)
out.putPrefixedString(self.chosen_task_name_)
def OutputPartial(self, out):
if (self.has_chosen_task_name_):
out.putVarInt32(10)
out.putPrefixedString(self.chosen_task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_chosen_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kchosen_task_name = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "chosen_task_name",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddResponse'
class TaskQueueBulkAddRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.add_request_ = []
if contents is not None: self.MergeFromString(contents)
def add_request_size(self): return len(self.add_request_)
def add_request_list(self): return self.add_request_
def add_request(self, i):
return self.add_request_[i]
def mutable_add_request(self, i):
return self.add_request_[i]
def add_add_request(self):
x = TaskQueueAddRequest()
self.add_request_.append(x)
return x
def clear_add_request(self):
self.add_request_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.add_request_size()): self.add_add_request().CopyFrom(x.add_request(i))
def Equals(self, x):
if x is self: return 1
if len(self.add_request_) != len(x.add_request_): return 0
for e1, e2 in zip(self.add_request_, x.add_request_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.add_request_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.add_request_)
for i in xrange(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.add_request_)
for i in xrange(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_add_request()
def OutputUnchecked(self, out):
for i in xrange(len(self.add_request_)):
out.putVarInt32(10)
out.putVarInt32(self.add_request_[i].ByteSize())
self.add_request_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.add_request_)):
out.putVarInt32(10)
out.putVarInt32(self.add_request_[i].ByteSizePartial())
self.add_request_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_add_request().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.add_request_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("add_request%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kadd_request = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "add_request",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddRequest'
class TaskQueueBulkAddResponse_TaskResult(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
has_chosen_task_name_ = 0
chosen_task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def result(self): return self.result_
def set_result(self, x):
self.has_result_ = 1
self.result_ = x
def clear_result(self):
if self.has_result_:
self.has_result_ = 0
self.result_ = 0
def has_result(self): return self.has_result_
def chosen_task_name(self): return self.chosen_task_name_
def set_chosen_task_name(self, x):
self.has_chosen_task_name_ = 1
self.chosen_task_name_ = x
def clear_chosen_task_name(self):
if self.has_chosen_task_name_:
self.has_chosen_task_name_ = 0
self.chosen_task_name_ = ""
def has_chosen_task_name(self): return self.has_chosen_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_result()): self.set_result(x.result())
if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
def Equals(self, x):
if x is self: return 1
if self.has_result_ != x.has_result_: return 0
if self.has_result_ and self.result_ != x.result_: return 0
if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_result_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.result_)
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_result_):
n += 1
n += self.lengthVarInt64(self.result_)
if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
return n
def Clear(self):
self.clear_result()
self.clear_chosen_task_name()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.result_)
if (self.has_chosen_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.chosen_task_name_)
def OutputPartial(self, out):
if (self.has_result_):
out.putVarInt32(16)
out.putVarInt32(self.result_)
if (self.has_chosen_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.chosen_task_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 16:
self.set_result(d.getVarInt32())
continue
if tt == 26:
self.set_chosen_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
return res
class TaskQueueBulkAddResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.taskresult_ = []
if contents is not None: self.MergeFromString(contents)
def taskresult_size(self): return len(self.taskresult_)
def taskresult_list(self): return self.taskresult_
def taskresult(self, i):
return self.taskresult_[i]
def mutable_taskresult(self, i):
return self.taskresult_[i]
def add_taskresult(self):
x = TaskQueueBulkAddResponse_TaskResult()
self.taskresult_.append(x)
return x
def clear_taskresult(self):
self.taskresult_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.taskresult_size()): self.add_taskresult().CopyFrom(x.taskresult(i))
def Equals(self, x):
if x is self: return 1
if len(self.taskresult_) != len(x.taskresult_): return 0
for e1, e2 in zip(self.taskresult_, x.taskresult_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.taskresult_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.taskresult_)
for i in xrange(len(self.taskresult_)): n += self.taskresult_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.taskresult_)
for i in xrange(len(self.taskresult_)): n += self.taskresult_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_taskresult()
def OutputUnchecked(self, out):
for i in xrange(len(self.taskresult_)):
out.putVarInt32(11)
self.taskresult_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.taskresult_)):
out.putVarInt32(11)
self.taskresult_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_taskresult().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.taskresult_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("TaskResult%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kTaskResultGroup = 1
kTaskResultresult = 2
kTaskResultchosen_task_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "TaskResult",
2: "result",
3: "chosen_task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddResponse'
class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
self.task_name_ = []
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name_size(self): return len(self.task_name_)
def task_name_list(self): return self.task_name_
def task_name(self, i):
return self.task_name_[i]
def set_task_name(self, i, x):
self.task_name_[i] = x
def add_task_name(self, x):
self.task_name_.append(x)
def clear_task_name(self):
self.task_name_ = []
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
for i in xrange(x.task_name_size()): self.add_task_name(x.task_name(i))
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if len(self.task_name_) != len(x.task_name_): return 0
for e1, e2 in zip(self.task_name_, x.task_name_):
if e1 != e2: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += 1 * len(self.task_name_)
for i in xrange(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
n += 1 * len(self.task_name_)
for i in xrange(len(self.task_name_)): n += self.lengthString(len(self.task_name_[i]))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
for i in xrange(len(self.task_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_[i])
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
for i in xrange(len(self.task_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_[i])
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.add_task_name(d.getPrefixedString())
continue
if tt == 26:
self.set_app_id(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
cnt=0
for e in self.task_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("task_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
kapp_id = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "app_id",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteRequest'
class TaskQueueDeleteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.result_ = []
if contents is not None: self.MergeFromString(contents)
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def set_result(self, i, x):
self.result_[i] = x
def add_result(self, x):
self.result_.append(x)
def clear_result(self):
self.result_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.result_size()): self.add_result(x.result(i))
def Equals(self, x):
if x is self: return 1
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthVarInt64(self.result_[i])
return n
def Clear(self):
self.clear_result()
def OutputUnchecked(self, out):
for i in xrange(len(self.result_)):
out.putVarInt32(24)
out.putVarInt32(self.result_[i])
def OutputPartial(self, out):
for i in xrange(len(self.result_)):
out.putVarInt32(24)
out.putVarInt32(self.result_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 24:
self.add_result(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kresult = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "result",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteResponse'
class TaskQueueForceRunRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_task_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
ktask_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunRequest'
class TaskQueueForceRunResponse(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def result(self): return self.result_
def set_result(self, x):
self.has_result_ = 1
self.result_ = x
def clear_result(self):
if self.has_result_:
self.has_result_ = 0
self.result_ = 0
def has_result(self): return self.has_result_
def MergeFrom(self, x):
assert x is not self
if (x.has_result()): self.set_result(x.result())
def Equals(self, x):
if x is self: return 1
if self.has_result_ != x.has_result_: return 0
if self.has_result_ and self.result_ != x.result_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_result_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.result_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_result_):
n += 1
n += self.lengthVarInt64(self.result_)
return n
def Clear(self):
self.clear_result()
def OutputUnchecked(self, out):
out.putVarInt32(24)
out.putVarInt32(self.result_)
def OutputPartial(self, out):
if (self.has_result_):
out.putVarInt32(24)
out.putVarInt32(self.result_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 24:
self.set_result(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kresult = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "result",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunResponse'
class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_bucket_refill_per_second_ = 0
bucket_refill_per_second_ = 0.0
has_bucket_capacity_ = 0
bucket_capacity_ = 0
has_user_specified_rate_ = 0
user_specified_rate_ = ""
has_retry_parameters_ = 0
retry_parameters_ = None
has_max_concurrent_requests_ = 0
max_concurrent_requests_ = 0
has_mode_ = 0
mode_ = 0
has_acl_ = 0
acl_ = None
def __init__(self, contents=None):
self.header_override_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def bucket_refill_per_second(self): return self.bucket_refill_per_second_
def set_bucket_refill_per_second(self, x):
self.has_bucket_refill_per_second_ = 1
self.bucket_refill_per_second_ = x
def clear_bucket_refill_per_second(self):
if self.has_bucket_refill_per_second_:
self.has_bucket_refill_per_second_ = 0
self.bucket_refill_per_second_ = 0.0
def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
def bucket_capacity(self): return self.bucket_capacity_
def set_bucket_capacity(self, x):
self.has_bucket_capacity_ = 1
self.bucket_capacity_ = x
def clear_bucket_capacity(self):
if self.has_bucket_capacity_:
self.has_bucket_capacity_ = 0
self.bucket_capacity_ = 0
def has_bucket_capacity(self): return self.has_bucket_capacity_
def user_specified_rate(self): return self.user_specified_rate_
def set_user_specified_rate(self, x):
self.has_user_specified_rate_ = 1
self.user_specified_rate_ = x
def clear_user_specified_rate(self):
if self.has_user_specified_rate_:
self.has_user_specified_rate_ = 0
self.user_specified_rate_ = ""
def has_user_specified_rate(self): return self.has_user_specified_rate_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def max_concurrent_requests(self): return self.max_concurrent_requests_
def set_max_concurrent_requests(self, x):
self.has_max_concurrent_requests_ = 1
self.max_concurrent_requests_ = x
def clear_max_concurrent_requests(self):
if self.has_max_concurrent_requests_:
self.has_max_concurrent_requests_ = 0
self.max_concurrent_requests_ = 0
def has_max_concurrent_requests(self): return self.has_max_concurrent_requests_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def acl(self):
if self.acl_ is None:
self.lazy_init_lock_.acquire()
try:
if self.acl_ is None: self.acl_ = TaskQueueAcl()
finally:
self.lazy_init_lock_.release()
return self.acl_
def mutable_acl(self): self.has_acl_ = 1; return self.acl()
def clear_acl(self):
# Warning: this method does not acquire the lock.
if self.has_acl_:
self.has_acl_ = 0;
if self.acl_ is not None: self.acl_.Clear()
def has_acl(self): return self.has_acl_
def header_override_size(self): return len(self.header_override_)
def header_override_list(self): return self.header_override_
def header_override(self, i):
return self.header_override_[i]
def mutable_header_override(self, i):
return self.header_override_[i]
def add_header_override(self):
x = TaskQueueHttpHeader()
self.header_override_.append(x)
return x
def clear_header_override(self):
self.header_override_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_max_concurrent_requests()): self.set_max_concurrent_requests(x.max_concurrent_requests())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
for i in xrange(x.header_override_size()): self.add_header_override().CopyFrom(x.header_override(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_max_concurrent_requests_ != x.has_max_concurrent_requests_: return 0
if self.has_max_concurrent_requests_ and self.max_concurrent_requests_ != x.max_concurrent_requests_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_acl_ != x.has_acl_: return 0
if self.has_acl_ and self.acl_ != x.acl_: return 0
if len(self.header_override_) != len(x.header_override_): return 0
for e1, e2 in zip(self.header_override_, x.header_override_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_bucket_refill_per_second_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_refill_per_second not set.')
if (not self.has_bucket_capacity_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_capacity not set.')
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
for p in self.header_override_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthVarInt64(self.bucket_capacity_)
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSize())
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_bucket_refill_per_second_):
n += 9
if (self.has_bucket_capacity_):
n += 1
n += self.lengthVarInt64(self.bucket_capacity_)
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_bucket_refill_per_second()
self.clear_bucket_capacity()
self.clear_user_specified_rate()
self.clear_retry_parameters()
self.clear_max_concurrent_requests()
self.clear_mode()
self.clear_acl()
self.clear_header_override()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
out.putVarInt32(32)
out.putVarInt32(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_retry_parameters_):
out.putVarInt32(50)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(56)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(64)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(74)
out.putVarInt32(self.acl_.ByteSize())
self.acl_.OutputUnchecked(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(82)
out.putVarInt32(self.header_override_[i].ByteSize())
self.header_override_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_bucket_refill_per_second_):
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
if (self.has_bucket_capacity_):
out.putVarInt32(32)
out.putVarInt32(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_retry_parameters_):
out.putVarInt32(50)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(56)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(64)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(74)
out.putVarInt32(self.acl_.ByteSizePartial())
self.acl_.OutputPartial(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(82)
out.putVarInt32(self.header_override_[i].ByteSizePartial())
self.header_override_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 25:
self.set_bucket_refill_per_second(d.getDouble())
continue
if tt == 32:
self.set_bucket_capacity(d.getVarInt32())
continue
if tt == 42:
self.set_user_specified_rate(d.getPrefixedString())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 56:
self.set_max_concurrent_requests(d.getVarInt32())
continue
if tt == 64:
self.set_mode(d.getVarInt32())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_acl().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_header_override().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormatInt32(self.bucket_capacity_))
if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_max_concurrent_requests_: res+=prefix+("max_concurrent_requests: %s\n" % self.DebugFormatInt32(self.max_concurrent_requests_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_acl_:
res+=prefix+"acl <\n"
res+=self.acl_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.header_override_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("header_override%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kbucket_refill_per_second = 3
kbucket_capacity = 4
kuser_specified_rate = 5
kretry_parameters = 6
kmax_concurrent_requests = 7
kmode = 8
kacl = 9
kheader_override = 10
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "bucket_refill_per_second",
4: "bucket_capacity",
5: "user_specified_rate",
6: "retry_parameters",
7: "max_concurrent_requests",
8: "mode",
9: "acl",
10: "header_override",
}, 10)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
}, 10, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueRequest'
class TaskQueueUpdateQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueResponse'
class TaskQueueFetchQueuesRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_max_rows_ = 0
max_rows_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def max_rows(self): return self.max_rows_
def set_max_rows(self, x):
self.has_max_rows_ = 1
self.max_rows_ = x
def clear_max_rows(self):
if self.has_max_rows_:
self.has_max_rows_ = 0
self.max_rows_ = 0
def has_max_rows(self): return self.has_max_rows_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_max_rows()): self.set_max_rows(x.max_rows())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_max_rows_ != x.has_max_rows_: return 0
if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_max_rows_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_rows not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.max_rows_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_max_rows_):
n += 1
n += self.lengthVarInt64(self.max_rows_)
return n
def Clear(self):
self.clear_app_id()
self.clear_max_rows()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt32(self.max_rows_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_max_rows_):
out.putVarInt32(16)
out.putVarInt32(self.max_rows_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_max_rows(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kmax_rows = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "max_rows",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesRequest'
class TaskQueueFetchQueuesResponse_Queue(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_bucket_refill_per_second_ = 0
bucket_refill_per_second_ = 0.0
has_bucket_capacity_ = 0
bucket_capacity_ = 0.0
has_user_specified_rate_ = 0
user_specified_rate_ = ""
has_paused_ = 0
paused_ = 0
has_retry_parameters_ = 0
retry_parameters_ = None
has_max_concurrent_requests_ = 0
max_concurrent_requests_ = 0
has_mode_ = 0
mode_ = 0
has_acl_ = 0
acl_ = None
has_creator_name_ = 0
creator_name_ = "apphosting"
def __init__(self, contents=None):
self.header_override_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def bucket_refill_per_second(self): return self.bucket_refill_per_second_
def set_bucket_refill_per_second(self, x):
self.has_bucket_refill_per_second_ = 1
self.bucket_refill_per_second_ = x
def clear_bucket_refill_per_second(self):
if self.has_bucket_refill_per_second_:
self.has_bucket_refill_per_second_ = 0
self.bucket_refill_per_second_ = 0.0
def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
def bucket_capacity(self): return self.bucket_capacity_
def set_bucket_capacity(self, x):
self.has_bucket_capacity_ = 1
self.bucket_capacity_ = x
def clear_bucket_capacity(self):
if self.has_bucket_capacity_:
self.has_bucket_capacity_ = 0
self.bucket_capacity_ = 0.0
def has_bucket_capacity(self): return self.has_bucket_capacity_
def user_specified_rate(self): return self.user_specified_rate_
def set_user_specified_rate(self, x):
self.has_user_specified_rate_ = 1
self.user_specified_rate_ = x
def clear_user_specified_rate(self):
if self.has_user_specified_rate_:
self.has_user_specified_rate_ = 0
self.user_specified_rate_ = ""
def has_user_specified_rate(self): return self.has_user_specified_rate_
def paused(self): return self.paused_
def set_paused(self, x):
self.has_paused_ = 1
self.paused_ = x
def clear_paused(self):
if self.has_paused_:
self.has_paused_ = 0
self.paused_ = 0
def has_paused(self): return self.has_paused_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def max_concurrent_requests(self): return self.max_concurrent_requests_
def set_max_concurrent_requests(self, x):
self.has_max_concurrent_requests_ = 1
self.max_concurrent_requests_ = x
def clear_max_concurrent_requests(self):
if self.has_max_concurrent_requests_:
self.has_max_concurrent_requests_ = 0
self.max_concurrent_requests_ = 0
def has_max_concurrent_requests(self): return self.has_max_concurrent_requests_
def mode(self): return self.mode_
def set_mode(self, x):
self.has_mode_ = 1
self.mode_ = x
def clear_mode(self):
if self.has_mode_:
self.has_mode_ = 0
self.mode_ = 0
def has_mode(self): return self.has_mode_
def acl(self):
if self.acl_ is None:
self.lazy_init_lock_.acquire()
try:
if self.acl_ is None: self.acl_ = TaskQueueAcl()
finally:
self.lazy_init_lock_.release()
return self.acl_
def mutable_acl(self): self.has_acl_ = 1; return self.acl()
def clear_acl(self):
# Warning: this method does not acquire the lock.
if self.has_acl_:
self.has_acl_ = 0;
if self.acl_ is not None: self.acl_.Clear()
def has_acl(self): return self.has_acl_
def header_override_size(self): return len(self.header_override_)
def header_override_list(self): return self.header_override_
def header_override(self, i):
return self.header_override_[i]
def mutable_header_override(self, i):
return self.header_override_[i]
def add_header_override(self):
x = TaskQueueHttpHeader()
self.header_override_.append(x)
return x
def clear_header_override(self):
self.header_override_ = []
def creator_name(self): return self.creator_name_
def set_creator_name(self, x):
self.has_creator_name_ = 1
self.creator_name_ = x
def clear_creator_name(self):
if self.has_creator_name_:
self.has_creator_name_ = 0
self.creator_name_ = "apphosting"
def has_creator_name(self): return self.has_creator_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
if (x.has_paused()): self.set_paused(x.paused())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_max_concurrent_requests()): self.set_max_concurrent_requests(x.max_concurrent_requests())
if (x.has_mode()): self.set_mode(x.mode())
if (x.has_acl()): self.mutable_acl().MergeFrom(x.acl())
for i in xrange(x.header_override_size()): self.add_header_override().CopyFrom(x.header_override(i))
if (x.has_creator_name()): self.set_creator_name(x.creator_name())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
if self.has_paused_ != x.has_paused_: return 0
if self.has_paused_ and self.paused_ != x.paused_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_max_concurrent_requests_ != x.has_max_concurrent_requests_: return 0
if self.has_max_concurrent_requests_ and self.max_concurrent_requests_ != x.max_concurrent_requests_: return 0
if self.has_mode_ != x.has_mode_: return 0
if self.has_mode_ and self.mode_ != x.mode_: return 0
if self.has_acl_ != x.has_acl_: return 0
if self.has_acl_ and self.acl_ != x.acl_: return 0
if len(self.header_override_) != len(x.header_override_): return 0
for e1, e2 in zip(self.header_override_, x.header_override_):
if e1 != e2: return 0
if self.has_creator_name_ != x.has_creator_name_: return 0
if self.has_creator_name_ and self.creator_name_ != x.creator_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_bucket_refill_per_second_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_refill_per_second not set.')
if (not self.has_bucket_capacity_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bucket_capacity not set.')
if (not self.has_paused_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: paused not set.')
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
if (self.has_acl_ and not self.acl_.IsInitialized(debug_strs)): initialized = 0
for p in self.header_override_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSize())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSize())
if (self.has_creator_name_): n += 1 + self.lengthString(len(self.creator_name_))
return n + 21
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_bucket_refill_per_second_):
n += 9
if (self.has_bucket_capacity_):
n += 9
if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
if (self.has_paused_):
n += 2
if (self.has_retry_parameters_): n += 1 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_max_concurrent_requests_): n += 1 + self.lengthVarInt64(self.max_concurrent_requests_)
if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
if (self.has_acl_): n += 1 + self.lengthString(self.acl_.ByteSizePartial())
n += 1 * len(self.header_override_)
for i in xrange(len(self.header_override_)): n += self.lengthString(self.header_override_[i].ByteSizePartial())
if (self.has_creator_name_): n += 1 + self.lengthString(len(self.creator_name_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_bucket_refill_per_second()
self.clear_bucket_capacity()
self.clear_user_specified_rate()
self.clear_paused()
self.clear_retry_parameters()
self.clear_max_concurrent_requests()
self.clear_mode()
self.clear_acl()
self.clear_header_override()
self.clear_creator_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
out.putVarInt32(33)
out.putDouble(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
out.putVarInt32(48)
out.putBoolean(self.paused_)
if (self.has_retry_parameters_):
out.putVarInt32(58)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(64)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(72)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(82)
out.putVarInt32(self.acl_.ByteSize())
self.acl_.OutputUnchecked(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(90)
out.putVarInt32(self.header_override_[i].ByteSize())
self.header_override_[i].OutputUnchecked(out)
if (self.has_creator_name_):
out.putVarInt32(98)
out.putPrefixedString(self.creator_name_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_bucket_refill_per_second_):
out.putVarInt32(25)
out.putDouble(self.bucket_refill_per_second_)
if (self.has_bucket_capacity_):
out.putVarInt32(33)
out.putDouble(self.bucket_capacity_)
if (self.has_user_specified_rate_):
out.putVarInt32(42)
out.putPrefixedString(self.user_specified_rate_)
if (self.has_paused_):
out.putVarInt32(48)
out.putBoolean(self.paused_)
if (self.has_retry_parameters_):
out.putVarInt32(58)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_max_concurrent_requests_):
out.putVarInt32(64)
out.putVarInt32(self.max_concurrent_requests_)
if (self.has_mode_):
out.putVarInt32(72)
out.putVarInt32(self.mode_)
if (self.has_acl_):
out.putVarInt32(82)
out.putVarInt32(self.acl_.ByteSizePartial())
self.acl_.OutputPartial(out)
for i in xrange(len(self.header_override_)):
out.putVarInt32(90)
out.putVarInt32(self.header_override_[i].ByteSizePartial())
self.header_override_[i].OutputPartial(out)
if (self.has_creator_name_):
out.putVarInt32(98)
out.putPrefixedString(self.creator_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 25:
self.set_bucket_refill_per_second(d.getDouble())
continue
if tt == 33:
self.set_bucket_capacity(d.getDouble())
continue
if tt == 42:
self.set_user_specified_rate(d.getPrefixedString())
continue
if tt == 48:
self.set_paused(d.getBoolean())
continue
if tt == 58:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 64:
self.set_max_concurrent_requests(d.getVarInt32())
continue
if tt == 72:
self.set_mode(d.getVarInt32())
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_acl().TryMerge(tmp)
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_header_override().TryMerge(tmp)
continue
if tt == 98:
self.set_creator_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormat(self.bucket_capacity_))
if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
if self.has_paused_: res+=prefix+("paused: %s\n" % self.DebugFormatBool(self.paused_))
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_max_concurrent_requests_: res+=prefix+("max_concurrent_requests: %s\n" % self.DebugFormatInt32(self.max_concurrent_requests_))
if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
if self.has_acl_:
res+=prefix+"acl <\n"
res+=self.acl_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.header_override_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("header_override%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_creator_name_: res+=prefix+("creator_name: %s\n" % self.DebugFormatString(self.creator_name_))
return res
class TaskQueueFetchQueuesResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.queue_ = []
if contents is not None: self.MergeFromString(contents)
def queue_size(self): return len(self.queue_)
def queue_list(self): return self.queue_
def queue(self, i):
return self.queue_[i]
def mutable_queue(self, i):
return self.queue_[i]
def add_queue(self):
x = TaskQueueFetchQueuesResponse_Queue()
self.queue_.append(x)
return x
def clear_queue(self):
self.queue_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.queue_size()): self.add_queue().CopyFrom(x.queue(i))
def Equals(self, x):
if x is self: return 1
if len(self.queue_) != len(x.queue_): return 0
for e1, e2 in zip(self.queue_, x.queue_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.queue_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.queue_)
for i in xrange(len(self.queue_)): n += self.queue_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.queue_)
for i in xrange(len(self.queue_)): n += self.queue_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_queue()
def OutputUnchecked(self, out):
for i in xrange(len(self.queue_)):
out.putVarInt32(11)
self.queue_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.queue_)):
out.putVarInt32(11)
self.queue_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_queue().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.queue_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Queue%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kQueueGroup = 1
kQueuequeue_name = 2
kQueuebucket_refill_per_second = 3
kQueuebucket_capacity = 4
kQueueuser_specified_rate = 5
kQueuepaused = 6
kQueueretry_parameters = 7
kQueuemax_concurrent_requests = 8
kQueuemode = 9
kQueueacl = 10
kQueueheader_override = 11
kQueuecreator_name = 12
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Queue",
2: "queue_name",
3: "bucket_refill_per_second",
4: "bucket_capacity",
5: "user_specified_rate",
6: "paused",
7: "retry_parameters",
8: "max_concurrent_requests",
9: "mode",
10: "acl",
11: "header_override",
12: "creator_name",
}, 12)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.DOUBLE,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.STRING,
}, 12, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesResponse'
class TaskQueueFetchQueueStatsRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_max_num_tasks_ = 0
max_num_tasks_ = 0
def __init__(self, contents=None):
self.queue_name_ = []
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name_size(self): return len(self.queue_name_)
def queue_name_list(self): return self.queue_name_
def queue_name(self, i):
return self.queue_name_[i]
def set_queue_name(self, i, x):
self.queue_name_[i] = x
def add_queue_name(self, x):
self.queue_name_.append(x)
def clear_queue_name(self):
self.queue_name_ = []
def max_num_tasks(self): return self.max_num_tasks_
def set_max_num_tasks(self, x):
self.has_max_num_tasks_ = 1
self.max_num_tasks_ = x
def clear_max_num_tasks(self):
if self.has_max_num_tasks_:
self.has_max_num_tasks_ = 0
self.max_num_tasks_ = 0
def has_max_num_tasks(self): return self.has_max_num_tasks_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
for i in xrange(x.queue_name_size()): self.add_queue_name(x.queue_name(i))
if (x.has_max_num_tasks()): self.set_max_num_tasks(x.max_num_tasks())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if len(self.queue_name_) != len(x.queue_name_): return 0
for e1, e2 in zip(self.queue_name_, x.queue_name_):
if e1 != e2: return 0
if self.has_max_num_tasks_ != x.has_max_num_tasks_: return 0
if self.has_max_num_tasks_ and self.max_num_tasks_ != x.max_num_tasks_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += 1 * len(self.queue_name_)
for i in xrange(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
if (self.has_max_num_tasks_): n += 1 + self.lengthVarInt64(self.max_num_tasks_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += 1 * len(self.queue_name_)
for i in xrange(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
if (self.has_max_num_tasks_): n += 1 + self.lengthVarInt64(self.max_num_tasks_)
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_max_num_tasks()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
for i in xrange(len(self.queue_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_[i])
if (self.has_max_num_tasks_):
out.putVarInt32(24)
out.putVarInt32(self.max_num_tasks_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
for i in xrange(len(self.queue_name_)):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_[i])
if (self.has_max_num_tasks_):
out.putVarInt32(24)
out.putVarInt32(self.max_num_tasks_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.add_queue_name(d.getPrefixedString())
continue
if tt == 24:
self.set_max_num_tasks(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
cnt=0
for e in self.queue_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("queue_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kmax_num_tasks = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "max_num_tasks",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsRequest'
class TaskQueueScannerQueueInfo(ProtocolBuffer.ProtocolMessage):
has_executed_last_minute_ = 0
executed_last_minute_ = 0
has_executed_last_hour_ = 0
executed_last_hour_ = 0
has_sampling_duration_seconds_ = 0
sampling_duration_seconds_ = 0.0
has_requests_in_flight_ = 0
requests_in_flight_ = 0
has_enforced_rate_ = 0
enforced_rate_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def executed_last_minute(self): return self.executed_last_minute_
def set_executed_last_minute(self, x):
self.has_executed_last_minute_ = 1
self.executed_last_minute_ = x
def clear_executed_last_minute(self):
if self.has_executed_last_minute_:
self.has_executed_last_minute_ = 0
self.executed_last_minute_ = 0
def has_executed_last_minute(self): return self.has_executed_last_minute_
def executed_last_hour(self): return self.executed_last_hour_
def set_executed_last_hour(self, x):
self.has_executed_last_hour_ = 1
self.executed_last_hour_ = x
def clear_executed_last_hour(self):
if self.has_executed_last_hour_:
self.has_executed_last_hour_ = 0
self.executed_last_hour_ = 0
def has_executed_last_hour(self): return self.has_executed_last_hour_
def sampling_duration_seconds(self): return self.sampling_duration_seconds_
def set_sampling_duration_seconds(self, x):
self.has_sampling_duration_seconds_ = 1
self.sampling_duration_seconds_ = x
def clear_sampling_duration_seconds(self):
if self.has_sampling_duration_seconds_:
self.has_sampling_duration_seconds_ = 0
self.sampling_duration_seconds_ = 0.0
def has_sampling_duration_seconds(self): return self.has_sampling_duration_seconds_
def requests_in_flight(self): return self.requests_in_flight_
def set_requests_in_flight(self, x):
self.has_requests_in_flight_ = 1
self.requests_in_flight_ = x
def clear_requests_in_flight(self):
if self.has_requests_in_flight_:
self.has_requests_in_flight_ = 0
self.requests_in_flight_ = 0
def has_requests_in_flight(self): return self.has_requests_in_flight_
def enforced_rate(self): return self.enforced_rate_
def set_enforced_rate(self, x):
self.has_enforced_rate_ = 1
self.enforced_rate_ = x
def clear_enforced_rate(self):
if self.has_enforced_rate_:
self.has_enforced_rate_ = 0
self.enforced_rate_ = 0.0
def has_enforced_rate(self): return self.has_enforced_rate_
def MergeFrom(self, x):
assert x is not self
if (x.has_executed_last_minute()): self.set_executed_last_minute(x.executed_last_minute())
if (x.has_executed_last_hour()): self.set_executed_last_hour(x.executed_last_hour())
if (x.has_sampling_duration_seconds()): self.set_sampling_duration_seconds(x.sampling_duration_seconds())
if (x.has_requests_in_flight()): self.set_requests_in_flight(x.requests_in_flight())
if (x.has_enforced_rate()): self.set_enforced_rate(x.enforced_rate())
def Equals(self, x):
if x is self: return 1
if self.has_executed_last_minute_ != x.has_executed_last_minute_: return 0
if self.has_executed_last_minute_ and self.executed_last_minute_ != x.executed_last_minute_: return 0
if self.has_executed_last_hour_ != x.has_executed_last_hour_: return 0
if self.has_executed_last_hour_ and self.executed_last_hour_ != x.executed_last_hour_: return 0
if self.has_sampling_duration_seconds_ != x.has_sampling_duration_seconds_: return 0
if self.has_sampling_duration_seconds_ and self.sampling_duration_seconds_ != x.sampling_duration_seconds_: return 0
if self.has_requests_in_flight_ != x.has_requests_in_flight_: return 0
if self.has_requests_in_flight_ and self.requests_in_flight_ != x.requests_in_flight_: return 0
if self.has_enforced_rate_ != x.has_enforced_rate_: return 0
if self.has_enforced_rate_ and self.enforced_rate_ != x.enforced_rate_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_executed_last_minute_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: executed_last_minute not set.')
if (not self.has_executed_last_hour_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: executed_last_hour not set.')
if (not self.has_sampling_duration_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sampling_duration_seconds not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.executed_last_minute_)
n += self.lengthVarInt64(self.executed_last_hour_)
if (self.has_requests_in_flight_): n += 1 + self.lengthVarInt64(self.requests_in_flight_)
if (self.has_enforced_rate_): n += 9
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_executed_last_minute_):
n += 1
n += self.lengthVarInt64(self.executed_last_minute_)
if (self.has_executed_last_hour_):
n += 1
n += self.lengthVarInt64(self.executed_last_hour_)
if (self.has_sampling_duration_seconds_):
n += 9
if (self.has_requests_in_flight_): n += 1 + self.lengthVarInt64(self.requests_in_flight_)
if (self.has_enforced_rate_): n += 9
return n
def Clear(self):
self.clear_executed_last_minute()
self.clear_executed_last_hour()
self.clear_sampling_duration_seconds()
self.clear_requests_in_flight()
self.clear_enforced_rate()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.executed_last_minute_)
out.putVarInt32(16)
out.putVarInt64(self.executed_last_hour_)
out.putVarInt32(25)
out.putDouble(self.sampling_duration_seconds_)
if (self.has_requests_in_flight_):
out.putVarInt32(32)
out.putVarInt32(self.requests_in_flight_)
if (self.has_enforced_rate_):
out.putVarInt32(41)
out.putDouble(self.enforced_rate_)
def OutputPartial(self, out):
if (self.has_executed_last_minute_):
out.putVarInt32(8)
out.putVarInt64(self.executed_last_minute_)
if (self.has_executed_last_hour_):
out.putVarInt32(16)
out.putVarInt64(self.executed_last_hour_)
if (self.has_sampling_duration_seconds_):
out.putVarInt32(25)
out.putDouble(self.sampling_duration_seconds_)
if (self.has_requests_in_flight_):
out.putVarInt32(32)
out.putVarInt32(self.requests_in_flight_)
if (self.has_enforced_rate_):
out.putVarInt32(41)
out.putDouble(self.enforced_rate_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_executed_last_minute(d.getVarInt64())
continue
if tt == 16:
self.set_executed_last_hour(d.getVarInt64())
continue
if tt == 25:
self.set_sampling_duration_seconds(d.getDouble())
continue
if tt == 32:
self.set_requests_in_flight(d.getVarInt32())
continue
if tt == 41:
self.set_enforced_rate(d.getDouble())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_executed_last_minute_: res+=prefix+("executed_last_minute: %s\n" % self.DebugFormatInt64(self.executed_last_minute_))
if self.has_executed_last_hour_: res+=prefix+("executed_last_hour: %s\n" % self.DebugFormatInt64(self.executed_last_hour_))
if self.has_sampling_duration_seconds_: res+=prefix+("sampling_duration_seconds: %s\n" % self.DebugFormat(self.sampling_duration_seconds_))
if self.has_requests_in_flight_: res+=prefix+("requests_in_flight: %s\n" % self.DebugFormatInt32(self.requests_in_flight_))
if self.has_enforced_rate_: res+=prefix+("enforced_rate: %s\n" % self.DebugFormat(self.enforced_rate_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kexecuted_last_minute = 1
kexecuted_last_hour = 2
ksampling_duration_seconds = 3
krequests_in_flight = 4
kenforced_rate = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "executed_last_minute",
2: "executed_last_hour",
3: "sampling_duration_seconds",
4: "requests_in_flight",
5: "enforced_rate",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.DOUBLE,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueScannerQueueInfo'
class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
has_num_tasks_ = 0
num_tasks_ = 0
has_oldest_eta_usec_ = 0
oldest_eta_usec_ = 0
has_scanner_info_ = 0
scanner_info_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def num_tasks(self): return self.num_tasks_
def set_num_tasks(self, x):
self.has_num_tasks_ = 1
self.num_tasks_ = x
def clear_num_tasks(self):
if self.has_num_tasks_:
self.has_num_tasks_ = 0
self.num_tasks_ = 0
def has_num_tasks(self): return self.has_num_tasks_
def oldest_eta_usec(self): return self.oldest_eta_usec_
def set_oldest_eta_usec(self, x):
self.has_oldest_eta_usec_ = 1
self.oldest_eta_usec_ = x
def clear_oldest_eta_usec(self):
if self.has_oldest_eta_usec_:
self.has_oldest_eta_usec_ = 0
self.oldest_eta_usec_ = 0
def has_oldest_eta_usec(self): return self.has_oldest_eta_usec_
def scanner_info(self):
if self.scanner_info_ is None:
self.lazy_init_lock_.acquire()
try:
if self.scanner_info_ is None: self.scanner_info_ = TaskQueueScannerQueueInfo()
finally:
self.lazy_init_lock_.release()
return self.scanner_info_
def mutable_scanner_info(self): self.has_scanner_info_ = 1; return self.scanner_info()
def clear_scanner_info(self):
# Warning: this method does not acquire the lock.
if self.has_scanner_info_:
self.has_scanner_info_ = 0;
if self.scanner_info_ is not None: self.scanner_info_.Clear()
def has_scanner_info(self): return self.has_scanner_info_
def MergeFrom(self, x):
assert x is not self
if (x.has_num_tasks()): self.set_num_tasks(x.num_tasks())
if (x.has_oldest_eta_usec()): self.set_oldest_eta_usec(x.oldest_eta_usec())
if (x.has_scanner_info()): self.mutable_scanner_info().MergeFrom(x.scanner_info())
def Equals(self, x):
if x is self: return 1
if self.has_num_tasks_ != x.has_num_tasks_: return 0
if self.has_num_tasks_ and self.num_tasks_ != x.num_tasks_: return 0
if self.has_oldest_eta_usec_ != x.has_oldest_eta_usec_: return 0
if self.has_oldest_eta_usec_ and self.oldest_eta_usec_ != x.oldest_eta_usec_: return 0
if self.has_scanner_info_ != x.has_scanner_info_: return 0
if self.has_scanner_info_ and self.scanner_info_ != x.scanner_info_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_num_tasks_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: num_tasks not set.')
if (not self.has_oldest_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: oldest_eta_usec not set.')
if (self.has_scanner_info_ and not self.scanner_info_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.num_tasks_)
n += self.lengthVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_num_tasks_):
n += 1
n += self.lengthVarInt64(self.num_tasks_)
if (self.has_oldest_eta_usec_):
n += 1
n += self.lengthVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_): n += 1 + self.lengthString(self.scanner_info_.ByteSizePartial())
return n
def Clear(self):
self.clear_num_tasks()
self.clear_oldest_eta_usec()
self.clear_scanner_info()
def OutputUnchecked(self, out):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_):
out.putVarInt32(34)
out.putVarInt32(self.scanner_info_.ByteSize())
self.scanner_info_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_num_tasks_):
out.putVarInt32(16)
out.putVarInt32(self.num_tasks_)
if (self.has_oldest_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.oldest_eta_usec_)
if (self.has_scanner_info_):
out.putVarInt32(34)
out.putVarInt32(self.scanner_info_.ByteSizePartial())
self.scanner_info_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 16:
self.set_num_tasks(d.getVarInt32())
continue
if tt == 24:
self.set_oldest_eta_usec(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_scanner_info().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_num_tasks_: res+=prefix+("num_tasks: %s\n" % self.DebugFormatInt32(self.num_tasks_))
if self.has_oldest_eta_usec_: res+=prefix+("oldest_eta_usec: %s\n" % self.DebugFormatInt64(self.oldest_eta_usec_))
if self.has_scanner_info_:
res+=prefix+"scanner_info <\n"
res+=self.scanner_info_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class TaskQueueFetchQueueStatsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.queuestats_ = []
if contents is not None: self.MergeFromString(contents)
def queuestats_size(self): return len(self.queuestats_)
def queuestats_list(self): return self.queuestats_
def queuestats(self, i):
return self.queuestats_[i]
def mutable_queuestats(self, i):
return self.queuestats_[i]
def add_queuestats(self):
x = TaskQueueFetchQueueStatsResponse_QueueStats()
self.queuestats_.append(x)
return x
def clear_queuestats(self):
self.queuestats_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.queuestats_size()): self.add_queuestats().CopyFrom(x.queuestats(i))
def Equals(self, x):
if x is self: return 1
if len(self.queuestats_) != len(x.queuestats_): return 0
for e1, e2 in zip(self.queuestats_, x.queuestats_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.queuestats_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.queuestats_)
for i in xrange(len(self.queuestats_)): n += self.queuestats_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.queuestats_)
for i in xrange(len(self.queuestats_)): n += self.queuestats_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_queuestats()
def OutputUnchecked(self, out):
for i in xrange(len(self.queuestats_)):
out.putVarInt32(11)
self.queuestats_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.queuestats_)):
out.putVarInt32(11)
self.queuestats_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_queuestats().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.queuestats_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("QueueStats%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kQueueStatsGroup = 1
kQueueStatsnum_tasks = 2
kQueueStatsoldest_eta_usec = 3
kQueueStatsscanner_info = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "QueueStats",
2: "num_tasks",
3: "oldest_eta_usec",
4: "scanner_info",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsResponse'
class TaskQueuePauseQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_pause_ = 0
pause_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def pause(self): return self.pause_
def set_pause(self, x):
self.has_pause_ = 1
self.pause_ = x
def clear_pause(self):
if self.has_pause_:
self.has_pause_ = 0
self.pause_ = 0
def has_pause(self): return self.has_pause_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_pause()): self.set_pause(x.pause())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_pause_ != x.has_pause_: return 0
if self.has_pause_ and self.pause_ != x.pause_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_pause_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: pause not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_pause_):
n += 2
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_pause()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(24)
out.putBoolean(self.pause_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_pause_):
out.putVarInt32(24)
out.putBoolean(self.pause_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 24:
self.set_pause(d.getBoolean())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_pause_: res+=prefix+("pause: %s\n" % self.DebugFormatBool(self.pause_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kpause = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "pause",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueRequest'
class TaskQueuePauseQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueResponse'
class TaskQueuePurgeQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueRequest'
class TaskQueuePurgeQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueResponse'
class TaskQueueDeleteQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueRequest'
class TaskQueueDeleteQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueResponse'
class TaskQueueDeleteGroupRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupRequest'
class TaskQueueDeleteGroupResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupResponse'
class TaskQueueQueryTasksRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_start_task_name_ = 0
start_task_name_ = ""
has_start_eta_usec_ = 0
start_eta_usec_ = 0
has_start_tag_ = 0
start_tag_ = ""
has_max_rows_ = 0
max_rows_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def start_task_name(self): return self.start_task_name_
def set_start_task_name(self, x):
self.has_start_task_name_ = 1
self.start_task_name_ = x
def clear_start_task_name(self):
if self.has_start_task_name_:
self.has_start_task_name_ = 0
self.start_task_name_ = ""
def has_start_task_name(self): return self.has_start_task_name_
def start_eta_usec(self): return self.start_eta_usec_
def set_start_eta_usec(self, x):
self.has_start_eta_usec_ = 1
self.start_eta_usec_ = x
def clear_start_eta_usec(self):
if self.has_start_eta_usec_:
self.has_start_eta_usec_ = 0
self.start_eta_usec_ = 0
def has_start_eta_usec(self): return self.has_start_eta_usec_
def start_tag(self): return self.start_tag_
def set_start_tag(self, x):
self.has_start_tag_ = 1
self.start_tag_ = x
def clear_start_tag(self):
if self.has_start_tag_:
self.has_start_tag_ = 0
self.start_tag_ = ""
def has_start_tag(self): return self.has_start_tag_
def max_rows(self): return self.max_rows_
def set_max_rows(self, x):
self.has_max_rows_ = 1
self.max_rows_ = x
def clear_max_rows(self):
if self.has_max_rows_:
self.has_max_rows_ = 0
self.max_rows_ = 1
def has_max_rows(self): return self.has_max_rows_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_start_task_name()): self.set_start_task_name(x.start_task_name())
if (x.has_start_eta_usec()): self.set_start_eta_usec(x.start_eta_usec())
if (x.has_start_tag()): self.set_start_tag(x.start_tag())
if (x.has_max_rows()): self.set_max_rows(x.max_rows())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_start_task_name_ != x.has_start_task_name_: return 0
if self.has_start_task_name_ and self.start_task_name_ != x.start_task_name_: return 0
if self.has_start_eta_usec_ != x.has_start_eta_usec_: return 0
if self.has_start_eta_usec_ and self.start_eta_usec_ != x.start_eta_usec_: return 0
if self.has_start_tag_ != x.has_start_tag_: return 0
if self.has_start_tag_ and self.start_tag_ != x.start_tag_: return 0
if self.has_max_rows_ != x.has_max_rows_: return 0
if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
if (self.has_start_tag_): n += 1 + self.lengthString(len(self.start_tag_))
if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_start_task_name_): n += 1 + self.lengthString(len(self.start_task_name_))
if (self.has_start_eta_usec_): n += 1 + self.lengthVarInt64(self.start_eta_usec_)
if (self.has_start_tag_): n += 1 + self.lengthString(len(self.start_tag_))
if (self.has_max_rows_): n += 1 + self.lengthVarInt64(self.max_rows_)
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_start_task_name()
self.clear_start_eta_usec()
self.clear_start_tag()
self.clear_max_rows()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_start_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.start_task_name_)
if (self.has_start_eta_usec_):
out.putVarInt32(32)
out.putVarInt64(self.start_eta_usec_)
if (self.has_max_rows_):
out.putVarInt32(40)
out.putVarInt32(self.max_rows_)
if (self.has_start_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.start_tag_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_start_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.start_task_name_)
if (self.has_start_eta_usec_):
out.putVarInt32(32)
out.putVarInt64(self.start_eta_usec_)
if (self.has_max_rows_):
out.putVarInt32(40)
out.putVarInt32(self.max_rows_)
if (self.has_start_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.start_tag_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_start_task_name(d.getPrefixedString())
continue
if tt == 32:
self.set_start_eta_usec(d.getVarInt64())
continue
if tt == 40:
self.set_max_rows(d.getVarInt32())
continue
if tt == 50:
self.set_start_tag(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_start_task_name_: res+=prefix+("start_task_name: %s\n" % self.DebugFormatString(self.start_task_name_))
if self.has_start_eta_usec_: res+=prefix+("start_eta_usec: %s\n" % self.DebugFormatInt64(self.start_eta_usec_))
if self.has_start_tag_: res+=prefix+("start_tag: %s\n" % self.DebugFormatString(self.start_tag_))
if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
kstart_task_name = 3
kstart_eta_usec = 4
kstart_tag = 6
kmax_rows = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "start_task_name",
4: "start_eta_usec",
5: "max_rows",
6: "start_tag",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksRequest'
class TaskQueueQueryTasksResponse_TaskHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(66)
out.putPrefixedString(self.key_)
out.putVarInt32(74)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(66)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(74)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 60: break
if tt == 66:
self.set_key(d.getPrefixedString())
continue
if tt == 74:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class TaskQueueQueryTasksResponse_TaskCronTimetable(ProtocolBuffer.ProtocolMessage):
has_schedule_ = 0
schedule_ = ""
has_timezone_ = 0
timezone_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def schedule(self): return self.schedule_
def set_schedule(self, x):
self.has_schedule_ = 1
self.schedule_ = x
def clear_schedule(self):
if self.has_schedule_:
self.has_schedule_ = 0
self.schedule_ = ""
def has_schedule(self): return self.has_schedule_
def timezone(self): return self.timezone_
def set_timezone(self, x):
self.has_timezone_ = 1
self.timezone_ = x
def clear_timezone(self):
if self.has_timezone_:
self.has_timezone_ = 0
self.timezone_ = ""
def has_timezone(self): return self.has_timezone_
def MergeFrom(self, x):
assert x is not self
if (x.has_schedule()): self.set_schedule(x.schedule())
if (x.has_timezone()): self.set_timezone(x.timezone())
def Equals(self, x):
if x is self: return 1
if self.has_schedule_ != x.has_schedule_: return 0
if self.has_schedule_ and self.schedule_ != x.schedule_: return 0
if self.has_timezone_ != x.has_timezone_: return 0
if self.has_timezone_ and self.timezone_ != x.timezone_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_schedule_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: schedule not set.')
if (not self.has_timezone_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: timezone not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.schedule_))
n += self.lengthString(len(self.timezone_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_schedule_):
n += 1
n += self.lengthString(len(self.schedule_))
if (self.has_timezone_):
n += 1
n += self.lengthString(len(self.timezone_))
return n
def Clear(self):
self.clear_schedule()
self.clear_timezone()
def OutputUnchecked(self, out):
out.putVarInt32(114)
out.putPrefixedString(self.schedule_)
out.putVarInt32(122)
out.putPrefixedString(self.timezone_)
def OutputPartial(self, out):
if (self.has_schedule_):
out.putVarInt32(114)
out.putPrefixedString(self.schedule_)
if (self.has_timezone_):
out.putVarInt32(122)
out.putPrefixedString(self.timezone_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 108: break
if tt == 114:
self.set_schedule(d.getPrefixedString())
continue
if tt == 122:
self.set_timezone(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_schedule_: res+=prefix+("schedule: %s\n" % self.DebugFormatString(self.schedule_))
if self.has_timezone_: res+=prefix+("timezone: %s\n" % self.DebugFormatString(self.timezone_))
return res
class TaskQueueQueryTasksResponse_TaskRunLog(ProtocolBuffer.ProtocolMessage):
has_dispatched_usec_ = 0
dispatched_usec_ = 0
has_lag_usec_ = 0
lag_usec_ = 0
has_elapsed_usec_ = 0
elapsed_usec_ = 0
has_response_code_ = 0
response_code_ = 0
has_retry_reason_ = 0
retry_reason_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def dispatched_usec(self): return self.dispatched_usec_
def set_dispatched_usec(self, x):
self.has_dispatched_usec_ = 1
self.dispatched_usec_ = x
def clear_dispatched_usec(self):
if self.has_dispatched_usec_:
self.has_dispatched_usec_ = 0
self.dispatched_usec_ = 0
def has_dispatched_usec(self): return self.has_dispatched_usec_
def lag_usec(self): return self.lag_usec_
def set_lag_usec(self, x):
self.has_lag_usec_ = 1
self.lag_usec_ = x
def clear_lag_usec(self):
if self.has_lag_usec_:
self.has_lag_usec_ = 0
self.lag_usec_ = 0
def has_lag_usec(self): return self.has_lag_usec_
def elapsed_usec(self): return self.elapsed_usec_
def set_elapsed_usec(self, x):
self.has_elapsed_usec_ = 1
self.elapsed_usec_ = x
def clear_elapsed_usec(self):
if self.has_elapsed_usec_:
self.has_elapsed_usec_ = 0
self.elapsed_usec_ = 0
def has_elapsed_usec(self): return self.has_elapsed_usec_
def response_code(self): return self.response_code_
def set_response_code(self, x):
self.has_response_code_ = 1
self.response_code_ = x
def clear_response_code(self):
if self.has_response_code_:
self.has_response_code_ = 0
self.response_code_ = 0
def has_response_code(self): return self.has_response_code_
def retry_reason(self): return self.retry_reason_
def set_retry_reason(self, x):
self.has_retry_reason_ = 1
self.retry_reason_ = x
def clear_retry_reason(self):
if self.has_retry_reason_:
self.has_retry_reason_ = 0
self.retry_reason_ = ""
def has_retry_reason(self): return self.has_retry_reason_
def MergeFrom(self, x):
assert x is not self
if (x.has_dispatched_usec()): self.set_dispatched_usec(x.dispatched_usec())
if (x.has_lag_usec()): self.set_lag_usec(x.lag_usec())
if (x.has_elapsed_usec()): self.set_elapsed_usec(x.elapsed_usec())
if (x.has_response_code()): self.set_response_code(x.response_code())
if (x.has_retry_reason()): self.set_retry_reason(x.retry_reason())
def Equals(self, x):
if x is self: return 1
if self.has_dispatched_usec_ != x.has_dispatched_usec_: return 0
if self.has_dispatched_usec_ and self.dispatched_usec_ != x.dispatched_usec_: return 0
if self.has_lag_usec_ != x.has_lag_usec_: return 0
if self.has_lag_usec_ and self.lag_usec_ != x.lag_usec_: return 0
if self.has_elapsed_usec_ != x.has_elapsed_usec_: return 0
if self.has_elapsed_usec_ and self.elapsed_usec_ != x.elapsed_usec_: return 0
if self.has_response_code_ != x.has_response_code_: return 0
if self.has_response_code_ and self.response_code_ != x.response_code_: return 0
if self.has_retry_reason_ != x.has_retry_reason_: return 0
if self.has_retry_reason_ and self.retry_reason_ != x.retry_reason_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_dispatched_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: dispatched_usec not set.')
if (not self.has_lag_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lag_usec not set.')
if (not self.has_elapsed_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: elapsed_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.dispatched_usec_)
n += self.lengthVarInt64(self.lag_usec_)
n += self.lengthVarInt64(self.elapsed_usec_)
if (self.has_response_code_): n += 2 + self.lengthVarInt64(self.response_code_)
if (self.has_retry_reason_): n += 2 + self.lengthString(len(self.retry_reason_))
return n + 6
def ByteSizePartial(self):
n = 0
if (self.has_dispatched_usec_):
n += 2
n += self.lengthVarInt64(self.dispatched_usec_)
if (self.has_lag_usec_):
n += 2
n += self.lengthVarInt64(self.lag_usec_)
if (self.has_elapsed_usec_):
n += 2
n += self.lengthVarInt64(self.elapsed_usec_)
if (self.has_response_code_): n += 2 + self.lengthVarInt64(self.response_code_)
if (self.has_retry_reason_): n += 2 + self.lengthString(len(self.retry_reason_))
return n
def Clear(self):
self.clear_dispatched_usec()
self.clear_lag_usec()
self.clear_elapsed_usec()
self.clear_response_code()
self.clear_retry_reason()
def OutputUnchecked(self, out):
out.putVarInt32(136)
out.putVarInt64(self.dispatched_usec_)
out.putVarInt32(144)
out.putVarInt64(self.lag_usec_)
out.putVarInt32(152)
out.putVarInt64(self.elapsed_usec_)
if (self.has_response_code_):
out.putVarInt32(160)
out.putVarInt64(self.response_code_)
if (self.has_retry_reason_):
out.putVarInt32(218)
out.putPrefixedString(self.retry_reason_)
def OutputPartial(self, out):
if (self.has_dispatched_usec_):
out.putVarInt32(136)
out.putVarInt64(self.dispatched_usec_)
if (self.has_lag_usec_):
out.putVarInt32(144)
out.putVarInt64(self.lag_usec_)
if (self.has_elapsed_usec_):
out.putVarInt32(152)
out.putVarInt64(self.elapsed_usec_)
if (self.has_response_code_):
out.putVarInt32(160)
out.putVarInt64(self.response_code_)
if (self.has_retry_reason_):
out.putVarInt32(218)
out.putPrefixedString(self.retry_reason_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 132: break
if tt == 136:
self.set_dispatched_usec(d.getVarInt64())
continue
if tt == 144:
self.set_lag_usec(d.getVarInt64())
continue
if tt == 152:
self.set_elapsed_usec(d.getVarInt64())
continue
if tt == 160:
self.set_response_code(d.getVarInt64())
continue
if tt == 218:
self.set_retry_reason(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_dispatched_usec_: res+=prefix+("dispatched_usec: %s\n" % self.DebugFormatInt64(self.dispatched_usec_))
if self.has_lag_usec_: res+=prefix+("lag_usec: %s\n" % self.DebugFormatInt64(self.lag_usec_))
if self.has_elapsed_usec_: res+=prefix+("elapsed_usec: %s\n" % self.DebugFormatInt64(self.elapsed_usec_))
if self.has_response_code_: res+=prefix+("response_code: %s\n" % self.DebugFormatInt64(self.response_code_))
if self.has_retry_reason_: res+=prefix+("retry_reason: %s\n" % self.DebugFormatString(self.retry_reason_))
return res
class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
# RequestMethod values
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_url_ = 0
url_ = ""
has_method_ = 0
method_ = 0
has_retry_count_ = 0
retry_count_ = 0
has_body_size_ = 0
body_size_ = 0
has_body_ = 0
body_ = ""
has_creation_time_usec_ = 0
creation_time_usec_ = 0
has_crontimetable_ = 0
crontimetable_ = None
has_runlog_ = 0
runlog_ = None
has_description_ = 0
description_ = ""
has_payload_ = 0
payload_ = None
has_retry_parameters_ = 0
retry_parameters_ = None
has_first_try_usec_ = 0
first_try_usec_ = 0
has_tag_ = 0
tag_ = ""
has_execution_count_ = 0
execution_count_ = 0
def __init__(self, contents=None):
self.header_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 0
def has_method(self): return self.has_method_
def retry_count(self): return self.retry_count_
def set_retry_count(self, x):
self.has_retry_count_ = 1
self.retry_count_ = x
def clear_retry_count(self):
if self.has_retry_count_:
self.has_retry_count_ = 0
self.retry_count_ = 0
def has_retry_count(self): return self.has_retry_count_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = TaskQueueQueryTasksResponse_TaskHeader()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def body_size(self): return self.body_size_
def set_body_size(self, x):
self.has_body_size_ = 1
self.body_size_ = x
def clear_body_size(self):
if self.has_body_size_:
self.has_body_size_ = 0
self.body_size_ = 0
def has_body_size(self): return self.has_body_size_
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def creation_time_usec(self): return self.creation_time_usec_
def set_creation_time_usec(self, x):
self.has_creation_time_usec_ = 1
self.creation_time_usec_ = x
def clear_creation_time_usec(self):
if self.has_creation_time_usec_:
self.has_creation_time_usec_ = 0
self.creation_time_usec_ = 0
def has_creation_time_usec(self): return self.has_creation_time_usec_
def crontimetable(self):
if self.crontimetable_ is None:
self.lazy_init_lock_.acquire()
try:
if self.crontimetable_ is None: self.crontimetable_ = TaskQueueQueryTasksResponse_TaskCronTimetable()
finally:
self.lazy_init_lock_.release()
return self.crontimetable_
def mutable_crontimetable(self): self.has_crontimetable_ = 1; return self.crontimetable()
def clear_crontimetable(self):
# Warning: this method does not acquire the lock.
if self.has_crontimetable_:
self.has_crontimetable_ = 0;
if self.crontimetable_ is not None: self.crontimetable_.Clear()
def has_crontimetable(self): return self.has_crontimetable_
def runlog(self):
if self.runlog_ is None:
self.lazy_init_lock_.acquire()
try:
if self.runlog_ is None: self.runlog_ = TaskQueueQueryTasksResponse_TaskRunLog()
finally:
self.lazy_init_lock_.release()
return self.runlog_
def mutable_runlog(self): self.has_runlog_ = 1; return self.runlog()
def clear_runlog(self):
# Warning: this method does not acquire the lock.
if self.has_runlog_:
self.has_runlog_ = 0;
if self.runlog_ is not None: self.runlog_.Clear()
def has_runlog(self): return self.has_runlog_
def description(self): return self.description_
def set_description(self, x):
self.has_description_ = 1
self.description_ = x
def clear_description(self):
if self.has_description_:
self.has_description_ = 0
self.description_ = ""
def has_description(self): return self.has_description_
def payload(self):
if self.payload_ is None:
self.lazy_init_lock_.acquire()
try:
if self.payload_ is None: self.payload_ = MessageSet()
finally:
self.lazy_init_lock_.release()
return self.payload_
def mutable_payload(self): self.has_payload_ = 1; return self.payload()
def clear_payload(self):
# Warning: this method does not acquire the lock.
if self.has_payload_:
self.has_payload_ = 0;
if self.payload_ is not None: self.payload_.Clear()
def has_payload(self): return self.has_payload_
def retry_parameters(self):
if self.retry_parameters_ is None:
self.lazy_init_lock_.acquire()
try:
if self.retry_parameters_ is None: self.retry_parameters_ = TaskQueueRetryParameters()
finally:
self.lazy_init_lock_.release()
return self.retry_parameters_
def mutable_retry_parameters(self): self.has_retry_parameters_ = 1; return self.retry_parameters()
def clear_retry_parameters(self):
# Warning: this method does not acquire the lock.
if self.has_retry_parameters_:
self.has_retry_parameters_ = 0;
if self.retry_parameters_ is not None: self.retry_parameters_.Clear()
def has_retry_parameters(self): return self.has_retry_parameters_
def first_try_usec(self): return self.first_try_usec_
def set_first_try_usec(self, x):
self.has_first_try_usec_ = 1
self.first_try_usec_ = x
def clear_first_try_usec(self):
if self.has_first_try_usec_:
self.has_first_try_usec_ = 0
self.first_try_usec_ = 0
def has_first_try_usec(self): return self.has_first_try_usec_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def execution_count(self): return self.execution_count_
def set_execution_count(self, x):
self.has_execution_count_ = 1
self.execution_count_ = x
def clear_execution_count(self):
if self.has_execution_count_:
self.has_execution_count_ = 0
self.execution_count_ = 0
def has_execution_count(self): return self.has_execution_count_
def MergeFrom(self, x):
assert x is not self
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_url()): self.set_url(x.url())
if (x.has_method()): self.set_method(x.method())
if (x.has_retry_count()): self.set_retry_count(x.retry_count())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_body_size()): self.set_body_size(x.body_size())
if (x.has_body()): self.set_body(x.body())
if (x.has_creation_time_usec()): self.set_creation_time_usec(x.creation_time_usec())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_runlog()): self.mutable_runlog().MergeFrom(x.runlog())
if (x.has_description()): self.set_description(x.description())
if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
if (x.has_retry_parameters()): self.mutable_retry_parameters().MergeFrom(x.retry_parameters())
if (x.has_first_try_usec()): self.set_first_try_usec(x.first_try_usec())
if (x.has_tag()): self.set_tag(x.tag())
if (x.has_execution_count()): self.set_execution_count(x.execution_count())
def Equals(self, x):
if x is self: return 1
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_retry_count_ != x.has_retry_count_: return 0
if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_body_size_ != x.has_body_size_: return 0
if self.has_body_size_ and self.body_size_ != x.body_size_: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_creation_time_usec_ != x.has_creation_time_usec_: return 0
if self.has_creation_time_usec_ and self.creation_time_usec_ != x.creation_time_usec_: return 0
if self.has_crontimetable_ != x.has_crontimetable_: return 0
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_runlog_ != x.has_runlog_: return 0
if self.has_runlog_ and self.runlog_ != x.runlog_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_retry_parameters_ != x.has_retry_parameters_: return 0
if self.has_retry_parameters_ and self.retry_parameters_ != x.retry_parameters_: return 0
if self.has_first_try_usec_ != x.has_first_try_usec_: return 0
if self.has_first_try_usec_ and self.first_try_usec_ != x.first_try_usec_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
if self.has_execution_count_ != x.has_execution_count_: return 0
if self.has_execution_count_ and self.execution_count_ != x.execution_count_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_creation_time_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: creation_time_usec not set.')
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_runlog_ and not self.runlog_.IsInitialized(debug_strs)): initialized = 0
if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
if (self.has_retry_parameters_ and not self.retry_parameters_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
n += self.lengthVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSize()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSize())
if (self.has_first_try_usec_): n += 2 + self.lengthVarInt64(self.first_try_usec_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_execution_count_): n += 2 + self.lengthVarInt64(self.execution_count_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSizePartial()
if (self.has_body_size_): n += 1 + self.lengthVarInt64(self.body_size_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_creation_time_usec_):
n += 1
n += self.lengthVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSizePartial()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSizePartial()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSizePartial())
if (self.has_retry_parameters_): n += 2 + self.lengthString(self.retry_parameters_.ByteSizePartial())
if (self.has_first_try_usec_): n += 2 + self.lengthVarInt64(self.first_try_usec_)
if (self.has_tag_): n += 2 + self.lengthString(len(self.tag_))
if (self.has_execution_count_): n += 2 + self.lengthVarInt64(self.execution_count_)
return n
def Clear(self):
self.clear_task_name()
self.clear_eta_usec()
self.clear_url()
self.clear_method()
self.clear_retry_count()
self.clear_header()
self.clear_body_size()
self.clear_body()
self.clear_creation_time_usec()
self.clear_crontimetable()
self.clear_runlog()
self.clear_description()
self.clear_payload()
self.clear_retry_parameters()
self.clear_first_try_usec()
self.clear_tag()
self.clear_execution_count()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
out.putVarInt32(48)
out.putVarInt32(self.retry_count_)
for i in xrange(len(self.header_)):
out.putVarInt32(59)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(60)
if (self.has_body_size_):
out.putVarInt32(80)
out.putVarInt32(self.body_size_)
if (self.has_body_):
out.putVarInt32(90)
out.putPrefixedString(self.body_)
out.putVarInt32(96)
out.putVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_):
out.putVarInt32(107)
self.crontimetable_.OutputUnchecked(out)
out.putVarInt32(108)
if (self.has_runlog_):
out.putVarInt32(131)
self.runlog_.OutputUnchecked(out)
out.putVarInt32(132)
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(178)
out.putVarInt32(self.payload_.ByteSize())
self.payload_.OutputUnchecked(out)
if (self.has_retry_parameters_):
out.putVarInt32(186)
out.putVarInt32(self.retry_parameters_.ByteSize())
self.retry_parameters_.OutputUnchecked(out)
if (self.has_first_try_usec_):
out.putVarInt32(192)
out.putVarInt64(self.first_try_usec_)
if (self.has_tag_):
out.putVarInt32(202)
out.putPrefixedString(self.tag_)
if (self.has_execution_count_):
out.putVarInt32(208)
out.putVarInt32(self.execution_count_)
def OutputPartial(self, out):
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_url_):
out.putVarInt32(34)
out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
out.putVarInt32(48)
out.putVarInt32(self.retry_count_)
for i in xrange(len(self.header_)):
out.putVarInt32(59)
self.header_[i].OutputPartial(out)
out.putVarInt32(60)
if (self.has_body_size_):
out.putVarInt32(80)
out.putVarInt32(self.body_size_)
if (self.has_body_):
out.putVarInt32(90)
out.putPrefixedString(self.body_)
if (self.has_creation_time_usec_):
out.putVarInt32(96)
out.putVarInt64(self.creation_time_usec_)
if (self.has_crontimetable_):
out.putVarInt32(107)
self.crontimetable_.OutputPartial(out)
out.putVarInt32(108)
if (self.has_runlog_):
out.putVarInt32(131)
self.runlog_.OutputPartial(out)
out.putVarInt32(132)
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
if (self.has_payload_):
out.putVarInt32(178)
out.putVarInt32(self.payload_.ByteSizePartial())
self.payload_.OutputPartial(out)
if (self.has_retry_parameters_):
out.putVarInt32(186)
out.putVarInt32(self.retry_parameters_.ByteSizePartial())
self.retry_parameters_.OutputPartial(out)
if (self.has_first_try_usec_):
out.putVarInt32(192)
out.putVarInt64(self.first_try_usec_)
if (self.has_tag_):
out.putVarInt32(202)
out.putPrefixedString(self.tag_)
if (self.has_execution_count_):
out.putVarInt32(208)
out.putVarInt32(self.execution_count_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 34:
self.set_url(d.getPrefixedString())
continue
if tt == 40:
self.set_method(d.getVarInt32())
continue
if tt == 48:
self.set_retry_count(d.getVarInt32())
continue
if tt == 59:
self.add_header().TryMerge(d)
continue
if tt == 80:
self.set_body_size(d.getVarInt32())
continue
if tt == 90:
self.set_body(d.getPrefixedString())
continue
if tt == 96:
self.set_creation_time_usec(d.getVarInt64())
continue
if tt == 107:
self.mutable_crontimetable().TryMerge(d)
continue
if tt == 131:
self.mutable_runlog().TryMerge(d)
continue
if tt == 170:
self.set_description(d.getPrefixedString())
continue
if tt == 178:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_payload().TryMerge(tmp)
continue
if tt == 186:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_retry_parameters().TryMerge(tmp)
continue
if tt == 192:
self.set_first_try_usec(d.getVarInt64())
continue
if tt == 202:
self.set_tag(d.getPrefixedString())
continue
if tt == 208:
self.set_execution_count(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_body_size_: res+=prefix+("body_size: %s\n" % self.DebugFormatInt32(self.body_size_))
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_creation_time_usec_: res+=prefix+("creation_time_usec: %s\n" % self.DebugFormatInt64(self.creation_time_usec_))
if self.has_crontimetable_:
res+=prefix+"CronTimetable {\n"
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_runlog_:
res+=prefix+"RunLog {\n"
res+=self.runlog_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
if self.has_payload_:
res+=prefix+"payload <\n"
res+=self.payload_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_retry_parameters_:
res+=prefix+"retry_parameters <\n"
res+=self.retry_parameters_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_first_try_usec_: res+=prefix+("first_try_usec: %s\n" % self.DebugFormatInt64(self.first_try_usec_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
if self.has_execution_count_: res+=prefix+("execution_count: %s\n" % self.DebugFormatInt32(self.execution_count_))
return res
class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.task_ = []
if contents is not None: self.MergeFromString(contents)
def task_size(self): return len(self.task_)
def task_list(self): return self.task_
def task(self, i):
return self.task_[i]
def mutable_task(self, i):
return self.task_[i]
def add_task(self):
x = TaskQueueQueryTasksResponse_Task()
self.task_.append(x)
return x
def clear_task(self):
self.task_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.task_size()): self.add_task().CopyFrom(x.task(i))
def Equals(self, x):
if x is self: return 1
if len(self.task_) != len(x.task_): return 0
for e1, e2 in zip(self.task_, x.task_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.task_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_task().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.task_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Task%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kTaskGroup = 1
kTasktask_name = 2
kTasketa_usec = 3
kTaskurl = 4
kTaskmethod = 5
kTaskretry_count = 6
kTaskHeaderGroup = 7
kTaskHeaderkey = 8
kTaskHeadervalue = 9
kTaskbody_size = 10
kTaskbody = 11
kTaskcreation_time_usec = 12
kTaskCronTimetableGroup = 13
kTaskCronTimetableschedule = 14
kTaskCronTimetabletimezone = 15
kTaskRunLogGroup = 16
kTaskRunLogdispatched_usec = 17
kTaskRunLoglag_usec = 18
kTaskRunLogelapsed_usec = 19
kTaskRunLogresponse_code = 20
kTaskRunLogretry_reason = 27
kTaskdescription = 21
kTaskpayload = 22
kTaskretry_parameters = 23
kTaskfirst_try_usec = 24
kTasktag = 25
kTaskexecution_count = 26
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Task",
2: "task_name",
3: "eta_usec",
4: "url",
5: "method",
6: "retry_count",
7: "Header",
8: "key",
9: "value",
10: "body_size",
11: "body",
12: "creation_time_usec",
13: "CronTimetable",
14: "schedule",
15: "timezone",
16: "RunLog",
17: "dispatched_usec",
18: "lag_usec",
19: "elapsed_usec",
20: "response_code",
21: "description",
22: "payload",
23: "retry_parameters",
24: "first_try_usec",
25: "tag",
26: "execution_count",
27: "retry_reason",
}, 27)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STARTGROUP,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STARTGROUP,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.STARTGROUP,
17: ProtocolBuffer.Encoder.NUMERIC,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.NUMERIC,
25: ProtocolBuffer.Encoder.STRING,
26: ProtocolBuffer.Encoder.NUMERIC,
27: ProtocolBuffer.Encoder.STRING,
}, 27, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksResponse'
class TaskQueueFetchTaskRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
return n
def Clear(self):
self.clear_app_id()
self.clear_queue_name()
self.clear_task_name()
def OutputUnchecked(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_queue_name_):
out.putVarInt32(18)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(26)
out.putPrefixedString(self.task_name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 18:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 26:
self.set_task_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
kqueue_name = 2
ktask_name = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "queue_name",
3: "task_name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskRequest'
class TaskQueueFetchTaskResponse(ProtocolBuffer.ProtocolMessage):
has_task_ = 0
def __init__(self, contents=None):
self.task_ = TaskQueueQueryTasksResponse()
if contents is not None: self.MergeFromString(contents)
def task(self): return self.task_
def mutable_task(self): self.has_task_ = 1; return self.task_
def clear_task(self):self.has_task_ = 0; self.task_.Clear()
def has_task(self): return self.has_task_
def MergeFrom(self, x):
assert x is not self
if (x.has_task()): self.mutable_task().MergeFrom(x.task())
def Equals(self, x):
if x is self: return 1
if self.has_task_ != x.has_task_: return 0
if self.has_task_ and self.task_ != x.task_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task not set.')
elif not self.task_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.task_.ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_task_):
n += 1
n += self.lengthString(self.task_.ByteSizePartial())
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.task_.ByteSize())
self.task_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_task_):
out.putVarInt32(10)
out.putVarInt32(self.task_.ByteSizePartial())
self.task_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_task().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_:
res+=prefix+"task <\n"
res+=self.task_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktask = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "task",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskResponse'
class TaskQueueUpdateStorageLimitRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
has_limit_ = 0
limit_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_limit()): self.set_limit(x.limit())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_limit_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: limit not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.limit_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_app_id_):
n += 1
n += self.lengthString(len(self.app_id_))
if (self.has_limit_):
n += 1
n += self.lengthVarInt64(self.limit_)
return n
def Clear(self):
self.clear_app_id()
self.clear_limit()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.limit_)
def OutputPartial(self, out):
if (self.has_app_id_):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
if (self.has_limit_):
out.putVarInt32(16)
out.putVarInt64(self.limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_limit(d.getVarInt64())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt64(self.limit_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kapp_id = 1
klimit = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app_id",
2: "limit",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitRequest'
class TaskQueueUpdateStorageLimitResponse(ProtocolBuffer.ProtocolMessage):
has_new_limit_ = 0
new_limit_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def new_limit(self): return self.new_limit_
def set_new_limit(self, x):
self.has_new_limit_ = 1
self.new_limit_ = x
def clear_new_limit(self):
if self.has_new_limit_:
self.has_new_limit_ = 0
self.new_limit_ = 0
def has_new_limit(self): return self.has_new_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_new_limit()): self.set_new_limit(x.new_limit())
def Equals(self, x):
if x is self: return 1
if self.has_new_limit_ != x.has_new_limit_: return 0
if self.has_new_limit_ and self.new_limit_ != x.new_limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_new_limit_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: new_limit not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.new_limit_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_new_limit_):
n += 1
n += self.lengthVarInt64(self.new_limit_)
return n
def Clear(self):
self.clear_new_limit()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.new_limit_)
def OutputPartial(self, out):
if (self.has_new_limit_):
out.putVarInt32(8)
out.putVarInt64(self.new_limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_new_limit(d.getVarInt64())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_new_limit_: res+=prefix+("new_limit: %s\n" % self.DebugFormatInt64(self.new_limit_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
knew_limit = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "new_limit",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitResponse'
class TaskQueueQueryAndOwnTasksRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_lease_seconds_ = 0
lease_seconds_ = 0.0
has_max_tasks_ = 0
max_tasks_ = 0
has_group_by_tag_ = 0
group_by_tag_ = 0
has_tag_ = 0
tag_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def lease_seconds(self): return self.lease_seconds_
def set_lease_seconds(self, x):
self.has_lease_seconds_ = 1
self.lease_seconds_ = x
def clear_lease_seconds(self):
if self.has_lease_seconds_:
self.has_lease_seconds_ = 0
self.lease_seconds_ = 0.0
def has_lease_seconds(self): return self.has_lease_seconds_
def max_tasks(self): return self.max_tasks_
def set_max_tasks(self, x):
self.has_max_tasks_ = 1
self.max_tasks_ = x
def clear_max_tasks(self):
if self.has_max_tasks_:
self.has_max_tasks_ = 0
self.max_tasks_ = 0
def has_max_tasks(self): return self.has_max_tasks_
def group_by_tag(self): return self.group_by_tag_
def set_group_by_tag(self, x):
self.has_group_by_tag_ = 1
self.group_by_tag_ = x
def clear_group_by_tag(self):
if self.has_group_by_tag_:
self.has_group_by_tag_ = 0
self.group_by_tag_ = 0
def has_group_by_tag(self): return self.has_group_by_tag_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_lease_seconds()): self.set_lease_seconds(x.lease_seconds())
if (x.has_max_tasks()): self.set_max_tasks(x.max_tasks())
if (x.has_group_by_tag()): self.set_group_by_tag(x.group_by_tag())
if (x.has_tag()): self.set_tag(x.tag())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_lease_seconds_ != x.has_lease_seconds_: return 0
if self.has_lease_seconds_ and self.lease_seconds_ != x.lease_seconds_: return 0
if self.has_max_tasks_ != x.has_max_tasks_: return 0
if self.has_max_tasks_ and self.max_tasks_ != x.max_tasks_: return 0
if self.has_group_by_tag_ != x.has_group_by_tag_: return 0
if self.has_group_by_tag_ and self.group_by_tag_ != x.group_by_tag_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_lease_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_seconds not set.')
if (not self.has_max_tasks_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: max_tasks not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthVarInt64(self.max_tasks_)
if (self.has_group_by_tag_): n += 2
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n + 11
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_lease_seconds_):
n += 9
if (self.has_max_tasks_):
n += 1
n += self.lengthVarInt64(self.max_tasks_)
if (self.has_group_by_tag_): n += 2
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n
def Clear(self):
self.clear_queue_name()
self.clear_lease_seconds()
self.clear_max_tasks()
self.clear_group_by_tag()
self.clear_tag()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(17)
out.putDouble(self.lease_seconds_)
out.putVarInt32(24)
out.putVarInt64(self.max_tasks_)
if (self.has_group_by_tag_):
out.putVarInt32(32)
out.putBoolean(self.group_by_tag_)
if (self.has_tag_):
out.putVarInt32(42)
out.putPrefixedString(self.tag_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_lease_seconds_):
out.putVarInt32(17)
out.putDouble(self.lease_seconds_)
if (self.has_max_tasks_):
out.putVarInt32(24)
out.putVarInt64(self.max_tasks_)
if (self.has_group_by_tag_):
out.putVarInt32(32)
out.putBoolean(self.group_by_tag_)
if (self.has_tag_):
out.putVarInt32(42)
out.putPrefixedString(self.tag_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 17:
self.set_lease_seconds(d.getDouble())
continue
if tt == 24:
self.set_max_tasks(d.getVarInt64())
continue
if tt == 32:
self.set_group_by_tag(d.getBoolean())
continue
if tt == 42:
self.set_tag(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_lease_seconds_: res+=prefix+("lease_seconds: %s\n" % self.DebugFormat(self.lease_seconds_))
if self.has_max_tasks_: res+=prefix+("max_tasks: %s\n" % self.DebugFormatInt64(self.max_tasks_))
if self.has_group_by_tag_: res+=prefix+("group_by_tag: %s\n" % self.DebugFormatBool(self.group_by_tag_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
klease_seconds = 2
kmax_tasks = 3
kgroup_by_tag = 4
ktag = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "lease_seconds",
3: "max_tasks",
4: "group_by_tag",
5: "tag",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.DOUBLE,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksRequest'
class TaskQueueQueryAndOwnTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_retry_count_ = 0
retry_count_ = 0
has_body_ = 0
body_ = ""
has_tag_ = 0
tag_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def retry_count(self): return self.retry_count_
def set_retry_count(self, x):
self.has_retry_count_ = 1
self.retry_count_ = x
def clear_retry_count(self):
if self.has_retry_count_:
self.has_retry_count_ = 0
self.retry_count_ = 0
def has_retry_count(self): return self.has_retry_count_
def body(self): return self.body_
def set_body(self, x):
self.has_body_ = 1
self.body_ = x
def clear_body(self):
if self.has_body_:
self.has_body_ = 0
self.body_ = ""
def has_body(self): return self.has_body_
def tag(self): return self.tag_
def set_tag(self, x):
self.has_tag_ = 1
self.tag_ = x
def clear_tag(self):
if self.has_tag_:
self.has_tag_ = 0
self.tag_ = ""
def has_tag(self): return self.has_tag_
def MergeFrom(self, x):
assert x is not self
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_retry_count()): self.set_retry_count(x.retry_count())
if (x.has_body()): self.set_body(x.body())
if (x.has_tag()): self.set_tag(x.tag())
def Equals(self, x):
if x is self: return 1
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_retry_count_ != x.has_retry_count_: return 0
if self.has_retry_count_ and self.retry_count_ != x.retry_count_: return 0
if self.has_body_ != x.has_body_: return 0
if self.has_body_ and self.body_ != x.body_: return 0
if self.has_tag_ != x.has_tag_: return 0
if self.has_tag_ and self.tag_ != x.tag_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
if (self.has_tag_): n += 1 + self.lengthString(len(self.tag_))
return n
def Clear(self):
self.clear_task_name()
self.clear_eta_usec()
self.clear_retry_count()
self.clear_body()
self.clear_tag()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_retry_count_):
out.putVarInt32(32)
out.putVarInt32(self.retry_count_)
if (self.has_body_):
out.putVarInt32(42)
out.putPrefixedString(self.body_)
if (self.has_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.tag_)
def OutputPartial(self, out):
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_retry_count_):
out.putVarInt32(32)
out.putVarInt32(self.retry_count_)
if (self.has_body_):
out.putVarInt32(42)
out.putPrefixedString(self.body_)
if (self.has_tag_):
out.putVarInt32(50)
out.putPrefixedString(self.tag_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 32:
self.set_retry_count(d.getVarInt32())
continue
if tt == 42:
self.set_body(d.getPrefixedString())
continue
if tt == 50:
self.set_tag(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_retry_count_: res+=prefix+("retry_count: %s\n" % self.DebugFormatInt32(self.retry_count_))
if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
if self.has_tag_: res+=prefix+("tag: %s\n" % self.DebugFormatString(self.tag_))
return res
class TaskQueueQueryAndOwnTasksResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.task_ = []
if contents is not None: self.MergeFromString(contents)
def task_size(self): return len(self.task_)
def task_list(self): return self.task_
def task(self, i):
return self.task_[i]
def mutable_task(self, i):
return self.task_[i]
def add_task(self):
x = TaskQueueQueryAndOwnTasksResponse_Task()
self.task_.append(x)
return x
def clear_task(self):
self.task_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.task_size()): self.add_task().CopyFrom(x.task(i))
def Equals(self, x):
if x is self: return 1
if len(self.task_) != len(x.task_): return 0
for e1, e2 in zip(self.task_, x.task_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.task_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.task_)
for i in xrange(len(self.task_)): n += self.task_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_task()
def OutputUnchecked(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputUnchecked(out)
out.putVarInt32(12)
def OutputPartial(self, out):
for i in xrange(len(self.task_)):
out.putVarInt32(11)
self.task_[i].OutputPartial(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_task().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.task_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Task%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kTaskGroup = 1
kTasktask_name = 2
kTasketa_usec = 3
kTaskretry_count = 4
kTaskbody = 5
kTasktag = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Task",
2: "task_name",
3: "eta_usec",
4: "retry_count",
5: "body",
6: "tag",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksResponse'
class TaskQueueModifyTaskLeaseRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
has_task_name_ = 0
task_name_ = ""
has_eta_usec_ = 0
eta_usec_ = 0
has_lease_seconds_ = 0
lease_seconds_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def queue_name(self): return self.queue_name_
def set_queue_name(self, x):
self.has_queue_name_ = 1
self.queue_name_ = x
def clear_queue_name(self):
if self.has_queue_name_:
self.has_queue_name_ = 0
self.queue_name_ = ""
def has_queue_name(self): return self.has_queue_name_
def task_name(self): return self.task_name_
def set_task_name(self, x):
self.has_task_name_ = 1
self.task_name_ = x
def clear_task_name(self):
if self.has_task_name_:
self.has_task_name_ = 0
self.task_name_ = ""
def has_task_name(self): return self.has_task_name_
def eta_usec(self): return self.eta_usec_
def set_eta_usec(self, x):
self.has_eta_usec_ = 1
self.eta_usec_ = x
def clear_eta_usec(self):
if self.has_eta_usec_:
self.has_eta_usec_ = 0
self.eta_usec_ = 0
def has_eta_usec(self): return self.has_eta_usec_
def lease_seconds(self): return self.lease_seconds_
def set_lease_seconds(self, x):
self.has_lease_seconds_ = 1
self.lease_seconds_ = x
def clear_lease_seconds(self):
if self.has_lease_seconds_:
self.has_lease_seconds_ = 0
self.lease_seconds_ = 0.0
def has_lease_seconds(self): return self.has_lease_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_queue_name()): self.set_queue_name(x.queue_name())
if (x.has_task_name()): self.set_task_name(x.task_name())
if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
if (x.has_lease_seconds()): self.set_lease_seconds(x.lease_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_queue_name_ != x.has_queue_name_: return 0
if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
if self.has_task_name_ != x.has_task_name_: return 0
if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
if self.has_eta_usec_ != x.has_eta_usec_: return 0
if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
if self.has_lease_seconds_ != x.has_lease_seconds_: return 0
if self.has_lease_seconds_ and self.lease_seconds_ != x.lease_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_queue_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: queue_name not set.')
if (not self.has_task_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: task_name not set.')
if (not self.has_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
if (not self.has_lease_seconds_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_seconds not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.queue_name_))
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
return n + 12
def ByteSizePartial(self):
n = 0
if (self.has_queue_name_):
n += 1
n += self.lengthString(len(self.queue_name_))
if (self.has_task_name_):
n += 1
n += self.lengthString(len(self.task_name_))
if (self.has_eta_usec_):
n += 1
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_lease_seconds_):
n += 9
return n
def Clear(self):
self.clear_queue_name()
self.clear_task_name()
self.clear_eta_usec()
self.clear_lease_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
out.putVarInt32(33)
out.putDouble(self.lease_seconds_)
def OutputPartial(self, out):
if (self.has_queue_name_):
out.putVarInt32(10)
out.putPrefixedString(self.queue_name_)
if (self.has_task_name_):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
if (self.has_eta_usec_):
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
if (self.has_lease_seconds_):
out.putVarInt32(33)
out.putDouble(self.lease_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_queue_name(d.getPrefixedString())
continue
if tt == 18:
self.set_task_name(d.getPrefixedString())
continue
if tt == 24:
self.set_eta_usec(d.getVarInt64())
continue
if tt == 33:
self.set_lease_seconds(d.getDouble())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
if self.has_lease_seconds_: res+=prefix+("lease_seconds: %s\n" % self.DebugFormat(self.lease_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kqueue_name = 1
ktask_name = 2
keta_usec = 3
klease_seconds = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "queue_name",
2: "task_name",
3: "eta_usec",
4: "lease_seconds",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.DOUBLE,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseRequest'
class TaskQueueModifyTaskLeaseResponse(ProtocolBuffer.ProtocolMessage):
has_updated_eta_usec_ = 0
updated_eta_usec_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def updated_eta_usec(self): return self.updated_eta_usec_
def set_updated_eta_usec(self, x):
self.has_updated_eta_usec_ = 1
self.updated_eta_usec_ = x
def clear_updated_eta_usec(self):
if self.has_updated_eta_usec_:
self.has_updated_eta_usec_ = 0
self.updated_eta_usec_ = 0
def has_updated_eta_usec(self): return self.has_updated_eta_usec_
def MergeFrom(self, x):
assert x is not self
if (x.has_updated_eta_usec()): self.set_updated_eta_usec(x.updated_eta_usec())
def Equals(self, x):
if x is self: return 1
if self.has_updated_eta_usec_ != x.has_updated_eta_usec_: return 0
if self.has_updated_eta_usec_ and self.updated_eta_usec_ != x.updated_eta_usec_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_updated_eta_usec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: updated_eta_usec not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.updated_eta_usec_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_updated_eta_usec_):
n += 1
n += self.lengthVarInt64(self.updated_eta_usec_)
return n
def Clear(self):
self.clear_updated_eta_usec()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.updated_eta_usec_)
def OutputPartial(self, out):
if (self.has_updated_eta_usec_):
out.putVarInt32(8)
out.putVarInt64(self.updated_eta_usec_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_updated_eta_usec(d.getVarInt64())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_updated_eta_usec_: res+=prefix+("updated_eta_usec: %s\n" % self.DebugFormatInt64(self.updated_eta_usec_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kupdated_eta_usec = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "updated_eta_usec",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseResponse'
if _extension_runtime:
pass
__all__ = ['TaskQueueServiceError','TaskQueueRetryParameters','TaskQueueAcl','TaskQueueHttpHeader','TaskQueueMode','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueBulkAddRequest','TaskQueueBulkAddResponse','TaskQueueBulkAddResponse_TaskResult','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueForceRunRequest','TaskQueueForceRunResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePauseQueueRequest','TaskQueuePauseQueueResponse','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueDeleteGroupRequest','TaskQueueDeleteGroupResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task','TaskQueueFetchTaskRequest','TaskQueueFetchTaskResponse','TaskQueueUpdateStorageLimitRequest','TaskQueueUpdateStorageLimitResponse','TaskQueueQueryAndOwnTasksRequest','TaskQueueQueryAndOwnTasksResponse','TaskQueueQueryAndOwnTasksResponse_Task','TaskQueueModifyTaskLeaseRequest','TaskQueueModifyTaskLeaseResponse']
| [
"[email protected]"
] | |
7b459863ace7904da1f6e6affba8dd4247466e96 | fea6e9d6b20b0c5f2a05a6f2433aae4176b2a00a | /server/applibs/account/models/phoneuser.py | 2b9987502723a11b18689e14372406d54281c047 | [] | no_license | fanshuai/kubrick | fddf6c21bcd500223d9a05bd002e47eb1ecf8839 | b7ed6588e13d2916a4162d56509d2794742a1eb1 | refs/heads/main | 2023-03-24T12:21:44.562850 | 2021-03-19T15:11:40 | 2021-03-19T15:11:40 | 349,445,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,497 | py | import json
import logging
import phonenumbers
from django.db import models, transaction
from django.utils.functional import cached_property
from phonenumbers import PhoneNumberFormat as PNFormat
from phonenumbers import carrier, geocoder
from mirage import fields as mg_fields
from server.constant import mochoice as mc
from server.corelib.sequence import idshift
from server.corelib.hash_id import pk_hashid_decode
from server.djextend.basemodel import BasicModel, BIDModel
from server.third.aliyun.dayu import sms_action, sms_constant
from server.corelib.dealer.deal_time import get_now
from server.corelib.dealer import format_phonenumber
from server.constant.normal import COUNTRY_CODES
logger = logging.getLogger('kubrick.debug')
class PhoneManager(models.Manager):
def get_phone(self, number, **kwargs):
""" 手机号获取 """
number, msg = format_phonenumber(number)
if not number:
logger.warning(f'get_phone__parse_error {number} {msg}')
raise ValueError(f'Phone number parse error: {msg}.')
kwargs['shahash'] = idshift.hash_sha1(number)
inst, is_created = self.get_or_create(number=number, defaults=kwargs)
logger.info(f'get_phone__created {inst.pk} {is_created} {inst.usrid} {inst.show}')
if is_created:
inst.check_context()
return inst
def user_phone_qs(self, usrid):
""" 用户手机号列表 """
qs = self.filter(usrid=usrid, is_verified=True).order_by('order', 'pk')
return qs
def user_phone_main(self, usrid):
""" 用户主手机号 """
phone = self.user_phone_qs(usrid=usrid).first()
return phone
def check_phone_exist(self, number):
""" 手机号是否已注册 """
number, msg = format_phonenumber(number)
if not number:
logger.warning(f'check_phone_exist__parse_error {number} {msg}')
return False
is_exists = self.filter(number=number, usrid__gt=0).exists()
return is_exists
class Phone(BasicModel, BIDModel):
""" 手机号绑定,用户可绑定1~5个 """
class Meta:
verbose_name = 'Phone'
verbose_name_plural = verbose_name
index_together = ['carrier', 'nation', 'region', 'is_verified']
db_table = 'k_ac_phone'
ordering = ('-pk',)
limit = 3 # 用户手机号绑定数量限制
shahash = models.CharField('SHA1签名', max_length=50, unique=True)
number = mg_fields.EncryptedCharField(verbose_name='手机号', max_length=50, unique=True) # E164,加密
national = mg_fields.EncryptedCharField(verbose_name='号码', max_length=50, db_index=True, default='')
usrid = models.BigIntegerField('用户', db_index=True, default=0)
carrier = models.CharField('运营商', max_length=50, default='')
nation = models.CharField('国家', max_length=20, default='')
region = models.CharField('归属地', max_length=50, default='')
is_verified = models.BooleanField('已验证', default=False)
verified_at = models.DateTimeField('验证时间', null=True, default=None)
order = models.PositiveSmallIntegerField('顺序', default=0)
objects = PhoneManager()
@cached_property
def parse_info(self):
info = phonenumbers.parse(self.number, None)
return info
@property
def user(self):
if not self.usrid:
return None
info = self.get_user(self.usrid)
return info
@property
def is_main(self):
if not (self.usrid and self.is_verified):
return False
is_ok = self.order == 0
return is_ok
@property
def sibling_qs(self):
""" 该用户下其他手机号 """
objects = self.__class__.objects
if self.usrid and self.is_verified:
qs = objects.filter(
usrid=self.usrid, is_verified=True,
).exclude(pk=self.pk).order_by('order', 'pk')
else:
qs = objects.none()
return qs
@property
def tail(self):
""" 尾号 """
return f'**{self.number[-4:]}'
@property
def show(self):
""" 脱敏显示 """
n = len(self.national)
if self.country in COUNTRY_CODES and n == 11:
s = f"{self.national[:1]}**{self.national[3:4]}***{self.national[-4:]}"
elif n > 9:
cut = n - 6
s = f"{self.national[:2]}{'*' * cut}{self.national[-4:]}"
else:
cut = n - 3
s = f"{self.national[:1]}{'*' * cut}{self.national[-2:]}"
return s
@property
def summary(self):
desc = f'{self.pk} {self.usrid} {self.show}'
return desc
@property
def country(self):
code = self.parse_info.country_code
return code
@property
def fmt_natl(self):
""" 国内号码格式化 """
fmt = phonenumbers.format_number(self.parse_info, PNFormat.NATIONAL)
return fmt
@property
def fmt_intl(self):
""" 国际号码格式化 """
fmt = phonenumbers.format_number(self.parse_info, PNFormat.INTERNATIONAL)
return fmt
def check_context(self):
self.national = str(self.parse_info.national_number)
self.carrier = carrier.name_for_number(self.parse_info, 'en')
self.nation = geocoder.country_name_for_number(self.parse_info, 'en')
self.region = geocoder.description_for_number(self.parse_info, 'en')
up_fields = ['national', 'carrier', 'nation', 'region', 'updated_at']
self.save(update_fields=up_fields)
@transaction.atomic
def set_main(self):
""" 设为主手机号 """
self.refresh_from_db()
if not (self.usrid and self.is_verified):
logger.info(f'set_main__not_verified {self.summary}')
return False
self.order = 0
up_fields = ['order', 'updated_at']
self.save(update_fields=up_fields)
for index, phone in enumerate(self.sibling_qs):
phone.order = index + 1
phone.save(update_fields=up_fields)
logger.info(f'set_main__done {self.pk} {self.show}')
return True
def user_phone_bind(self, usrid):
""" 关联用户 """
assert usrid > 0, f'user_phone_bind__no_user {self.pk}'
if self.usrid == usrid:
logger.warning(f'user_phone_bind__done {self.pk} {usrid}')
return True, '已绑定'
if self.usrid:
self.extra_log('bind', usrid=self.usrid, new=usrid, type='repeat')
logger.warning(f'user_phone_bind__repeat {self.pk} {self.usrid}')
return False, '已被绑定'
self.usrid = usrid
self.is_verified = True
self.verified_at = get_now()
self.save(update_fields=['usrid', 'is_verified', 'verified_at', 'updated_at'])
self.extra_log('usrid', usrid=usrid, type='create')
return True, '成功'
def captcha_send_for_sign(self):
""" 验证码发送,仅登录 """
assert self.usrid > 0, f'captcha_send_for_sign__no_user {self.pk}'
ret = PNVerify.objects.pnvc_send(self.pk, mc.PNVScene.Sign)
logger.info(f'captcha_send_for_sign__done {self.summary} {ret}')
return ret
def captcha_verify_for_sign(self, code):
""" 验证码验证,仅登录 """
assert self.usrid > 0, f'captcha_send_for_sign__no_user {self.pk}'
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.Sign)
if not is_ok:
return None
return self.user
def captcha_send_for_bind(self):
""" 验证码发送,用户绑定新手机号 """
if self.usrid > 0:
return False, '手机号已被绑定'
ret = PNVerify.objects.pnvc_send(self.pk, mc.PNVScene.Bind)
logger.info(f'captcha_send_for_bind__done {self.summary} {ret}')
return True, ret
def captcha_verify_for_bind(self, code, usrid):
""" 验证码验证,用户绑定新手机号 """
if self.usrid > 0:
return False, '手机号已被绑定'
assert isinstance(usrid, int) and usrid > 0, usrid
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.Bind)
if not is_ok:
return False, '验证码不正确'
if self.usrid > 0:
return False, '手机号已被绑定'
is_ok, reason = self.user_phone_bind(usrid)
return is_ok, reason
def captcha_send_for_unbind(self):
""" 验证码发送,解除绑定手机号 """
if not self.is_verified:
return False, '手机号未绑定'
if self.is_main:
return False, '主手机号无法解除绑定'
ret = PNVerify.objects.pnvc_send(self.pk, mc.PNVScene.Unbind)
logger.info(f'captcha_send_for_unbind__done {self.summary} {ret}')
return True, ret
def captcha_verify_for_unbind(self, code):
""" 验证码验证,解除绑定手机号 """
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.Unbind)
if not is_ok:
return False, '验证码不正确'
if not self.is_verified:
return False, '手机号未绑定'
if self.is_main:
return False, '主手机号无法解除绑定'
self.usrid = 0
self.order = 0
self.verified_at = None
self.is_verified = False
up_fields = ['usrid', 'order', 'is_verified', 'verified_at', 'updated_at']
self.save(update_fields=up_fields)
self.extra_log('usrid', usrid=0, type='unbind')
return True, self.user
def captcha_send_for_symbol_strike(self):
""" 验证码发送,场景码删除 """
if not (self.usrid and self.is_verified):
return False, '手机号信息不正确'
ret = PNVerify.objects.pnvc_send(self.pk, scene=mc.PNVScene.UNSymbol)
logger.info(f'captcha_send_for_sign__done {self.summary} {ret}')
return True, ret
def captcha_verify_for_symbol_strike(self, code):
""" 验证码验证,场景码删除 """
if not (self.usrid and self.is_verified):
return False
is_ok = PNVerify.objects.pnvc_verify(self.pk, code, mc.PNVScene.UNSymbol)
return is_ok
class PNVerifyManager(models.Manager):
""" PNVerify.objects """
def pnvc_send(self, phoneid, scene):
""" 短信验证码发送,50秒内有记录不重发 """
now = get_now()
seconds_ago = now.add(seconds=-50)
pnv_qs = self.filter(
phoneid=phoneid, scene=scene,
created_at__gt=seconds_ago,
is_verified=False,
)
if pnv_qs.exists():
send_dic = dict(
phoneid=phoneid, scene=scene,
seconds_ago=seconds_ago.isoformat(),
now=now.isoformat(),
pnv_count=pnv_qs.count(),
)
send_info = json.dumps(send_dic, sort_keys=True)
logger.info(f'pnvc_just_sent {send_info}')
return False, None
template = sms_constant.SMS_CODE_SCENE_MAP[scene]
inst = self.create(phoneid=phoneid, scene=scene, template=template)
inst.sms_code_send()
return True, inst.pk
def pnvc_verify(self, phoneid, code, scene):
""" 短信验证码验证,过去6分钟内未使用的验证码 """
now = get_now()
minutes_ago = now.add(minutes=-6)
pnv_qs = self.filter(
phoneid=phoneid, scene=scene,
created_at__gt=minutes_ago,
is_verified=False,
).order_by('-pk')
for pnv in pnv_qs:
is_ok, msg = pnv.sms_code_verify(code)
if is_ok:
return True
return False
def sms_code_report_receipt(self, dic):
""" 验证码短信发送回执MNS订阅 """
try:
assert isinstance(dic, dict)
bid = pk_hashid_decode(dic['tid'])
inst = self.get(pk=bid, bizid=dic['biz_id'])
is_ok = inst.report_receipt(dic)
except (IndexError, AssertionError, PNVerify.DoesNotExist) as exc:
logger.warning(f'sms_code_report_receipt__error {dic} {str(exc)}')
is_ok = True
return is_ok
class PNVerify(BasicModel, BIDModel):
""" 手机号短信验证 """
class Meta:
verbose_name = 'PNVerify'
verbose_name_plural = verbose_name
db_table = 'k_ac_pnverify'
ordering = ('-pk',)
phoneid = models.BigIntegerField('手机号ID', db_index=True)
captcha_hmac = models.CharField('验证码签名', max_length=50, default='')
captcha_at = models.DateTimeField('发送时间', null=True, default=None)
verified_at = models.DateTimeField('验证时间', null=True, default=None)
is_verified = models.BooleanField('是否已验证', default=False)
scene = models.PositiveSmallIntegerField(choices=mc.PNVScene.choices, default=0)
status = models.SmallIntegerField('发送状态', choices=mc.SMSStatus.choices, default=0)
bizid = models.CharField('回执', db_index=True, max_length=50, default='')
template = models.CharField('模板', max_length=50, default='')
sign = models.CharField('短信签名', max_length=25, default='')
objects = PNVerifyManager()
@property
def sms_outid(self):
""" 短信发送外部ID """
return f'code-{self.hid}'
@cached_property
def phone_info(self):
info = Phone.objects.get(pk=self.phoneid)
return info
@property
def number(self):
number = self.phone_info.number
return number
@property
def usrid(self):
number = self.phone_info.usrid
return number
@property
def is_status_final(self):
""" 是否已终态 """
is_yes = self.status in [
mc.SMSStatus.Success,
mc.SMSStatus.Failure,
]
return is_yes
def sms_code_send(self):
""" 短信验证码发送 """
if self.is_verified:
return f'is_verified'
self.captcha_at = get_now()
code = idshift.generate_captcha()
self.captcha_hmac = idshift.hmac_hash(self.pk, code)
self.save(update_fields=['captcha_hmac', 'captcha_at'])
try:
result = sms_action.sms_send__code(self, code)
self.extra['resp_send'] = result
self.bizid = result.get('BizId', '')
self.status = mc.SMSStatus.Waiting if self.bizid else mc.SMSStatus.Init
self.save(update_fields=['status', 'bizid', 'extra', 'updated_at'])
except Exception as exc:
self.extra['send_error'] = str(exc)
self.save(update_fields=['extra', 'updated_at'])
logger.warning(f'sms_code_send__error {str(exc)}')
logger.exception(exc)
return code
def sms_code_verify(self, code):
""" 短信验证码验证 """
if self.is_verified:
return None, 'is_verified'
if not self.captcha_hmac == idshift.hmac_hash(self.pk, code):
return False, 'failure'
self.is_verified = True
self.verified_at = get_now()
self.save(update_fields=['is_verified', 'verified_at', 'updated_at'])
return True, 'success'
def sms_code_query(self):
""" 主动查询回执状态 """
if self.is_status_final:
logger.info(f'sms_code_query__final {self.pk}')
return
if not self.bizid:
logger.warning(f'sms_code_query__no_bizid {self.pk}')
return
try:
result = sms_action.sms_query__code(self)
self.status = result['SendStatus']
self.extra['resp_query'] = result
self.save(update_fields=['status', 'extra', 'updated_at'])
except Exception as exc:
self.extra['query_error'] = str(exc)
self.save(update_fields=['extra', 'updated_at'])
logger.warning(f'sms_code_query__error {str(exc)}')
logger.exception(exc)
def report_receipt(self, result):
""" 短信发送回执MNS订阅 """
if self.status in [mc.SMSStatus.Init, mc.SMSStatus.Waiting]: # 回调时序问题
self.status = mc.SMSStatus.Success if result['success'] else mc.SMSStatus.Failure
self.save(update_fields=['status', 'updated_at'])
self.extra_log('report', result=result)
return True
| [
"[email protected]"
] | |
2e2b8705b460a63f5c112ef28e86945c639ebe7a | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/CISCO-DNS-CLIENT-MIB.py | 01fce15acb4eef7b472a8ae3f75dca628704efbc | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 8,248 | py | #
# PySNMP MIB module CISCO-DNS-CLIENT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DNS-CLIENT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:38:02 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
ModuleIdentity, Bits, Unsigned32, Counter32, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, iso, Integer32, Counter64, Gauge32, TimeTicks, ObjectIdentity, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Bits", "Unsigned32", "Counter32", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "iso", "Integer32", "Counter64", "Gauge32", "TimeTicks", "ObjectIdentity", "MibIdentifier")
DisplayString, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus")
ciscoDNSClientMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 436))
ciscoDNSClientMIB.setRevisions(('2004-09-09 00:00',))
if mibBuilder.loadTexts: ciscoDNSClientMIB.setLastUpdated('200409090000Z')
if mibBuilder.loadTexts: ciscoDNSClientMIB.setOrganization('Cisco Systems Inc. ')
ciscoDNSClientMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 0))
ciscoDNSClientMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 1))
ciscoDNSClientMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 2))
cdcConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1))
cdcDNSConfigEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdcDNSConfigEnable.setStatus('current')
cdcNoOfDNSServerConfig = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdcNoOfDNSServerConfig.setStatus('current')
cdcDNSServerNextAvailIndex = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdcDNSServerNextAvailIndex.setStatus('current')
cdcDNSServerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4), )
if mibBuilder.loadTexts: cdcDNSServerTable.setStatus('current')
cdcDNSServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1), ).setIndexNames((0, "CISCO-DNS-CLIENT-MIB", "cdcDNSServerIndex"))
if mibBuilder.loadTexts: cdcDNSServerEntry.setStatus('current')
cdcDNSServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: cdcDNSServerIndex.setStatus('current')
cdcDNSServerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSServerAddrType.setStatus('current')
cdcDNSServerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSServerAddr.setStatus('current')
cdcDNSServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 4, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSServerStatus.setStatus('current')
cdcDefaultDNSDomainName = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdcDefaultDNSDomainName.setStatus('current')
cdcDNSDomainNameTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6), )
if mibBuilder.loadTexts: cdcDNSDomainNameTable.setStatus('current')
cdcDNSDomainNameEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1), ).setIndexNames((0, "CISCO-DNS-CLIENT-MIB", "cdcDNSDomainNameIndex"))
if mibBuilder.loadTexts: cdcDNSDomainNameEntry.setStatus('current')
cdcDNSDomainNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 64)))
if mibBuilder.loadTexts: cdcDNSDomainNameIndex.setStatus('current')
cdcDNSDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSDomainName.setStatus('current')
cdcDNSDomainNameStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 436, 1, 1, 6, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdcDNSDomainNameStatus.setStatus('current')
ciscoDNSClientMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 1))
ciscoDNSClientMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 2))
ciscoDNSClientMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 1, 1)).setObjects(("CISCO-DNS-CLIENT-MIB", "ciscoDNSServerConfigGroup"), ("CISCO-DNS-CLIENT-MIB", "ciscoDNSDomainNameConfigGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDNSClientMIBCompliance = ciscoDNSClientMIBCompliance.setStatus('current')
ciscoDNSServerConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 2, 1)).setObjects(("CISCO-DNS-CLIENT-MIB", "cdcDNSConfigEnable"), ("CISCO-DNS-CLIENT-MIB", "cdcNoOfDNSServerConfig"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerNextAvailIndex"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerAddrType"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerAddr"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDNSServerConfigGroup = ciscoDNSServerConfigGroup.setStatus('current')
ciscoDNSDomainNameConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 436, 2, 2, 2)).setObjects(("CISCO-DNS-CLIENT-MIB", "cdcDefaultDNSDomainName"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSDomainName"), ("CISCO-DNS-CLIENT-MIB", "cdcDNSDomainNameStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDNSDomainNameConfigGroup = ciscoDNSDomainNameConfigGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-DNS-CLIENT-MIB", cdcDNSDomainName=cdcDNSDomainName, ciscoDNSClientMIBCompliances=ciscoDNSClientMIBCompliances, cdcDNSServerStatus=cdcDNSServerStatus, cdcConfigGroup=cdcConfigGroup, cdcDNSDomainNameIndex=cdcDNSDomainNameIndex, ciscoDNSClientMIBCompliance=ciscoDNSClientMIBCompliance, cdcDNSDomainNameStatus=cdcDNSDomainNameStatus, ciscoDNSClientMIBGroups=ciscoDNSClientMIBGroups, PYSNMP_MODULE_ID=ciscoDNSClientMIB, cdcDNSDomainNameEntry=cdcDNSDomainNameEntry, ciscoDNSClientMIBNotifs=ciscoDNSClientMIBNotifs, cdcDNSServerNextAvailIndex=cdcDNSServerNextAvailIndex, cdcDNSDomainNameTable=cdcDNSDomainNameTable, cdcDNSServerAddrType=cdcDNSServerAddrType, ciscoDNSDomainNameConfigGroup=ciscoDNSDomainNameConfigGroup, ciscoDNSServerConfigGroup=ciscoDNSServerConfigGroup, ciscoDNSClientMIB=ciscoDNSClientMIB, cdcDNSServerEntry=cdcDNSServerEntry, cdcDefaultDNSDomainName=cdcDefaultDNSDomainName, cdcDNSServerIndex=cdcDNSServerIndex, cdcNoOfDNSServerConfig=cdcNoOfDNSServerConfig, cdcDNSConfigEnable=cdcDNSConfigEnable, cdcDNSServerTable=cdcDNSServerTable, ciscoDNSClientMIBObjects=ciscoDNSClientMIBObjects, cdcDNSServerAddr=cdcDNSServerAddr, ciscoDNSClientMIBConformance=ciscoDNSClientMIBConformance)
| [
"[email protected]"
] | |
3b6a980ffb87af3580820c10aa1428a173c1618d | 9399d687b2e41245968ba0e9d413a6789d773b1d | /CI/erlang/erlang/libs/fake_ne/interface/FakeNeKeyword.py | d00bfa40b778bc57c8f8d8b44ee00d7e54648cad | [] | no_license | jiangliu888/DemoForSpeed | be41bdb85a1d1f5ca9350a3a1f681ced5ec9b929 | 11319bc19c074327d863ac2813a04cef3487f8d6 | refs/heads/main | 2023-08-23T14:16:21.686155 | 2021-10-17T12:01:34 | 2021-10-17T12:01:34 | 388,452,435 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,959 | py | import os
from client.device import DeviceClient
from erlang.libs.uranus.interface.EsInterface import EsInterface
from erlang.libs.uranus.interface.UranusInterface import UranusInterface
from erlang.libs.variables import MeasureResultVariables
class FakeNeKeyword(object):
DB_PORT = 3000
DB_REST_PORT = 3500
OFP_REST_PORT = 4000
fake_ne_list = {}
def __init__(self):
pass
@staticmethod
def get_fake_ne_measure_tunnels(neid):
return DeviceClient.get_device_config(int(neid), "TUNNEL")
@staticmethod
def get_fake_ne_measure_tasks(neid):
return DeviceClient.get_device_config(int(neid), "MEASURE")
@staticmethod
def get_fake_ne_measure_task_with_address(ne_id, local_ip, remote_ip):
tasks = FakeNeKeyword.get_fake_ne_measure_tasks(ne_id)
print tasks
return filter(lambda x: (x["remote-ipv4-address"] == remote_ip) and (x["local-ipv4-address"] == local_ip), tasks)
@staticmethod
def get_fake_ne_tunnels_with_dstNeId(local_NeId, dstNeId):
s_id = int(local_NeId) >> 4
tunnels = FakeNeKeyword.get_fake_ne_measure_tunnels(s_id)
print tunnels
return filter(lambda x: x["dst"] == dstNeId, tunnels)
@staticmethod
def get_fake_ne_measure_tasks_with_dstNeId(local_NeId, dstNeId):
s_id = int(local_NeId) >> 4
tasks = FakeNeKeyword.get_fake_ne_measure_tasks(s_id)
print tasks
return filter(lambda x: x["dstNeId"] == dstNeId, tasks)
@staticmethod
def get_fake_ne_flows_id(ne_id):
res = DeviceClient.get_routes(int(ne_id))
return map(int, res) if res else []
@staticmethod
def change_ne_link_measure_result(ne_id, jitter, loss, delay=[0, 0, 0, 0], loss_target=[]):
cmd = "ps -ef |grep create_measure|grep {} |awk {}".format(ne_id, r"'{print $10}'")
r = os.popen(cmd)
info = r.read().split('\n')[0]
print 'info is {}'.format(info)
cmd = "ps -ef |grep create_measure|grep {} |awk {}|xargs sudo kill -9".format(ne_id, r"'{print $2}'")
ret = os.system(cmd)
print 'cmd is {} and ret is {}'.format(cmd, ret)
cmd = "sh -c 'python erlang/libs/fake_ne/create_measure_result.py {} {} {} {} {} {} >> logs/{}measure.log &'".format(info, int(ne_id), ' '.join(jitter), ' '.join(loss), ' '.join(delay), ' '.join(loss_target), int(ne_id))
print cmd
ret = os.system(cmd)
assert ret == 0
@staticmethod
def export_data_to_es(topo_name):
for es_data in MeasureResultVariables.topo(topo_name):
EsInterface.bulk_insert_12_measure_results(es_data['netLink'], es_data['ttl'], es_data['jitter'], es_data['loss'])
@staticmethod
def get_fake_ne_type(ne_id):
rec, ne_info = UranusInterface.get_netcfg_ne_config_with_id(ne_id)
ne_type = ne_info["type"]
return ne_type
| [
"[email protected]"
] | |
4d00ccd7e2aa83e59a80c5067dca230245fd07bc | 09f8a8bb1655cc76a29ac60896d1d42b0145f3c2 | /Utils.py | f617be1131d20d1c307cca7ba5b167e85ef6ea3f | [
"BSD-3-Clause"
] | permissive | FlatL1neAPT/PoshC2_Python | 4d1eb4d6a639395a32a2674ee49a17969a2b8a79 | 39f755f67bf4de15e93f56cd690e50924aa8bba0 | refs/heads/master | 2020-05-04T02:58:16.717780 | 2019-03-08T10:32:55 | 2019-03-08T10:32:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,015 | py | import os, base64, string, random, re
validate_sleep_regex = re.compile("^[0-9]*[smh]$")
def gen_key():
key = os.urandom(256/8)
return base64.b64encode(key)
def formStrMacro(varstr, instr):
holder = []
str1 = ''
str2 = ''
str1 = varstr + ' = "' + instr[:54] + '"'
for i in xrange(54, len(instr), 48):
holder.append(varstr + ' = '+ varstr +' + "'+instr[i:i+48])
str2 = '"\r\n'.join(holder)
str2 = str2 + "\""
str1 = str1 + "\r\n"+str2
return str1
def formStr(varstr, instr):
holder = []
str1 = ''
str2 = ''
str1 = varstr + ' = "' + instr[:56] + '"'
for i in xrange(56, len(instr), 48):
holder.append('"'+instr[i:i+48])
str2 = '"\r\n'.join(holder)
str2 = str2 + "\""
str1 = str1 + "\r\n"+str2
return "%s;" % str1
def randomuri(size = 15, chars=string.ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def validate_sleep_time(sleeptime):
sleeptime = sleeptime.strip()
return validate_sleep_regex.match(sleeptime) | [
"email"
] | email |
fbc34bce75ef0bcc33b60c5c56c4ee439012a1ba | 7e470dd54740ca6331d1341328e344a713329a77 | /src/DQD_counting_statistics/zero_freq_statistics.py | a710fb1843cd96bbb95ceec6215f5f71d5d12580 | [] | no_license | rstones/DQD_counting_statistics | 127eb2ad83c5c69bdfb168975077f541c09d4bbc | 3eb5ad9876b59c43c35150238c3af3396b3ad100 | refs/heads/master | 2020-04-07T03:10:59.294391 | 2017-10-22T10:58:06 | 2017-10-22T10:58:06 | 53,421,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | '''
Created on 8 Mar 2016
@author: rstones
'''
import numpy as np
import quant_mech.utils as utils
from DQD_counting_statistics.DQD_model import DQDModel
import matplotlib.pyplot as plt
bias_values = np.array([0, 1.5, 3., 4.5, 6.])
Gamma_R_range = np.logspace(-4, 3, 1000)
model = DQDModel(remove_elements=True)
current = np.zeros((bias_values.size, Gamma_R_range.size))
F2 = np.zeros((bias_values.size, Gamma_R_range.size))
coherence = np.zeros((bias_values.size, Gamma_R_range.size), dtype='complex')
for i,v in enumerate(bias_values):
model.bias = v
for j,Gamma_R in enumerate(Gamma_R_range):
model.Gamma_R = Gamma_R
ss = utils.stationary_state_svd(model.liouvillian(), model.density_vector_populations())
current[i,j] = model.mean(ss)
F2[i,j] = model.second_order_fano_factor(ss)
coherence[i,j] = ss[2]
np.savez('../../data/DQD_zero_freq_counting_statistics_data.npz', Gamma_R_range=Gamma_R_range, bias_values=bias_values, current=current, F2=F2, coherence=coherence)
fig,(ax1,ax2,ax3) = plt.subplots(1,3)
for i,v in enumerate(bias_values):
ax1.semilogx(Gamma_R_range, current[i], label=v)
ax2.semilogx(Gamma_R_range, F2[i], label=v)
ax3.semilogx(Gamma_R_range, np.real(coherence[i]), label=v)
ax1.legend().draggable()
ax2.legend().draggable()
ax3.legend().draggable()
plt.show()
| [
"[email protected]"
] | |
28459452020b3f9d921767c1fd75d3f868741f99 | 26f23588e80acc2b28d4cc70a8fbcf78c5b33a20 | /PythonModels/learnBasic/file_options.py | 4173a88638e76c5058927e4ba42da592ecbd3ca6 | [] | no_license | Timehsw/PythonCouldbeEverything | aa31b3e32bf68b49fe8e96b971637353a8ef644f | 85d4f1a2c93c7b1edc34ceb9e8bb3c8d7beb30e9 | refs/heads/master | 2021-01-01T15:38:25.253094 | 2018-01-22T06:49:05 | 2018-01-22T06:49:05 | 97,661,530 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # coding=utf8
__author__ = 'zenith'
#读文件
f=open("D:\data.txt","r")
#print(f.read())
#print(f.readline().strip())
#print(f.readline().strip())
for line in f.readlines():
print(line.strip())
f.close()
#文件追加内容
f=open("D:\data.txt","a")
f.write("\n超人学院")
f.close()
#文件覆盖内容
f=open("D:\data.txt","w")
f.write("\n超人学院")
f.close()
| [
"[email protected]"
] | |
4ec82c4d69562c103864beb83bc5eac587470077 | 1af49694004c6fbc31deada5618dae37255ce978 | /third_party/blink/renderer/bindings/scripts/bind_gen/__init__.py | 44c068af8ca05cd83d23acbbb3e0bc2dfd11be14 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause"
] | permissive | sadrulhc/chromium | 59682b173a00269ed036eee5ebfa317ba3a770cc | a4b950c23db47a0fdd63549cccf9ac8acd8e2c41 | refs/heads/master | 2023-02-02T07:59:20.295144 | 2020-12-01T21:32:32 | 2020-12-01T21:32:32 | 317,678,056 | 3 | 0 | BSD-3-Clause | 2020-12-01T21:56:26 | 2020-12-01T21:56:25 | null | UTF-8 | Python | false | false | 2,353 | py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import sys
# Set up |sys.path| so that this module works without user-side setup of
# PYTHONPATH assuming Chromium's directory tree structure.
def _setup_sys_path():
expected_path = 'third_party/blink/renderer/bindings/scripts/bind_gen/'
this_dir = os.path.dirname(__file__)
root_dir = os.path.abspath(
os.path.join(this_dir, *(['..'] * expected_path.count('/'))))
module_dirs = (
# //third_party/blink/renderer/bindings/scripts/web_idl
os.path.join(root_dir, 'third_party', 'blink', 'renderer', 'bindings',
'scripts'),
# //third_party/blink/renderer/build/scripts/blinkbuild
os.path.join(root_dir, 'third_party', 'blink', 'renderer', 'build',
'scripts'),
# //third_party/mako/mako
os.path.join(root_dir, 'third_party', 'mako'),
)
for module_dir in reversed(module_dirs):
# Preserve sys.path[0] as is.
# https://docs.python.org/3/library/sys.html?highlight=path[0]#sys.path
sys.path.insert(1, module_dir)
_setup_sys_path()
from .callback_function import generate_callback_functions
from .callback_interface import generate_callback_interfaces
from .dictionary import generate_dictionaries
from .enumeration import generate_enumerations
from .interface import generate_interfaces
from .namespace import generate_namespaces
from .task_queue import TaskQueue
from .union import generate_unions
def init(web_idl_database_path, root_src_dir, root_gen_dir, component_reldirs):
"""
Args:
web_idl_database_path: File path to the web_idl.Database.
root_src_dir: Project's root directory, which corresponds to "//" in GN.
root_gen_dir: Root directory of generated files, which corresponds to
"//out/Default/gen" in GN.
component_reldirs: Pairs of component and output directory.
"""
from . import package_initializer
package_initializer.init(web_idl_database_path=web_idl_database_path,
root_src_dir=root_src_dir,
root_gen_dir=root_gen_dir,
component_reldirs=component_reldirs)
| [
"[email protected]"
] | |
50d41bc04b35250d86a4adb67e67092dd7f34b51 | 34339da2c834d79c9d3142afb8c498c62fb8917d | /thenewboston_node/blockchain/tasks/debug_task.py | 5af50cb9f0a73b2cb20d0323ab22fd1023029219 | [
"MIT"
] | permissive | olegtropinin/thenewboston-node | 5abfcbe02404f7c5347af724fb06c7f6420226ba | 2de4e14ef6855646121840224a82fcfc505b213c | refs/heads/master | 2023-08-23T09:33:25.286098 | 2021-10-14T22:53:15 | 2021-10-14T22:53:15 | 417,582,617 | 0 | 0 | MIT | 2021-10-15T17:27:52 | 2021-10-15T17:27:51 | null | UTF-8 | Python | false | false | 190 | py | # TODO(dmu) HIGH: Remove this example task once real tasks are created
from celery import shared_task
@shared_task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}')
| [
"[email protected]"
] | |
9755b139b741d0c2700bb0413e958eed81d94419 | f2668e062d0c72c7e96a007f555459fecfd02ebe | /wagtail_review/__init__.py | 2a422415622321b2cc310a82fd0013f1e2c4c900 | [
"BSD-3-Clause"
] | permissive | BackBayRider/wagtail-review | 170e1f48d421ed46f56c8607756b25d495e35c6c | 45841611921d3cf67be94370e2ab6c332b0f838c | refs/heads/master | 2023-04-19T11:26:15.577124 | 2021-03-01T16:22:32 | 2021-03-01T16:23:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | default_app_config = 'wagtail_review.apps.WagtailReviewAppConfig' | [
"[email protected]"
] | |
8a2ab4c6d3a5e094042ddf4c2df9dbb5ffce65ca | 29091a32fbcbfc5c5db0b1e2a8aa344835a82f68 | /ctrlengine/sensors/__init__.py | 9e8adab2a9274c69dc8209f319d7f4752f1ae404 | [
"MIT"
] | permissive | 0xJeremy/ctrl.engine | 52b0244f42e9a7a92486ba1fcfcf2fe2fedc5631 | 19abba70df149a05edc5722cc95ceacc538448e6 | refs/heads/master | 2022-11-17T23:48:40.547073 | 2020-07-06T22:31:37 | 2020-07-06T22:31:37 | 241,662,968 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | from .camera import camera
from .realsense import realsense_camera
| [
"[email protected]"
] | |
088a093e36d31ff4a4fc4890cd0ea0a3f98a32e7 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/artificial/transf_BoxCox/trend_MovingMedian/cycle_30/ar_/test_artificial_32_BoxCox_MovingMedian_30__100.py | eaa3d461b45d44c18f2e1bbaffa799b7393f51fd | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 265 | py | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "BoxCox", sigma = 0.0, exog_count = 100, ar_order = 0); | [
"[email protected]"
] | |
f9a3518f256b925c3a31d214b721e8d53706123e | f0b741f24ccf8bfe9bd1950425d83b6291d21b10 | /backend/api/v2beta1/python_http_client/test/test_v2beta1_runtime_config.py | ea104015c08bff480428f747f9b1fe16d1dd0715 | [
"Apache-2.0"
] | permissive | kubeflow/pipelines | e678342b8a325559dec0a6e1e484c525fdcc8ce8 | 3fb199658f68e7debf4906d9ce32a9a307e39243 | refs/heads/master | 2023-09-04T11:54:56.449867 | 2023-09-01T19:07:33 | 2023-09-01T19:12:27 | 133,100,880 | 3,434 | 1,675 | Apache-2.0 | 2023-09-14T20:19:06 | 2018-05-12T00:31:47 | Python | UTF-8 | Python | false | false | 1,580 | py | # coding: utf-8
"""
Kubeflow Pipelines API
This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kfp_server_api
from kfp_server_api.models.v2beta1_runtime_config import V2beta1RuntimeConfig # noqa: E501
from kfp_server_api.rest import ApiException
class TestV2beta1RuntimeConfig(unittest.TestCase):
"""V2beta1RuntimeConfig unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test V2beta1RuntimeConfig
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kfp_server_api.models.v2beta1_runtime_config.V2beta1RuntimeConfig() # noqa: E501
if include_optional :
return V2beta1RuntimeConfig(
parameters = {
'key' : None
},
pipeline_root = '0'
)
else :
return V2beta1RuntimeConfig(
)
def testV2beta1RuntimeConfig(self):
"""Test V2beta1RuntimeConfig"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
355ea3438068cb566e9bb686ad11c0e9bbcbe658 | 20f86ba7febb3233516f0e2161bc98604c302cc4 | /tests/acceptance/page_model/base_page.py | 165ad2e4da698093c6351465a36d33bb5fb659c4 | [] | no_license | LukaszMalucha/Machine-Learning-Dashboard | 5369270ff39710f2c6545ea0828f01265d7e797f | 3ee29498d7d74365a2cd33547795ddfe9573dac4 | refs/heads/master | 2022-12-10T05:08:52.693425 | 2019-03-14T11:15:54 | 2019-03-14T11:15:54 | 126,514,014 | 8 | 3 | null | 2022-12-08T01:33:30 | 2018-03-23T16:52:05 | Python | UTF-8 | Python | false | false | 1,023 | py | from tests.acceptance.locators.base_page import BasePageLocators
class BasePage:
def __init__(self, driver):
self.driver = driver
@property
def url(self):
return 'http://127.0.0.1:5000'
@property
def title(self):
return self.driver.find_element(*BasePageLocators.TITLE)
@property
def navigation(self):
return self.driver.find_elements(*BasePageLocators.NAV_LINKS)
@property
def dropdown(self):
return self.driver.find_element(*BasePageLocators.DROPDOWN)
@property
def dropdown_links(self):
return self.driver.find_elements(*BasePageLocators.DROPDOWN_LINKS)
@property
def table(self):
return self.driver.find_element(*BasePageLocators.TABLE)
@property
def github_user(self):
return self.driver.find_element(*BasePageLocators.GITHUB_USER)
@property
def github_repos(self):
return self.driver.find_element(*BasePageLocators.GITHUB_REPOS)
| [
"[email protected]"
] | |
fc4dd2aeebba0b006b2c867b0c71b235f777c216 | 4737df4162bee6abc7b78d1e8b4930d2cb542d6b | /graphgallery/nn/layers/pytorch/conv/dagnn.py | 24f060cea3fb5cf39695a42498b8ea286a211594 | [
"MIT"
] | permissive | freebird3366/GraphGallery | d1aa4ff291753ccf0ac4a8e024d18c59d2db8aa8 | f3294dad35ca0e14a525ed48f18feae2e9af661f | refs/heads/master | 2023-02-23T20:04:30.316450 | 2021-02-01T16:06:03 | 2021-02-01T16:06:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,228 | py | import torch
import torch.nn as nn
from graphgallery.nn.init.pytorch import uniform, zeros
from ..get_activation import get_activation
class PropConvolution(nn.Module):
def __init__(self,
in_channels,
out_channels=1,
K=10,
use_bias=False,
activation=None):
super().__init__()
assert out_channels == 1, "'out_channels' must be 1"
self.in_channels = in_channels
self.out_channels = out_channels
self.activation = get_activation(activation)
self.w = nn.Linear(in_channels, out_channels, bias=use_bias)
self.K = K
def reset_parameters(self):
self.w.reset_parameters()
def forward(self, x, adj):
propagations = [x]
for _ in range(self.K):
x = torch.spmm(adj, x)
propagations.append(x)
h = torch.stack(propagations, axis=1)
retrain_score = self.w(h)
retrain_score = self.activation(retrain_score).permute(0, 2, 1).contiguous()
out = (retrain_score @ h).squeeze(1)
return out
def __repr__(self):
return f"{self.__class__.__name__}({self.in_channels} -> {self.out_channels})"
| [
"[email protected]"
] | |
0180fb50fcc9a71e70b3ccce51b1092d8db51019 | 09ecd5f17ff36896c141db58563de3887d3f627d | /src/accounts/forms.py | ce6f9e63345af563ce7d020d907191aa2146429a | [] | no_license | samirthapa20/tweetme | df9b43bc8be4975343a54cceebba0f259ab6a6dd | 23d77575b85f8f6ff5d8993d3bbbf3898c1e6671 | refs/heads/master | 2021-05-23T15:32:18.136662 | 2020-09-09T14:18:37 | 2020-09-09T14:18:37 | 253,362,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 953 | py | from django import forms
from django.contrib.auth import get_user_model
User = get_user_model()
class UserRegisterForm(forms.Form):
username = forms.CharField()
email = forms.EmailField()
password = forms.CharField(widget = forms.PasswordInput)
password2 = forms.CharField(widget= forms.PasswordInput)
def clean_password2(self):
password = self.cleaned_data.get('password')
password2 = self.cleaned_data.get('password2')
if password != password2:
raise forms.ValidationError('Password must match')
return password2
def clean_username(self):
username = self.cleaned_data.get('username')
if User.objects.filter(username__icontains=username).exists():
raise forms.ValidationError("This username is taken")
return username
def clean_email(self):
email = self.cleaned_data.get('email')
if User.objects.filter(email__icontains=email).exists():
raise forms.ValidationError("This email is already taken.")
return email
| [
"[email protected]"
] | |
7fb4f71a9ccc64dc5c65d6bf095c6e49af56ef7a | 6820e74ec72ed67f6b84a071cef9cfbc9830ad74 | /plans/migrations/0008_auto_20150401_2155.py | dbca0bd39f59f421e402b58652c15b1cbd599a57 | [
"MIT"
] | permissive | AppforallHQ/f5 | 96c15eaac3d7acc64e48d6741f26d78c9ef0d8cd | 0a85a5516e15d278ce30d1f7f339398831974154 | refs/heads/master | 2020-06-30T17:00:46.646867 | 2016-11-21T11:41:59 | 2016-11-21T11:41:59 | 74,357,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,808 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import plans.models
class Migration(migrations.Migration):
dependencies = [
('plans', '0007_auto_20150330_0046'),
]
operations = [
migrations.CreateModel(
name='ItemInvoice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('amount', models.IntegerField(validators=[plans.models.neg_validator])),
('plan_amount', models.IntegerField(validators=[plans.models.neg_validator])),
('created_at', models.DateTimeField(auto_now_add=True)),
('paid', models.BooleanField(default=False)),
('pay_time', models.DateTimeField(null=True, blank=True)),
('invalid', models.BooleanField(default=False)),
('metadata', jsonfield.fields.JSONField()),
('generated_promo_code', models.ForeignKey(related_name='+', to='plans.PromoCode')),
('plan', models.ForeignKey(to='plans.Plan', null=True)),
('promo_code', models.ForeignKey(default=None, blank=True, to='plans.PromoCode', null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.RemoveField(
model_name='giftinvoice',
name='generated_promo_code',
),
migrations.RemoveField(
model_name='giftinvoice',
name='plan',
),
migrations.RemoveField(
model_name='giftinvoice',
name='promo_code',
),
migrations.DeleteModel(
name='GiftInvoice',
),
]
| [
"[email protected]"
] | |
04bb85ca1bdd439c34b6adc124e725772daf1bad | d22a2fbb9adb82644c5665242661bad172550552 | /venv/Scripts/easy_install-3.7-script.py | e05bf4e5e8b133bdefcaf94f129dcc7bb1a6c3db | [] | no_license | felipemanfrin/Python-Zero-ao-Zeno | e98ba3e4b974e88801b8bc947f461b125bc665b8 | d6d08aa17071f77170bbd105452b0d05586131c8 | refs/heads/master | 2022-07-29T19:38:41.729178 | 2020-05-25T01:02:18 | 2020-05-25T01:02:18 | 265,356,280 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | #!C:\Users\Felipe\PycharmProjects\666\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"[email protected]"
] | |
16093e48e8ea6d2c734900b39e59e698fffa2edc | 29bec83fc600720533ad2bcf17fc90cd9ca385b7 | /0x06-python-classes/prueba_OOP.py | 19e79b242fb1d7fd13ef39f02007e7cc9e743a28 | [] | no_license | VictorZ94/holbertonschool-higher_level_programming | 73a7f504cde583f43f641e18e692e062610870a4 | ad512a1c76dc9b4c999a0ba2922c79f56206dd98 | refs/heads/master | 2023-03-25T04:38:12.708766 | 2021-03-24T01:08:47 | 2021-03-24T01:08:47 | 291,826,914 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | #!/usr/bin/python3
class coche():
largochasis = 250
anchochasis = 120
ruedas = 4
enmarcha = False
def arrancar(self):
self.enmarcha=True
def estado(self):
if (self.enmarcha):
return "El coche está en marcha"
else:
return "El coche está parado"
micoche=coche()
print(micoche.largochasis)
print(micoche.ruedas)
# micoche.arrancar()
(print(micoche.estado())) | [
"[email protected]"
] | |
bb5630d590dfe6c1e987d8698e11bff0633d156d | 7a803cd0c16ff676e3d7ecc33ec5e7af2c42d026 | /hello.py | f3123c66e088b6ec25c1b96a658855cae387ee88 | [] | no_license | emetowinner/VGG-Internship-Assignments- | ddc798da4c91572455d4f69b0a0524def13be268 | 67fa5b345b0981dd43694b72d5fc61f45d431c19 | refs/heads/master | 2020-12-15T03:40:37.321894 | 2020-03-05T00:46:39 | 2020-03-05T00:46:39 | 234,981,995 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | #Learning Integer Literals
birth_month = input('Enter birth month: ')
if type(birth_month) == str:
print('Not an integer')
if type(birth_month) != int:
print('Converting to int now.........')
int(birth_month)
print('....................')
print('Now is of int type') | [
"[email protected]"
] | |
68d80aabd964ecc0c03b3c58dbb4409ea535aea0 | 9381d2a25adac95fab9fc4b8015aadd6c7bed6ca | /ITP1/8_A.py | cf8098d7b4993f62b0cc1f7fe90d16e436e1b142 | [] | no_license | kazuma104/AOJ | e3ca14bd31167656bcd203d4f92a43fd4045434c | d91cc3313cbfa575928787677e5ed6be63aa8acf | refs/heads/master | 2023-03-20T22:16:22.764351 | 2021-03-18T10:38:08 | 2021-03-18T10:38:08 | 262,047,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | def solve():
S = input()
T = ""
for i in range(len(S)):
if S[i] >= 'a':
T += S[i].upper()
else:
T += S[i].lower()
print(T)
if __name__ == '__main__':
solve() | [
"[email protected]"
] | |
755c8410856fd9a634ed73e87e50ec135313c22b | 1f3bed0bb480a7d163dab73f1d315741ecbc1072 | /vtkplotter_examples/pyplot/plot7_stream.py | 04cb003af2b8799fd539ccebed6d1317312814c5 | [
"MIT"
] | permissive | ismarou/vtkplotter-examples | 1ce78197182da7496b016b27f1d5eb524c49cac6 | 1eefcc026be169ab7a77a5bce6dec8044c33b554 | refs/heads/master | 2021-03-11T18:43:22.313457 | 2020-03-03T22:11:25 | 2020-03-03T22:11:25 | 246,551,341 | 4 | 0 | null | 2020-03-11T11:18:48 | 2020-03-11T11:18:47 | null | UTF-8 | Python | false | false | 672 | py | """Plot streamlines of the 2D field:
u(x,y) = -1 - x^2 + y
v(x,y) = 1 + x - y^2
"""
from vtkplotter import *
import numpy as np
# a grid with a vector field (U,V):
X, Y = np.mgrid[-5:5 :15j, -4:4 :15j]
U = -1 - X**2 + Y
V = 1 + X - Y**2
# optionally, pick some random points as seeds:
prob_pts = np.random.rand(200, 2)*8 - [4,4]
sp = streamplot(X,Y, U,V,
lw=0.001, # line width in abs. units
direction='forward', # 'both' or 'backward'
probes=prob_pts, # try comment out this
)
pts = Points(prob_pts, r=5, c='white')
show(sp, pts,
Text2D(__doc__, c='w'),
axes=1, bg='bb')
| [
"[email protected]"
] | |
7c945592d39eb2f6680b846f93d8f8921188613c | 0ed9a8eef1d12587d596ec53842540063b58a7ec | /cloudrail/knowledge/rules/rules_loader.py | 443b78c547b07c45631f401fffc28e2ebc664574 | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | cbc506/cloudrail-knowledge | 8611faa10a3bf195f277b81622e2590dbcc60da4 | 7b5c9030575f512b9c230eed1a93f568d8663708 | refs/heads/main | 2023-08-02T08:36:22.051695 | 2021-09-13T15:23:33 | 2021-09-13T15:24:26 | 390,127,361 | 0 | 0 | MIT | 2021-07-27T21:08:06 | 2021-07-27T21:08:06 | null | UTF-8 | Python | false | false | 1,710 | py | import functools
from typing import Dict, Optional
from cloudrail.knowledge.context.cloud_provider import CloudProvider
from cloudrail.knowledge.exceptions import UnsupportedCloudProviderException
from cloudrail.knowledge.rules.base_rule import BaseRule
from cloudrail.knowledge.rules.aws_rules_loader import AwsRulesLoader
from cloudrail.knowledge.rules.azure_rules_loader import AzureRulesLoader
from cloudrail.knowledge.rules.gcp_rules_loader import GcpRulesLoader
class RulesLoader:
@classmethod
def load(cls, cloud_provider: Optional[CloudProvider] = None) -> Dict[str, BaseRule]:
if not cloud_provider:
return {**AwsRulesLoader().load(), **AzureRulesLoader().load(), **GcpRulesLoader().load()}
if cloud_provider == CloudProvider.AMAZON_WEB_SERVICES:
return AwsRulesLoader().load()
if cloud_provider == CloudProvider.AZURE:
return AzureRulesLoader().load()
if cloud_provider == CloudProvider.GCP:
return GcpRulesLoader().load()
raise UnsupportedCloudProviderException(cloud_provider)
@classmethod
@functools.lru_cache(maxsize=None)
def get_rules_source_control_links(cls) -> Dict[str, str]:
rules = cls.load()
source_control_links = {}
for rule_id, rule in rules.items():
rule_module = rule.__module__
if not rule_module.startswith('cloudrail.knowledge'):
continue
rule_path = rule_module.replace('.', '/')
source_control_link = f'https://github.com/indeni/cloudrail-knowledge/blob/main/{rule_path}.py'
source_control_links[rule_id] = source_control_link
return source_control_links
| [
"[email protected]"
] | |
f6b845b799f3e15e52f10efd5b2ba60a4d5e1fb8 | da687718aa8ce62974090af63d25e057262e9dfe | /cap12-dicionarios/10_fromkeys_method.py | 59b1594ed08737b3f91bb025905c1d9639f0eab5 | [] | no_license | frclasso/revisao_Python_modulo1 | 77928fa4409c97d49cc7deccdf291f44c337d290 | 1e83d0ef9657440db46a8e84b136ac5f9a7c556e | refs/heads/master | 2020-06-25T05:37:28.768343 | 2019-07-27T22:23:58 | 2019-07-27T22:23:58 | 199,217,969 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | #!/usr/bin/env python3
"""retorna um novo dicionário cujas chaves são os elementos de uma sequencia e cujos
valores são todos iguais ao argumento valor.
Sintaxe: dict.fromkeys(seq[, value])
"""
seq = ['name', 'age', 'sex']
dict = dict.fromkeys(seq)
print('Novo dicionario: {}'.format(str(dict))) # nenhum valor foi definido para 'value'
# definido o valor 10 para o argumento value
# dict = dict.fromkeys(seq, 10)
# print('Novo dicionario: {}'.format(str(dict)))
| [
"[email protected]"
] | |
b2f6ba810f56fe21e915805b75b08d7c0443d9fc | 8fb7a7b4fb09ce457ad413d19191235cf4805851 | /notes code/detection of fail/object_only/scan_mark1/find_thing_on_print_bed.py | 1a6610a59ee5df617f1a2396d94b2e6a3a5120ce | [] | no_license | clambering-goat/honner-progect | df8ab2e22c223cf0f8cb59b93b132eea3d9030f2 | ea996ea34ac13867dea6d4935f9760c6915b206f | refs/heads/master | 2020-04-15T19:32:57.303438 | 2019-05-13T17:51:56 | 2019-05-13T17:51:56 | 164,954,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py |
import numpy as np
import cv2
data=np.load("data.npy")
iamge = data.astype(np.uint8)
for Ycount,y in enumerate(iamge):
for x_count,x in enumerate(y):
if iamge[Ycount][x_count]==255:
iamge[Ycount][x_count] =0
cv2.imshow("frame",iamge)
cv2.waitKey(20000)
| [
"[email protected]"
] | |
9179210109a8fa035ce59bb29a6582ddd74d25fd | d9d9a203a27bd28fe9afc72ecc613b186b33d673 | /06_MultipleForm/mainform.py | fe80a611cdf948f4f13e439c5959ffe08d143681 | [] | no_license | wildenali/Belajar-GUI-dengan-pyQT | 378951fcf0e172f48bf71ec46d887599cf5e09ed | 06ebbcbf57bec8a6a63fbb6d5397a7e2ab7c9ef9 | refs/heads/master | 2020-04-06T10:51:58.582049 | 2018-12-31T10:37:56 | 2018-12-31T10:37:56 | 157,395,034 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | from PyQt5.QtWidgets import QWidget, QPushButton
from otherform import *
class MainForm(QWidget):
def __init__(self):
super(MainForm, self).__init__()
self.setupUI()
def setupUI(self):
self.resize(300, 500)
self.move(400, 200)
self.setWindowTitle('Form nya ada DUA')
self.button = QPushButton('Muncuuuul kan')
self.button.move(50,50)
self.button.setParent(self)
self.button.clicked.connect(self.buttonClick)
def buttonClick(self):
self.form = OtherForm()
self.form.show()
| [
"[email protected]"
] | |
28598f36e66c74da10b429d228ad8e96cb136f00 | aaf9df2f15ec9bbfb7d98c2239db940117bc6762 | /Algorithmic-Toolbox/covering_segments/covering_segments.py | e33a1707cc450ddd489122ddb82f546ec7713987 | [
"MIT"
] | permissive | ugaliguy/Data-Structures-and-Algorithms | db50a0f4b39908d17fa125ca70c0616f52d895d2 | 4bcbd1b0cff66f442a03d06393f654f8e3a61ded | refs/heads/master | 2021-01-21T14:08:42.127708 | 2016-07-04T00:43:38 | 2016-07-04T00:43:38 | 56,821,728 | 0 | 1 | null | 2016-07-04T00:43:39 | 2016-04-22T02:54:23 | Python | UTF-8 | Python | false | false | 854 | py | # Uses python3
import sys
from collections import namedtuple
from operator import attrgetter
Segment = namedtuple('Segment', 'start end')
def optimal_points(segments):
start_sort = sorted(segments, key=attrgetter('start'))
end_sort = sorted(segments, key=attrgetter('end'))
points = []
#write your code here
minimum = start_sort[0].start - 1
for i in range(len(segments)):
begin = end_sort[i].start
end = end_sort[i].end
if begin > minimum:
points.append(end)
minimum = end
return points
if __name__ == '__main__':
input = sys.stdin.read()
n, *data = map(int, input.split())
segments = list(map(lambda x: Segment(x[0], x[1]), zip(data[::2], data[1::2])))
points = optimal_points(segments)
print(len(points))
for p in points:
print(p, end=' ')
| [
"[email protected]"
] | |
4370545f8a75330aec51c5b699aada3f8df69d5c | 4e4c22dfabb1a0fa89f0f51f58737273412a30e0 | /fort_machine/wsgi.py | 640e612427bbf2c0356ea849505b08617eed3925 | [] | no_license | shaoqianliang/fort_machine | 4cb271d5ef29c924c09172ff397e2af8562ee4ba | cf7e3d4c6682831ce04bcde478930ab7e85abb01 | refs/heads/master | 2020-04-28T15:24:02.056674 | 2019-04-12T23:50:35 | 2019-04-12T23:50:35 | 175,372,042 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for fort_machine project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fort_machine.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
fe7d0f99e3ae6e1f339a1cd8e4642a724e9016f7 | 1b1e8e73649ad1eed89556a5d479b0a549354fd5 | /opennem/db/migrations/versions/4bf86ff5c8ff_update_indicies_that_aren_t_used.py | a7a46b84637e88e727f1fe594938c21feb0ebb3f | [
"MIT"
] | permissive | zalihat/opennem | 3ea8db7246f350fb0eacf8c6078dbffa4fe9aea2 | 0f82e4fc3fd2bcfbf56a2741d89e4228d017dcf3 | refs/heads/master | 2023-02-27T15:37:47.206336 | 2021-02-08T07:28:57 | 2021-02-08T07:28:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,488 | py | # pylint: disable=no-member
"""
update indicies that aren't used
Revision ID: 4bf86ff5c8ff
Revises: 64987ea01b57
Create Date: 2020-11-23 02:54:29.564574
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "4bf86ff5c8ff"
down_revision = "64987ea01b57"
branch_labels = None
depends_on = None
def upgrade():
op.drop_index("idx_facility_scada_trading_interval_perth_year")
op.drop_index("idx_facility_scada_trading_interval_perth_month")
op.drop_index("idx_facility_scada_trading_interval_perth_day")
op.drop_index("idx_facility_scada_trading_interval_perth_hour")
op.drop_index("idx_balancing_summary_trading_interval_perth_year")
op.drop_index("idx_balancing_summary_trading_interval_perth_month")
op.drop_index("idx_balancing_summary_trading_interval_perth_day")
op.drop_index("idx_balancing_summary_trading_interval_perth_hour")
op.drop_index("idx_facility_scada_trading_interval_sydney_year")
op.drop_index("idx_facility_scada_trading_interval_sydney_month")
op.drop_index("idx_facility_scada_trading_interval_sydney_day")
op.drop_index("idx_facility_scada_trading_interval_sydney_hour")
op.drop_index("idx_balancing_summary_trading_interval_sydney_year")
op.drop_index("idx_balancing_summary_trading_interval_sydney_month")
op.drop_index("idx_balancing_summary_trading_interval_sydney_day")
op.drop_index("idx_balancing_summary_trading_interval_sydney_hour")
def downgrade():
pass
| [
"[email protected]"
] | |
a99c4d3cb68c551d8ecf9d307608d40a13d95cd8 | 7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0 | /0701-0800/0716-Max Stack/0716-Max Stack.py | 698d5125b3a7f7ace31abc4055e1c827e468fd5e | [
"MIT"
] | permissive | jiadaizhao/LeetCode | be31bd0db50cc6835d9c9eff8e0175747098afc6 | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | refs/heads/master | 2021-11-05T04:38:47.252590 | 2021-10-31T09:54:53 | 2021-10-31T09:54:53 | 99,655,604 | 52 | 28 | MIT | 2020-10-02T12:47:47 | 2017-08-08T05:57:26 | C++ | UTF-8 | Python | false | false | 967 | py | class MaxStack:
def __init__(self):
"""
initialize your data structure here.
"""
self.St = []
self.maxSt = []
def push(self, x: int) -> None:
self.St.append(x)
maxX = x if not self.maxSt or x > self.maxSt[-1] else self.maxSt[-1]
self.maxSt.append(maxX)
def pop(self) -> int:
self.maxSt.pop()
return self.St.pop()
def top(self) -> int:
return self.St[-1]
def peekMax(self) -> int:
return self.maxSt[-1]
def popMax(self) -> int:
maxX = self.maxSt[-1]
buffer = []
while self.St[-1] != maxX:
buffer.append(self.pop())
self.pop()
while buffer:
self.push(buffer.pop())
return maxX
# Your MaxStack object will be instantiated and called as such:
# obj = MaxStack()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.top()
# param_4 = obj.peekMax()
# param_5 = obj.popMax()
| [
"[email protected]"
] | |
ffaa35bbff6e5594111a59aeed63bc26897a2692 | 0b12e31cafa598c163d2cc53706df193a73e31e3 | /people/models.py | 65c21cc6090d290ebf5ac91ed163dedd5de88207 | [] | no_license | getopen/pro | 6a4dba774558e1de0419a4c6daf030ee360d68fd | 97e939d26d9fdaf54f05f3cd4a9b32a6722d0ac3 | refs/heads/master | 2021-07-06T09:35:18.077577 | 2017-09-30T16:07:06 | 2017-09-30T16:07:06 | 100,471,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,517 | py | from django.db import models
from django.contrib.auth.models import BaseUserManager, AbstractBaseUser
from django.utils import timezone
import hashlib
import random
import string
from django.conf import settings
SALT = getattr(settings, "EMAIL_TOKEN_SALT")
class MyUserManager(BaseUserManager):
def create_user(self, username, email, password=None):
if not email :
raise ValueError('Users must have an email address')
if not username:
raise ValueError('Users must have an username')
#判断邮件和用户名是否具有
now = timezone.now()
#获取当前django的时间
user = self.model(
username=username,
email=self.normalize_email(email),
date_joined=now,
last_login=now,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, password):
user = self.create_user(username,
email,
password=password,
)
user.is_admin = True
user.save(using=self._db)
return user
#新版用户表
class Member(AbstractBaseUser):
#AbstractBaseUser中只含有3个field: password, last_login和is_active.
email = models.EmailField(verbose_name='邮箱',max_length=255,unique=True,)
username = models.CharField(verbose_name="用户名", max_length=16, unique=True)
weibo_id = models.CharField(verbose_name="新浪微博", max_length=30, blank=True)
blog = models.CharField(verbose_name="个人网站", max_length=200, blank=True)
location = models.CharField(verbose_name="城市", max_length=10, blank=True)
profile = models.CharField(verbose_name="个人简介", max_length=140, blank=True)
avatar = models.CharField(verbose_name="头像", max_length=128, blank=True)
au = models.IntegerField(verbose_name="用户活跃度", default=0)
last_ip = models.GenericIPAddressField(verbose_name="上次访问IP", default="0.0.0.0")
email_verified = models.BooleanField(verbose_name="邮箱是否验证", default=False)
date_joined = models.DateTimeField(verbose_name="用户注册时间", default=timezone.now)
topic_num = models.IntegerField(verbose_name="帖子数", default=0)
comment_num = models.IntegerField(verbose_name="评论数", default=0)
is_active = models.BooleanField(default=True)
is_admin = models.BooleanField(default=False)
objects = MyUserManager()
#objects就是我们之前一直使用的管理器
#管理器用来维护我们的增删改查
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
def __str__(self):
return self.username
#标签中的数据实例
def is_email_verified(self):
return self.email_verified
#我们可以在模板中,通过实例出来的对象数据进行这个函数的调取,获取他是否验证过
def get_weibo(self):
return self.weibo_id
def get_username(self):
return self.username
#方法的圆括号在templates标签中必需省略!!
def get_email(self):
return self.email
#方法的圆括号在templates标签中必需省略!!
def get_full_name(self):
# The user is identified by their email address
return self.email
#get_full_name本来是获取first_name和last_name的
#但是由于我们重新设置了表结构,那么这个函数必须自定义
#方法的圆括号在templates标签中必需省略!!
def get_short_name(self):
# The user is identified by their email address
return self.username
#get_short_name获取first_name
#但是由于我们重新设置了表结构,那么这个函数必须自定义
#方法的圆括号在templates标签中必需省略!!
def has_perm(self, perm, obj=None):
"Does the user have a specific permission?"
return True
def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
return True
def calculate_au(self):
"""
计算活跃度
公式:Topic * 5 + Comment * 1
"""
self.au = self.topic_num * 5 + self.comment_num * 1
return self.au
@property
#类中函数可以直接做为属性使用
def is_staff(self):
"Is the user a member of staff?"
# Simplest possible answer: All admins are staff
return self.is_admin
class Follower(models.Model):
"""
用户的关系表
B is the follower of A
B 是 A 的关注者
A 被 B 关注
"""
user_a = models.ForeignKey(Member, related_name="user_a",verbose_name='偶像')
user_b = models.ForeignKey(Member, related_name="user_b",verbose_name='粉丝')
date_followed = models.DateTimeField(default=timezone.now,verbose_name='关注时间')
class Meta:
unique_together = ('user_a', 'user_b')
def __str__(self):
return "%s following %s" % (self.user_b, self.user_a)
class EmailVerified(models.Model):
user = models.OneToOneField(Member, related_name="user")
token = models.CharField("Email 验证 token", max_length=32, default=None)
timestamp = models.DateTimeField(default=timezone.now)
def __str__(self):
return "%s@%s" % (self.user, self.token)
def generate_token(self):
year = self.timestamp.year
month = self.timestamp.month
day = self.timestamp.day
date = "%s-%s-%s" % (year, month, day)
token = hashlib.md5((self.ran_str()+date).encode('utf-8')).hexdigest()
return token
def ran_str(self):
salt = ''.join(random.sample(string.ascii_letters + string.digits, 8))
return salt + SALT
class FindPass(models.Model):
user = models.OneToOneField(Member, verbose_name="用户")
token = models.CharField(max_length=32, blank=True)
timestamp = models.DateTimeField(default=timezone.now)
def __str__(self):
return "%s@%s" % (self.user, self.token)
def generate_token(self):
year = self.timestamp.year
month = self.timestamp.month
day = self.timestamp.day
date = "%s-%s-%s" % (year, month, day)
token = hashlib.md5((self.ran_str()+date).encode('utf-8')).hexdigest()
return token
def ran_str(self):
salt = ''.join(random.sample(string.ascii_letters + string.digits, 8))
return salt + SALT
| [
"[email protected]"
] | |
954e1a81cae9daf62bf9cb9cf0f83299c3e8a038 | 8b942cbd6a0da0a61f68c468956ba318c7f1603d | /dynamic_programming/0053_maximum_subarray.py | 4ed3786dd1ebedf430bdbe2dfaceed01c1a79c9e | [
"MIT"
] | permissive | MartinMa28/Algorithms_review | 080bd608b0e0c6f39c45f28402e5181791af4766 | 3f2297038c00f5a560941360ca702e6868530f34 | refs/heads/master | 2022-04-13T03:56:56.932788 | 2020-04-06T03:41:33 | 2020-04-06T03:41:33 | 203,349,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | class Solution:
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
# Each slot means the sum of the max-subarray that ends at this index
dp = [float('-inf')] * len(nums)
"""
dp[i] = max((nums[i], dp[i - 1] + nums[i]))
"""
dp[0] = nums[0]
for i in range(1, len(nums)):
dp[i] = max((nums[i], dp[i - 1] + nums[i]))
return max(dp) | [
"[email protected]"
] | |
a399301c523887d5bcc02002c2d2c1ac09e638a1 | 07cf86733b110a13224ef91e94ea5862a8f5d0d5 | /permutations/permutations.py | 9adc376b32f889d512681c06e31fc88b05902f97 | [] | no_license | karsevar/Code_Challenge_Practice | 2d96964ed2601b3beb324d08dd3692c3d566b223 | 88d4587041a76cfd539c0698771420974ffaf60b | refs/heads/master | 2023-01-23T17:20:33.967020 | 2020-12-14T18:29:49 | 2020-12-14T18:29:49 | 261,813,079 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,179 | py | # First attempt most test cases didn't pass. Perhaps I miss read the permutation
# requirements for this problem.
class Solution:
def permute(self, nums):
# create a permutations array that will hold all the possible
# permutations
# create a recursive function that will have a start argument, nums argument,
# and a permutation argument
# if permutation is equal to the length len(nums) and not none
# add permutation to the permutations array
# if permutation is less than the lenght of len(nums)
# have a for loop that will start at range(start, len(nums) + 1)
# recursively call the recursive function
permutations = []
nums_length = len(nums)
def permutation_helper(nums, nums_length, permutation=None, variable_exclude=None):
if permutation != None and len(permutation) == nums_length:
permutations.append(permutation)
elif permutation == None or len(permutation) < nums_length:
for number in nums:
if permutation == None:
new_permutation = []
variable_exclude = number
new_permutation.append(number)
permutation_helper(nums, nums_length, new_permutation, variable_exclude)
elif permutation != None and variable_exclude != number and number != permutation[-1]:
new_permutation = permutation[:]
new_permutation.append(number)
permutation_helper(nums, nums_length, new_permutation, variable_exclude)
permutation_helper(nums, nums_length)
return permutations
class OfficialSolution:
def permute(self, nums):
# create a permutations array that will hold all the possible
# permutations
# create a recursive function that will have a start argument, nums argument,
# and a permutation argument
# if permutation is equal to the length len(nums) and not none
# add permutation to the permutations array
# if permutation is less than the lenght of len(nums)
# have a for loop that will start at range(start, len(nums) + 1)
# recursively call the recursive function
permutations = []
nums_length = len(nums)
def permutation_helper(index, perm, nums_length):
if index == len(perm):
permutations.append(list(perm))
for i in range(index, len(perm)):
print('permutation', perm)
print('index', index)
perm[index], perm[i] = perm[i], perm[index]
permutation_helper(index+1, perm, nums_length)
perm[index], perm[i] = perm[i], perm[index]
permutation_helper(0, nums, nums_length)
return permutations | [
"[email protected]"
] | |
257aa8d1f68e6c7580b34aa3188372ce47c07185 | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/nos/v7_2_0/rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/__init__.py | d3f779d9b38079c6a063f8e88fe16bb0211a6131 | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,029 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class maximum_paths(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-rbridge - based on the path /rbridge-id/router/router-bgp/address-family/ipv4/ipv4-unicast/default-vrf/af-common-cmds-holder/maximum-paths. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__load_sharing_value','__ebgp','__ibgp','__use_load_sharing',)
_yang_name = 'maximum-paths'
_rest_name = 'maximum-paths'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ibgp = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ibgp", rest_name="ibgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of IBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ibgp-paths', is_config=True)
self.__load_sharing_value = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="load-sharing-value", rest_name="load-sharing-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)
self.__ebgp = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ebgp", rest_name="ebgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of EBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ebgp-paths', is_config=True)
self.__use_load_sharing = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="use-load-sharing", rest_name="use-load-sharing", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Number of load-sharing paths: using load-sharing value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'rbridge-id', u'router', u'router-bgp', u'address-family', u'ipv4', u'ipv4-unicast', u'default-vrf', u'af-common-cmds-holder', u'maximum-paths']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'rbridge-id', u'router', u'bgp', u'address-family', u'ipv4', u'unicast', u'maximum-paths']
def _get_load_sharing_value(self):
"""
Getter method for load_sharing_value, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/load_sharing_value (uint32)
"""
return self.__load_sharing_value
def _set_load_sharing_value(self, v, load=False):
"""
Setter method for load_sharing_value, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/load_sharing_value (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_load_sharing_value is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_load_sharing_value() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="load-sharing-value", rest_name="load-sharing-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """load_sharing_value must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="load-sharing-value", rest_name="load-sharing-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)""",
})
self.__load_sharing_value = t
if hasattr(self, '_set'):
self._set()
def _unset_load_sharing_value(self):
self.__load_sharing_value = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="load-sharing-value", rest_name="load-sharing-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='uint32', is_config=True)
def _get_ebgp(self):
"""
Getter method for ebgp, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/ebgp (ebgp-paths)
"""
return self.__ebgp
def _set_ebgp(self, v, load=False):
"""
Setter method for ebgp, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/ebgp (ebgp-paths)
If this variable is read-only (config: false) in the
source YANG file, then _set_ebgp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ebgp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ebgp", rest_name="ebgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of EBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ebgp-paths', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ebgp must be of a type compatible with ebgp-paths""",
'defined-type': "brocade-bgp:ebgp-paths",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ebgp", rest_name="ebgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of EBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ebgp-paths', is_config=True)""",
})
self.__ebgp = t
if hasattr(self, '_set'):
self._set()
def _unset_ebgp(self):
self.__ebgp = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ebgp", rest_name="ebgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of EBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ebgp-paths', is_config=True)
def _get_ibgp(self):
"""
Getter method for ibgp, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/ibgp (ibgp-paths)
"""
return self.__ibgp
def _set_ibgp(self, v, load=False):
"""
Setter method for ibgp, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/ibgp (ibgp-paths)
If this variable is read-only (config: false) in the
source YANG file, then _set_ibgp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ibgp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ibgp", rest_name="ibgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of IBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ibgp-paths', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ibgp must be of a type compatible with ibgp-paths""",
'defined-type': "brocade-bgp:ibgp-paths",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ibgp", rest_name="ibgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of IBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ibgp-paths', is_config=True)""",
})
self.__ibgp = t
if hasattr(self, '_set'):
self._set()
def _unset_ibgp(self):
self.__ibgp = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..32']}), is_leaf=True, yang_name="ibgp", rest_name="ibgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Number of IBGP paths for load sharing', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='ibgp-paths', is_config=True)
def _get_use_load_sharing(self):
"""
Getter method for use_load_sharing, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/use_load_sharing (empty)
"""
return self.__use_load_sharing
def _set_use_load_sharing(self, v, load=False):
"""
Setter method for use_load_sharing, mapped from YANG variable /rbridge_id/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths/use_load_sharing (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_use_load_sharing is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_use_load_sharing() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="use-load-sharing", rest_name="use-load-sharing", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Number of load-sharing paths: using load-sharing value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """use_load_sharing must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="use-load-sharing", rest_name="use-load-sharing", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Number of load-sharing paths: using load-sharing value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__use_load_sharing = t
if hasattr(self, '_set'):
self._set()
def _unset_use_load_sharing(self):
self.__use_load_sharing = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="use-load-sharing", rest_name="use-load-sharing", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Number of load-sharing paths: using load-sharing value'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
load_sharing_value = __builtin__.property(_get_load_sharing_value, _set_load_sharing_value)
ebgp = __builtin__.property(_get_ebgp, _set_ebgp)
ibgp = __builtin__.property(_get_ibgp, _set_ibgp)
use_load_sharing = __builtin__.property(_get_use_load_sharing, _set_use_load_sharing)
_pyangbind_elements = {'load_sharing_value': load_sharing_value, 'ebgp': ebgp, 'ibgp': ibgp, 'use_load_sharing': use_load_sharing, }
| [
"[email protected]"
] | |
588ff9f9d1fd2b83d89b92f998ad98b57b5b6142 | ec513ac551fc0bbb6c8af5b30330445bf52c6c7f | /location_monitor/src/location_monitor_node.py | e907ab747f1eb280bbd66076673f3279e2518249 | [] | no_license | ChuChuIgbokwe/me495_tutorials | b88c4833f35e50b51a4ccaa1a4bae5a1916e12bf | b03e74605cf469d818c4533f3d563622e7d14552 | refs/heads/master | 2020-04-06T07:06:08.360123 | 2016-09-18T08:46:01 | 2016-09-18T08:46:01 | 64,951,342 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,896 | py | #!/usr/bin/env python
# # -*- coding: utf-8 -*-
# #Created by Chukwunyere Igbokwe on July 27, 2016 by 2:23 PM
# import rospy
# import math
# from nav_msgs.msg import Odometry
# from location_monitor.msg import LandmarkDistance
# def distance(x1, y1, x2, y2):
# xd = x1 - x2
# yd = y1 - y2
# return math.sqrt(xd*xd + yd*yd)
# class LandmarkMonitor(object):
# def __init__(self,landmark_pub, landmarks):
# self._landmark_pub = landmark_pub
# self._landmarks = landmarks
# def callback(self,msg):
# x = msg.pose.pose.position.x
# y = msg.pose.pose.position.y
# # rospy.loginfo("x: {}, y: {}".format(x,y))
# closest_name = None
# closest_distance = None
# for l_name,l_x, l_y in self._landmarks:
# dist = distance(x, y, l_x, l_y)
# if closest_distance is None or dist < closest_distance:
# closest_name = l_name
# closest_distance = dist
# ld = LandmarkDistance()
# ld.name = closest_name
# ld.distance = closest_distance
# self._landmark_pub.publish(ld)
# if closest_distance < 0.5:
# rospy.loginfo("I'm near the {}".format(closest_name))
# # rospy.loginfo("closest : {}".format(closest_name))
# def main():
# rospy.init_node('location_monitor_node')
# landmarks = []
# landmarks.append(("Cube", 0.31, -0.99));
# landmarks.append(("Dumpster", 0.11, -2.42));
# landmarks.append(("Cylinder", -1.14, -2.88));
# landmarks.append(("Barrier", -2.59, -0.83));
# landmarks.append(("Bookshelf", -0.09, 0.53));
# landmark_pub = rospy.Publisher("closest_landmark", LandmarkDistance, queue_size=10)
# monitor = LandmarkMonitor(landmark_pub,landmarks)
# rospy.Subscriber("/odom", Odometry, monitor.callback)
# try:
# rospy.spin()
# except KeyboardInterrupt:
# print("Shutting down")
# if __name__ == '__main__':
# main()
#your python node and package/message should always have different names
import rospy
from nav_msgs.msg import Odometry
import math
landmarks = []
landmarks.append(("Cube",0.31,-0.99));
landmarks.append(("Dumpster", 0.11,-2.42));
landmarks.append(("Cylinder", -1.14,-2.88));
landmarks.append(("Barrier", -2.59,-0.83));
landmarks.append(("Bookshelf", -0.09, 0.53));
def distance(x1, y1, x2, y2):
xd = x1 - x2
yd = y1 - y2
return math.sqrt(xd*xd + yd*yd)
def callback(msg):
x = msg.pose.pose.position.x
y = msg.pose.pose.position.y
# rospy.loginfo("x: {}, y: {}".format(x,y))
closest_name = None
closest_distance = None
for l_name,l_x, l_y in landmarks:
dist = distance(x, y, l_x, l_y)
if closest_distance is None or dist < closest_distance:
closest_name = l_name
closest_distance = dist
rospy.loginfo("Landmark: {} || Distance: {}".format(closest_name,closest_distance))
def main():
rospy.init_node('location_monitor')
rospy.Subscriber("/odom", Odometry, callback)
rospy.spin()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6ae54d9ccc133969c04088f132d6ef7883c2e260 | 3d9825900eb1546de8ad5d13cae893eb0d6a9b14 | /AutoWorkup/SEMTools/utilities/brains.py | a9f06b8bfcc070f02a886a1a7dbbda143a65d219 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | rtkarcher/BRAINSTools | 20d69f96e6d5ca92adaeb06aa4fe6556b5e7b268 | 961135366450400409cece431423ed480855d34c | refs/heads/master | 2021-01-15T08:53:48.961607 | 2013-06-26T19:09:34 | 2013-06-26T19:09:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39,809 | py | # -*- coding: utf8 -*-
"""Autogenerated file - DO NOT EDIT
If you spot a bug, please report it on the mailing list and/or change the generator."""
from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath
import os
class BRAINSConstellationModelerInputSpec(CommandLineInputSpec):
verbose = traits.Bool(desc=", Show more verbose output, ", argstr="--verbose ")
inputTrainingList = File(desc=", Setup file, giving all parameters for training up a template model for each landmark., ", exists=True, argstr="--inputTrainingList %s")
outputModel = traits.Either(traits.Bool, File(), hash_files=False, desc=", The full filename of the output model file., ", argstr="--outputModel %s")
saveOptimizedLandmarks = traits.Bool(desc=", Flag to make a new subject-specific landmark definition file in the same format produced by Slicer3 with the optimized landmark (the detected RP, AC, and PC) in it. Useful to tighten the variances in the ConstellationModeler., ", argstr="--saveOptimizedLandmarks ")
optimizedLandmarksFilenameExtender = traits.Str(desc=", If the trainingList is (indexFullPathName) and contains landmark data filenames [path]/[filename].fcsv , make the optimized landmarks filenames out of [path]/[filename](thisExtender) and the optimized version of the input trainingList out of (indexFullPathName)(thisExtender) , when you rewrite all the landmarks according to the saveOptimizedLandmarks flag., ", argstr="--optimizedLandmarksFilenameExtender %s")
resultsDir = traits.Either(traits.Bool, Directory(), hash_files=False, desc=", The directory for the results to be written., ", argstr="--resultsDir %s")
mspQualityLevel = traits.Int(desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", argstr="--mspQualityLevel %d")
rescaleIntensities = traits.Bool(desc=", Flag to turn on rescaling image intensities on input., ", argstr="--rescaleIntensities ")
trimRescaledIntensities = traits.Float(desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", argstr="--trimRescaledIntensities %f")
rescaleIntensitiesOutputRange = InputMultiPath(
traits.Int, desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", argstr="--rescaleIntensitiesOutputRange %s")
BackgroundFillValue = traits.Str(desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s")
writedebuggingImagesLevel = traits.Int(desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", argstr="--writedebuggingImagesLevel %d")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSConstellationModelerOutputSpec(TraitedSpec):
outputModel = File(desc=", The full filename of the output model file., ", exists=True)
resultsDir = Directory(desc=", The directory for the results to be written., ", exists=True)
class BRAINSConstellationModeler(SEMLikeCommandLine):
"""title: Generate Landmarks Model (BRAINS)
category: Utilities.BRAINS
description: Train up a model for BRAINSConstellationDetector
"""
input_spec = BRAINSConstellationModelerInputSpec
output_spec = BRAINSConstellationModelerOutputSpec
_cmd = " BRAINSConstellationModeler "
_outputs_filenames = {'outputModel': 'outputModel.mdl', 'resultsDir': 'resultsDir'}
class landmarksConstellationWeightsInputSpec(CommandLineInputSpec):
inputTrainingList = File(desc=", Setup file, giving all parameters for training up a Weight list for landmark., ", exists=True, argstr="--inputTrainingList %s")
inputTemplateModel = File(desc="User-specified template model., ", exists=True, argstr="--inputTemplateModel %s")
LLSModel = File(desc="Linear least squares model filename in HD5 format", exists=True, argstr="--LLSModel %s")
outputWeightsList = traits.Either(traits.Bool, File(), hash_files=False, desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", argstr="--outputWeightsList %s")
class landmarksConstellationWeightsOutputSpec(TraitedSpec):
outputWeightsList = File(desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", exists=True)
class landmarksConstellationWeights(SEMLikeCommandLine):
"""title: Generate Landmarks Weights (BRAINS)
category: Utilities.BRAINS
description: Train up a list of Weights for the Landmarks in BRAINSConstellationDetector
"""
input_spec = landmarksConstellationWeightsInputSpec
output_spec = landmarksConstellationWeightsOutputSpec
_cmd = " landmarksConstellationWeights "
_outputs_filenames = {'outputWeightsList': 'outputWeightsList.wts'}
class BRAINSTrimForegroundInDirectionInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Input image to trim off the neck (and also air-filling noise.)", exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", argstr="--outputVolume %s")
directionCode = traits.Int(desc=", This flag chooses which dimension to compare. The sign lets you flip direction., ", argstr="--directionCode %d")
otsuPercentileThreshold = traits.Float(desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed to trim off air-filling noise., ", argstr="--otsuPercentileThreshold %f")
closingSize = traits.Int(desc=", This is a parameter to FindLargestForegroundFilledMask, ", argstr="--closingSize %d")
headSizeLimit = traits.Float(desc=", Use this to vary from the command line our search for how much upper tissue is head for the center-of-mass calculation. Units are CCs, not cubic millimeters., ", argstr="--headSizeLimit %f")
BackgroundFillValue = traits.Str(desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSTrimForegroundInDirectionOutputSpec(TraitedSpec):
outputVolume = File(desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", exists=True)
class BRAINSTrimForegroundInDirection(SEMLikeCommandLine):
"""title: Trim Foreground In Direction (BRAINS)
category: Utilities.BRAINS
description: This program will trim off the neck and also air-filling noise from the inputImage.
version: 0.1
documentation-url: http://www.nitrc.org/projects/art/
"""
input_spec = BRAINSTrimForegroundInDirectionInputSpec
output_spec = BRAINSTrimForegroundInDirectionOutputSpec
_cmd = " BRAINSTrimForegroundInDirection "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class BRAINSLmkTransformInputSpec(CommandLineInputSpec):
inputMovingLandmarks = File(desc="Input Moving Landmark list file in fcsv, ", exists=True, argstr="--inputMovingLandmarks %s")
inputFixedLandmarks = File(desc="Input Fixed Landmark list file in fcsv, ", exists=True, argstr="--inputFixedLandmarks %s")
outputAffineTransform = traits.Either(traits.Bool, File(), hash_files=False, desc="The filename for the estimated affine transform, ", argstr="--outputAffineTransform %s")
inputMovingVolume = File(desc="The filename of input moving volume", exists=True, argstr="--inputMovingVolume %s")
inputReferenceVolume = File(desc="The filename of the reference volume", exists=True, argstr="--inputReferenceVolume %s")
outputResampledVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="The filename of the output resampled volume", argstr="--outputResampledVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSLmkTransformOutputSpec(TraitedSpec):
outputAffineTransform = File(desc="The filename for the estimated affine transform, ", exists=True)
outputResampledVolume = File(desc="The filename of the output resampled volume", exists=True)
class BRAINSLmkTransform(SEMLikeCommandLine):
"""title: Landmark Transform (BRAINS)
category: Utilities.BRAINS
description:
This utility program estimates the affine transform to align the fixed landmarks to the moving landmarks, and then generate the resampled moving image to the same physical space as that of the reference image.
version: 1.0
documentation-url: http://www.nitrc.org/projects/brainscdetector/
"""
input_spec = BRAINSLmkTransformInputSpec
output_spec = BRAINSLmkTransformOutputSpec
_cmd = " BRAINSLmkTransform "
_outputs_filenames = {'outputResampledVolume': 'outputResampledVolume.nii', 'outputAffineTransform': 'outputAffineTransform.h5'}
class BRAINSMushInputSpec(CommandLineInputSpec):
inputFirstVolume = File(desc="Input image (1) for mixture optimization", exists=True, argstr="--inputFirstVolume %s")
inputSecondVolume = File(desc="Input image (2) for mixture optimization", exists=True, argstr="--inputSecondVolume %s")
inputMaskVolume = File(desc="Input label image for mixture optimization", exists=True, argstr="--inputMaskVolume %s")
outputWeightsFile = traits.Either(traits.Bool, File(), hash_files=False, desc="Output Weights File", argstr="--outputWeightsFile %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="The MUSH image produced from the T1 and T2 weighted images", argstr="--outputVolume %s")
outputMask = traits.Either(traits.Bool, File(), hash_files=False, desc="The brain volume mask generated from the MUSH image", argstr="--outputMask %s")
seed = InputMultiPath(traits.Int, desc="Seed Point for Brain Region Filling", sep=",", argstr="--seed %s")
desiredMean = traits.Float(desc="Desired mean within the mask for weighted sum of both images.", argstr="--desiredMean %f")
desiredVariance = traits.Float(desc="Desired variance within the mask for weighted sum of both images.", argstr="--desiredVariance %f")
lowerThresholdFactorPre = traits.Float(desc="Lower threshold factor for finding an initial brain mask", argstr="--lowerThresholdFactorPre %f")
upperThresholdFactorPre = traits.Float(desc="Upper threshold factor for finding an initial brain mask", argstr="--upperThresholdFactorPre %f")
lowerThresholdFactor = traits.Float(desc="Lower threshold factor for defining the brain mask", argstr="--lowerThresholdFactor %f")
upperThresholdFactor = traits.Float(desc="Upper threshold factor for defining the brain mask", argstr="--upperThresholdFactor %f")
boundingBoxSize = InputMultiPath(traits.Int, desc="Size of the cubic bounding box mask used when no brain mask is present", sep=",", argstr="--boundingBoxSize %s")
boundingBoxStart = InputMultiPath(traits.Int, desc="XYZ point-coordinate for the start of the cubic bounding box mask used when no brain mask is present", sep=",", argstr="--boundingBoxStart %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSMushOutputSpec(TraitedSpec):
outputWeightsFile = File(desc="Output Weights File", exists=True)
outputVolume = File(desc="The MUSH image produced from the T1 and T2 weighted images", exists=True)
outputMask = File(desc="The brain volume mask generated from the MUSH image", exists=True)
class BRAINSMush(SEMLikeCommandLine):
"""title: Brain Extraction from T1/T2 image (BRAINS)
category: Utilities.BRAINS
description:
This program: 1) generates a weighted mixture image optimizing the mean and variance and 2) produces a mask of the brain volume
version: 0.1.0.$Revision: 1.4 $(alpha)
documentation-url: http:://mri.radiology.uiowa.edu
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor:
This tool is a modification by Steven Dunn of a program developed by Greg Harris and Ron Pierson.
acknowledgements:
This work was developed by the University of Iowa Departments of Radiology and Psychiatry. This software was supported in part of NIH/NINDS award NS050568.
"""
input_spec = BRAINSMushInputSpec
output_spec = BRAINSMushOutputSpec
_cmd = " BRAINSMush "
_outputs_filenames = {'outputMask': 'outputMask.nii.gz', 'outputWeightsFile': 'outputWeightsFile.txt', 'outputVolume': 'outputVolume.nii.gz'}
class BRAINSAlignMSPInputSpec(CommandLineInputSpec):
inputVolume = File(desc=", The Image to be resampled, ", exists=True, argstr="--inputVolume %s")
OutputresampleMSP = traits.Either(traits.Bool, File(), hash_files=False, desc=", The image to be output., ", argstr="--OutputresampleMSP %s")
verbose = traits.Bool(desc=", Show more verbose output, ", argstr="--verbose ")
resultsDir = traits.Either(traits.Bool, Directory(), hash_files=False, desc=", The directory for the results to be written., ", argstr="--resultsDir %s")
writedebuggingImagesLevel = traits.Int(desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", argstr="--writedebuggingImagesLevel %d")
mspQualityLevel = traits.Int(desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", argstr="--mspQualityLevel %d")
rescaleIntensities = traits.Bool(desc=", Flag to turn on rescaling image intensities on input., ", argstr="--rescaleIntensities ")
trimRescaledIntensities = traits.Float(desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", argstr="--trimRescaledIntensities %f")
rescaleIntensitiesOutputRange = InputMultiPath(traits.Int, desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", argstr="--rescaleIntensitiesOutputRange %s")
BackgroundFillValue = traits.Str(desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s")
interpolationMode = traits.Enum("NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman",
desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSAlignMSPOutputSpec(TraitedSpec):
OutputresampleMSP = File(desc=", The image to be output., ", exists=True)
resultsDir = Directory(desc=", The directory for the results to be written., ", exists=True)
class BRAINSAlignMSP(SEMLikeCommandLine):
"""title: Align Mid Saggital Brain (BRAINS)
category: Utilities.BRAINS
description: Resample an image into ACPC alignement ACPCDetect
"""
input_spec = BRAINSAlignMSPInputSpec
output_spec = BRAINSAlignMSPOutputSpec
_cmd = " BRAINSAlignMSP "
_outputs_filenames = {'OutputresampleMSP': 'OutputresampleMSP.nii', 'resultsDir': 'resultsDir'}
class BRAINSTransformConvertInputSpec(CommandLineInputSpec):
inputTransform = File(exists=True, argstr="--inputTransform %s")
referenceVolume = File(exists=True, argstr="--referenceVolume %s")
outputTransformType = traits.Enum("Affine", "VersorRigid", "ScaleVersor", "ScaleSkewVersor", "DisplacementField", "Same", desc="The target transformation type. Must be conversion-compatible with the input transform type", argstr="--outputTransformType %s")
displacementVolume = traits.Either(traits.Bool, File(), hash_files=False, argstr="--displacementVolume %s")
outputTransform = traits.Either(traits.Bool, File(), hash_files=False, argstr="--outputTransform %s")
class BRAINSTransformConvertOutputSpec(TraitedSpec):
displacementVolume = File(exists=True)
outputTransform = File(exists=True)
class BRAINSTransformConvert(SEMLikeCommandLine):
"""title: BRAINS Transform Convert
category: Utilities.BRAINS
description: Convert ITK transforms to higher order transforms
version: 1.0
documentation-url: A utility to convert between transform file formats.
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: Hans J. Johnson,Kent Williams
acknowledgements:
"""
input_spec = BRAINSTransformConvertInputSpec
output_spec = BRAINSTransformConvertOutputSpec
_cmd = " BRAINSTransformConvert "
_outputs_filenames = {'displacementVolume': 'displacementVolume.nii', 'outputTransform': 'outputTransform.mat'}
class landmarksConstellationAlignerInputSpec(CommandLineInputSpec):
inputLandmarksPaired = File(desc="Input landmark file (.fcsv)", exists=True, argstr="--inputLandmarksPaired %s")
outputLandmarksPaired = traits.Either(traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", argstr="--outputLandmarksPaired %s")
class landmarksConstellationAlignerOutputSpec(TraitedSpec):
outputLandmarksPaired = File(desc="Output landmark file (.fcsv)", exists=True)
class landmarksConstellationAligner(SEMLikeCommandLine):
"""title: MidACPC Landmark Insertion
category: Utilities.BRAINS
description:
This program converts the original landmark files to the acpc-aligned landmark files
version:
documentation-url:
license:
contributor: Ali Ghayoor
acknowledgements:
"""
input_spec = landmarksConstellationAlignerInputSpec
output_spec = landmarksConstellationAlignerOutputSpec
_cmd = " landmarksConstellationAligner "
_outputs_filenames = {'outputLandmarksPaired': 'outputLandmarksPaired'}
class BRAINSEyeDetectorInputSpec(CommandLineInputSpec):
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
inputVolume = File(desc="The input volume", exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="The output volume", argstr="--outputVolume %s")
debugDir = traits.Str(desc="A place for debug information", argstr="--debugDir %s")
class BRAINSEyeDetectorOutputSpec(TraitedSpec):
outputVolume = File(desc="The output volume", exists=True)
class BRAINSEyeDetector(SEMLikeCommandLine):
"""title: Eye Detector (BRAINS)
category: Utilities.BRAINS
description:
version: 1.0
documentation-url: http://www.nitrc.org/projects/brainscdetector/
"""
input_spec = BRAINSEyeDetectorInputSpec
output_spec = BRAINSEyeDetectorOutputSpec
_cmd = " BRAINSEyeDetector "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class BRAINSLinearModelerEPCAInputSpec(CommandLineInputSpec):
inputTrainingList = File(desc="Input Training Landmark List Filename, ", exists=True, argstr="--inputTrainingList %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSLinearModelerEPCAOutputSpec(TraitedSpec):
pass
class BRAINSLinearModelerEPCA(SEMLikeCommandLine):
"""title: Landmark Linear Modeler (BRAINS)
category: Utilities.BRAINS
description:
Training linear model using EPCA. Implementation based on my MS thesis, "A METHOD FOR AUTOMATED LANDMARK CONSTELLATION DETECTION USING EVOLUTIONARY PRINCIPAL COMPONENTS AND STATISTICAL SHAPE MODELS"
version: 1.0
documentation-url: http://www.nitrc.org/projects/brainscdetector/
"""
input_spec = BRAINSLinearModelerEPCAInputSpec
output_spec = BRAINSLinearModelerEPCAOutputSpec
_cmd = " BRAINSLinearModelerEPCA "
_outputs_filenames = {}
class BRAINSInitializedControlPointsInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Input Volume", exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Output Volume", argstr="--outputVolume %s")
splineGridSize = InputMultiPath(traits.Int, desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", argstr="--splineGridSize %s")
permuteOrder = InputMultiPath(traits.Int, desc="The permutation order for the images. The default is 0,1,2 (i.e. no permutation)", sep=",", argstr="--permuteOrder %s")
outputLandmarksFile = traits.Str(desc="Output filename", argstr="--outputLandmarksFile %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSInitializedControlPointsOutputSpec(TraitedSpec):
outputVolume = File(desc="Output Volume", exists=True)
class BRAINSInitializedControlPoints(SEMLikeCommandLine):
"""title: Initialized Control Points (BRAINS)
category: Utilities.BRAINS
description:
Outputs bspline control points as landmarks
version: 0.1.0.$Revision: 916 $(alpha)
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: Mark Scully
acknowledgements:
This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for Mark Scully and Hans Johnson at the University of Iowa.
"""
input_spec = BRAINSInitializedControlPointsInputSpec
output_spec = BRAINSInitializedControlPointsOutputSpec
_cmd = " BRAINSInitializedControlPoints "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class CleanUpOverlapLabelsInputSpec(CommandLineInputSpec):
inputBinaryVolumes = InputMultiPath(File(exists=True), desc="The list of binary images to be checked and cleaned up. Order is important. Binary volume given first always wins out. ", argstr="--inputBinaryVolumes %s...")
outputBinaryVolumes = traits.Either(traits.Bool, InputMultiPath(File(), ), hash_files=False, desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", argstr="--outputBinaryVolumes %s...")
class CleanUpOverlapLabelsOutputSpec(TraitedSpec):
outputBinaryVolumes = OutputMultiPath(File(exists=True), desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", exists=True)
class CleanUpOverlapLabels(SEMLikeCommandLine):
"""title: Clean Up Overla Labels
category: Utilities.BRAINS
description: Take a series of input binary images and clean up for those overlapped area. Binary volumes given first always wins out
version: 0.1.0
contributor: Eun Young Kim
"""
input_spec = CleanUpOverlapLabelsInputSpec
output_spec = CleanUpOverlapLabelsOutputSpec
_cmd = " CleanUpOverlapLabels "
_outputs_filenames = {'outputBinaryVolumes': 'outputBinaryVolumes.nii'}
class BRAINSClipInferiorInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Input image to make a clipped short int copy from.", exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", argstr="--outputVolume %s")
acLowerBound = traits.Float(desc=", When the input image to the output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (assumed to be the voxel field middle.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", argstr="--acLowerBound %f")
BackgroundFillValue = traits.Str(desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSClipInferiorOutputSpec(TraitedSpec):
outputVolume = File(desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", exists=True)
class BRAINSClipInferior(SEMLikeCommandLine):
"""title: Clip Inferior of Center of Brain (BRAINS)
category: Utilities.BRAINS
description: This program will read the inputVolume as a short int image, write the BackgroundFillValue everywhere inferior to the lower bound, and write the resulting clipped short int image in the outputVolume.
version: 1.0
"""
input_spec = BRAINSClipInferiorInputSpec
output_spec = BRAINSClipInferiorOutputSpec
_cmd = " BRAINSClipInferior "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class GenerateLabelMapFromProbabilityMapInputSpec(CommandLineInputSpec):
inputVolumes = InputMultiPath(File(exists=True), desc="The Input probaiblity images to be computed for lable maps", argstr="--inputVolumes %s...")
outputLabelVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="The Input binary image for region of interest", argstr="--outputLabelVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class GenerateLabelMapFromProbabilityMapOutputSpec(TraitedSpec):
outputLabelVolume = File(desc="The Input binary image for region of interest", exists=True)
class GenerateLabelMapFromProbabilityMap(SEMLikeCommandLine):
"""title: Label Map from Probability Images
category: Utilities.BRAINS
description:
Given a list of probability maps for labels, create a discrete label map where only the highest probability region is used for the labeling.
version: 0.1
contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu
"""
input_spec = GenerateLabelMapFromProbabilityMapInputSpec
output_spec = GenerateLabelMapFromProbabilityMapOutputSpec
_cmd = " GenerateLabelMapFromProbabilityMap "
_outputs_filenames = {'outputLabelVolume': 'outputLabelVolume.nii.gz'}
class BRAINSLandmarkInitializerInputSpec(CommandLineInputSpec):
inputFixedLandmarkFilename = File(desc="input fixed landmark. *.fcsv", exists=True, argstr="--inputFixedLandmarkFilename %s")
inputMovingLandmarkFilename = File(desc="input moving landmark. *.fcsv", exists=True, argstr="--inputMovingLandmarkFilename %s")
inputWeightFilename = File(desc="Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are propotional, that is the magnitude of weights will be normalized by its minimum and maximum value. ", exists=True, argstr="--inputWeightFilename %s")
outputTransformFilename = traits.Either(traits.Bool, File(), hash_files=False, desc="output transform file name (ex: ./outputTransform.mat) ", argstr="--outputTransformFilename %s")
class BRAINSLandmarkInitializerOutputSpec(TraitedSpec):
outputTransformFilename = File(desc="output transform file name (ex: ./outputTransform.mat) ", exists=True)
class BRAINSLandmarkInitializer(SEMLikeCommandLine):
"""title: BRAINSLandmarkInitializer
category: Utilities.BRAINS
description: Create transformation file (*mat) from a pair of landmarks (*fcsv) files.
version: 1.0
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: Eunyoung Regina Kim
"""
input_spec = BRAINSLandmarkInitializerInputSpec
output_spec = BRAINSLandmarkInitializerOutputSpec
_cmd = " BRAINSLandmarkInitializer "
_outputs_filenames = {'outputTransformFilename': 'outputTransformFilename'}
class BRAINSMultiModeSegmentInputSpec(CommandLineInputSpec):
inputVolumes = InputMultiPath(File(exists=True), desc="The input image volumes for finding the largest region filled mask.", argstr="--inputVolumes %s...")
inputMaskVolume = File(desc="The ROI for region to compute histogram levels.", exists=True, argstr="--inputMaskVolume %s")
outputROIMaskVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", argstr="--outputROIMaskVolume %s")
outputClippedVolumeROI = traits.Either(traits.Bool, File(), hash_files=False, desc="The inputVolume clipped to the region of the brain mask.", argstr="--outputClippedVolumeROI %s")
lowerThreshold = InputMultiPath(traits.Float, desc="Lower thresholds on the valid histogram regions for each modality", sep=",", argstr="--lowerThreshold %s")
upperThreshold = InputMultiPath(traits.Float, desc="Upper thresholds on the valid histogram regions for each modality", sep=",", argstr="--upperThreshold %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class BRAINSMultiModeSegmentOutputSpec(TraitedSpec):
outputROIMaskVolume = File(desc="The ROI automatically found from the input image.", exists=True)
outputClippedVolumeROI = File(desc="The inputVolume clipped to the region of the brain mask.", exists=True)
class BRAINSMultiModeSegment(SEMLikeCommandLine):
"""title: Segment based on rectangular region of joint histogram (BRAINS)
category: Utilities.BRAINS
description: This tool creates binary regions based on segmenting multiple image modalitities at once.
version: 2.4.1
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu
acknowledgements: Hans Johnson(1,3,4); Gregory Harris(1), Vincent Magnotta(1,2,3); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering)
"""
input_spec = BRAINSMultiModeSegmentInputSpec
output_spec = BRAINSMultiModeSegmentOutputSpec
_cmd = " BRAINSMultiModeSegment "
_outputs_filenames = {'outputROIMaskVolume': 'outputROIMaskVolume.nii', 'outputClippedVolumeROI': 'outputClippedVolumeROI.nii'}
class insertMidACPCpointInputSpec(CommandLineInputSpec):
inputLandmarkFile = File(desc="Input landmark file (.fcsv)", exists=True, argstr="--inputLandmarkFile %s")
outputLandmarkFile = traits.Either(traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", argstr="--outputLandmarkFile %s")
class insertMidACPCpointOutputSpec(TraitedSpec):
outputLandmarkFile = File(desc="Output landmark file (.fcsv)", exists=True)
class insertMidACPCpoint(SEMLikeCommandLine):
"""title: MidACPC Landmark Insertion
category: Utilities.BRAINS
description:
This program gets a landmark fcsv file and adds a new landmark as the midpoint between AC and PC points to the output landmark fcsv file
version:
documentation-url:
license:
contributor: Ali Ghayoor
acknowledgements:
"""
input_spec = insertMidACPCpointInputSpec
output_spec = insertMidACPCpointOutputSpec
_cmd = " insertMidACPCpoint "
_outputs_filenames = {'outputLandmarkFile': 'outputLandmarkFile'}
class BRAINSSnapShotWriterInputSpec(CommandLineInputSpec):
inputVolumes = InputMultiPath(File(exists=True), desc="Input image volume list to be extracted as 2D image. Multiple input is possible. At least one input is required.", argstr="--inputVolumes %s...")
inputBinaryVolumes = InputMultiPath(File(exists=True), desc="Input mask (binary) volume list to be extracted as 2D image. Multiple input is possible.", argstr="--inputBinaryVolumes %s...")
inputSliceToExtractInPhysicalPoint = InputMultiPath(traits.Float, desc="2D slice number of input images. For autoWorkUp output, which AC-PC aligned, 0,0,0 will be the center.", sep=",", argstr="--inputSliceToExtractInPhysicalPoint %s")
inputSliceToExtractInIndex = InputMultiPath(traits.Int, desc="2D slice number of input images. For size of 256*256*256 image, 128 is usually used.", sep=",", argstr="--inputSliceToExtractInIndex %s")
inputSliceToExtractInPercent = InputMultiPath(traits.Int, desc="2D slice number of input images. Percentage input from 0%-100%. (ex. --inputSliceToExtractInPercent 50,50,50", sep=",", argstr="--inputSliceToExtractInPercent %s")
inputPlaneDirection = InputMultiPath(traits.Int, desc="Plane to display. In general, 0=saggital, 1=coronal, and 2=axial plane.", sep=",", argstr="--inputPlaneDirection %s")
outputFilename = traits.Either(traits.Bool, File(), hash_files=False, desc="2D file name of input images. Required.", argstr="--outputFilename %s")
class BRAINSSnapShotWriterOutputSpec(TraitedSpec):
outputFilename = File(desc="2D file name of input images. Required.", exists=True)
class BRAINSSnapShotWriter(SEMLikeCommandLine):
"""title: BRAINSSnapShotWriter
category: Utilities.BRAINS
description: Create 2D snapshot of input images. Mask images are color-coded
version: 1.0
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: Eunyoung Regina Kim
"""
input_spec = BRAINSSnapShotWriterInputSpec
output_spec = BRAINSSnapShotWriterOutputSpec
_cmd = " BRAINSSnapShotWriter "
_outputs_filenames = {'outputFilename': 'outputFilename'}
class JointHistogramInputSpec(CommandLineInputSpec):
inputVolumeInXAxis = File(desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolumeInXAxis %s")
inputVolumeInYAxis = File(desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolumeInYAxis %s")
inputMaskVolumeInXAxis = File(desc="Input mask volume for inputVolumeInXAxis. Histogram will be computed just for the masked region", exists=True, argstr="--inputMaskVolumeInXAxis %s")
inputMaskVolumeInYAxis = File(desc="Input mask volume for inputVolumeInYAxis. Histogram will be computed just for the masked region", exists=True, argstr="--inputMaskVolumeInYAxis %s")
outputJointHistogramImage = traits.Str(desc=" output joint histogram image file name. Histogram is usually 2D image. ", argstr="--outputJointHistogramImage %s")
verbose = traits.Bool(desc=" print debugging information, ", argstr="--verbose ")
class JointHistogramOutputSpec(TraitedSpec):
pass
class JointHistogram(SEMLikeCommandLine):
"""title: Write Out Image Intensities
category: Utilities.BRAINS
description:
For Analysis
version: 0.1
contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu
"""
input_spec = JointHistogramInputSpec
output_spec = JointHistogramOutputSpec
_cmd = " JointHistogram "
_outputs_filenames = {}
class ShuffleVectorsModuleInputSpec(CommandLineInputSpec):
inputVectorFileBaseName = File(desc="input vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", exists=True, argstr="--inputVectorFileBaseName %s")
outputVectorFileBaseName = traits.Either(traits.Bool, File(), hash_files=False, desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", argstr="--outputVectorFileBaseName %s")
resampleProportion = traits.Float(desc="downsample size of 1 will be the same size as the input images, downsample size of 3 will throw 2/3 the vectors away.", argstr="--resampleProportion %f")
class ShuffleVectorsModuleOutputSpec(TraitedSpec):
outputVectorFileBaseName = File(desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", exists=True)
class ShuffleVectorsModule(SEMLikeCommandLine):
"""title: ShuffleVectors
category: Utilities.BRAINS
description: Automatic Segmentation using neural networks
version: 1.0
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: Hans Johnson
"""
input_spec = ShuffleVectorsModuleInputSpec
output_spec = ShuffleVectorsModuleOutputSpec
_cmd = " ShuffleVectorsModule "
_outputs_filenames = {'outputVectorFileBaseName': 'outputVectorFileBaseName'}
class ImageRegionPlotterInputSpec(CommandLineInputSpec):
inputVolume1 = File(desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolume1 %s")
inputVolume2 = File(desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolume2 %s")
inputBinaryROIVolume = File(desc="The Input binary image for region of interest", exists=True, argstr="--inputBinaryROIVolume %s")
inputLabelVolume = File(desc="The Label Image", exists=True, argstr="--inputLabelVolume %s")
numberOfHistogramBins = traits.Int(desc=" the number of histogram levels", argstr="--numberOfHistogramBins %d")
outputJointHistogramData = traits.Str(desc=" output data file name", argstr="--outputJointHistogramData %s")
useROIAUTO = traits.Bool(desc=" Use ROIAUTO to compute region of interest. This cannot be used with inputLabelVolume", argstr="--useROIAUTO ")
useIntensityForHistogram = traits.Bool(desc=" Create Intensity Joint Histogram instead of Quantile Joint Histogram", argstr="--useIntensityForHistogram ")
verbose = traits.Bool(desc=" print debugging information, ", argstr="--verbose ")
class ImageRegionPlotterOutputSpec(TraitedSpec):
pass
class ImageRegionPlotter(SEMLikeCommandLine):
"""title: Write Out Image Intensities
category: Utilities.BRAINS
description: For Analysis
version: 0.1
contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu
"""
input_spec = ImageRegionPlotterInputSpec
output_spec = ImageRegionPlotterOutputSpec
_cmd = " ImageRegionPlotter "
_outputs_filenames = {}
| [
"[email protected]"
] | |
202384744bc82b1b11a8752e20a41b61b8c14117 | 30ab9750e6ca334941934d1727c85ad59e6b9c8a | /zentral/contrib/monolith/management/commands/rebuild_manifest_enrollment_packages.py | 4311863e71bf807a69d5cdb8a2dda5713092f8ef | [
"Apache-2.0"
] | permissive | ankurvaishley/zentral | 57e7961db65278a0e614975e484927f0391eeadd | a54769f18305c3fc71bae678ed823524aaa8bb06 | refs/heads/main | 2023-05-31T02:56:40.309854 | 2021-07-01T07:51:31 | 2021-07-01T14:15:34 | 382,346,360 | 1 | 0 | Apache-2.0 | 2021-07-02T12:55:47 | 2021-07-02T12:55:47 | null | UTF-8 | Python | false | false | 484 | py | from django.core.management.base import BaseCommand
from zentral.contrib.monolith.models import ManifestEnrollmentPackage
from zentral.contrib.monolith.utils import build_manifest_enrollment_package
class Command(BaseCommand):
help = 'Rebuild monolith manifest enrollment packages.'
def handle(self, *args, **kwargs):
for mep in ManifestEnrollmentPackage.objects.all():
build_manifest_enrollment_package(mep)
print(mep.file.path, "rebuilt")
| [
"[email protected]"
] | |
bdbf224d07f9a5aeceb878a2ff696537cb9fd117 | 3633bab8066f576c8bf9e7908afe30bb070d0b70 | /Hack-tenth-week/cinema/website/management/commands/populate_db.py | f9afe0316b2236021528fb773fea671a5c9bdfe8 | [] | no_license | 6desislava6/Hack-Bulgaria | 099c195e45a443cf4a3342eff6612ac2aa66565b | de4bf7baae35e21d6a7b27d4bde68247bb85b67a | refs/heads/master | 2021-01-20T11:57:29.027595 | 2015-06-02T17:36:59 | 2015-06-02T17:36:59 | 32,828,816 | 4 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,035 | py | from django.core.management.base import BaseCommand
from website.models import Movie, Projection, Reservation
class Command(BaseCommand):
def _add_movies(self):
Movie.add_movie(name='The Green Mile', rating=9.0)
Movie.add_movie(name='Stay Alive', rating=6.0)
Movie.add_movie(name='Twenty-Seven Dresses', rating=5.0)
Movie.add_movie(name='Inception', rating=9.0)
Movie.add_movie(name='The Hunger Games: Catching Fire', rating=7.9)
Movie.add_movie(name='Wreck-It Ralph', rating=7.8)
Movie.add_movie(name='Her', rating=8.3)
def _delete_movies(self):
Movie.objects.all().delete()
def _delete_projections(self):
Projection.objects.all().delete()
def _add_projections(self):
Projection.add_projection(movie=Movie.objects.get(name='The Green Mile'), type_projection='3D', date='2015-05-19', time='18:00')
Projection.add_projection(movie=Movie.objects.get(name='Stay Alive'), type_projection='3D', date='2015-05-19', time='18:00')
Projection.add_projection(movie=Movie.objects.get(name='Twenty-Seven Dresses'), type_projection='3D', date='2015-05-19', time='18:00')
Projection.add_projection(movie=Movie.objects.get(name='Inception'), type_projection='3D', date='2015-05-19', time='18:00')
Projection.add_projection(movie=Movie.objects.get(name='The Hunger Games: Catching Fire'), type_projection='3D', date='2015-05-19', time='18:00')
Projection.add_projection(movie=Movie.objects.get(name='Wreck-It Ralph'), type_projection='3D', date='2015-05-19', time='18:00')
def _add_reservations(self):
Reservation.add_reservation(username='desi', row='1', col='1', projection=Projection.objects.get(movie__name='The Green Mile'))
Reservation.add_reservation(username='marmot', row='1', col='1', projection=Projection.objects.get(movie__name='Inception'))
def handle(self, *args, **options):
self._add_movies()
self._add_projections()
self._add_reservations()
| [
"[email protected]"
] | |
c084bf927837edbff9f1738b44a08d195446fec2 | 35fa8925e63f2b0f62ef6bfc1ff4e03cf42bd923 | /tests/models/output/definitions/test_output_definition.py | dc3f2047a98052437876efa3ed6a308349469e6b | [
"Apache-2.0"
] | permissive | TheLabbingProject/django_analyses | 9e6f8b9bd2a84e8efe6dda6a15de6a3ecdf48ec1 | 5642579660fd09dde4a23bf02ec98a7ec264bceb | refs/heads/master | 2023-02-26T07:53:53.142552 | 2023-02-17T08:12:17 | 2023-02-17T08:12:17 | 225,623,958 | 1 | 2 | Apache-2.0 | 2023-02-17T08:12:18 | 2019-12-03T13:15:29 | Python | UTF-8 | Python | false | false | 5,861 | py | from django.core.exceptions import ValidationError
from django.test import TestCase
from django_analyses.models.input.definitions.file_input_definition import \
FileInputDefinition
from django_analyses.models.managers.output_definition import \
OutputDefinitionManager
from django_analyses.models.output.definitions.output_definition import \
OutputDefinition
from django_analyses.models.output.types.file_output import FileOutput
from tests.factories.output.definitions.output_definition import \
OutputDefinitionFactory
class OutputDefinitionTestCase(TestCase):
"""
Tests for the
:class:`~django_analyses.models.output.definitions.output_definition.OutputDefinition`
model.
"""
def setUp(self):
"""
Adds the created instances to the tests' contexts.
For more information see unittest's :meth:`~unittest.TestCase.setUp` method.
"""
self.output_definition = OutputDefinitionFactory()
##########
# Meta #
##########
def test_ordering(self):
"""
Test the `ordering`.
"""
self.assertTupleEqual(OutputDefinition._meta.ordering, ("key",))
def test_output_class_is_none(self):
"""
Tests that the *output_class* class attribute is set to None. This is
meant to be overriden by a
:class:`~django_analyses.models.output.output.Output` instance.
"""
self.assertIsNone(OutputDefinition.output_class)
def test_custom_manager_is_assigned(self):
"""
Tests that the manager is assigned to be the custom
:class:`~django_analyses.models.managers.output_definition.OutputDefinitionManager`
class.
"""
self.assertIsInstance(OutputDefinition.objects, OutputDefinitionManager)
##########
# Fields #
##########
# key
def test_key_max_length(self):
"""
Test the max_length of the *key* field.
"""
field = self.output_definition._meta.get_field("key")
self.assertEqual(field.max_length, 50)
def test_key_is_not_unique(self):
"""
Tests that the *key* field is not unique.
"""
field = self.output_definition._meta.get_field("key")
self.assertFalse(field.unique)
def test_key_blank_and_null(self):
"""
Tests that the *key* field may not be blank or null.
"""
field = self.output_definition._meta.get_field("key")
self.assertFalse(field.blank)
self.assertFalse(field.null)
# description
def test_description_blank_and_null(self):
"""
Tests that the *description* field may be blank or null.
"""
field = self.output_definition._meta.get_field("description")
self.assertTrue(field.blank)
self.assertTrue(field.null)
###########
# Methods #
###########
def test_string(self):
"""
Test the string output.
"""
value = str(self.output_definition)
expected = self.output_definition.key
self.assertEqual(value, expected)
def test_create_output_instance_raises_type_error(self):
"""
Tests that calling the
:meth:`~django_analyses.models.output.definitions.output_definition.OutputDefinition.create_output_instance`
raises a ValidationError. This is the expected behavior as long as the
output_class attribute is not defined (or ill defined).
"""
with self.assertRaises(ValidationError):
self.output_definition.create_output_instance()
def test_create_output_instance_with_non_model_value_raises_type_error(self):
"""
Tests that calling the
:meth:`~django_analyses.models.output.definitions.output_definition.OutputDefinition.create_output_instance`
with a non-model value raises a ValidationError.
"""
self.output_definition.output_class = str
with self.assertRaises(ValidationError):
self.output_definition.create_output_instance()
def test_create_output_instance_with_non_output_subclass_value_raises_type_error(
self,
):
"""
Tests that calling the
:meth:`~django_analyses.models.output.definitions.output_definition.OutputDefinition.create_output_instance`
with a non-:class:`~django_analyses.models.output.output.Output`
model subclass value raises a ValidationError.
"""
self.output_definition.output_class = FileInputDefinition
with self.assertRaises(ValidationError):
self.output_definition.check_output_class_definition()
def test_resetting_output_class_to_valid_output_subclass(self):
"""
Tests that the
:meth:`~django_analyses.models.output.definitions.output_definition.OutputDefinition.check_output_class_definition`
method does not raise a ValidationError when setting *output_class* to
some valid Output model subclass.
"""
self.output_definition.output_class = FileOutput
try:
self.output_definition.check_output_class_definition()
except ValidationError:
self.fail(
"Failed to set output_definition output_class to a valid Output subclass!"
)
def test_create_output_instance_reraises_uncaught_exception(self):
"""
Tests that calling the
:meth:`~django_analyses.models.output.definitions.output_definition.OutputDefinition.create_output_instance`
method when *output_class* is properly set but invalid kwargs still
raises an exception.
"""
self.output_definition.output_class = FileOutput
with self.assertRaises(ValueError):
self.output_definition.create_output_instance()
| [
"[email protected]"
] | |
af9bf4858b5793e1641a6963e2f7e683b1de3f12 | 1adc548f1865c0e4fcb3b3ff1049789fa0c72b12 | /tests/observes/test_column_property.py | 058383a5651f5433d39e0d4606bda3d52d6f5663 | [] | no_license | wujuguang/sqlalchemy-utils | ca826a81acdc70168e0b85820aaf8fe1604d6b0a | b6871980a412f2ebd16ec08be3127814b42ba64e | refs/heads/master | 2021-01-12T20:59:48.692539 | 2016-01-15T08:06:48 | 2016-01-18T18:52:12 | 48,418,840 | 0 | 0 | null | 2015-12-22T08:05:48 | 2015-12-22T08:05:47 | null | UTF-8 | Python | false | false | 1,582 | py | import sqlalchemy as sa
from pytest import raises
from sqlalchemy_utils.observer import observes
from tests import TestCase
class TestObservesForColumn(TestCase):
dns = 'postgres://postgres@localhost/sqlalchemy_utils_test'
def create_models(self):
class Product(self.Base):
__tablename__ = 'product'
id = sa.Column(sa.Integer, primary_key=True)
price = sa.Column(sa.Integer)
@observes('price')
def product_price_observer(self, price):
self.price = price * 2
self.Product = Product
def test_simple_insert(self):
product = self.Product(price=100)
self.session.add(product)
self.session.flush()
assert product.price == 200
class TestObservesForColumnWithoutActualChanges(TestCase):
dns = 'postgres://postgres@localhost/sqlalchemy_utils_test'
def create_models(self):
class Product(self.Base):
__tablename__ = 'product'
id = sa.Column(sa.Integer, primary_key=True)
price = sa.Column(sa.Integer)
@observes('price')
def product_price_observer(self, price):
raise Exception('Trying to change price')
self.Product = Product
def test_only_notifies_observer_on_actual_changes(self):
product = self.Product()
self.session.add(product)
self.session.flush()
with raises(Exception) as e:
product.price = 500
self.session.commit()
assert str(e.value) == 'Trying to change price'
| [
"[email protected]"
] | |
c2e9ac93f8629983cb977f8a65caf9dee5bfceaa | 80760d4c8a6b2c45b4b529bdd98d33c9c5509438 | /Practice/atcoder/ABC/054/src/c2.py | 007ef8de5fd091ec21679eb96f94eb5ea1f9c5f2 | [] | no_license | prrn-pg/Shojin | f1f46f8df932df0be90082b475ec02b52ddd882e | 3a20f1122d8bf7d95d9ecd205a62fc36168953d2 | refs/heads/master | 2022-12-30T22:26:41.020473 | 2020-10-17T13:53:52 | 2020-10-17T13:53:52 | 93,830,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | # 全域木?っていうんだっけ?でもコストは関係ないか
# 適当に隣接リストでもってしてDFSする
N, M = map(int, input().split())
Neighbor_list = [[] for _ in range(N)]
for _ in range(M):
s, t = map(int, input().split())
Neighbor_list[s-1].append(t-1)
Neighbor_list[t-1].append(s-1)
def dfs(cur, path):
if len(path) == N:
return 1
else:
ret = 0
for neighbor in Neighbor_list[cur]:
if neighbor not in path:
next_list = path[:]
next_list.append(neighbor)
ret += dfs(neighbor, next_list)
return ret
print(dfs(0, [0]))
| [
"[email protected]"
] | |
cd53fdab752cc6628b086d089002c796748479b8 | e09bbdc53af6be9281795189f26f6e59997abf68 | /tests/test_forex.py | eeb783520060d238446a4a97fba67b6f1d7c96a9 | [
"Apache-2.0"
] | permissive | jag787/ppQuanTrade | 620ce72c7875bb730708c48ae0481376b43e501b | 9a6da7522d281da130a2c459e2e614a75daa543d | refs/heads/master | 2021-01-11T13:53:40.583710 | 2013-12-20T10:43:58 | 2013-12-20T10:43:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,100 | py | #
# Copyright 2013 Xavier Bruhiere
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Tests for the forex datasource
'''
from unittest import TestCase
from nose.tools import timed
from neuronquant.data.forex import ConnectTrueFX
#from neuronquant.utils.datautils import FX_PAIRS
DEFAULT_TIMEOUT = 15
EXTENDED_TIMEOUT = 90
class TestForex(TestCase):
'''
Forex access through TrueFX provider
!! Beware that truefx server will return empty array
if currencies were not updated since last call
'''
def setUp(self):
pass
def tearDown(self):
pass
def test_connection_credentials(self):
'''
Use explicit TrueFx username and password account for
authentification
'''
client = ConnectTrueFX(user='Gusabi', password='quantrade')
# If succeeded, an authentification for further use was returned by
# truefx server
assert client
assert client._code
assert client._code.find('Gusabi') == 0
def test_connection_default_auth_file(self):
'''
If no credentials, the constructor tries to find it
reading config/default.json
'''
# It's default behavior, nothing to specifie
client = ConnectTrueFX()
assert client
assert client._code
assert client._code.find('Gusabi') == 0
def test_connection_custom_auth_file(self):
'''
If no credentials, the constructor tries to find it
reading given json file
'''
client = ConnectTrueFX(auth_file='plugins.json')
assert client
assert client._code
assert client._code.find('Gusabi') == 0
def test_connection_without_auth(self):
''' TrueFX API can be used without credentials in a limited mode '''
#FIXME Fails to retrieve limited values
client = ConnectTrueFX(user=None, password=None, auth_file='fake.json')
assert client._code == 'not authorized'
def test_connection_with_pairs(self):
pairs = ['EUR/USD', 'USD/JPY']
client = ConnectTrueFX(pairs=pairs)
### Default call use pairs given during connection
dataframe = client.QueryTrueFX()
for p in pairs:
assert p in dataframe.columns
@timed(DEFAULT_TIMEOUT)
def test_query_default(self):
pass
def test_query_format(self):
pass
def test_query_pairs(self):
pass
def test_response_formating(self):
pass
def test_detect_active(self):
pass
def test_standalone_request(self):
pass
| [
"[email protected]"
] | |
838a1a224339fe920c49a50a2b316a3903af131c | fca7958875d4650c6daeec7049fef02139db9eb1 | /mi/dataset/parser/test/test_parad_k_stc_imodem.py | e220343f8e3a8ebed36ef5cb7da6e5a3da97baf2 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | oceanobservatories/mi-dataset | 36f08a076b24c40f91abd0a97e47a72ec85fc5e6 | 93aa7289f5f4788727f3b32f11d62f30ad88fd2f | refs/heads/master | 2020-04-04T04:25:22.372472 | 2017-02-24T17:06:23 | 2017-02-24T17:06:23 | 24,067,634 | 1 | 9 | null | 2016-06-29T23:24:46 | 2014-09-15T18:15:01 | Python | UTF-8 | Python | false | false | 26,149 | py | #!/usr/bin/env python
"""
@package mi.dataset.parser.test.test_parad_k_stc_imodem
@file marine-integrations/mi/dataset/parser/test/test_parad_k_stc_imodem.py
@author Mike Nicoletti, Steve Myerson (recovered)
@brief Test code for a Parad_k_stc_imodem data parser
"""
import struct, ntplib
from StringIO import StringIO
from nose.plugins.attrib import attr
from mi.core.log import get_logger ; log = get_logger()
from mi.core.exceptions import SampleException
from mi.dataset.test.test_parser import ParserUnitTestCase
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.parad_k_stc_imodem import \
Parad_k_stc_imodemParser,\
Parad_k_stc_imodemRecoveredParser, \
Parad_k_stc_imodemDataParticle, \
Parad_k_stc_imodemRecoveredDataParticle
from mi.dataset.parser.WFP_E_file_common import StateKey
@attr('UNIT', group='mi')
class Parad_k_stc_imodemParserUnitTestCase(ParserUnitTestCase):
"""
Parad_k_stc_imodem Parser unit test suite
"""
TEST_DATA_SHORT = "\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00R\x9d\xab\xa2R\x9d\xac\x19R\x9d\xac" \
"\x1d\x00\x00\x00\x00A:6\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00h\x00NR\x9d\xac!C\t\xf2\xf7A9A!\x00\x00\x00" \
"\x00\x00\x00\x00\x00\x00\xf2\x00c\x00OR\x9d\xac&C\xbc\x9f\xa7A7'\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x00^" \
"\x00OR\x9d\xac*C\xc5\xad\x08A6\xd5\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x00n\x00O"
TEST_DATA = "\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00R\x9d\xab\xa2R\x9d\xac\x19R\x9d\xac\x1d\x00" \
"\x00\x00\x00A:6\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00h\x00NR\x9d\xac!C\t\xf2\xf7A9A!\x00\x00\x00\x00" \
"\x00\x00\x00\x00\x00\xf2\x00c\x00OR\x9d\xac&C\xbc\x9f\xa7A7'\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x00^" \
"\x00OR\x9d\xac*C\xc5\xad\x08A6\xd5\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x00n\x00OR\x9d\xac/C\xb8COA6\xde" \
"\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9d\x00p\x00QR\x9d\xac3C\x98\xe5TA733\x00\x00\x00\x00\x00\x00\x00\x00" \
"\x00\xa4\x00u\x00OR\x9d\xac8C\x9566A7!-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9a\x00o\x00OR\x9d\xac?C\xa1\xd7\xc3" \
"A6\xa6LB\x8bG\xae\x00\x00\x00\x00\x00\xb6\x00v\x00PR\x9d\xacECsS\xfeA7e\xfeB\x88\x00\x00\x00\x00\x00\x00\x00" \
"\x98\x00s\x00QR\x9d\xacKC\x89\x17\x8cA6\xe2\xecB\x84\x99\x9a\x00\x00\x00\x00\x00\xa4\x00\x81\x00PR\x9d\xacQC}\n" \
"\xbfA7\x00hB\x81G\xae\x00\x00\x00\x00\x00\xa2\x00|\x00NR\x9d\xacWCyW\xc7A6\x97\x8dB{\xe1H\x00\x00\x00\x00\x00\x9a" \
"\x00m\x00NR\x9d\xac]C\x8c!#A6\x9f\xbeBuQ\xec\x00\x00\x00\x00\x00\x97\x00s\x00QR\x9d\xaccC\x84!9A6h\nBn\x8f\\\x00" \
"\x00\x00\x00\x00\x9f\x00v\x00NR\x9d\xaciCE\xa5UA6a|Bh=q\x00\x00\x00\x00\x00\x97\x00l\x00PR\x9d\xacoC\xa5\xa5\xad" \
"A5\x94\xafBa\\)\x00\x00\x00\x00\x00\x9b\x00n\x00RR\x9d\xacuC\\\r\x08A6\x14{B[\n=\x00\x00\x00\x00\x00\x9a\x00s\x00" \
"OR\x9d\xac{C\xa3\x0b\xb8A5F\nBT33\x00\x00\x00\x00\x00\x98\x00q\x00NR\x9d\xac\x81CO\xc0+A5\xd7\xdcBM\xd7\n\x00\x00" \
"\x00\x00\x00\x97\x00n\x00PR\x9d\xac\x87Cxp\xd0A5#\xa3BGG\xae\x00\x00\x00\x00\x00\x9b\x00n\x00PR\x9d\xac\x8dC\x84" \
"\xdd\xd9A5X\x10B@\xae\x14\x00\x00\x00\x00\x00\xa5\x00v\x00OR\x9d\xac\x93C\xa0\x85\x01A4j\x7fB:\x14{\x00\x00\x00\x00" \
"\x00\x9c\x00t\x00QR\x9d\xac\x99Cq\xa4\xdbA5:\x92B3\xc2\x8f\x00\x00\x00\x00\x00\x9c\x00x\x00PR\x9d\xac\x9fCg\x07#A5" \
"\x18+B-\x00\x00\x00\x00\x00\x00\x00\x9e\x00m\x00QR\x9d\xac\xa5C\x9bw\x96A4FtB&z\xe1\x00\x00\x00\x00\x00\xd7\x00s" \
"\x00OR\x9d\xac\xabCmP5A4\x9dJB\x1f\xd7\n\x00\x00\x00\x00\x00\x99\x00s\x00PR\x9d\xac\xb1C\xad\x960A3\x8a\tB\x19" \
"(\xf6\x00\x00\x00\x00\x00\x95\x00n\x00OR\x9d\xac\xb7C\x0c\xce]A5\x0f\xfaB\x12\xe1H\x00\x00\x00\x00\x00\x9c\x00u" \
"\x00PR\x9d\xac\xbdC\xa1\xeb\x02A3Z\x85B\x0c=q\x00\x00\x00\x00\x00\x95\x00u\x00OR\x9d\xac\xc3C$\xafOA4\xa23B\x05" \
"\xe1H\x00\x00\x00\x00\x00\x99\x00r\x00PR\x9d\xac\xc9C\xae\xddeA3\x0f(A\xfe(\xf6\x00\x00\x00\x00\x00\x9a\x00o\x00O" \
"R\x9d\xac\xcfA\xfa\xb2:A5\x0b\x0fA\xf2\x8f\\\x00\x00\x00\x00\x00\xaf\x00m\x00P\xff\xff\xff\xff\x00\x00\x00\rR\x9d" \
"\xac\xd4R\x9d\xadQ"
# all flags set to zero
TEST_DATA_BAD_FLAGS = "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00R\x9d\xab\xa2R\x9d\xac\x19R\x9d\xac\x1d" \
"\x00\x00\x00\x00A:6\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00h\x00NR\x9d\xac!C\t\xf2\xf7A9A!\x00\x00\x00\x00\x00" \
"\x00\x00\x00\x00\xf2\x00c\x00OR\x9d\xac&C\xbc\x9f\xa7A7'\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x00^\x00OR\x9d\xac" \
"*C\xc5\xad\x08A6\xd5\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x00n\x00O"
# took 5 bytes out of second engineering sample
TEST_DATA_BAD_ENG = "\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00R\x9d\xab\xa2R\x9d\xac\x19R\x9d\xac\x1d" \
"\x00\x00\x00\x00A:6\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00h\x00NR\x9d\xac!C\t!\x00\x00\x00\x00\x00" \
"\x00\x00\x00\x00\xf2\x00c\x00OR\x9d\xac&C\xbc\x9f\xa7A7'\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x00^\x00OR\x9d\xac" \
"*C\xc5\xad\x08A6\xd5\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x00n\x00O"
# Has a NaN for par_value
TEST_DATA_NAN = \
'\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00' \
'\x52\x9D\xAB\xA2\x52\x9D\xAC\x19' \
'\x52\x9D\xAC\x1D' \
'\x00\x00\x00\x00\x41\x3A\x36\xE3\x00\x00\x00\x00' \
'\xFF\xC0\x00\x00' \
'\x01\x03\x00\x68\x00\x4E'
def create_rec_parser(self, new_state, file_handle):
"""
This function creates a Parad_k_stc parser for recovered data.
"""
if new_state is None:
new_state = self.state
parser = Parad_k_stc_imodemRecoveredParser(self.rec_config, new_state, file_handle,
self.state_callback, self.pub_callback)
return parser
def state_callback(self, state, file_ingested):
""" Call back method to watch what comes in via the position callback """
self.file_ingested = file_ingested
self.state_callback_value = state
def pub_callback(self, pub):
""" Call back method to watch what comes in via the publish callback """
self.publish_callback_value = pub
def setUp(self):
ParserUnitTestCase.setUp(self)
self.config = {
DataSetDriverConfigKeys.PARTICLE_MODULE:
'mi.dataset.parser.parad_k_stc_imodem',
DataSetDriverConfigKeys.PARTICLE_CLASS:
['Parad_k_stc_imodem_statusParserDataParticle',
'Parad_k_stc_imodem_startParserDataParticle',
'Parad_k_stc_imodem_engineeringParserDataParticle']
}
self.rec_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE:
'mi.dataset.parser.parad_k_stc_imodem',
DataSetDriverConfigKeys.PARTICLE_CLASS:
['Parad_k_stc_imodemRecoveredDataParticle']
}
self.start_state = {StateKey.POSITION: 0}
# Define test data particles and their associated timestamps which will be
# compared with returned results
self.timestamp1_eng = self.timestamp_to_ntp('R\x9d\xac\x1d')
log.debug("Converted timestamp #1: %s",self.timestamp1_eng)
self.particle_a_eng = Parad_k_stc_imodemDataParticle(b'R\x9d\xac\x1d' \
'\x00\x00\x00\x00A:6\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00h\x00N',
internal_timestamp=self.timestamp1_eng)
self.timestamp2_eng = self.timestamp_to_ntp('R\x9d\xac!')
self.particle_b_eng = Parad_k_stc_imodemDataParticle(b'R\x9d\xac!C\t' \
'\xf2\xf7A9A!\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf2\x00c\x00O',
internal_timestamp=self.timestamp2_eng)
self.timestamp3_eng = self.timestamp_to_ntp('R\x9d\xac&')
self.particle_c_eng = Parad_k_stc_imodemDataParticle(b"R\x9d\xac&C\xbc" \
"\x9f\xa7A7'\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x00^\x00O",
internal_timestamp=self.timestamp3_eng)
self.timestamp4_eng = self.timestamp_to_ntp('R\x9d\xac*')
self.particle_d_eng = Parad_k_stc_imodemDataParticle(b'R\x9d\xac' \
'*C\xc5\xad\x08A6\xd5\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x00n\x00O',
internal_timestamp=self.timestamp4_eng)
self.timestamp_last_eng = self.timestamp_to_ntp('R\x9d\xac\xcf')
self.particle_last_eng = Parad_k_stc_imodemDataParticle(b'R\x9d\xac\xcfA' \
'\xfa\xb2:A5\x0b\x0fA\xf2\x8f\\\x00\x00\x00\x00\x00\xaf\x00m\x00P',
internal_timestamp=self.timestamp_last_eng)
# Recovered expected particles
self.particle_a_eng_rec = Parad_k_stc_imodemRecoveredDataParticle(b'R\x9d\xac\x1d' \
'\x00\x00\x00\x00A:6\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00h\x00N',
internal_timestamp=self.timestamp1_eng)
self.particle_b_eng_rec = Parad_k_stc_imodemRecoveredDataParticle(b'R\x9d\xac!C\t' \
'\xf2\xf7A9A!\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf2\x00c\x00O',
internal_timestamp=self.timestamp2_eng)
self.particle_c_eng_rec = Parad_k_stc_imodemRecoveredDataParticle(b"R\x9d\xac&C\xbc" \
"\x9f\xa7A7'\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x00^\x00O",
internal_timestamp=self.timestamp3_eng)
self.particle_d_eng_rec = Parad_k_stc_imodemRecoveredDataParticle(b'R\x9d\xac' \
'*C\xc5\xad\x08A6\xd5\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x00n\x00O',
internal_timestamp=self.timestamp4_eng)
self.particle_last_eng_rec = Parad_k_stc_imodemRecoveredDataParticle(b'R\x9d\xac\xcfA' \
'\xfa\xb2:A5\x0b\x0fA\xf2\x8f\\\x00\x00\x00\x00\x00\xaf\x00m\x00P',
internal_timestamp=self.timestamp_last_eng)
# uncomment the following to generate particles in yml format for driver testing results files
#self.particle_to_yml(self.particle_a_eng)
#self.particle_to_yml(self.particle_b_eng)
#self.particle_to_yml(self.particle_c_eng)
#self.particle_to_yml(self.particle_d_eng)
self.file_ingested = False
self.state_callback_value = None
self.publish_callback_value = None
self.state = None
def particle_to_yml(self, particle):
"""
This is added as a testing helper, not actually as part of the parser tests. Since the same particles
will be used for the driver test it is helpful to write them to .yml in the same form they need in the
results.yml files here.
"""
particle_dict = particle.generate_dict()
# open write append, if you want to start from scratch manually delete this file
fid = open('particle.yml', 'a')
fid.write(' - _index: 0\n')
fid.write(' internal_timestamp: %f\n' % particle_dict.get('internal_timestamp'))
fid.write(' particle_object: %s\n' % particle.__class__.__name__)
fid.write(' particle_type: %s\n' % particle_dict.get('stream_name'))
for val in particle_dict.get('values'):
if isinstance(val.get('value'), float):
fid.write(' %s: %16.16f\n' % (val.get('value_id'), val.get('value')))
else:
fid.write(' %s: %s\n' % (val.get('value_id'), val.get('value')))
fid.close()
def test_simple(self):
"""
Read test data and pull out data particles one at a time.
Assert that the results are those we expected.
"""
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT) #turn into a data stream to look like file ingestion
self.parser = Parad_k_stc_imodemParser(self.config, self.start_state, self.stream_handle,
self.state_callback, self.pub_callback) # last one is the link to the data source
# next get engineering records
result = self.parser.get_records(1)
self.assert_result(result, 50, self.particle_a_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 76, self.particle_b_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng, True)
# no data left, dont move the position
result = self.parser.get_records(1)
self.assertEqual(result, [])
self.assertEqual(self.parser._state[StateKey.POSITION], 128)
self.assertEqual(self.state_callback_value[StateKey.POSITION], 128)
self.assert_(isinstance(self.publish_callback_value, list))
self.assertEqual(self.publish_callback_value[0], self.particle_d_eng)
def test_simple_recovered(self):
"""
Read recovered test data and pull out data particles one at a time.
Assert that the results are those we expected.
"""
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT) #turn into a data stream to look like file ingestion
self.parser = self.create_rec_parser(None, stream_handle)
# next get engineering records
result = self.parser.get_records(1)
self.assert_result(result, 50, self.particle_a_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 76, self.particle_b_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng_rec, True)
# no data left, don't move the position
result = self.parser.get_records(1)
self.assertEqual(result, [])
self.assertEqual(self.parser._state[StateKey.POSITION], 128)
self.assertEqual(self.state_callback_value[StateKey.POSITION], 128)
self.assert_(isinstance(self.publish_callback_value, list))
self.assertEqual(self.publish_callback_value[0], self.particle_d_eng_rec)
def timestamp_to_ntp(self, hex_timestamp):
fields = struct.unpack('>I', hex_timestamp)
timestamp = int(fields[0])
return ntplib.system_to_ntp_time(timestamp)
def assert_result(self, result, position, particle, ingested):
self.assertEqual(result, [particle])
self.assertEqual(self.file_ingested, ingested)
self.assertEqual(self.parser._state[StateKey.POSITION], position)
self.assertEqual(self.state_callback_value[StateKey.POSITION], position)
self.assert_(isinstance(self.publish_callback_value, list))
self.assertEqual(self.publish_callback_value[0], particle)
def test_get_many(self):
"""
Read test data and pull out multiple data particles at one time.
Assert that the results are those we expected.
"""
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = Parad_k_stc_imodemParser(self.config, self.start_state, self.stream_handle,
self.state_callback, self.pub_callback)
# start with the start time record
result = self.parser.get_records(4)
self.assertEqual(result, [self.particle_a_eng, self.particle_b_eng, self.particle_c_eng, self.particle_d_eng])
self.assertEqual(self.parser._state[StateKey.POSITION], 128)
self.assertEqual(self.state_callback_value[StateKey.POSITION], 128)
self.assertEqual(self.publish_callback_value[0], self.particle_a_eng)
self.assertEqual(self.publish_callback_value[1], self.particle_b_eng)
self.assertEqual(self.publish_callback_value[2], self.particle_c_eng)
self.assertEqual(self.publish_callback_value[3], self.particle_d_eng)
self.assertEqual(self.file_ingested, True)
def test_get_many_recovered(self):
"""
Read recovered test data and pull out multiple data particles at one time.
Assert that the results are those we expected.
"""
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = self.create_rec_parser(None, stream_handle)
# start with the start time record
result = self.parser.get_records(4)
self.assertEqual(result, [self.particle_a_eng_rec, self.particle_b_eng_rec,
self.particle_c_eng_rec, self.particle_d_eng_rec])
self.assertEqual(self.parser._state[StateKey.POSITION], 128)
self.assertEqual(self.state_callback_value[StateKey.POSITION], 128)
self.assertEqual(self.publish_callback_value[0], self.particle_a_eng_rec)
self.assertEqual(self.publish_callback_value[1], self.particle_b_eng_rec)
self.assertEqual(self.publish_callback_value[2], self.particle_c_eng_rec)
self.assertEqual(self.publish_callback_value[3], self.particle_d_eng_rec)
self.assertEqual(self.file_ingested, True)
def test_long_stream(self):
"""
Test a long stream of data
"""
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA)
self.parser = Parad_k_stc_imodemParser(self.config, self.start_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(32)
self.assertEqual(result[0], self.particle_a_eng)
self.assertEqual(result[-1], self.particle_last_eng)
self.assertEqual(self.parser._state[StateKey.POSITION], 856)
self.assertEqual(self.state_callback_value[StateKey.POSITION], 856)
self.assertEqual(self.publish_callback_value[-1], self.particle_last_eng)
def test_long_stream_recovered(self):
"""
Test a long stream of recovered data
"""
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA)
self.parser = self.create_rec_parser(None, stream_handle)
result = self.parser.get_records(32)
self.assertEqual(result[0], self.particle_a_eng_rec)
self.assertEqual(result[-1], self.particle_last_eng_rec)
self.assertEqual(self.parser._state[StateKey.POSITION], 856)
self.assertEqual(self.state_callback_value[StateKey.POSITION], 856)
self.assertEqual(self.publish_callback_value[-1], self.particle_last_eng_rec)
def test_after_header(self):
"""
Test starting the parser in a state in the middle of processing
"""
new_state = {StateKey.POSITION:24}
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = Parad_k_stc_imodemParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
# get engineering records
result = self.parser.get_records(1)
self.assert_result(result, 50, self.particle_a_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 76, self.particle_b_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng, True)
def test_after_header_recovered(self):
"""
Test starting the parser in a state in the middle of processing
"""
new_state = {StateKey.POSITION:24}
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = self.create_rec_parser(new_state, stream_handle)
# get engineering records
result = self.parser.get_records(1)
self.assert_result(result, 50, self.particle_a_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 76, self.particle_b_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng_rec, True)
def test_mid_state_start(self):
"""
Test starting the parser in a state in the middle of processing
"""
new_state = {StateKey.POSITION:76}
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = Parad_k_stc_imodemParser(self.config, new_state, self.stream_handle,
self.state_callback, self.pub_callback)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng, True)
def test_mid_state_start_recovered(self):
"""
Test starting the parser in a state in the middle of processing
"""
new_state = {StateKey.POSITION:76}
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = self.create_rec_parser(new_state, stream_handle)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng_rec, True)
def test_set_state(self):
"""
Test changing to a new state after initializing the parser and
reading data, as if new data has been found and the state has
changed
"""
new_state = {StateKey.POSITION:76}
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = Parad_k_stc_imodemParser(self.config, self.start_state, self.stream_handle,
self.state_callback, self.pub_callback)
# set the new state, the essentially skips engineering a and b
self.parser.set_state(new_state)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng, True)
def test_set_state_recovered(self):
"""
Test changing to a new state after initializing the parser and
reading data, as if new data has been found and the state has
changed
"""
new_state = {StateKey.POSITION:76}
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_SHORT)
self.parser = self.create_rec_parser(None, stream_handle)
# set the new state, the essentially skips engineering a and b
self.parser.set_state(new_state)
result = self.parser.get_records(1)
self.assert_result(result, 102, self.particle_c_eng_rec, False)
result = self.parser.get_records(1)
self.assert_result(result, 128, self.particle_d_eng_rec, True)
def test_bad_flags(self):
"""
test that we don't parse any records when the flags are not what we expect
"""
with self.assertRaises(SampleException):
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_BAD_FLAGS)
self.parser = Parad_k_stc_imodemParser(self.config, self.start_state, self.stream_handle,
self.state_callback, self.pub_callback)
def test_bad_flags_recovered(self):
"""
test that we don't parse any records when the flags are not what we expect
"""
with self.assertRaises(SampleException):
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_BAD_FLAGS)
self.parser = self.create_rec_parser(None, stream_handle)
def test_bad_data(self):
"""
Ensure that missing data causes us to miss records
TODO: This test should be improved if we come up with a more accurate regex for the data sample
"""
self.stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_BAD_ENG)
self.parser = Parad_k_stc_imodemParser(self.config, self.start_state, self.stream_handle,
self.state_callback, self.pub_callback)
# next get engineering records
result = self.parser.get_records(4)
if len(result) == 4:
self.fail("We got 4 records, the bad data should only make 3")
def test_bad_data_recovered(self):
"""
Ensure that missing data causes us to miss records
"""
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_BAD_ENG)
self.parser = self.create_rec_parser(None, stream_handle)
# next get engineering records
result = self.parser.get_records(4)
if len(result) == 4:
self.fail("We got 4 records, the bad data should only make 3")
def test_nan(self):
"""
Verify that an exception occurs when the par_value has a value of NaN.
"""
stream_handle = StringIO(Parad_k_stc_imodemParserUnitTestCase.TEST_DATA_NAN)
self.parser = self.create_rec_parser(None, stream_handle)
with self.assertRaises(SampleException):
self.parser.get_records(1)
| [
"[email protected]"
] | |
e32fadc710671ee0d561a5192a3e0c6875072673 | ac7e039a70ba627f6d9a7a02c9a8849ed5e18a89 | /unep.project-database/tags/0.2/content/Project.py | d13c620db2288f39c6b8598a0df372dc144dd473 | [] | no_license | jean/project-database | 65a2559844175350351ba87e820d25c3037b5fb2 | e818d322ec11d950f2770cd5324fbcd1acaa734d | refs/heads/master | 2021-01-01T06:27:24.528764 | 2014-01-31T11:11:45 | 2014-01-31T11:11:45 | 32,125,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,359 | py | # -*- coding: utf-8 -*-
#
# File: Project.py
#
# Copyright (c) 2008 by []
# Generator: ArchGenXML Version 2.0
# http://plone.org/products/archgenxml
#
# GNU General Public License (GPL)
#
__author__ = """Jean Jordaan <[email protected]>, Jurgen Blignaut
<[email protected]>"""
__docformat__ = 'plaintext'
from AccessControl import ClassSecurityInfo
from Products.Archetypes.atapi import *
from zope.interface import implements
import interfaces
from Products.CMFDynamicViewFTI.browserdefault import BrowserDefaultMixin
from Products.ATVocabularyManager.namedvocabulary import NamedVocabulary
from Products.ProjectDatabase.config import *
# additional imports from tagged value 'import'
from Products.ProjectDatabase.widgets.SelectedLinesField import SelectedLinesField
from Products.CMFCore.utils import getToolByName
from Products.FinanceFields.MoneyField import MoneyField
from Products.FinanceFields.MoneyWidget import MoneyWidget
from Products.DataGridField import DataGridField, DataGridWidget, Column, SelectColumn, CalendarColumn
from Products.ATReferenceBrowserWidget.ATReferenceBrowserWidget import ReferenceBrowserWidget
import Project
import Financials
from Products.CMFCore.utils import getToolByName
from Products.FinanceFields.Money import Money
##code-section module-header #fill in your manual code here
del Project
from Products.ProjectDatabase.content.FMIFolder import FMIFolder
from Products.ProjectDatabase.content.MonitoringAndEvaluation import MonitoringAndEvaluation
from Products.ProjectDatabase.content.ProjectGeneralInformation import ProjectGeneralInformation
from Products.ProjectDatabase.content.MilestoneFolder import MilestoneFolder
import permissions
##/code-section module-header
schema = Schema((
),
)
##code-section after-local-schema #fill in your manual code here
##/code-section after-local-schema
Project_schema = BaseFolderSchema.copy() + \
schema.copy()
##code-section after-schema #fill in your manual code here
##/code-section after-schema
class Project(BaseFolder, BrowserDefaultMixin):
"""
"""
security = ClassSecurityInfo()
implements(interfaces.IProject)
meta_type = 'Project'
_at_rename_after_creation = True
schema = Project_schema
##code-section class-header #fill in your manual code here
##/code-section class-header
# Methods
security.declarePublic('getLeadAgencies')
def getLeadAgencies(self):
"""
"""
catalog = getToolByName(self, 'portal_catalog')
proxies = catalog(portal_type='Agency')
pl = [p.getObject().Title() for p in proxies]
return ','.join(pl)
security.declarePublic('getVocabulary')
def getVocabulary(self, vocabName):
"""
"""
pv_tool = getToolByName(self, 'portal_vocabularies')
vocab = pv_tool.getVocabularyByName(vocabName)
return vocab.getDisplayList(vocab)
security.declarePublic('getProjectGeneralInformation')
def getProjectGeneralInformation(self):
"""
"""
return self['project_general_info']
security.declarePublic('getAProject')
def getAProject(self):
"""
"""
return self
registerType(Project, PROJECTNAME)
# end of class Project
##code-section module-footer #fill in your manual code here
##/code-section module-footer
| [
"jurgen.blignaut@61ed036f-b72b-0410-9ea5-b9ec1d72d98d"
] | jurgen.blignaut@61ed036f-b72b-0410-9ea5-b9ec1d72d98d |
4f599b8dfbd69a5f176a51a7c15b40ac767c1900 | caaf1b0754db1e676c37a6f1e58f19183754e654 | /sdk/network/azure-mgmt-network/generated_samples/virtual_network_peering_delete.py | 532bf47b34961b57f4acac61960084e27f172f18 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | rdomenzain/azure-sdk-for-python | 45dfb39121a0abda048c22e7309733a56259f525 | 58984255aeb904346b6958c5ba742749a2cc7d1b | refs/heads/master | 2023-07-07T06:53:12.967120 | 2023-07-04T16:27:37 | 2023-07-04T16:27:37 | 258,050,134 | 0 | 0 | MIT | 2020-04-23T00:12:14 | 2020-04-23T00:12:13 | null | UTF-8 | Python | false | false | 1,583 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.network import NetworkManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-network
# USAGE
python virtual_network_peering_delete.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = NetworkManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subid",
)
client.virtual_network_peerings.begin_delete(
resource_group_name="peerTest",
virtual_network_name="vnet1",
virtual_network_peering_name="peer",
).result()
# x-ms-original-file: specification/network/resource-manager/Microsoft.Network/stable/2022-11-01/examples/VirtualNetworkPeeringDelete.json
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
24606d612bfe57df9133c52158fa43cb8df4b0fd | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02554/s686857894.py | 2b0827ac913366bf60ab8e65803058621790719e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | N = int(input())
print(((10**N) - 2*(9**N) + (8**N)) % ((10**9)+7))
| [
"[email protected]"
] | |
66fa92e9025251b90129308bd92a3f521649690c | 753a70bc416e8dced2853f278b08ef60cdb3c768 | /models/research/domain_adaptation/domain_separation/dsn_test.py | 3d687398a9b9356455f739417bc96ddb2ca5ad40 | [
"MIT",
"Apache-2.0"
] | permissive | finnickniu/tensorflow_object_detection_tflite | ef94158e5350613590641880cb3c1062f7dd0efb | a115d918f6894a69586174653172be0b5d1de952 | refs/heads/master | 2023-04-06T04:59:24.985923 | 2022-09-20T16:29:08 | 2022-09-20T16:29:08 | 230,891,552 | 60 | 19 | MIT | 2023-03-25T00:31:18 | 2019-12-30T09:58:41 | C++ | UTF-8 | Python | false | false | 6,027 | py | # Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for DSN model assembly functions."""
import numpy as np
import tensorflow as tf
import dsn
class HelperFunctionsTest(tf.test.TestCase):
def testBasicDomainSeparationStartPoint(self):
with self.test_session() as sess:
# Test for when global_step < domain_separation_startpoint
step = tf.contrib.slim.get_or_create_global_step()
sess.run(tf.global_variables_initializer()) # global_step = 0
params = {'domain_separation_startpoint': 2}
weight = dsn.dsn_loss_coefficient(params)
weight_np = sess.run(weight)
self.assertAlmostEqual(weight_np, 1e-10)
step_op = tf.assign_add(step, 1)
step_np = sess.run(step_op) # global_step = 1
weight = dsn.dsn_loss_coefficient(params)
weight_np = sess.run(weight)
self.assertAlmostEqual(weight_np, 1e-10)
# Test for when global_step >= domain_separation_startpoint
step_np = sess.run(step_op) # global_step = 2
tf.logging.info(step_np)
weight = dsn.dsn_loss_coefficient(params)
weight_np = sess.run(weight)
self.assertAlmostEqual(weight_np, 1.0)
class DsnModelAssemblyTest(tf.test.TestCase):
def _testBuildDefaultModel(self):
images = tf.to_float(np.random.rand(32, 28, 28, 1))
labels = {}
labels['classes'] = tf.one_hot(
tf.to_int32(np.random.randint(0, 9, (32))), 10)
params = {
'use_separation': True,
'layers_to_regularize': 'fc3',
'weight_decay': 0.0,
'ps_tasks': 1,
'domain_separation_startpoint': 1,
'alpha_weight': 1,
'beta_weight': 1,
'gamma_weight': 1,
'recon_loss_name': 'sum_of_squares',
'decoder_name': 'small_decoder',
'encoder_name': 'default_encoder',
}
return images, labels, params
def testBuildModelDann(self):
images, labels, params = self._testBuildDefaultModel()
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels,
'dann_loss', params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 6)
def testBuildModelDannSumOfPairwiseSquares(self):
images, labels, params = self._testBuildDefaultModel()
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels,
'dann_loss', params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 6)
def testBuildModelDannMultiPSTasks(self):
images, labels, params = self._testBuildDefaultModel()
params['ps_tasks'] = 10
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels,
'dann_loss', params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 6)
def testBuildModelMmd(self):
images, labels, params = self._testBuildDefaultModel()
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels,
'mmd_loss', params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 6)
def testBuildModelCorr(self):
images, labels, params = self._testBuildDefaultModel()
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels,
'correlation_loss', params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 6)
def testBuildModelNoDomainAdaptation(self):
images, labels, params = self._testBuildDefaultModel()
params['use_separation'] = False
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels, 'none',
params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 1)
self.assertEqual(len(tf.contrib.losses.get_regularization_losses()), 0)
def testBuildModelNoAdaptationWeightDecay(self):
images, labels, params = self._testBuildDefaultModel()
params['use_separation'] = False
params['weight_decay'] = 1e-5
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels, 'none',
params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 1)
self.assertTrue(len(tf.contrib.losses.get_regularization_losses()) >= 1)
def testBuildModelNoSeparation(self):
images, labels, params = self._testBuildDefaultModel()
params['use_separation'] = False
with self.test_session():
dsn.create_model(images, labels,
tf.cast(tf.ones([32,]), tf.bool), images, labels,
'dann_loss', params, 'dann_mnist')
loss_tensors = tf.contrib.losses.get_losses()
self.assertEqual(len(loss_tensors), 2)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
abde9955cdf401538f6a48140cc38c426eea896a | 8e29c21c631d2b3a21f18a210a2c0bbab0d1f347 | /python/pfs/drp/stella/datamodel/pfsTargetSpectra.py | 37983859c2a5a41486dce2a196d966979720b153 | [] | no_license | Subaru-PFS/drp_stella | 630d25118dcc074cf14629f2f1389fad21a023a8 | 85602eea2485ac24e0831046dc74f1b2d1a3d89f | refs/heads/master | 2023-09-01T06:23:57.661286 | 2023-08-23T21:22:25 | 2023-08-23T21:22:25 | 53,125,359 | 3 | 1 | null | 2023-09-07T05:52:04 | 2016-03-04T09:51:39 | Python | UTF-8 | Python | false | false | 13,208 | py | from collections.abc import Mapping
from typing import Dict, Iterator, Iterable, List, Type
import astropy.io.fits
import numpy as np
import yaml
from astropy.io.fits import BinTableHDU, Column, HDUList, ImageHDU
from pfs.datamodel.drp import PfsSingleNotes, PfsSingle, PfsObjectNotes, PfsObject
from pfs.datamodel.masks import MaskHelper
from pfs.datamodel.observations import Observations
from pfs.datamodel.pfsConfig import TargetType
from pfs.datamodel.pfsTable import PfsTable
from pfs.datamodel.target import Target
from pfs.drp.stella.datamodel.fluxTable import FluxTable
from .pfsFiberArray import PfsFiberArray
__all__ = ["PfsTargetSpectra", "PfsCalibratedSpectra", "PfsObjectSpectra"]
class PfsTargetSpectra(Mapping):
"""A collection of `PfsFiberArray` indexed by target"""
PfsFiberArrayClass: Type[PfsFiberArray] # Subclasses must override
NotesClass: Type[PfsTable] # Subclasses must override
def __init__(self, spectra: Iterable[PfsFiberArray]):
super().__init__()
self.spectra: Dict[Target, PfsFiberArray] = {spectrum.target: spectrum for spectrum in spectra}
def __getitem__(self, target: Target) -> PfsFiberArray:
"""Retrieve spectrum for target"""
return self.spectra[target]
def __iter__(self) -> Iterator[Target]:
"""Return iterator over targets in container"""
return iter(self.spectra)
def __len__(self) -> int:
"""Return length of container"""
return len(self.spectra)
def __contains__(self, target: Target) -> bool:
"""Return whether target is in container"""
return target in self.spectra
@classmethod
def readFits(cls, filename: str) -> "PfsTargetSpectra":
"""Read from FITS file
Parameters
----------
filename : `str`
Filename of FITS file.
Returns
-------
self : ``cls``
Constructed instance, from FITS file.
"""
spectra = []
with astropy.io.fits.open(filename) as fits:
targetHdu = fits["TARGET"].data
targetFluxHdu = fits["TARGETFLUX"].data
observationsHdu = fits["OBSERVATIONS"].data
wavelengthHdu = fits["WAVELENGTH"].data
fluxHdu = fits["FLUX"].data
maskHdu = fits["MASK"].data
skyHdu = fits["SKY"].data
covarHdu = fits["COVAR"].data
covar2Hdu = fits["COVAR2"].data if "COVAR2" in fits else None
metadataHdu = fits["METADATA"].data
fluxTableHdu = fits["FLUXTABLE"].data
notesTable = cls.NotesClass.readHdu(fits)
for ii, row in enumerate(targetHdu):
targetId = row["targetId"]
select = targetFluxHdu.targetId == targetId
fiberFlux = dict(
zip(
("".join(np.char.decode(ss.astype("S"))) for ss in targetFluxHdu.filterName[select]),
targetFluxHdu.fiberFlux[select],
)
)
target = Target(
row["catId"],
row["tract"],
"".join(row["patch"]),
row["objId"],
row["ra"],
row["dec"],
TargetType(row["targetType"]),
fiberFlux=fiberFlux,
)
select = observationsHdu.targetId == targetId
observations = Observations(
observationsHdu.visit[select],
["".join(np.char.decode(ss.astype("S"))) for ss in observationsHdu.arm[select]],
observationsHdu.spectrograph[select],
observationsHdu.pfsDesignId[select],
observationsHdu.fiberId[select],
observationsHdu.pfiNominal[select],
observationsHdu.pfiCenter[select],
)
metadataRow = metadataHdu[ii]
assert metadataRow["targetId"] == targetId
metadata = yaml.load(
# This complicated conversion is required in order to preserve the newlines
"".join(np.char.decode(metadataRow["metadata"].astype("S"))),
Loader=yaml.SafeLoader,
)
flags = MaskHelper.fromFitsHeader(metadata, strip=True)
fluxTableRow = fluxTableHdu[ii]
assert fluxTableRow["targetId"] == targetId
fluxTable = FluxTable(
fluxTableRow["wavelength"],
fluxTableRow["flux"],
fluxTableRow["error"],
fluxTableRow["mask"],
flags,
)
notes = cls.PfsFiberArrayClass.NotesClass(
**{col.name: notesTable[col.name][ii] for col in notesTable.schema}
)
spectrum = cls.PfsFiberArrayClass(
target,
observations,
wavelengthHdu[ii],
fluxHdu[ii],
maskHdu[ii],
skyHdu[ii],
covarHdu[ii],
covar2Hdu[ii] if covar2Hdu is not None else [],
flags,
metadata,
fluxTable,
notes,
)
spectra.append(spectrum)
return cls(spectra)
def writeFits(self, filename: str):
"""Write to FITS file
This API is intended for use by the LSST data butler, which handles
translating the desired identity into a filename.
Parameters
----------
filename : `str`
Filename of FITS file.
"""
fits = HDUList()
targetId = np.arange(len(self), dtype=np.int16)
fits.append(
BinTableHDU.from_columns(
[
Column("targetId", "I", array=targetId),
Column("catId", "J", array=[target.catId for target in self]),
Column("tract", "J", array=[target.tract for target in self]),
Column("patch", "PA()", array=[target.patch for target in self]),
Column("objId", "K", array=[target.objId for target in self]),
Column("ra", "D", array=[target.ra for target in self]),
Column("dec", "D", array=[target.dec for target in self]),
Column("targetType", "I", array=[int(target.targetType) for target in self]),
],
name="TARGET",
)
)
numFluxes = sum(len(target.fiberFlux) for target in self)
targetFluxIndex = np.empty(numFluxes, dtype=np.int16)
filterName: List[str] = []
fiberFlux = np.empty(numFluxes, dtype=np.float32)
start = 0
for tt, target in zip(targetId, self):
num = len(target.fiberFlux)
stop = start + num
targetFluxIndex[start:stop] = tt
filterName += list(target.fiberFlux.keys())
fiberFlux[start:stop] = np.array(list(target.fiberFlux.values()))
start = stop
fits.append(
BinTableHDU.from_columns(
[
Column("targetId", "I", array=targetFluxIndex),
Column("filterName", "PA()", array=filterName),
Column("fiberFlux", "E", array=fiberFlux),
],
name="TARGETFLUX",
)
)
numObservations = sum(len(ss.observations) for ss in self.values())
observationsIndex = np.empty(numObservations, dtype=np.int16)
visit = np.empty(numObservations, dtype=np.int32)
arm: List[str] = []
spectrograph = np.empty(numObservations, dtype=np.int16)
pfsDesignId = np.empty(numObservations, dtype=np.int64)
fiberId = np.empty(numObservations, dtype=np.int32)
pfiNominal = np.empty((numObservations, 2), dtype=float)
pfiCenter = np.empty((numObservations, 2), dtype=float)
start = 0
for tt, spectrum in zip(targetId, self.values()):
observations = spectrum.observations
num = len(observations)
stop = start + num
observationsIndex[start:stop] = tt
visit[start:stop] = observations.visit
arm += list(observations.arm)
spectrograph[start:stop] = observations.spectrograph
pfsDesignId[start:stop] = observations.pfsDesignId
fiberId[start:stop] = observations.fiberId
pfiNominal[start:stop] = observations.pfiNominal
pfiCenter[start:stop] = observations.pfiCenter
start = stop
fits.append(
BinTableHDU.from_columns(
[
Column("targetId", "I", array=observationsIndex),
Column("visit", "J", array=visit),
Column("arm", "PA()", array=arm),
Column("spectrograph", "I", array=spectrograph),
Column("pfsDesignId", "K", array=pfsDesignId),
Column("fiberId", "J", array=fiberId),
Column("pfiNominal", "2D", array=pfiNominal),
Column("pfiCenter", "2D", array=pfiCenter),
],
name="OBSERVATIONS",
)
)
fits.append(ImageHDU(data=[spectrum.wavelength for spectrum in self.values()], name="WAVELENGTH"))
fits.append(ImageHDU(data=[spectrum.flux for spectrum in self.values()], name="FLUX"))
fits.append(ImageHDU(data=[spectrum.mask for spectrum in self.values()], name="MASK"))
fits.append(ImageHDU(data=[spectrum.sky for spectrum in self.values()], name="SKY"))
fits.append(ImageHDU(data=[spectrum.covar for spectrum in self.values()], name="COVAR"))
haveCovar2 = [spectrum.covar2 is not None for spectrum in self.values()]
if len(set(haveCovar2)) == 2:
raise RuntimeError("covar2 must be uniformly populated")
if any(haveCovar2):
fits.append(ImageHDU(data=[spectrum.covar2 for spectrum in self.values()], name="COVAR2"))
# Metadata table
metadata: List[str] = []
for spectrum in self.values():
md = spectrum.metadata.copy()
md.update(spectrum.flags.toFitsHeader())
metadata.append(yaml.dump(md))
fits.append(
BinTableHDU.from_columns(
[
Column("targetId", "I", array=targetId),
Column("metadata", "PA()", array=metadata),
],
name="METADATA",
)
)
fits.append(
BinTableHDU.from_columns(
[
Column("targetId", "I", array=targetId),
Column(
"wavelength",
"PD()",
array=[
spectrum.fluxTable.wavelength if spectrum.fluxTable else []
for spectrum in self.values()
],
),
Column(
"flux",
"PD()",
array=[
spectrum.fluxTable.flux if spectrum.fluxTable else []
for spectrum in self.values()
],
),
Column(
"error",
"PD()",
array=[
spectrum.fluxTable.error if spectrum.fluxTable else []
for spectrum in self.values()
],
),
Column(
"mask",
"PJ()",
array=[
spectrum.fluxTable.mask if spectrum.fluxTable else []
for spectrum in self.values()
],
),
],
name="FLUXTABLE",
)
)
notes = self.NotesClass.empty(len(self))
for ii, spectrum in enumerate(self.values()):
notes.setRow(ii, **spectrum.notes.getDict())
notes.writeHdu(fits)
with open(filename, "wb") as fd:
fits.writeto(fd)
class PfsCalibratedNotesTable(PfsTable):
"""Table of notes for PfsCalibratedSpectra"""
schema = PfsSingleNotes.schema
fitsExtName = "NOTES"
class PfsCalibratedSpectra(PfsTargetSpectra):
"""A collection of PfsSingle indexed by target"""
PfsFiberArrayClass = PfsSingle
NotesClass = PfsCalibratedNotesTable
class PfsObjectNotesTable(PfsTable):
"""Table of notes for PfsObjectSpectra"""
schema = PfsObjectNotes.schema
fitsExtName = "NOTES"
class PfsObjectSpectra(PfsTargetSpectra):
"""A collection of PfsObject indexed by target"""
PfsFiberArrayClass = PfsObject
NotesClass = PfsObjectNotesTable
| [
"[email protected]"
] | |
310a2ff7d5c25b08fd026424c91c406d6dce04a7 | 8e4a5e0a81fc9401fc0b6e55dd55e8d6e29c3ed6 | /PycharmProjects/licamb/licamb/db.py | 56e07023c14dd0a9ab4cc3e86d345f33321735e3 | [] | no_license | rogeriodelphi/portifolio | 1fb16c8c723b97f20cdd305224b660a1657f3913 | 5c704305ce26576afb4efd1e410f691971f06fac | refs/heads/master | 2023-08-11T05:33:37.539047 | 2021-09-26T01:57:02 | 2021-09-26T01:57:02 | 284,164,866 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SQLITE = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
POSTGRESQL = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'db',
'USER': 'postgres',
'PASSWORD': '123456',
'HOST': 'localhost',
'PORT': '5432',
}
}
| [
"[email protected]"
] | |
e610e2ff68b9264be3b2f2e6659c8a516cad7e27 | eb136fec7f6dfcb11834cc0cd4d3daec1d7a4dc6 | /fiasco_api/expenses/migrations/0001_initial.py | 40ad410a30d5561dfacbc245e35bd26e587ef388 | [
"MIT"
] | permissive | xelnod/fiasco_backend | 4635cff2fd220585c4433010e64208dfebbf2441 | edeca8cac8c7b1a1cc53051d4443cc2996eba37c | refs/heads/master | 2020-09-21T13:37:37.971952 | 2020-09-15T19:38:37 | 2020-09-15T19:38:37 | 224,804,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,396 | py | # Generated by Django 3.1.1 on 2020-09-13 21:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('categories', '0001_initial'),
('channels', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ExpenseProto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('comment', models.TextField(blank=True, null=True)),
('amount', models.IntegerField(default=0)),
('channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='channels.channel')),
('kit', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='categories.kit')),
],
),
migrations.CreateModel(
name='Expense',
fields=[
('expenseproto_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='expenses.expenseproto')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('is_fulfilled', models.BooleanField(default=True)),
('money_stored', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
bases=('expenses.expenseproto', models.Model),
),
migrations.CreateModel(
name='OngoingExpense',
fields=[
('expenseproto_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='expenses.expenseproto')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('scope', models.IntegerField(choices=[(0, 'Month'), (1, 'Year')], default=0)),
],
options={
'abstract': False,
},
bases=('expenses.expenseproto', models.Model),
),
]
| [
"[email protected]"
] | |
895a6ff291a61e66f00fd311bf599cf8fdb80ba1 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-dataartsstudio/huaweicloudsdkdataartsstudio/v1/model/list_workspaceusers_request.py | 03fe5cb9797831a7a33f080679f078e6c5bedd22 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,633 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListWorkspaceusersRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'workspace_id': 'str',
'limit': 'str',
'offset': 'str'
}
attribute_map = {
'workspace_id': 'workspace_id',
'limit': 'limit',
'offset': 'offset'
}
def __init__(self, workspace_id=None, limit=None, offset=None):
"""ListWorkspaceusersRequest
The model defined in huaweicloud sdk
:param workspace_id: 工作空间id
:type workspace_id: str
:param limit: 数据条数限制
:type limit: str
:param offset: 偏移量
:type offset: str
"""
self._workspace_id = None
self._limit = None
self._offset = None
self.discriminator = None
self.workspace_id = workspace_id
if limit is not None:
self.limit = limit
if offset is not None:
self.offset = offset
@property
def workspace_id(self):
"""Gets the workspace_id of this ListWorkspaceusersRequest.
工作空间id
:return: The workspace_id of this ListWorkspaceusersRequest.
:rtype: str
"""
return self._workspace_id
@workspace_id.setter
def workspace_id(self, workspace_id):
"""Sets the workspace_id of this ListWorkspaceusersRequest.
工作空间id
:param workspace_id: The workspace_id of this ListWorkspaceusersRequest.
:type workspace_id: str
"""
self._workspace_id = workspace_id
@property
def limit(self):
"""Gets the limit of this ListWorkspaceusersRequest.
数据条数限制
:return: The limit of this ListWorkspaceusersRequest.
:rtype: str
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this ListWorkspaceusersRequest.
数据条数限制
:param limit: The limit of this ListWorkspaceusersRequest.
:type limit: str
"""
self._limit = limit
@property
def offset(self):
"""Gets the offset of this ListWorkspaceusersRequest.
偏移量
:return: The offset of this ListWorkspaceusersRequest.
:rtype: str
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this ListWorkspaceusersRequest.
偏移量
:param offset: The offset of this ListWorkspaceusersRequest.
:type offset: str
"""
self._offset = offset
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListWorkspaceusersRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.