blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
71ac26b37b14e37540c8c6747bb6ed2674c72de1 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r8/Gen/DecFiles/options/12103121.py | aff6fabbfe181ee752fdebddb4817168e9cada39 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,753 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/12103121.py generated: Fri, 27 Mar 2015 15:48:08
#
# Event Type: 12103121
#
# ASCII decay Descriptor: [B+ -> K+ (KS0 -> pi+ pi-)]cc
#
from Configurables import Generation
Generation().EventType = 12103121
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bu_KsK=DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 521,-521 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 521
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 521,-521 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_521.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 12103121
| [
"[email protected]"
] | |
0f0384d38e52d268984da24a31de51dd057df061 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_4/crvcam001/boxes.py | 48a26b479879bf751284a115ee42601517ff5ba7 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | def print_square():
print('*' * 5)
for i in range(3):
print('*' , " "*3 , '*' , sep="")
print('*' * 5)
def print_rectangle(width, height):
print('*' * width)
gap = " "
for i in range(height -2):
print('*' , gap * (width-2) , '*', sep="")
print('*' * width)
def get_rectangle (width, height):
new_line='\n'
star='*'
figure=""
top_line=(star*width)+new_line
bottom_line=(star*width)
gap=" "
middle=""
for i in range(height-2):
line=star+(gap*(width-2))+star+new_line
middle+=line
figure=top_line + middle + bottom_line
return figure | [
"[email protected]"
] | |
a8e5ad6c2627b4a7e10d03f0efbd7d90534a7b5a | 445e58746e9ca9b61a2d1a6ede82b7badec059ee | /scripts/download.py | 361335c8c1356c0c4c1d4354c59ba3909be583a7 | [
"MIT"
] | permissive | agrc/electrofishing-query | cdbc18eec74d4af1c478309a518f898a4db51e92 | 21934ef11622139ba55f26c30dee17c1d5ac6688 | refs/heads/main | 2023-08-19T04:51:21.612552 | 2023-07-04T16:43:02 | 2023-07-04T21:35:15 | 181,779,317 | 0 | 0 | MIT | 2023-09-01T02:45:33 | 2019-04-16T22:55:17 | JavaScript | UTF-8 | Python | false | false | 7,795 | py | #!/usr/bin/env python
# * coding: utf8 *
'''
download.py
A script that downloads CSVs of electrofishing data for a given
set of sampling event ids.
'''
import csv
import sys
from glob import glob
from os import sep, walk
from os.path import basename, dirname, join, normpath, realpath
from zipfile import ZIP_DEFLATED, ZipFile
import arcpy
import pyodbc
import swq_secrets as secrets
def zip_fgdb(path, zip):
path = normpath(path)
for (dirpath, dirnames, filenames) in walk(path):
for file in filenames:
# Ignore .lock files
if not file.endswith('.lock'):
try:
zip.write(
join(dirpath, file),
join(basename(path),
join(dirpath, file)[len(path) + len(sep):]))
except Exception as e:
arcpy.AddWarning(
'error zipping file geodatabase: {}'.format(e))
return None
cardinality_lookup = {
'OneToOne': 'ONE_TO_ONE',
'OneToMany': 'ONE_TO_MANY'
}
def main(ids, type):
#: ids: string
#: type: string (csv or fgdb)
#: returns a path to the zip file
ids = ids.split(';')
arcpy.AddMessage('ids: {}'.format(ids))
formatted_ids = '\'{}\''.format('\',\''.join(ids))
current_folder = dirname(realpath(__file__))
sql_directory = join(current_folder, 'sql')
zip_file_path = join(arcpy.env.scratchFolder, 'data.zip')
sde_file_name = 'DATABASE.sde'
sde = join(current_folder, sde_file_name)
arcpy.AddMessage('scratch folder: ' + arcpy.env.scratchFolder)
if not arcpy.Exists(sde):
arcpy.management.CreateDatabaseConnection(
current_folder,
sde_file_name,
'SQL_SERVER',
secrets.SERVER,
account_authentication='DATABASE_AUTH',
username=secrets.USERNAME,
password=secrets.PASSWORD,
database=secrets.DATABASE)
connection = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};' +
'SERVER={};DATABASE={};UID={};PWD={}'.format(
secrets.SERVER, secrets.DATABASE,
secrets.USERNAME, secrets.PASSWORD))
cursor = connection.cursor()
with ZipFile(zip_file_path, 'w', ZIP_DEFLATED) as zip_file:
if type == 'fgdb':
#: fgdb
arcpy.AddMessage('creating file geodatabase')
fgdb = join(arcpy.env.scratchFolder, 'data.gdb')
if arcpy.Exists(fgdb):
arcpy.management.Delete(fgdb)
arcpy.management.CreateFileGDB(dirname(fgdb), basename(fgdb))
arcpy.AddMessage('copying sampling events feature class')
events_where = 'EVENT_ID IN ({})'.format(formatted_ids)
events_layer = arcpy.management.MakeFeatureLayer(join(sde, 'SamplingEvents'), 'events_layer', events_where)
arcpy.management.CopyFeatures(events_layer, join(fgdb, 'SamplingEvents'))
arcpy.AddMessage('copying stations feature class')
stations_where = 'STATION_ID IN (SELECT STATION_ID FROM {}.WILDADMIN.SamplingEvents_evw where {})'.format(
secrets.DATABASE, events_where)
stations_layer = arcpy.management.MakeFeatureLayer(join(sde, 'Stations'), 'stations_layer', stations_where)
arcpy.management.CopyFeatures(stations_layer, join(fgdb, 'Stations'))
arcpy.AddMessage('copying streams feature class')
stations_where = 'Permanent_Identifier IN (SELECT WATER_ID FROM {}.WILDADMIN.Stations_evw where {})'.format(
secrets.DATABASE, stations_where)
streams_layer = arcpy.management.MakeFeatureLayer(join(sde, 'UDWRStreams'), 'streams_layer', stations_where)
arcpy.management.CopyFeatures(streams_layer, join(fgdb, 'UDWRStreams'))
arcpy.AddMessage('copying lakes feature class')
stations_where = 'Permanent_Identifier IN (SELECT WATER_ID FROM {}.WILDADMIN.Stations_evw where {})'.format(
secrets.DATABASE, stations_where)
lakes_layer = arcpy.management.MakeFeatureLayer(join(sde, 'UDWRLakes'), 'lakes_layer', stations_where)
arcpy.management.CopyFeatures(lakes_layer, join(fgdb, 'UDWRLakes'))
def copy_related_tables(dataset):
relationship_classes = arcpy.Describe(join(sde, dataset)).relationshipClassNames
for relationship_class in relationship_classes:
describe = arcpy.Describe(join(sde, relationship_class))
destination = describe.destinationClassNames[0]
primary_key = describe.originClassKeys[0][0]
foreign_key = describe.originClassKeys[1][0]
destination_is_table = arcpy.Describe(join(sde, destination)).datasetType == 'Table'
if destination.split('.')[-1] != dataset and destination_is_table:
arcpy.AddMessage('copying {} table'.format(destination))
where = '{} IN (SELECT {} FROM {}.WILDADMIN.{} where {})'.format(
foreign_key, primary_key, secrets.DATABASE, dataset, events_where)
layer = arcpy.management.MakeTableView(join(sde, destination), destination + '_layer', where)
arcpy.management.CopyRows(layer, join(fgdb, destination))
if arcpy.Exists(join(fgdb, relationship_class.split('.')[-1])):
continue
arcpy.AddMessage('creating {} relationship class'.format(relationship_class))
arcpy.env.workspace = fgdb
origin = describe.originClassNames[0].split('.')[-1]
cardinality = describe.cardinality
arcpy.management.CreateRelationshipClass(
origin,
destination.split('.')[-1],
relationship_class.split('.')[-1],
'SIMPLE',
describe.forwardPathLabel,
describe.backwardPathLabel,
message_direction='BOTH',
cardinality=cardinality_lookup[cardinality],
origin_primary_key=primary_key,
origin_foreign_key=foreign_key
)
arcpy.env.workspace = None
if destination_is_table:
copy_related_tables(destination.split('.')[-1])
copy_related_tables('SamplingEvents')
copy_related_tables('Stations')
zip_fgdb(fgdb, zip_file)
else:
#: csvs
for query_file in glob(sql_directory + '\*.sql'):
csv_name = basename(query_file).replace('sql', 'csv')
arcpy.AddMessage(csv_name)
with open(query_file, 'r') as file:
query = file.read().format(secrets.DATABASE, formatted_ids)
cursor.execute(query)
csv_file_path = join(arcpy.env.scratchFolder, csv_name)
with open(csv_file_path, 'w', newline='') as csv_file:
writer = csv.writer(csv_file)
#: write headers
writer.writerow([x[0] for x in cursor.description])
for row in cursor:
writer.writerow(row)
zip_file.write(csv_file_path, csv_name)
arcpy.AddMessage(zip_file_path)
connection.close()
del connection
return zip_file_path
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2])
| [
"[email protected]"
] | |
7c04bf56b1d4694c250bc1011dd80707d168fa04 | 632099ac0d895943cbbeb9048a2cdfcd21102411 | /Novation_Impulse/__init__.py | 6f5f46ce2ef01ae52ac870a2cfd2e9d926eaf2c6 | [] | no_license | Toniigor/AbletonLive9_RemoteScripts | 7f4bbf759a79629584413f6d1797005e8cd7f2ff | fed1e5ee61ea12ea6360107a65a6e666364353ff | refs/heads/master | 2021-01-16T21:19:25.330221 | 2014-06-06T12:33:03 | 2014-06-06T12:33:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | #Embedded file name: /Users/versonator/Jenkins/live/Projects/AppLive/Resources/MIDI Remote Scripts/Novation_Impulse/__init__.py
from Novation_Impulse import Novation_Impulse
def create_instance(c_instance):
return Novation_Impulse(c_instance)
from _Framework.Capabilities import *
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=4661, product_ids=[25], model_name='Impulse 25'),
PORTS_KEY: [inport(props=[NOTES_CC, REMOTE, SCRIPT]), inport(props=[NOTES_CC, REMOTE]), outport(props=[NOTES_CC, REMOTE, SCRIPT])]} | [
"[email protected]"
] | |
669639ec4990e3d78768775b8b7faa030a35400e | f96937b4f6ee9865413978fb85fa2e99e2f842ff | /curriculumBuilder/__init__.py | e3d673e4e7cff389c9d9ed185f26e56dd2ca56d5 | [] | no_license | mrjoshida/curriculumbuilder | 8adf22d91eae2f3205b177e18dbd8a0909fb2d58 | 3b700d6a6fea6d1d7d7ed18d5612d6e0853449c3 | refs/heads/master | 2021-01-17T02:28:11.094855 | 2019-10-17T22:47:46 | 2019-10-17T22:47:46 | 36,847,693 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | from __future__ import absolute_import
# import curriculumBuilder.receivers
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
# from .celery import app as celery_app
import curriculumBuilder.numbering_patch
| [
"[email protected]"
] | |
223ca663a896fe7a75e5413d34f5ecfe3cc0c3e5 | 433ada0b349e8a68dd85a5af047b90d23aee44c9 | /include/TestClientImportOptions.py | 414bed35d496b167fcb8e57b8681071e8f77d8c2 | [
"WTFPL"
] | permissive | 3wayHimself/hydrus | 7ddfe3507ad2b3e9dc4ab69cb9c6e25efc06c5aa | 804ffe8cecfe01bdb9518070d31dbf826b72e8ef | refs/heads/master | 2020-03-23T04:37:53.849078 | 2018-07-11T20:23:51 | 2018-07-11T20:23:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,403 | py | import ClientConstants as CC
import ClientImportFileSeeds
import ClientImportOptions
import ClientTags
import HydrusConstants as HC
import HydrusData
import HydrusExceptions
import HydrusGlobals as HG
import os
import unittest
from mock import patch
class TestCheckerOptions( unittest.TestCase ):
def test_checker_options( self ):
regular_checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check = 5, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) )
fast_checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check = 2, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) )
slow_checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check = 10, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) )
callous_checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check = 5, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 60 ) )
empty_file_seed_cache = ClientImportFileSeeds.FileSeedCache()
file_seed_cache = ClientImportFileSeeds.FileSeedCache()
last_check_time = 10000000
one_day_before = last_check_time - 86400
for i in range( 50 ):
url = 'https://wew.lad/' + os.urandom( 16 ).encode( 'hex' )
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = one_day_before - 10
file_seed_cache.AddFileSeeds( ( file_seed, ) )
for i in range( 50 ):
url = 'https://wew.lad/' + os.urandom( 16 ).encode( 'hex' )
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = last_check_time - 600
file_seed_cache.AddFileSeeds( ( file_seed, ) )
bare_file_seed_cache = ClientImportFileSeeds.FileSeedCache()
url = 'https://wew.lad/' + 'early'
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = one_day_before - 10
bare_file_seed_cache.AddFileSeeds( ( file_seed, ) )
url = 'https://wew.lad/' + 'in_time_delta'
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = one_day_before + 10
bare_file_seed_cache.AddFileSeeds( ( file_seed, ) )
busy_file_seed_cache = ClientImportFileSeeds.FileSeedCache()
url = 'https://wew.lad/' + 'early'
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = one_day_before - 10
busy_file_seed_cache.AddFileSeeds( ( file_seed, ) )
for i in range( 8640 ):
url = 'https://wew.lad/' + os.urandom( 16 ).encode( 'hex' )
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = one_day_before + ( ( i + 1 ) * 10 ) - 1
busy_file_seed_cache.AddFileSeeds( ( file_seed, ) )
new_thread_file_seed_cache = ClientImportFileSeeds.FileSeedCache()
for i in range( 10 ):
url = 'https://wew.lad/' + os.urandom( 16 ).encode( 'hex' )
file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url )
file_seed.source_time = last_check_time - 600
new_thread_file_seed_cache.AddFileSeeds( ( file_seed, ) )
# empty
# should say ok if last_check_time is 0, so it can initialise
# otherwise sperg out safely
self.assertFalse( regular_checker_options.IsDead( empty_file_seed_cache, 0 ) )
self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( empty_file_seed_cache, 0 ), 'no files yet' )
self.assertEqual( regular_checker_options.GetNextCheckTime( empty_file_seed_cache, 0, 0 ), 0 )
self.assertTrue( regular_checker_options.IsDead( empty_file_seed_cache, last_check_time ) )
self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( empty_file_seed_cache, last_check_time ), 'no files, unable to determine velocity' )
# regular
# current velocity should be 50 files per day for the day ones and 0 files per min for the callous minute one
self.assertFalse( regular_checker_options.IsDead( file_seed_cache, last_check_time ) )
self.assertFalse( fast_checker_options.IsDead( file_seed_cache, last_check_time ) )
self.assertFalse( slow_checker_options.IsDead( file_seed_cache, last_check_time ) )
self.assertTrue( callous_checker_options.IsDead( file_seed_cache, last_check_time ) )
self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( file_seed_cache, last_check_time ), u'at last check, found 50 files in previous 1 day' )
self.assertEqual( fast_checker_options.GetPrettyCurrentVelocity( file_seed_cache, last_check_time ), u'at last check, found 50 files in previous 1 day' )
self.assertEqual( slow_checker_options.GetPrettyCurrentVelocity( file_seed_cache, last_check_time ), u'at last check, found 50 files in previous 1 day' )
self.assertEqual( callous_checker_options.GetPrettyCurrentVelocity( file_seed_cache, last_check_time ), u'at last check, found 0 files in previous 1 minute' )
self.assertEqual( regular_checker_options.GetNextCheckTime( file_seed_cache, last_check_time, 0 ), last_check_time + 8640 )
self.assertEqual( fast_checker_options.GetNextCheckTime( file_seed_cache, last_check_time, 0 ), last_check_time + 3456 )
self.assertEqual( slow_checker_options.GetNextCheckTime( file_seed_cache, last_check_time, 0 ), last_check_time + 17280 )
# bare
# 1 files per day
self.assertFalse( regular_checker_options.IsDead( bare_file_seed_cache, last_check_time ) )
self.assertTrue( callous_checker_options.IsDead( bare_file_seed_cache, last_check_time ) )
self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( bare_file_seed_cache, last_check_time ), u'at last check, found 1 files in previous 1 day' )
self.assertEqual( regular_checker_options.GetNextCheckTime( bare_file_seed_cache, last_check_time, 0 ), last_check_time + 86400 )
self.assertEqual( fast_checker_options.GetNextCheckTime( bare_file_seed_cache, last_check_time, 0 ), last_check_time + 86400 )
self.assertEqual( slow_checker_options.GetNextCheckTime( bare_file_seed_cache, last_check_time, 0 ), last_check_time + 86400 )
# busy
# 8640 files per day, 6 files per minute
self.assertFalse( regular_checker_options.IsDead( busy_file_seed_cache, last_check_time ) )
self.assertFalse( fast_checker_options.IsDead( busy_file_seed_cache, last_check_time ) )
self.assertFalse( slow_checker_options.IsDead( busy_file_seed_cache, last_check_time ) )
self.assertFalse( callous_checker_options.IsDead( busy_file_seed_cache, last_check_time ) )
self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( busy_file_seed_cache, last_check_time ), u'at last check, found 8,640 files in previous 1 day' )
self.assertEqual( callous_checker_options.GetPrettyCurrentVelocity( busy_file_seed_cache, last_check_time ), u'at last check, found 6 files in previous 1 minute' )
self.assertEqual( regular_checker_options.GetNextCheckTime( busy_file_seed_cache, last_check_time, 0 ), last_check_time + 50 )
self.assertEqual( fast_checker_options.GetNextCheckTime( busy_file_seed_cache, last_check_time, 0 ), last_check_time + 30 )
self.assertEqual( slow_checker_options.GetNextCheckTime( busy_file_seed_cache, last_check_time, 0 ), last_check_time + 100 )
self.assertEqual( callous_checker_options.GetNextCheckTime( busy_file_seed_cache, last_check_time, 0 ), last_check_time + 50 )
# new thread
# only had files from ten mins ago, so timings are different
self.assertFalse( regular_checker_options.IsDead( new_thread_file_seed_cache, last_check_time ) )
self.assertFalse( fast_checker_options.IsDead( new_thread_file_seed_cache, last_check_time ) )
self.assertFalse( slow_checker_options.IsDead( new_thread_file_seed_cache, last_check_time ) )
self.assertTrue( callous_checker_options.IsDead( new_thread_file_seed_cache, last_check_time ) )
self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( new_thread_file_seed_cache, last_check_time ), u'at last check, found 10 files in previous 10 minutes' )
self.assertEqual( fast_checker_options.GetPrettyCurrentVelocity( new_thread_file_seed_cache, last_check_time ), u'at last check, found 10 files in previous 10 minutes' )
self.assertEqual( slow_checker_options.GetPrettyCurrentVelocity( new_thread_file_seed_cache, last_check_time ), u'at last check, found 10 files in previous 10 minutes' )
self.assertEqual( callous_checker_options.GetPrettyCurrentVelocity( new_thread_file_seed_cache, last_check_time ), u'at last check, found 0 files in previous 1 minute' )
# these would be 360, 120, 600, but the 'don't check faster the time since last file post' bumps this up
self.assertEqual( regular_checker_options.GetNextCheckTime( new_thread_file_seed_cache, last_check_time, 0 ), last_check_time + 600 )
self.assertEqual( fast_checker_options.GetNextCheckTime( new_thread_file_seed_cache, last_check_time, 0 ), last_check_time + 600 )
self.assertEqual( slow_checker_options.GetNextCheckTime( new_thread_file_seed_cache, last_check_time, 0 ), last_check_time + 600 )
# Let's test these new static timings, where if faster_than == slower_than, we just add that period to the 'last_next_check_time' (e.g. checking every sunday night)
static_checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check = 5, never_faster_than = 3600, never_slower_than = 3600, death_file_velocity = ( 1, 3600 ) )
self.assertTrue( static_checker_options.IsDead( bare_file_seed_cache, last_check_time ) )
last_next_check_time = last_check_time - 200
with patch.object( HydrusData, 'GetNow', return_value = last_check_time + 10 ):
self.assertEqual( static_checker_options.GetNextCheckTime( new_thread_file_seed_cache, last_check_time, last_next_check_time ), last_next_check_time + 3600 )
class TestFileImportOptions( unittest.TestCase ):
def test_file_import_options( self ):
file_import_options = ClientImportOptions.FileImportOptions()
exclude_deleted = False
allow_decompression_bombs = False
min_size = None
max_size = None
max_gif_size = None
min_resolution = None
max_resolution = None
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
automatic_archive = False
file_import_options.SetPostImportOptions( automatic_archive )
present_new_files = True
present_already_in_inbox_files = True
present_already_in_archive_files = True
file_import_options.SetPresentationOptions( present_new_files, present_already_in_inbox_files, present_already_in_archive_files )
#
self.assertFalse( file_import_options.ExcludesDeleted() )
self.assertFalse( file_import_options.AllowsDecompressionBombs() )
self.assertFalse( file_import_options.AutomaticallyArchives() )
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 480 )
file_import_options.CheckFileIsValid( 65536, HC.APPLICATION_7Z, None, None )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, False ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, True ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True ) )
self.assertFalse( file_import_options.ShouldPresent( CC.STATUS_DELETED, False ) )
#
exclude_deleted = True
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
self.assertTrue( file_import_options.ExcludesDeleted() )
self.assertFalse( file_import_options.AllowsDecompressionBombs() )
self.assertFalse( file_import_options.AutomaticallyArchives() )
#
allow_decompression_bombs = True
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
self.assertTrue( file_import_options.ExcludesDeleted() )
self.assertTrue( file_import_options.AllowsDecompressionBombs() )
self.assertFalse( file_import_options.AutomaticallyArchives() )
#
automatic_archive = True
file_import_options.SetPostImportOptions( automatic_archive )
self.assertTrue( file_import_options.ExcludesDeleted() )
self.assertTrue( file_import_options.AllowsDecompressionBombs() )
self.assertTrue( file_import_options.AutomaticallyArchives() )
#
min_size = 4096
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 512, HC.IMAGE_JPEG, 640, 480 )
#
min_size = None
max_size = 2000
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
file_import_options.CheckFileIsValid( 1800, HC.IMAGE_JPEG, 640, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 2200, HC.IMAGE_JPEG, 640, 480 )
#
max_size = None
max_gif_size = 2000
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
file_import_options.CheckFileIsValid( 1800, HC.IMAGE_JPEG, 640, 480 )
file_import_options.CheckFileIsValid( 2200, HC.IMAGE_JPEG, 640, 480 )
file_import_options.CheckFileIsValid( 1800, HC.IMAGE_GIF, 640, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 2200, HC.IMAGE_GIF, 640, 480 )
#
max_gif_size = None
min_resolution = ( 200, 100 )
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 180, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 80 )
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 180 )
#
min_resolution = None
max_resolution = ( 3000, 4000 )
file_import_options.SetPreImportOptions( exclude_deleted, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution )
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 3200, 480 )
with self.assertRaises( HydrusExceptions.SizeException ):
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 640, 4200 )
file_import_options.CheckFileIsValid( 65536, HC.IMAGE_JPEG, 2800, 3800 )
#
present_new_files = False
file_import_options.SetPresentationOptions( present_new_files, present_already_in_inbox_files, present_already_in_archive_files )
self.assertFalse( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, False ) )
self.assertFalse( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, True ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True ) )
#
present_new_files = True
present_already_in_inbox_files = False
file_import_options.SetPresentationOptions( present_new_files, present_already_in_inbox_files, present_already_in_archive_files )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, False ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, True ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False ) )
self.assertFalse( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True ) )
#
present_already_in_inbox_files = True
present_already_in_archive_files = False
file_import_options.SetPresentationOptions( present_new_files, present_already_in_inbox_files, present_already_in_archive_files )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, False ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_AND_NEW, True ) )
self.assertFalse( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False ) )
self.assertTrue( file_import_options.ShouldPresent( CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True ) )
class TestTagImportOptions( unittest.TestCase ):
def test_basics( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
#
default_tag_import_options = ClientImportOptions.TagImportOptions()
self.assertEqual( default_tag_import_options.ShouldFetchTagsEvenIfURLKnownAndFileAlreadyInDB(), False )
self.assertEqual( default_tag_import_options.ShouldFetchTagsEvenIfHashKnownAndFileAlreadyInDB(), False )
blacklist = default_tag_import_options.GetTagBlacklist()
self.assertEqual( blacklist.Filter( some_tags ), some_tags )
self.assertEqual( default_tag_import_options.GetServiceKeysToContentUpdates( CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), {} )
#
tag_import_options = ClientImportOptions.TagImportOptions( fetch_tags_even_if_url_recognised_and_file_already_in_db = True )
self.assertEqual( tag_import_options.ShouldFetchTagsEvenIfURLKnownAndFileAlreadyInDB(), True )
self.assertEqual( tag_import_options.ShouldFetchTagsEvenIfHashKnownAndFileAlreadyInDB(), False )
#
tag_import_options = ClientImportOptions.TagImportOptions( fetch_tags_even_if_hash_recognised_and_file_already_in_db = True )
self.assertEqual( tag_import_options.ShouldFetchTagsEvenIfURLKnownAndFileAlreadyInDB(), False )
self.assertEqual( tag_import_options.ShouldFetchTagsEvenIfHashKnownAndFileAlreadyInDB(), True )
def test_filter( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
tag_blacklist = ClientTags.TagFilter()
tag_blacklist.SetRule( 'series:', CC.FILTER_BLACKLIST )
service_keys_to_service_tag_import_options = { example_service_key : ClientImportOptions.ServiceTagImportOptions( get_all = True ) }
tag_import_options = ClientImportOptions.TagImportOptions( tag_blacklist = tag_blacklist, service_keys_to_service_tag_import_options = service_keys_to_service_tag_import_options )
result = tag_import_options.GetServiceKeysToContentUpdates( CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags )
self.assertIn( example_service_key, result )
self.assertEqual( len( result ), 1 )
content_updates = result[ example_service_key ]
filtered_tags = { 'bodysuit', 'character:samus aran' }
self.assertTrue( len( content_updates ), len( filtered_tags ) )
def test_services( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key_1 = HydrusData.GenerateKey()
example_service_key_2 = HydrusData.GenerateKey()
#
service_keys_to_service_tag_import_options = {}
service_keys_to_service_tag_import_options[ example_service_key_1 ] = ClientImportOptions.ServiceTagImportOptions( get_all = True )
service_keys_to_service_tag_import_options[ example_service_key_2 ] = ClientImportOptions.ServiceTagImportOptions( namespaces = [ 'character' ] )
tag_import_options = ClientImportOptions.TagImportOptions( service_keys_to_service_tag_import_options = service_keys_to_service_tag_import_options )
result = tag_import_options.GetServiceKeysToContentUpdates( CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags )
self.assertIn( example_service_key_1, result )
self.assertIn( example_service_key_2, result )
self.assertTrue( len( result ), 2 )
content_updates_1 = result[ example_service_key_1 ]
content_updates_2 = result[ example_service_key_2 ]
self.assertEqual( len( content_updates_1 ), 3 )
self.assertEqual( len( content_updates_2 ), 1 )
class TestServiceTagImportOptions( unittest.TestCase ):
def test_basics( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
default_service_tag_import_options = ClientImportOptions.ServiceTagImportOptions()
self.assertEqual( default_service_tag_import_options._get_all, False )
self.assertEqual( default_service_tag_import_options._namespaces, [] )
self.assertEqual( default_service_tag_import_options._additional_tags, [] )
self.assertEqual( default_service_tag_import_options._to_new_files, True )
self.assertEqual( default_service_tag_import_options._to_already_in_inbox, True )
self.assertEqual( default_service_tag_import_options._to_already_in_archive, True )
self.assertEqual( default_service_tag_import_options._only_add_existing_tags, False )
self.assertEqual( default_service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), set() )
def test_get_all_filtering( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), some_tags )
#
only_namespaced = ClientTags.TagFilter()
only_namespaced.SetRule( '', CC.FILTER_BLACKLIST )
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, get_all_filter = only_namespaced )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), { 'character:samus aran', 'series:metroid' } )
#
only_samus = ClientTags.TagFilter()
only_samus.SetRule( '', CC.FILTER_BLACKLIST )
only_samus.SetRule( ':', CC.FILTER_BLACKLIST )
only_samus.SetRule( 'character:samus aran', CC.FILTER_WHITELIST )
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, get_all_filter = only_samus )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), { 'character:samus aran' } )
def test_namespace_filtering( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), some_tags )
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( namespaces = [ '', 'character' ] )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), { 'bodysuit', 'character:samus aran' } )
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( namespaces = [] )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), set() )
def test_additional( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, additional_tags = [ 'wew' ] )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), some_tags.union( [ 'wew' ] ) )
def test_application( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, to_new_files = True, to_already_in_inbox = False, to_already_in_archive = False )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), some_tags )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True, example_hash, some_tags ), set() )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False, example_hash, some_tags ), set() )
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, to_new_files = False, to_already_in_inbox = True, to_already_in_archive = False )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), set() )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True, example_hash, some_tags ), some_tags )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False, example_hash, some_tags ), set() )
#
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, to_new_files = False, to_already_in_inbox = False, to_already_in_archive = True )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), set() )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, True, example_hash, some_tags ), set() )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, False, example_hash, some_tags ), some_tags )
def test_existing( self ):
some_tags = { 'bodysuit', 'character:samus aran', 'series:metroid' }
existing_tags = { 'character:samus aran', 'series:metroid' }
example_hash = HydrusData.GenerateKey()
example_service_key = HydrusData.GenerateKey()
#
HG.test_controller.SetRead( 'filter_existing_tags', existing_tags )
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, only_add_existing_tags = True )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), existing_tags )
#
some_tags = { 'explicit', 'bodysuit', 'character:samus aran', 'series:metroid' }
existing_tags = { 'bodysuit' }
only_unnamespaced = ClientTags.TagFilter()
only_unnamespaced.SetRule( ':', CC.FILTER_BLACKLIST )
HG.test_controller.SetRead( 'filter_existing_tags', existing_tags )
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_all = True, only_add_existing_tags = True, only_add_existing_tags_filter = only_unnamespaced )
self.assertEqual( service_tag_import_options.GetTags( example_service_key, CC.STATUS_SUCCESSFUL_AND_NEW, True, example_hash, some_tags ), { 'bodysuit', 'character:samus aran', 'series:metroid' } )
| [
"[email protected]"
] | |
8d92d5c580a5678636e059c2ef71d1e061dd7ee0 | 056754778e09aa5c412bbeadd77f20667b013648 | /markov.py | 2f7dfc8804d428b00d5fa056c84ec36f167f9234 | [] | no_license | sridevich/markov_chains | 22518a4cdcb2be71cd95dd8ef3d3be32912af8d0 | b799430373d0ca1bfe5ffc0596a22d4dcea026fc | refs/heads/master | 2021-01-11T23:23:00.780606 | 2017-01-12T02:05:43 | 2017-01-12T02:05:43 | 78,574,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,471 | py | from random import choice
import twitter
import os
import sys
api = twitter.Api(
consumer_key=os.environ['TWITTER_CONSUMER_KEY'],
consumer_secret=os.environ['TWITTER_CONSUMER_SECRET'],
access_token_key=os.environ['TWITTER_ACCESS_TOKEN_KEY'],
access_token_secret=os.environ['TWITTER_ACCESS_TOKEN_SECRET']
)
def open_and_read_file(file_path):
"""Takes file path as string; returns text as string.
Takes a string that is a file path, opens the file, and turns
the file's contents as one string of text.
"""
open_text_file = open(file_path).read()
# print file_green_eggs
return open_text_file
def make_chains(text_string):
"""Takes input text as string; returns _dictionary_ of markov chains.
A chain will be a key that consists of a tuple of (word1, word2)
and the value would be a list of the word(s) that follow those two
words in the input text.
For example:
>>> make_chains("hi there mary hi there juanita")
{('hi', 'there'): ['mary', 'juanita'], ('there', 'mary'): ['hi'], ('mary', 'hi': ['there']}
"""
chains = {}
words = text_string.split()
for i in range(len(words)-2):
key1 = words[i]
key2 = words[i+1]
key_words = (key1, key2)
value_word = words[i+2]
if key_words not in chains:
chains[key_words] = [value_word]
else:
chains[key_words].append(value_word)
return chains
# def make_more_chains(text_string):
# """Takes input text as string; returns _dictionary_ of markov chains.
# A chain will be a key that consists of a tuple of (word1, word2)
# and the value would be a list of the word(s) that follow those two
# words in the input text.
# For example:
# >>> make_chains("hi there mary hi there juanita")
# {('hi', 'there'): ['mary', 'juanita'], ('there', 'mary'): ['hi'], ('mary', 'hi': ['there']}
# """
# chains = {}
# words = text_string.split()
# for i in range(len(words)):
# key1 = words[i]
# print key1
# key2 = words[i+1]
# key_words = (key1, key2)
# value_word = words[i+2]
# if key_words not in chains:
# chains[key_words] = [value_word]
# else:
# chains[key_words].append(value_word)
# return chains
def make_text(chains):
"""Takes dictionary of markov chains; returns random text."""
# key_word randon grabs key
key_word = choice(chains.keys())
text = key_word[0] + " " + key_word[1]
while key_word in chains:
random_word = choice(chains[key_word])
# print random_word
text = text + " " + random_word
key_word = (key_word[1], random_word)
return text[0:140]
input_path = sys.argv[1]
# Open the file and turn it into one long string
input_text = open_and_read_file(input_path)
# Get a Markov chain
chains = make_chains(input_text)
#chains = make_more_chains(input_text)
# Produce random text
random_text = make_text(chains)
api.PostUpdate(random_text)
# def open_and_read_file(file_path):
# """Takes file path as string; returns text as string.
# Takes a string that is a file path, opens the file, and turns
# the file's contents as one string of text.
# """
# file_green_eggs = open(file_path).read()
# # print file_green_eggs
# return file_green_eggs
# open_and_read_file("green-eggs.txt")
# def make_chains(text_string):
# """Takes input text as string; returns _dictionary_ of markov chains.
# A chain will be a key that consists of a tuple of (word1, word2)
# and the value would be a list of the word(s) that follow those two
# words in the input text.
# For example:
# >>> make_chains("hi there mary hi there juanita")
# {('hi', 'there'): ['mary', 'juanita'], ('there', 'mary'): ['hi'], ('mary', 'hi': ['there']}
# """
# chains = {}
# words = text_string.split()
# for i in range(len(words)-2):
# key1 = words[i]
# key2 = words[i+1]
# key_words = (key1, key2)
# value_word = words[i+2]
# if key_words not in chains:
# chains[key_words] = [value_word]
# else:
# chains[key_words].append(value_word)
# return chains
# def make_text(chains):
# """Takes dictionary of markov chains; returns random text."""
# # key_word randon grabs key
# key_word = choice(chains.keys())
# text = key_word[0] + " " + key_word[1]
# while key_word in chains:
# random_word = choice(chains[key_word])
# # print random_word
# text = text + " " + random_word
# key_word = (key_word[1], random_word)
# return text
# input_path = "green-eggs.txt"
# Open the file and turn it into one long string
# input_text = open_and_read_file(input_path)
# # Get a Markov chain
# chains = make_chains(input_text)
# # Produce random text
# random_text = make_text(chains)
# print random_text
# def make_text(chains):
# """Takes dictionary of markov chains; returns random text."""
# # key_word randon grabs key
# key_word = choice(chains.keys())
# text = key_word[0] + " " + key_word[1]
# while key_word in chains:
# random_word = choice(chains[key_word])
# # print random_word
# text = text + " " + random_word
# key_word = (key_word[1], random_word)
# return text
| [
"[email protected]"
] | |
1e2f8bbd44e82b0308ddca0516aaa3686749ac5e | b7a753180264aaba666da8aaad1bb03ccc0159da | /board/views.py | 3eabda8d5d16d2813e82265dd3898bc61943f2b5 | [] | no_license | boyl/lighteddjango | ec3630985a8e45bc4313f4a6bfe4d46912a58f72 | cf437743d23157b6e888468ac8d2e9cc5bc24bcc | refs/heads/master | 2022-12-19T08:39:18.018477 | 2020-06-30T03:47:42 | 2020-06-30T03:47:42 | 173,584,375 | 0 | 0 | null | 2022-12-08T04:51:46 | 2019-03-03T14:19:12 | JavaScript | UTF-8 | Python | false | false | 5,434 | py | import hashlib
import requests
from django.conf import settings
from django.core.signing import TimestampSigner
from django.contrib.auth import get_user_model
from rest_framework import viewsets, authentication, permissions, filters
from rest_framework.renderers import JSONRenderer
from rest_framework.pagination import PageNumberPagination
import django_filters
from django_filters.rest_framework import DjangoFilterBackend
from .models import Sprint, Task
from .serializers import SprintSerializer, TaskSerializer, UserSerializer
# Create your views here.
User = get_user_model()
class NullFilter(django_filters.BooleanFilter):
"""Filter on a field set as null or not."""
def filter(self, qs, value):
if value is not None:
return qs.filter(**{'%s__isnull' % self.field_name: value})
return qs
class TaskFilter(django_filters.FilterSet):
backlog = NullFilter(field_name='sprint')
class Meta:
model = Task
fields = ('sprint', 'status', 'assigned', 'backlog',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.filters['assigned'].extra.update({'to_field_name': User.USERNAME_FIELD})
class SprintFilter(django_filters.FilterSet):
end_min = django_filters.DateFilter(field_name='end', lookup_expr='gte')
end_max = django_filters.DateFilter(field_name='end', lookup_expr='lte')
class Meta:
model = Sprint
fields = ('end_min', 'end_max',)
class StandardResultsSetPagination(PageNumberPagination):
"""
Setting pagination for standard results.
"""
page_size = 25
page_size_query_param = 'page_size'
max_page_size = 100
class DefaultsMixin(object):
"""
Default settings for view authentication, permissions,
filtering and pagination.
"""
authentication_classes = (
authentication.BasicAuthentication,
authentication.TokenAuthentication,
# authentication.SessionAuthentication,
)
permission_classes = (
permissions.IsAuthenticated,
)
pagination_class = StandardResultsSetPagination
filter_backends = (
filters.SearchFilter,
filters.OrderingFilter,
DjangoFilterBackend,
)
class UpdateHookMixin(object):
"""Mixin class to send update information to the websocket server."""
@staticmethod
def _build_hook_url(obj):
if isinstance(obj, User):
model = 'user'
else:
model = obj.__class__.__name__.lower()
proto = 'https' if settings.WATERCOOLER_SECURE else 'http'
host = settings.WATERCOOLER_SERVER
return f"{proto}://{host}/{model}/{obj.pk}"
def _send_hook_request(self, obj, method):
url = self._build_hook_url(obj)
if method in ('POST', 'PUT'):
# Build the body
serializer = self.get_serializer(obj)
renderer = JSONRenderer()
context = dict(request=self.request)
body = renderer.render(serializer.data, renderer_context=context)
else:
body = None
headers = {
'content-type': 'application/json',
'X-Signature': self._build_hook_signature(method, url, body)
}
try:
response = requests.request(method, url, data=body, headers=headers, timeout=0.5)
response.raise_for_status()
except requests.exceptions.ConnectionError:
# Host could not be resolved or the connection was refused
pass
except requests.exceptions.Timeout:
# Request timed out
pass
except requests.exceptions.RequestException:
# Server response with 4XX or 5XX status code
pass
@staticmethod
def _build_hook_signature(method, url, body):
signer = TimestampSigner(settings.WATERCOOLER_SECRET)
body = hashlib.sha256(body or b'').hexdigest()
value = f"{method.lower()}:{url}:{body}"
return signer.sign(value)
def perform_create(self, serializer):
super().perform_create(serializer)
self._send_hook_request(serializer.instance, 'POST')
def perform_update(self, serializer):
super().perform_update(serializer)
self._send_hook_request(serializer.instance, 'PUT')
def perform_destroy(self, instance):
self._send_hook_request(instance, 'DELETE')
super().perform_destroy(instance)
class SprintViewSet(DefaultsMixin, UpdateHookMixin, viewsets.ModelViewSet):
"""API endpoint for listing and creating sprints."""
queryset = Sprint.objects.order_by('end')
serializer_class = SprintSerializer
filter_class = SprintFilter
search_fields = ('name',)
ordering_fields = ('end', 'name', )
class TaskViewSet(DefaultsMixin, UpdateHookMixin, viewsets.ModelViewSet):
"""API endpoint for listing and creating tasks."""
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_class = TaskFilter
search_fields = ('name', 'description',)
ordering_fields = ('name', 'order', 'started', 'due', 'completed',)
class UserViewSet(DefaultsMixin, UpdateHookMixin, viewsets.ReadOnlyModelViewSet):
"""API endpoint for listing users."""
lookup_field = User.USERNAME_FIELD
queryset = User.objects.order_by(User.USERNAME_FIELD)
serializer_class = UserSerializer
search_fields = (User.USERNAME_FIELD,)
| [
"[email protected]"
] | |
ee726d7533579f7a9c1d89fddd7d1581375a412a | b55f70755712b26688b80a8ba3806a4124fbcd11 | /LinkedList/remove_duplicate_from_sorted_linkedlist.py | ec55034979d77103ec2b1c458b9b307dd7dbf44a | [] | no_license | Shanshan-IC/Algorithm_Python | a44703a0f33370c47e3e55af70aadeae08d5a1a5 | ace23976d2f1f51141498c4c4ea6bca0039b233f | refs/heads/master | 2021-09-08T07:16:59.576674 | 2018-03-08T09:24:01 | 2018-03-08T09:24:01 | 114,254,497 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | """
http://www.lintcode.com/zh-cn/problem/remove-duplicates-from-sorted-list/
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param: head: head is the head of the linked list
@return: head of linked list
"""
def deleteDuplicates(self, head):
pre = head
while pre:
while pre.next and pre.val == pre.next.val:
pre.next = pre.next.next
pre = pre.next
return head | [
"[email protected]"
] | |
9551014bbb31c958619230f90f70227bf926855c | ad0cbe6e2f3862490ec378319ed14072735da628 | /airflow_ml_dags/images/airflow-preprocess/preprocess.py | 05baaf7e9e5e09e3529a1dc5307f6adc36f4482e | [] | no_license | made-ml-in-prod-2021/dronovartem | 4365c05e67d6dd7bc5d798ad3b814b4687b51171 | 37711ea51e326f6468303daaa9852d9f1e92e9bf | refs/heads/main | 2023-06-04T12:40:39.017311 | 2021-06-24T18:28:15 | 2021-06-24T18:28:15 | 353,791,508 | 0 | 1 | null | 2021-06-24T18:28:16 | 2021-04-01T18:28:51 | Jupyter Notebook | UTF-8 | Python | false | false | 599 | py | import os
import pandas as pd
import click
@click.command("predict")
@click.option("--input-dir")
@click.option("--output-dir")
def preprocess(input_dir: str, output_dir):
"""
Implement dummy dataset preprocessing.
"""
data = pd.read_csv(os.path.join(input_dir, "data.csv"))
target = pd.read_csv(os.path.join(input_dir, "target.csv"))
os.makedirs(output_dir, exist_ok=True)
train_data = pd.concat([data, target], axis=1)
train_data.to_csv(os.path.join(output_dir, "train_data.csv"), index=False)
if __name__ == '__main__':
preprocess() | [
"[email protected]"
] | |
eabfb42337373790ebbbc5379ef69a5441583b28 | 2436f9f911149926af8fd7c1cd5c4e0d604987a0 | /puente_quintanavides/pretensa_viga_xci.py | ae3ea59819381584f381dd1432ad80297966424e | [] | no_license | cryptopulgo/XCmodels | d185b93d01f69d52d88d8a5f965c35dc1742da22 | bad6e553d919909086ab9045884594b7d8aafc06 | refs/heads/master | 2022-12-08T21:24:39.152883 | 2020-08-19T13:25:25 | 2020-08-19T13:25:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 319 | py | # -*- coding: utf-8
def trataResultsPret(nmbComb):
mdlr(dom(calculate_nodal_reactions(1)\listaReaccionesNodos(nmbComb,tagsNodosCoartados,"%7.2f","reacc","cabecera","tit")))
\nuevo_archivo_salida["salidaG0"]{"reacciones_G0.tex"}
\resuelveCombEstatLin("G0")
\trataResultsPret("G0")
cierra_archivo_salida("salidaG0")
| [
"[email protected]"
] | |
3109ce5a426b3c7a9bbb884949883026d046ef93 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/grade-school/07ecb8fb9316422683afbe71e7b89cda.py | 282651815e49817683861a8da2d0e1bacaaac438 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 456 | py | from collections import defaultdict
class School:
def __init__(self, name):
self.name = name
self.db = defaultdict(set)
def add(self, student, grade):
self.db[grade].add(student)
def grade(self, grade_number):
return self.db[grade_number]
def sort(self):
register = dict()
for grade, roster in self.db.items():
register[grade] = tuple(sorted(roster))
return register
| [
"[email protected]"
] | |
859496717de268dc9117c928a2adaa60d197eb67 | 35fdd5b42b47a1dbe6a25f6fc1865f4e48b842a5 | /evalml/tuners/tuner_exceptions.py | a1fecf7101695ca8c098a63dd3fa2cdf08aa4bf7 | [
"BSD-3-Clause"
] | permissive | skvorekn/evalml | 41e5426f9f7d5ad625c21b74336009894c79c7de | 2cbfa344ec3fdc0fb0f4a0f1093811135b9b97d8 | refs/heads/main | 2023-03-27T01:42:07.691406 | 2021-03-19T18:53:43 | 2021-03-19T18:53:43 | 349,555,689 | 0 | 0 | BSD-3-Clause | 2021-03-21T14:57:01 | 2021-03-19T21:08:12 | null | UTF-8 | Python | false | false | 274 | py | class NoParamsException(Exception):
"""Raised when a tuner exhausts its search space and runs out of parameters to propose."""
pass
class ParameterError(Exception):
"""Raised when a tuner encounters an error with the parameters being used with it."""
pass
| [
"[email protected]"
] | |
37f7b34d2d10f3ffdc8b99dc71379799894ec17a | ef6229d281edecbea3faad37830cb1d452d03e5b | /ucsmsdk/mometa/fabric/FabricFcSan.py | 31582e89aeb3a2412f8e11f8d85e697845f18cf1 | [
"Apache-2.0"
] | permissive | anoop1984/python_sdk | 0809be78de32350acc40701d6207631322851010 | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | refs/heads/master | 2020-12-31T00:18:57.415950 | 2016-04-26T17:39:38 | 2016-04-26T17:39:38 | 57,148,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,240 | py | """This module contains the general information for FabricFcSan ManagedObject."""
import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class FabricFcSanConsts():
ID_A = "A"
ID_B = "B"
ID_NONE = "NONE"
UPLINK_TRUNKING_DISABLED = "disabled"
UPLINK_TRUNKING_ENABLED = "enabled"
class FabricFcSan(ManagedObject):
"""This is FabricFcSan class."""
consts = FabricFcSanConsts()
naming_props = set([u'id'])
mo_meta = MoMeta("FabricFcSan", "fabricFcSan", "[id]", VersionMeta.Version101e, "InputOutput", 0xff, [], ["admin", "ext-san-config", "ext-san-policy"], [u'fabricSanCloud'], [u'fabricFcSanEp', u'fabricFcSanPc', u'fabricFcoeSanEp', u'fabricFcoeSanPc', u'fabricSubGroup', u'fabricVsan', u'faultInst'], ["Get", "Set"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"config_qualifier": MoPropertyMeta("config_qualifier", "configQualifier", "string", VersionMeta.Version311e, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|vsan-count-exceeds-limit),){0,2}(defaultValue|not-applicable|vsan-count-exceeds-limit){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"id": MoPropertyMeta("id", "id", "string", VersionMeta.Version101e, MoPropertyMeta.NAMING, 0x8, None, None, None, ["A", "B", "NONE"], []),
"locale": MoPropertyMeta("locale", "locale", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|unknown|server|chassis|internal|external),){0,5}(defaultValue|unknown|server|chassis|internal|external){0,1}""", [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101e, MoPropertyMeta.CREATE_ONLY, 0x10, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x20, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x40, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"transport": MoPropertyMeta("transport", "transport", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|unknown|ether|dce|fc),){0,4}(defaultValue|unknown|ether|dce|fc){0,1}""", [], []),
"type": MoPropertyMeta("type", "type", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|unknown|lan|san|ipc),){0,4}(defaultValue|unknown|lan|san|ipc){0,1}""", [], []),
"uplink_trunking": MoPropertyMeta("uplink_trunking", "uplinkTrunking", "string", VersionMeta.Version141i, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["disabled", "enabled"], []),
}
prop_map = {
"childAction": "child_action",
"configQualifier": "config_qualifier",
"dn": "dn",
"id": "id",
"locale": "locale",
"name": "name",
"rn": "rn",
"sacl": "sacl",
"status": "status",
"transport": "transport",
"type": "type",
"uplinkTrunking": "uplink_trunking",
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.config_qualifier = None
self.locale = None
self.name = None
self.sacl = None
self.status = None
self.transport = None
self.type = None
self.uplink_trunking = None
ManagedObject.__init__(self, "FabricFcSan", parent_mo_or_dn, **kwargs)
| [
"[email protected]"
] | |
79fe8745abbd0602a0437bb480fabb4ab786264d | 682319f56c17e949bab0d6e418838d33977dd760 | /Assignment_5/pattern1.py | 5c0520857bd5b8c307f2c17dfda35cc732667f4a | [] | no_license | DilipBDabahde/PythonExample | 8eb70773a783b1f4b6cf6d7fbd2dc1302af8aa1b | 669762a8d9ee81ce79416d74a4b6af1e2fb63865 | refs/heads/master | 2020-08-23T01:05:44.788080 | 2020-07-25T21:59:52 | 2020-07-25T21:59:52 | 216,511,985 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | '''
2. Write a recursive program which display below pattern.
Input : 5
Output : 1 2 3 4 5
'''
i = 1;
def pattern(iNo):
global i;
if i <= iNo:
print(i,end= " ");
i += 1;
pattern(iNo);
def main():
val = int(input("Enter a val:"));
pattern(val);
print();
if __name__ == '__main__':
main();
| [
"[email protected]"
] | |
98fd0ff9ef892db47051df84ce0c6dab4a5c6f62 | 7d1fd87e1aaf7e6b2ea72ab086a128d03ab059f1 | /Python_Flask/Flask1/flask_2.py | 3204f2d47e2bdba0604fdd8a5c532a2880633b24 | [] | no_license | efren1990/codepy | 05fb34fb608d9921cd5b1c257a9869f2d42eafae | 1bd957e7a7285d459ba76e99c4bccb8dbabf8da4 | refs/heads/main | 2023-06-30T06:31:51.154519 | 2021-08-13T01:30:57 | 2021-08-13T01:30:57 | 395,486,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 964 | py | """
QUE ES FLASK--->
Flask es un microframework para Python basado en Werkzeug, Jinja 2 y buenas intenciones.
Y antes de preguntar: ¡es con licencia BSD!
--------------METODO RUN--------------------------------------------------------------
"""
# Llamado de la libreria flask la clas Flask
from flask import Flask
"""Para utilizar flask es necesario crear una instacia de la clase Flask
El objeto Flask recibe como parametro la constante __name__
"""
app = Flask(__name__)
@app.route('/') #wrap o decorador
def index():
return 'Hola mundo'
# run()-> se encarga de ejecutar el servidor
# Run puede responder en el puerto que le indiquemos
# La cantidad de puertos que tiene nuestra computadora es 2**16
# de los cuales los primeros 1024 ya se encuentran ocupados
# debug = true permite realizar cambion sin necesidad de correr la consola en cada cambion
# Validacion de buenas practicas flask
if __name__ == '__main__':
app.run(debug = True, port = 8000) | [
"[email protected]"
] | |
008dd71fd10f5620479b636d318ed0204bec3d29 | cb3119cbe128fc9adcf286d257126d03b0de7a10 | /tests/test_net.py | d5498289586178f08151b9c08a1eee33656809fe | [
"MIT"
] | permissive | CBJamo/skidl | a66d14598ec851eb80c1d1fd38df3513aff0acdc | c0e46aa2e75f3be8aefd585c8cbdcbd33a59d542 | refs/heads/master | 2020-03-09T06:30:26.759951 | 2018-04-08T12:54:08 | 2018-04-08T12:54:08 | 128,640,952 | 0 | 0 | null | 2018-04-08T12:52:59 | 2018-04-08T12:52:58 | null | UTF-8 | Python | false | false | 412 | py | import pytest
from skidl import *
from .setup_teardown import *
def test_nets_1():
gnd = Net('GND')
a = Net('A')
b = Net('B')
c = Net()
p = Pin()
assert len(default_circuit.get_nets()) == 0
assert len(a) == 0
assert len(b) == 0
assert len(c) == 0
a += p
assert len(default_circuit.get_nets()) == 1
assert len(a) == 1
assert len(b) == 0
assert len(c) == 0
| [
"[email protected]"
] | |
20f5f50e20a6e339fe8174eaf494840bb10d8f6b | a312bf5148945a19bb6d30c1f0e97d3edc021af2 | /RestFrameWork/api/serializer.py | d9ff420cf6e881549e5c71899e923275d6171933 | [] | no_license | Shikhar0907/Create-API | 252cdb632765f5d5b7a03213b942bfc9c5b979de | c8f13f309d0aa668091d615946b52bea66316d36 | refs/heads/master | 2020-04-03T00:51:57.367335 | 2018-10-27T01:16:00 | 2018-10-27T01:16:00 | 154,911,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | from rest_framework import serializers
from RestFrameWork.models import Status
class StatusSerializer(serializers.ModelSerializer):
class Meta:
model = Status
fields = [
'id',
'user',
'content',
'image'
]
read_only_fields = ['user']
def validate(self, data):
content = data.get("content",None)
if content == "":
content = None
image = data.get("image",None)
if content is None and image is None:
raise(serializers.ValidationError("content or image is not Present"))
return(data) | [
"[email protected]"
] | |
e2fb33449cb17a9b98a90fba8046aa3d8eddddcf | 2324d8e4544a9b813153ce0ed0f858972ea7f909 | /66-加一.py | 35964e18d54db38d2430e97ff639ad548ef65d3c | [] | no_license | Terry-Ma/Leetcode | af8a4ad8059975f8d12b0351610336f1f5f01097 | cc7f41e2fb3ed5734c2a5af97e49a5bc17afbceb | refs/heads/master | 2021-08-10T16:40:20.482851 | 2021-07-03T08:35:56 | 2021-07-03T08:35:56 | 225,814,239 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | class Solution:
def plusOne(self, digits: List[int]) -> List[int]:
result = 0
flag = 1
for i in range(len(digits) - 1, -1, -1):
if flag == 1:
if digits[i] == 9:
digits[i] = 0
else:
digits[i] = digits[i] + 1
flag = 0
break
if flag == 1:
digits.insert(0, 1)
return digits
| [
"[email protected]"
] | |
33b03bfe4606d07da07c3346d5d1ccfd40eca98f | d3efc82dfa61fb82e47c82d52c838b38b076084c | /crossmarketetf_bak/crossmarket_redemption_HA/YW_CETFSS_SHSH_037.py | b5b26bedf5acb5c58983fc121c0293984c3daf9e | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,499 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test")
from crossmarketetf.cetfservice.cetf_main_service import *
from crossmarketetf.cetfservice.cetf_get_components_asset import *
from crossmarketetf.cetfservice.cetf_utils import *
from mysql.QueryOrderErrorMsg import queryOrderErrorMsg
from service.mainService import *
from mysql.getUpOrDownPrice import getUpPrice
from crossmarketetf.cetfservice.cetf_add import cetf_add
class YW_CETFSS_SHSH_037(xtp_test_case):
def test_YW_CETFSS_SHSH_037(self):
# -----------ETF赎回-------------
title = ('上海ETF赎回--可深市股票退补现金替代:T日ETF拥股量1unit→T日赎回ETF')
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、全成、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
unit_info = {
'ticker': '530510', # etf代码
'etf_unit_buy': 1.0, # etf买入单位数
'etf_unit': 1.0, # etf赎回单位数
'etf_unit_sell': 1.0, # etf卖出单位数
'component_unit_sell': 1.0 # 成分股卖出单位数
}
# -----------T日买入etf-------------
cetf_add(Api,
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
unit_info['ticker'],
unit_info['etf_unit_buy'])
# -----------查询ETF赎回前成分股持仓-------------
component_stk_info = cetf_get_all_component_stk(Api,unit_info['ticker'])
# 查询etf最小申赎数量
unit_number = query_creation_redem_unit(unit_info['ticker'])
# etf赎回数量
quantity = int(unit_info['etf_unit'] * unit_number)
# 定义委托参数信息------------------------------------------
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
unit_info['ticker'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_REDEMPTION'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
quantity
}
g_func.cetf_parm_init(case_goal['期望状态'])
rs1 = cetf_service_test(Api, case_goal, wt_reqs,component_stk_info)
etf_creation_log(case_goal, rs1)
self.assertEqual(rs1['用例测试结果'], True)
# --------二级市场,卖出etf-----------
case_goal['期望状态'] = '废单'
case_goal['errorID'] = 11010121
case_goal['errorMSG'] = queryOrderErrorMsg(11010121)
# 二级市场卖出的etf数量
quantity = int(unit_info['etf_unit_sell'] * unit_number)
quantity_list = split_etf_quantity(quantity)
# 查询涨停价
limitup_px = getUpPrice(unit_info['ticker'])
rs2 = {}
for etf_quantity in quantity_list:
wt_reqs_etf = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
unit_info['ticker'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price':
limitup_px,
'quantity':
etf_quantity
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
rs2 = serviceTest(Api, case_goal, wt_reqs_etf)
if rs2['用例测试结果'] is False:
etf_sell_log(case_goal, rs2)
self.assertEqual(rs2['用例测试结果'], True)
return
etf_sell_log(case_goal, rs2)
# ------------二级市场卖出成份股-----------
case_goal['期望状态'] = '废单'
case_goal['errorID'] = 11010121
case_goal['errorMSG'] = queryOrderErrorMsg(11010121)
# 查询etf成分股代码和数量
etf_components = query_cetf_component_share(unit_info['ticker'])
rs3 = {}
for stk_code in etf_components:
# 赎回用例1-25会有上海和深圳的成分股各一支,深圳成分股为'008000',只卖上海的
if stk_code != '008000':
components_share = etf_components[stk_code]
quantity = (int(unit_info['component_unit_sell'])
if unit_info['component_unit_sell'] >= 100
else int(components_share * unit_info['component_unit_sell']))
limitup_px = getUpPrice(stk_code)
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker':
stk_code,
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price':
limitup_px,
'quantity':
quantity
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
rs3 = serviceTest(Api, case_goal, wt_reqs)
if rs3['用例测试结果'] is False:
etf_components_sell_log(case_goal, rs3)
self.assertEqual(rs3['用例测试结果'], True)
etf_components_sell_log(case_goal, rs3)
self.assertEqual(rs3['用例测试结果'], True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
11dd3960248cfa2fe0cc0c0fac1b2eb2a9ff57ac | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-7332.py | d25e1cfe7e3b3c326b3f08619c66c15d0345986e | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,291 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5($ID, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
] | |
0b3171a0d71ffcfd3d3b577218d0ac08ae5273f7 | b01d5af63ae060b3b4b1a715823722e0e2cde603 | /tensorflow_graphics/nn/layer/tests/pointnet_test.py | d5a6a3b51085aa29b0bee419989d7efc2848ef79 | [
"Apache-2.0"
] | permissive | ghosalsattam/graphics | 9c8b313def86d4629281e9c53e0cb261703336f5 | 946aa03b5178d2fc557a81045b84df24af322afd | refs/heads/master | 2022-12-06T15:42:21.729897 | 2020-06-25T14:37:26 | 2020-06-25T14:37:51 | 276,175,390 | 0 | 0 | Apache-2.0 | 2020-06-30T18:13:34 | 2020-06-30T18:13:33 | null | UTF-8 | Python | false | false | 3,553 | py | # Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for pointnet layers."""
# pylint: disable=invalid-name
from absl.testing import parameterized
import tensorflow as tf
from tensorflow_graphics.nn.layer.pointnet import ClassificationHead
from tensorflow_graphics.nn.layer.pointnet import PointNetConv2Layer
from tensorflow_graphics.nn.layer.pointnet import PointNetDenseLayer
from tensorflow_graphics.nn.layer.pointnet import PointNetVanillaClassifier
from tensorflow_graphics.nn.layer.pointnet import VanillaEncoder
from tensorflow_graphics.util import test_case
class RandomForwardExecutionTest(test_case.TestCase):
@parameterized.parameters(
((32, 2048, 1, 3), (32), (.5), True),
((32, 2048, 1, 3), (32), (.5), False),
((32, 2048, 1, 2), (16), (.99), True),
)
def test_conv2(self, input_shape, channels, momentum, training):
B, N, X, _ = input_shape
inputs = tf.random.uniform(input_shape)
layer = PointNetConv2Layer(channels, momentum)
outputs = layer(inputs, training=training)
assert outputs.shape == (B, N, X, channels)
@parameterized.parameters(
((32, 1024), (40), (.5), True),
((32, 2048), (20), (.5), False),
((32, 512), (10), (.99), True),
)
def test_dense(self, input_shape, channels, momentum, training):
B, _ = input_shape
inputs = tf.random.uniform(input_shape)
layer = PointNetDenseLayer(channels, momentum)
outputs = layer(inputs, training=training)
assert outputs.shape == (B, channels)
@parameterized.parameters(
((32, 2048, 3), (.9), True),
((32, 2048, 2), (.5), False),
((32, 2048, 3), (.99), True),
)
def test_vanilla_encoder(self, input_shape, momentum, training):
B = input_shape[0]
inputs = tf.random.uniform(input_shape)
encoder = VanillaEncoder(momentum)
outputs = encoder(inputs, training=training)
assert outputs.shape == (B, 1024)
@parameterized.parameters(
((16, 1024), (20), (.9), True),
((8, 2048), (40), (.5), False),
((32, 512), (10), (.99), True),
)
def test_classification_head(self, input_shape, num_classes, momentum,
training):
B = input_shape[0]
inputs = tf.random.uniform(input_shape)
head = ClassificationHead(num_classes, momentum)
outputs = head(inputs, training=training)
assert outputs.shape == (B, num_classes)
@parameterized.parameters(
((32, 1024, 3), 40, True),
((32, 1024, 2), 40, False),
((16, 2048, 3), 20, True),
((16, 2048, 2), 20, False),
)
def test_vanilla_classifier(self, input_shape, num_classes, training):
B = input_shape[0]
C = num_classes
inputs = tf.random.uniform(input_shape)
model = PointNetVanillaClassifier(num_classes, momentum=.5)
logits = model(inputs, training)
assert logits.shape == (B, C)
labels = tf.random.uniform((B,), minval=0, maxval=C, dtype=tf.int64)
PointNetVanillaClassifier.loss(labels, logits)
if __name__ == "__main__":
test_case.main()
| [
"[email protected]"
] | |
fbcc8dc57d65876cb88bbb56654d802ff47535ab | ef74d9ad851021bcb0ed12880e14269b6ed7f617 | /Sample/Koudai/Server/src/ZyGames.Tianjiexing.Server/PyScript/Action/action4408.py | c30f2afc099f329d04a422f45a419fb2d45cb810 | [
"BSD-2-Clause-Views",
"MIT"
] | permissive | sunyuping/Scut | b5e5798e9b519941f0ac3a08a3263dc0f45beb47 | ec2ea35c0e4de1f2da49c50d14e119a4f17cd93a | refs/heads/master | 2020-12-25T23:19:26.597830 | 2013-11-16T07:50:01 | 2013-11-16T07:50:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,953 | py | import clr, sys
import random
import time
import datetime
clr.AddReference('ZyGames.Framework.Common');
clr.AddReference('ZyGames.Framework');
clr.AddReference('ZyGames.Framework.Game');
clr.AddReference('ZyGames.Tianjiexing.Model');
clr.AddReference('ZyGames.Tianjiexing.BLL');
clr.AddReference('ZyGames.Tianjiexing.Lang');
clr.AddReference('ZyGames.Tianjiexing.BLL.Combat');
from lang import Lang
from action import *
from System import *
from System.Collections.Generic import *
from ZyGames.Framework.Common.Log import *
from ZyGames.Tianjiexing.Model import *
from ZyGames.Tianjiexing.BLL import *
from ZyGames.Tianjiexing.BLL.Base import *
from ZyGames.Tianjiexing.Lang import *
from ZyGames.Framework.Game.Cache import *
from ZyGames.Framework.Game.Service import *
from ZyGames.Framework.Common import *
from ZyGames.Framework.Cache.Generic import *
from ZyGames.Tianjiexing.Model.Config import *
from ZyGames.Tianjiexing.BLL.Combat import *
from ZyGames.Tianjiexing.Model.Enum import *
# 4408_圣吉塔属性兑换接口
class UrlParam(HttpParam):
def __init__(self):
HttpParam.__init__(self);
self.propertyType = 0;
self.starNum = 0;
class ActionResult(DataResult):
def __init__(self):
DataResult.__init__(self);
def getUrlElement(httpGet, parent):
urlParam = UrlParam();
if httpGet.Contains("PropertyType")\
and httpGet.Contains("StarNum"):
urlParam.propertyType = httpGet.GetEnum[PropertyType]("PropertyType");
urlParam.starNum = httpGet.GetIntValue("StarNum");
else:
urlParam.Result = False;
return urlParam;
def takeAction(urlParam, parent):
actionResult = ActionResult();
userId = parent.Current.User.PersonalId;
contextUser = parent.Current.User;
def loadError():
parent.ErrorCode = Lang.getLang("ErrorCode");
parent.ErrorInfo = Lang.getLang("LoadError");
actionResult.Result = False;
return actionResult;
# 更新属性加成
percent = 100.0;
userSJTInfo = GameDataCacheSet[UserShengJiTa]().FindKey(userId);
# 判断星星数是否足够兑换
if userSJTInfo.LastScoreStar < urlParam.starNum:
return loadError();
if urlParam.propertyType == PropertyType.Life:
userSJTInfo.LifeNum = userSJTInfo.LifeNum + (urlParam.starNum / percent);
elif urlParam.propertyType == PropertyType.WuLi:
userSJTInfo.WuLiNum = userSJTInfo.WuLiNum + (urlParam.starNum / percent);
elif urlParam.propertyType == PropertyType.Mofa:
userSJTInfo.MofaNum = userSJTInfo.MofaNum + (urlParam.starNum / percent);
elif urlParam.propertyType == PropertyType.FunJi:
userSJTInfo.FunJiNum = userSJTInfo.FunJiNum + (urlParam.starNum / percent);
else:
return loadError();
# 更新星星数
userSJTInfo.LastScoreStar -= urlParam.starNum;
return actionResult;
def buildPacket(writer, urlParam, actionResult):
return True; | [
"[email protected]"
] | |
17008c563dddbe80a6415183379d983eac9dbb47 | 3b504a983f1807ae7c5af51078bfab8c187fc82d | /client/input/Profile/JoystickProfile.py | 5d46096bb6be570335fa1ccb552a8962953dcbef | [] | no_license | SEA-group/wowp_scripts | 7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58 | 2fe54a44df34f2dcaa6860a23b835dcd8dd21402 | refs/heads/master | 2021-09-07T23:10:13.706605 | 2018-03-02T17:23:48 | 2018-03-02T17:23:48 | 117,280,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,476 | py | # Embedded file name: scripts/client/input/Profile/JoystickProfile.py
import BigWorld
import math
import GameEnvironment
from ICMultiUpdate import ICMultiUpdate
import InputMapping
from MathExt import clamp, sign, FloatToCInt16
from consts import FORCE_AXIS, FLAPS_AXIS, HORIZONTAL_AXIS, VERTICAL_AXIS, ROLL_AXIS, BATTLE_MODE
from input.InputSubsystem.JoyInput import JoystickExpertInput
from input.InputSubsystem.KeyboardInput import KeyboardInput
from input.Profile.ProfileBase import IProfileBase
from clientConsts import NOT_CONTROLLED_MOD
from input.InputSubsystem.cameraOverlookInput import cameraOverlookInput
from input.Profile.Helper import MouseProfileProxy, getMouseInput, getGunnerInput, KeysTurnLikeMouse, JoyMouseEvent, mouse_event_dt
CAMERA_ROLL_SPEED = 5.0
CAMERA_YAW_SPEED = 8.0
CAMERA_PITCH_SPEED = 10
class JoystickProfile(IProfileBase, ICMultiUpdate):
def __init__(self, inputAxis, notControlledByUser):
self._notControlledByUser = notControlledByUser
self._forciblySendAxis = False
self.__overlookCameraInput = cameraOverlookInput()
InputMapping.g_instance.onSaveControls += self._onSaveControls
GameEnvironment.getCamera().eZoomStateChanged += self.__zoomStateChanged
GameEnvironment.getInput().eBattleModeChange += self.__eBattleModeChange
self.__axisKeyBoard = [0.0] * 5
self.__axisJoy = [0.0] * 5
self.__axisExtra = [0.0] * 5
self.__lastAxis = [0.0] * 5
self._isSlipComp = False
self.__battleMode = BATTLE_MODE.COMBAT_MODE
self.__lastJoyMouseEvent = JoyMouseEvent()
self.__mpp = MouseProfileProxy(self)
planeType = getattr(BigWorld.player(), 'planeType', -1)
self._mouse = getMouseInput(planeType)(self.__mpp)
self.__gunner = getGunnerInput(planeType)(self.__mpp)
self._kef = KeysTurnLikeMouse(self)
self.__keyboard = KeyboardInput(self._kef.get_filter())
self.__keyboard.isMultiplySignal = False
self.__joystick = JoystickExpertInput(self)
self.__joystick.pushLastEvent()
ICMultiFunction = lambda : (self.__autopilotUpdate() if self._notControlledByUser else None)
joyMouseEventGenerator = lambda : (self.__generateMouseEvent() if self._altActivityState else None)
ICMultiUpdate.__init__(self, (0.1, ICMultiFunction), (mouse_event_dt, joyMouseEventGenerator))
self._onSaveControls()
@property
def isKeyEventFilterActive(self):
return self.__battleMode in (BATTLE_MODE.ASSAULT_MODE, BATTLE_MODE.GUNNER_MODE)
@property
def _altActivityState(self):
return self.__mpp.isMouseActivate or self.__gunner.isActive
def __eBattleModeChange(self, value):
lastBattleMode, self.__battleMode = self.__battleMode, value
BigWorld.player().cell.sendDirectionalMouseMode(value)
self._kef.get_filter().clear_last_key_event(value)
self.__mpp.battleMode = value
self.__mpp.isMouseActivate = value
self.__gunner.eBattleModeChange(value)
condition = not self._altActivityState or self._notControlledByUser
self._mouse.notControlledByUser(NOT_CONTROLLED_MOD.MOUSE_INPUT_BLOCKED if condition else 0)
ignoreStateToSetAxis = (BATTLE_MODE.COMBAT_MODE, BATTLE_MODE.SNIPER_MODE)
if value not in ignoreStateToSetAxis or lastBattleMode not in ignoreStateToSetAxis:
for axis in MouseProfileProxy.axisFilter:
self.__axisKeyBoard[axis] = 0
self.__axisExtra[axis] = 0
self.sendPrimaryAxis(axis, 0.0, None)
return
def getCurrentForce(self):
return self.__lastAxis[FORCE_AXIS]
def dispose(self):
InputMapping.g_instance.onSaveControls -= self._onSaveControls
GameEnvironment.getCamera().eZoomStateChanged -= self.__zoomStateChanged
GameEnvironment.getInput().eBattleModeChange -= self.__eBattleModeChange
ICMultiUpdate.dispose(self)
self._mouse.dispose()
self.__gunner.dispose()
self._kef = KeysTurnLikeMouse(self)
self.__joystick.dispose()
self.__keyboard.dispose()
self.__overlookCameraInput.dispose()
self.__joystick = None
self.__keyboard = None
self.__overlookCameraInput = None
return
def restart(self):
planeType = getattr(BigWorld.player(), 'planeType', -1)
self.__mpp.reset()
self._mouse.dispose()
self._mouse = getMouseInput(planeType)(self.__mpp)
self.__gunner.dispose()
self.__gunner = getGunnerInput(planeType)(self.__mpp)
self.__lastJoyMouseEvent.dy = 0
self.__lastJoyMouseEvent.dx = 0
ICMultiUpdate.restart(self)
def _onSaveControls(self):
settings = InputMapping.g_instance.mouseSettings
camera = GameEnvironment.getCamera().getDefualtStrategies['CameraStrategyNormal']
cameraInertia = settings.INERTIA_CAMERA
cameraInertiaRoll = settings.INERTIA_CAMERA_ROLL
camera.speedRoll = CAMERA_ROLL_SPEED + 2.0 * CAMERA_ROLL_SPEED * (1.0 - cameraInertiaRoll) if cameraInertiaRoll > 0 else 100
camera.speedYaw = CAMERA_YAW_SPEED + 2.0 * CAMERA_YAW_SPEED * (1.0 - cameraInertia) if cameraInertia > 0 else 100
camera.speedPitch = CAMERA_PITCH_SPEED + 2.0 * CAMERA_PITCH_SPEED * (1.0 - cameraInertia) if cameraInertia > 0 else 100
flex = lambda x, min_, max_: (max_ - min_) * math.pow(x, 2.0) * math.exp(x - 1) + min_
self.__overlookCameraInput.setTurnSpeed(flex(settings.HATKA_MOVE_SPEED, 100, 400))
camera.flexibility = flex(1.0 - settings.HATKA_MOVE_SPEED, 1e-15, 0.0001)
self._isSlipComp = settings.SLIP_COMPENSATION_VALUE
BigWorld.player().cell.sendLiningFlag(int(clamp(0.0, settings.SLIP_COMPENSATION_VALUE * 255, 255)))
BigWorld.player().cell.sendJoyVersionFlag(settings.JOY_VERSION_SWITCHER)
self.__joystick.pushLastEvent()
def __resendRudders(self):
self._forciblySendAxis = True
for axis in range(0, len(self.__axisKeyBoard)):
self.__send(axis)
self._forciblySendAxis = False
def __zoomStateChanged(self, newState):
self.__resendRudders()
def resetKeyboardInput(self, source):
for axis, _ in enumerate(self.__axisKeyBoard):
self.sendAxis(axis, 0)
def __sensitivity(self, axis):
if GameEnvironment.getCamera().isSniperMode and axis in (HORIZONTAL_AXIS, VERTICAL_AXIS):
sensitivityInSniperMode = InputMapping.g_instance.mouseSettings.SENSITIVITY_IN_SNIPER_MODE
return max(0.1, sensitivityInSniperMode)
return 1.0
def __send(self, axis):
player = BigWorld.player()
axisJoyValue = self.__axisExtra[axis] * (1.0 - abs(self.__axisJoy[axis])) + self.__axisJoy[axis]
value = self.__sensitivity(axis) * axisJoyValue * (1.0 - abs(self.__axisKeyBoard[axis])) + self.__axisKeyBoard[axis]
if self.__lastAxis[axis] != value or self._forciblySendAxis:
player.cell.sendInputJoyAxis(axis, FloatToCInt16(value))
player.applyInputAxis(axis, value)
self.__lastAxis[axis] = value
def __trySend(self, axis):
if self._notControlledByUser:
if self._notControlledByUser & NOT_CONTROLLED_MOD.PLANE_ALIGN and axis in (FLAPS_AXIS, FORCE_AXIS):
self.__send(axis)
return
self.__send(axis)
def sendAxis(self, axis, value):
self.__axisKeyBoard[axis] = value
self.__trySend(axis)
super(JoystickProfile, self).sendAxis(axis, value)
def sendPrimaryAxis(self, axis, value, axisID):
self.__axisJoy[axis] = value
self.__trySend(axis)
def sendExtraAxis(self, axis, value):
self.__axisExtra[axis] = value
self.__trySend(axis)
def notControlledByUser(self, value):
self._notControlledByUser = value
if not self._notControlledByUser:
self.__resendRudders()
condition = not self._altActivityState or self._notControlledByUser
self._mouse.notControlledByUser(NOT_CONTROLLED_MOD.MOUSE_INPUT_BLOCKED if condition else 0)
def sendData(self, axis, value, axisID):
self.__mpp.setSpeedAxis(axis, value)
if self._altActivityState and axis in self.__mpp.mouseSpeedAxis:
return
self.sendPrimaryAxis(axis, value, axisID)
def __generateMouseEvent(self):
event = JoyMouseEvent()
event.dx, event.dy = self.__mpp.getJoystickToMouseTranslation(mouse_event_dt)
isNewEvent = event.dx != self.__lastJoyMouseEvent.dx or event.dy != self.__lastJoyMouseEvent.dy
isNotEmpty = self.__lastJoyMouseEvent.dx or self.__lastJoyMouseEvent.dy
if isNewEvent or isNotEmpty:
self.__lastJoyMouseEvent = event
self.altMouseEvent(event)
@KeysTurnLikeMouse.mouse_event_router
def altMouseEvent(self, event):
if not self.__gunner.processMouseEvent(event):
self._mouse.processMouseEvent(event)
def processMouseEvent(self, event):
self.__overlookCameraInput.processMouseEvent(event)
def processJoystickEvent(self, event):
self.__joystick.processJoystickEvent(event)
self.__overlookCameraInput.processJoystickEvent(event)
def addCommandListeners(self, processor):
self.__keyboard.addCommandListeners(processor)
self.__overlookCameraInput.addCommandListeners(processor)
def removeCommandListeners(self, processor):
self.__keyboard.removeCommandListeners(processor)
self.__overlookCameraInput.removeCommandListeners(processor)
def slipCompensationVisualisation(self):
if self._isSlipComp and not self._notControlledByUser:
owner = BigWorld.player()
fmRotation = owner.getRotation()
speedDirection = owner.getWorldVector()
speedDirection.normalise()
dotX = clamp(-1.0, fmRotation.getAxisX().dot(speedDirection), 1.0)
dotY = clamp(-1.0, fmRotation.getAxisY().dot(speedDirection), 1.0)
angleX = abs(math.pi / 2.0 - math.acos(dotX)) / math.radians(10.0)
angleY = abs(math.pi / 2.0 - math.acos(dotY)) / math.radians(35.0 / 2.0)
signX = sign(dotX)
signY = sign(dotY)
hAxis = clamp(-1.0, self.__lastAxis[HORIZONTAL_AXIS] - self._isSlipComp * (1.0 - abs(self.__lastAxis[HORIZONTAL_AXIS])) * clamp(-1.0, signX * angleX, 1.0), 1.0)
owner.applyInputAxis(HORIZONTAL_AXIS, hAxis)
vAxis = clamp(-1.0, self.__lastAxis[VERTICAL_AXIS] - self._isSlipComp * (1.0 - abs(self.__lastAxis[VERTICAL_AXIS])) * clamp(-1.0, signY * angleY, 1.0), 1.0)
owner.applyInputAxis(VERTICAL_AXIS, vAxis)
def __autopilotUpdate(self):
"""successor should provide an update of this method through its own ICMultiUpdate """
if self._notControlledByUser & (NOT_CONTROLLED_MOD.NCBU_STRATEGY_ACTIVATE | NOT_CONTROLLED_MOD.AUTOPILOT):
owner = BigWorld.player()
if abs(owner.pitch) < 0.25 * math.pi:
rollAxis = owner.roll * 0.5
rollAxis = min(1.0, max(-1.0, rollAxis))
owner.applyInputAxis(ROLL_AXIS, -rollAxis)
pitchAxis = owner.pitch
pitchAxis = min(1.0, max(-1.0, pitchAxis))
owner.applyInputAxis(VERTICAL_AXIS, pitchAxis)
owner.applyInputAxis(HORIZONTAL_AXIS, 0) | [
"[email protected]"
] | |
81fb43984df719ac0a69586774519e26f244c066 | acf7457d3a799cb9bff12686d2d616688bcd4b5b | /packages/python/plotly/plotly/validators/scattersmith/marker/_colorsrc.py | 1d2e5dccad1b68311d62023efc16f4abcdc15182 | [
"MIT"
] | permissive | plotly/plotly.py | f4f61639f08160f16195efc95b5901dc5a937346 | 975a704074f01c078e0fdfa32bdf17130bf89e69 | refs/heads/master | 2023-09-06T06:15:08.340035 | 2023-08-24T12:28:14 | 2023-08-24T12:28:14 | 14,579,099 | 14,751 | 2,989 | MIT | 2023-09-08T19:55:32 | 2013-11-21T05:53:08 | Python | UTF-8 | Python | false | false | 423 | py | import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="scattersmith.marker", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| [
"[email protected]"
] | |
7ebfc26b10baea71e8dbcd50b7ed6fc962d09c71 | e09bbdc53af6be9281795189f26f6e59997abf68 | /neuronquant/gryd/transport.py | 183330aa2c65f32fdec906caffa44dc146ea5c03 | [
"Apache-2.0"
] | permissive | jag787/ppQuanTrade | 620ce72c7875bb730708c48ae0481376b43e501b | 9a6da7522d281da130a2c459e2e614a75daa543d | refs/heads/master | 2021-01-11T13:53:40.583710 | 2013-12-20T10:43:58 | 2013-12-20T10:43:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,675 | py | #
# Copyright 2013 Xavier Bruhiere
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import signal
import logbook
import zmq
import time
import json
import datetime as dt
import abc
from neuronquant.utils.signals import SignalManager
from neuronquant.utils.logger import get_nestedlog
#from neuronquant.utils import remote_setup
log = logbook.Logger('ZMQ Messaging')
#TODO http://pythonhosted.org/Logbook/setups.html
# http://pythonhosted.org/Logbook/api/queues.html
# Alias for acknowledgment messages
OK_CODE = 0
KO_CODE = 1
class ZMQ_Base(object):
'''
Abstract class in common for every type of ZMQ device. Messages sent through
network between QuanTrade processes are json, and should look like:
{
'time': '14, March 2012 15:12',
'id': 'sender id',
'channel': 'receiver id',
'type': 'used by broker for filtering',
'msg': 'json or plain text informations'
}
Most of the time messages will ben sent to the broker which will read
those fields and route accordingly the message toward a client that
will process the informations.
'''
__metaclass__ = abc.ABCMeta
def __init__(self, id=None, recipient='dashboard', signal_manager=True):
# Used in messages 'id' field, for authentification on receiver side
self.identity = id
# So said receiver, allows the broker to route the message toward the right client
self.recipient = recipient
# Handles especially CTRL-C keyboard interruption or alarms (when waiting with a timeout)
signals = [signal.SIGINT]
self.signal_manager = SignalManager(signal_codes=signals)
log.info(self.signal_manager)
# Every ZMQ sockets need to initialize a context
self.context = zmq.Context()
self.port = None
self.socket = None
def receive(self, json=True, acknowledgment=None):
msg = None
msg = self.socket.recv_json() if json else self.socket.recv()
log.debug('ZMQ Agent received {}'.format(msg))
# If requested, we let the sender know the message was received
if acknowledgment:
if json:
# Complete protocole, default
self.socket.send_json({'time': dt.datetime.strftime(dt.datetime.now(), format='%Y-%m-%dT%H:%M:%S'),
'id': self.identity, 'channel': self.recipient, 'msg': OK_CODE})
else:
# Simple string response
self.socket.send('{}:{}'.format(self.identity, OK_CODE))
return msg
def __del__(self):
#FIXME Correct seesion end
#if not self.socket.closed:
#self.socket.close()
#self.context.term()
pass
class ZMQ_Dealer(ZMQ_Base):
'''
ZMQ Dealer device, http://www.zeromq.org/tutorials:dealer-and-router
This is the client device, used by the simulator and the portfolio manager,
and meant (but not limited) to talk to the broker.
'''
#NOTE Default should be reade from ~/.quantrade/default.json (method from abstract class)
def run(self, uri=None, host='localhost', port=5570):
'''
Device setup and makes it listen
'''
# Create 'Dealer' type socket
self.socket = self.context.socket(zmq.DEALER)
self.port = port
# Set identity that will be communicated as authentification in messages
self.socket.setsockopt(zmq.IDENTITY, self.identity if self.identity else str(self.port))
# If no explicit uri, we use given or default host and port
if uri is None:
uri = 'tcp://{}:{}'.format(host, port)
# Setup done, running
self.socket.connect(uri)
# Non-blocking mechanism
self.poll = zmq.Poller()
self.poll.register(self.socket, zmq.POLLIN)
log.info('Client connected to {}.'.format(uri))
def noblock_recv(self, timeout=0, json=True, acknowledgment=None):
'''
Checks for pending messages on socket but won't wait for new to arrive
'''
# Checks
socks = dict(self.poll.poll(timeout))
msg = None
# Something arrived for this device ?
if self.socket in socks:
# A new message is pending ?
if socks[self.socket] == zmq.POLLIN:
msg = self.socket.recv_json() if json else self.socket.recv()
log.debug('Client received {}'.format(msg))
if acknowledgment:
if json and msg:
self.send(OK_CODE, type='acknowledgment')
elif json and not msg:
self.send(KO_CODE, type='acknowledgment')
else:
self.socket.send('{}:{}'.format(self.identity, acknowledgment))
return msg
def send_to_android(self, msg):
'''
Send regular message to broker but setting channel to 'android', that
will make it use NotifyMyAndroid to route message toward a green robot device
'''
assert isinstance(msg, dict)
msg['time'] = dt.datetime.strftime(dt.datetime.now(), format = '%Y-%m-%dT%H:%M:%S')
msg['type'] = 'notification'
msg['channel'] = 'android'
msg['appname'] = 'NeuronQuant'
log.debug('Dealer sends android notification: {}'.format(msg))
self.send(msg, format=False)
def send(self, msg, format=True, **kwargs):
'''
Sends msg through socket, taking care of missing fields in protocole
if format flag is set. Otherwise, autodetection
'''
if format:
self.socket.send_json({'time': dt.datetime.strftime(dt.datetime.now(), format='%Y-%m-%dT%H:%M:%S'),
'msg': msg,
'id': self.identity,
'channel': kwargs.get('channel', self.recipient),
'type': kwargs.get('type', '')})
else:
self.socket.send_json(msg) if isinstance(msg, dict) else self.socket.send(msg)
#TODO Unused, cleanup
class ZMQ_Broker(threading.Thread):
"""ServerTask"""
def __init__(self):
threading.Thread.__init__(self)
def run(self):
context = zmq.Context()
frontend = context.socket(zmq.ROUTER)
frontend.bind('tcp://*:5555')
backend = context.socket(zmq.DEALER)
backend.bind('tcp://127.0.0.1:5570')
zmq.device(zmq.QUEUE, frontend, backend)
frontend.close()
backend.close()
context.term()
class ZMQ_Server(ZMQ_Base):
def run(self, port=5555, on_recv=None, forever=False):
self.socket = self.context.socket(zmq.REP)
self.port = port
if not on_recv:
on_recv = self.default_on_recv
log.info('Server listening on port {}...'.format(port))
self.socket.bind("tcp://*:%s" % port)
msg = dict()
if forever:
while 'done' not in msg:
msg = self.receive()
try:
on_recv(msg, id=port)
self.send({"{}:statut".format(port): OK_CODE})
except:
log.error('** Processing message received')
self.send({"{}:statut".format(port): 1})
log.info('Termination request, stop listening...')
def run_forever(self, port=5555, on_recv=None):
self.run(port, on_recv, True)
def default_on_recv(self, msg, id=1):
log.info("Received request: {}".format(msg))
time.sleep(1)
class ZMQ_Client(ZMQ_Base):
def connect(self, host='localhost', ports=[5555]):
self.socket = self.context.socket(zmq.REQ)
self.ports = ports
for port in ports:
log.info('Client connecting to {} on port {}...'.format(host, port))
self.socket.connect('tcp://{}:{}'.format(host, port))
def handle_json(msg, id):
#print(json.dumps(json.loads(msg), indent=4, separators=(',', ': ')))
print(json.dumps(msg, indent=4, separators=(',', ': ')))
#TODO Externalize tests
def server_test():
server = ZMQ_Server()
server.run_forever(ports=5555, on_recv=handle_json)
def client_test():
client = ZMQ_Client(timeout=5)
client.connect(host='localhost', ports=[5555, 5555, 5555])
for request in range(1, 5):
reply = client.send('Hello', acknowledgment=True)
assert(reply)
def dealer_test():
client = ZMQ_Dealer(id='client_test')
client.run(host='127.0.0.1', port=5570)
client.receive()
for request in range(2):
client.noblock_recv()
time.sleep(0.5)
client.send('test number {}'.format(request), channel='dashboard', json=True)
client.send_to_android({'title': 'Buy signal on google', 'priority': 4,
'description': 'Google dual moving average crossed: you should buy 23 stocks with a risk of 0.23'})
if __name__ == '__main__':
log_setup = get_nestedlog(uri='tcp://127.0.0.1:5555')
with log_setup.applicationbound():
dealer_test()
| [
"[email protected]"
] | |
5157862865c047a3f71dfc3861aa683a3ee01433 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_340/ch18_2020_03_24_20_25_22_823016.py | 2c9bee710b1b30509eed5988debf9eecc529908b | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | def verifica_idade(idade):
if idade<21 and idade>=18:
return "Liberado BRASIL"
if idade>=21:
return "Liberado EUA e BRASIL"
else:
return "Não está liberado" | [
"[email protected]"
] | |
387c0b0af55a652c79e797fdd431a9df93bc9ad1 | 0bce7412d58675d6cc410fa7a81c294ede72154e | /Python3/0840. Magic Squares In Grid.py | f34d8b33065ffacf2d06e6948cd16d845b526b6e | [] | no_license | yang4978/LeetCode | 9ddf010b0f1dda32cddc7e94c3f987509dea3214 | 6387d05b619d403414bad273fc3a7a2c58668db7 | refs/heads/master | 2022-01-15T04:21:54.739812 | 2021-12-28T12:28:28 | 2021-12-28T12:28:28 | 182,653,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 721 | py | class Solution:
def numMagicSquaresInside(self, grid: List[List[int]]) -> int:
m = len(grid)
n = len(grid[0])
res = 0
for i in range(1,m-1):
for j in range(1,n-1):
if grid[i][j] == 5:
s = set([grid[x][y] for x in range(i-1,i+2) for y in range(j-1,j+2)])
if len(s) == 9 and max(s) == 9 and min(s) == 1 and sum(grid[i-1][j-1:j+2]) == 15 and sum(grid[i+1][j-1:j+2]) == 15 and sum(grid[i][j-1:j+2]) == 15 and grid[i-1][j-1]+grid[i][j-1]+grid[i+1][j-1] == 15 and grid[i-1][j]+grid[i][j]+grid[i+1][j] == 15 and grid[i-1][j+1]+grid[i][j+1]+grid[i+1][j+1] == 15:
res += 1
return res
| [
"[email protected]"
] | |
709301f9d4428d82c82012c10bfd7baa33a5c3ed | 3a91ebaa94e72906c8ef06baa780eeb14ceb6a2f | /rex/controllers/wallet_controller.py | 915639c1959dbc67f919f59662b5da6fcd68cd93 | [] | no_license | mphuc/max | 660523b07f201265398f7ccf1e50ff937c2bbe55 | 835f7877c217cd80b5d8334376e4708cca1a15e9 | refs/heads/master | 2022-12-09T17:38:40.163828 | 2019-03-19T02:43:16 | 2019-03-19T02:43:16 | 151,679,863 | 0 | 0 | null | 2022-12-07T23:52:39 | 2018-10-05T06:33:47 | JavaScript | UTF-8 | Python | false | false | 35,406 | py | from flask import Blueprint, request, session, redirect, url_for, render_template, flash
from flask.ext.login import login_user, logout_user, current_user, login_required
from rex import app, db
from rex.models import user_model, deposit_model, history_model, invoice_model
from werkzeug.security import generate_password_hash, check_password_hash
from bson.objectid import ObjectId
import json
import datetime
from datetime import datetime
from datetime import datetime, date, timedelta
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from validate_email import validate_email
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.header import Header
from email.utils import formataddr
from bson import ObjectId, json_util
import codecs
from random import randint
from hashlib import sha256
import string
import random
import urllib
import urllib2
from time import gmtime, strftime
import time
import os
import base64
import onetimepass
import requests
import sys
from rex.coinpayments import CoinPaymentsAPI
from rex.config import Config
__author__ = 'carlozamagni'
wallet_ctrl = Blueprint('wallet', __name__, static_folder='static', template_folder='templates')
ApiCoinpayment = CoinPaymentsAPI(public_key=Config().public_key,
private_key=Config().private_key)
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def get_totp_uri(otp_secret):
return 'otpauth://totp/worldtrader:{0}?secret={1}&issuer=worldtrader' \
.format('username', otp_secret)
def verify_totp(token, otp_secret):
return onetimepass.valid_totp(token, otp_secret)
def get_id_tree_node(ids):
listId = ''
query = db.users.find({'p_node': ids})
for x in query:
listId += ',%s'%(x['customer_id'])
listId += get_id_tree_node(x['customer_id'])
return listId
def get_id_tree_parent(ids):
listId = ''
customer = db.users.find_one({'customer_id': ids})
customer_node = db.users.find_one({'customer_id': customer['p_node']})
if customer_node is not None:
listId += ',%s'%(customer['p_node'])
listId += get_id_tree_parent(customer['p_node'])
return listId
def check_user_send(ids,ids_send):
check_user_send = get_id_tree_node(ids)+get_id_tree_parent(ids)
check_user_send_array = check_user_send.split(',')
status_send = False
for xx in check_user_send_array:
if xx == ids_send:
status_send = True
break
return status_send
@wallet_ctrl.route('/deposit', methods=['GET', 'POST'])
def homedeposit():
if session.get(u'logged_in') is None:
return redirect('/user/login')
else:
uid = session.get('uid')
user_id = session.get('user_id')
user = db.users.find_one({'customer_id': uid})
deposit = db.deposits.find({'uid': uid})
list_notifications = db.notifications.find({'$and' : [{'read' : 0},{'status' : 0},{'$or' : [{'uid' : uid},{'type' : 'all'}]}]})
number_notifications = list_notifications.count()
ticker = db.tickers.find_one({})
data ={
'user': user,
'menu' : 'wallet',
'float' : float,
'ticker' : ticker,
'deposit' : deposit,
'number_notifications' : number_notifications,
'list_notifications' : list_notifications
}
return render_template('account/deposit.html', data=data)
@wallet_ctrl.route('/withdraw', methods=['GET', 'POST'])
def homewithdraw():
if session.get(u'logged_in') is None:
return redirect('/user/login')
else:
uid = session.get('uid')
user_id = session.get('user_id')
user = db.users.find_one({'customer_id': uid})
val_withdraw = ''
val_amount_usd = ''
val_amount_max = ''
val_wallet = ''
val_authen = ''
val_balance = ''
if request.method == 'POST':
if request.form['token_crt'] == session['token_crt']:
currency = request.form['currency']
quantity = request.form['quantity']
address = request.form['address']
authen = request.form['authen']
max_withdraw = float(user['investment'])
if int(user['status_verify']) != 2:
max_withdraw = 100
if is_number(quantity) == False or quantity == '' or float(quantity) < 10:
val_amount_usd = 'empty'
else:
if float(quantity) > float(max_withdraw):
val_amount_max = max_withdraw
if address == '':
val_wallet = 'empty'
if int(user['status_2fa']) == 1:
if authen == '':
val_authen = 'empty'
else:
if verify_totp(authen, user['secret_2fa']) == False:
val_authen = 'not'
if int(user['level']) > 1 and val_amount_usd == '' and val_wallet =='' and val_authen == '' and val_amount_max == '':
#check balance
if float(user['balance_wallet']) >= float(quantity):
new_balance_wallets = float(user['balance_wallet']) - float(quantity)
db.users.update({ "customer_id" : uid }, { '$set': { "balance_wallet": float(new_balance_wallets) } })
ticker = db.tickers.find_one({})
if currency == 'BTC':
price = ticker['btc_usd']
if currency == 'ETH':
price = ticker['eth_usd']
if currency == 'LTC':
price = ticker['ltc_usd']
if currency == 'BCH':
price = ticker['bch_usd']
if currency == 'USDT':
price = 1
amount_curency = round(float(quantity)/float(price),8)*0.95
data_investment = {
'uid' : uid,
'user_id': user['_id'],
'username' : user['username'],
'amount' : quantity,
'amount_curency' : '',
'tx': '',
'status' : 0,
'date_added' : datetime.utcnow(),
'wallet' : address,
'type' : currency,
'code_active': id_generator(15),
'active_email' :0,
'id_withdraw' : '',
'price' : price
}
db.withdrawas.insert(data_investment)
val_withdraw = 'complete'
else:
val_balance = 'not'
else:
val_withdraw = 'not'
withdrawa = db.withdrawas.find({'uid': uid})
token_crt = id_generator(15)
session['token_crt'] = token_crt
user = db.users.find_one({'customer_id': uid})
now_day = datetime.now().day
statrus_withdraw = False
if int(now_day) == 2 or int(now_day) == 18:
statrus_withdraw = True
withdrawa_no_active = db.withdrawas.find_one({'$and' : [{'uid': uid},{'status' : 0}]})
if withdrawa_no_active is not None:
statrus_withdraw = False
list_notifications = db.notifications.find({'$and' : [{'read' : 0},{'status' : 0},{'$or' : [{'uid' : uid},{'type' : 'all'}]}]})
number_notifications = list_notifications.count()
data ={
'user': user,
'menu' : 'wallet',
'float' : float,
'withdrawa' : withdrawa,
'token_crt' : token_crt,
'val_withdraw' : val_withdraw,
'val_amount_usd' : val_amount_usd,
'val_wallet' : val_wallet,
'val_authen' : val_authen,
'val_balance' : val_balance,
'val_amount_max' : val_amount_max,
'statrus_withdraw' : statrus_withdraw,
'number_notifications' : number_notifications,
'list_notifications' : list_notifications
}
return render_template('account/withdraw.html', data=data)
@wallet_ctrl.route('/transfer', methods=['GET', 'POST'])
def hometransfer():
#widthdaw_wallet()
if session.get(u'logged_in') is None:
return redirect('/user/login')
else:
uid = session.get('uid')
user_id = session.get('user_id')
user = db.users.find_one({'customer_id': uid})
#print get_id_tree_parent(uid)
val_transfer = ''
val_user_id = ''
val_quantity = ''
val_authen = ''
val_balance = ''
if request.method == 'POST':
# if user['username'] != 'admin':
# return redirect('/auth/login')
if request.form['token_crt'] == session['token_crt']:
quantity = request.form['quantity']
username = request.form['username']
authen = request.form['authen']
if username == '' or username == user['username']:
val_user_id = 'empty'
# else:
# check_id_user = db.users.find_one({'username': username})
# if check_id_user is None:
# val_user_id = 'not'
# else:
# if check_user_send(uid,check_id_user['customer_id']) == False:
# val_user_id = 'not_node'
if is_number(quantity) == False or quantity == '' or float(quantity) < 50:
val_quantity = 'empty'
if int(user['status_2fa']) == 1:
if authen == '':
val_authen = 'empty'
else:
if verify_totp(authen, user['secret_2fa']) == False:
val_authen = 'not'
if val_quantity == '' and val_user_id =='' and val_authen == '':
#check balance
if float(user['balance_wallet']) >= float(quantity):
new_balance_wallets = float(user['balance_wallet']) - float(quantity)
db.users.update({ "customer_id" : uid }, { '$set': { "balance_wallet": float(new_balance_wallets) } })
data_transfer = {
'uid' : uid,
'user_id': user['_id'],
'username' : user['username'],
'amount' : quantity,
'status' : 1,
'date_added' : datetime.utcnow(),
'type' : 'send',
'from' : user['username'],
'to' : str(check_id_user['username'])
}
db.transfers.insert(data_transfer)
user_receive = db.users.find_one({'customer_id': check_id_user['customer_id']})
new_balance_wallet_recevie = float(user_receive['balance_wallet']) + float(quantity)
db.users.update({ "customer_id" : check_id_user['customer_id'] }, { '$set': { "balance_wallet": float(new_balance_wallet_recevie) } })
data_transfers = {
'uid' : user_receive['customer_id'],
'user_id': user_receive['_id'],
'username' : user_receive['username'],
'amount' : quantity,
'status' : 1,
'date_added' : datetime.utcnow(),
'type' : 'receive',
'from' : user['username'],
'to' : str(check_id_user['username'])
}
db.transfers.insert(data_transfers)
val_transfer = 'complete'
else:
val_balance = 'not'
else:
val_transfer = 'not'
transfer = db.transfers.find({'uid': uid})
token_crt = id_generator(15)
session['token_crt'] = token_crt
user = db.users.find_one({'customer_id': uid})
now_day = datetime.now().day
statrus_withdraw = True
if int(now_day) == 8 or int(now_day) == 18 or int(now_day) == 28:
statrus_withdraw = True
list_notifications = db.notifications.find({'$and' : [{'read' : 0},{'status' : 0},{'$or' : [{'uid' : uid},{'type' : 'all'}]}]})
number_notifications = list_notifications.count()
data ={
'user': user,
'menu' : 'wallet',
'float' : float,
'transfer' : transfer,
'token_crt' : token_crt,
'val_transfer' : val_transfer,
'val_quantity' : val_quantity,
'val_user_id' : val_user_id,
'val_authen' : val_authen,
'val_balance' : val_balance,
'statrus_withdraw' : statrus_withdraw,
'number_notifications' : number_notifications,
'list_notifications' : list_notifications
}
return render_template('account/transfer.html', data=data)
@wallet_ctrl.route('/get-new-address', methods=['GET', 'POST'])
def get_new_address():
if session.get(u'logged_in') is None:
return redirect('/user/login')
else:
uid = session.get('uid')
user_id = session.get('user_id')
user = db.users.find_one({'customer_id': uid})
url_callback = 'http://192.254.73.26:51052/account/jskfkjsfhkjsdhfqwtryqweqeweqeqwe'
if request.form['type'] == 'BTC':
if user['btc_address'] == '':
respon_wallet_btc = ApiCoinpayment.get_callback_address(currency='BTC', ipn_url=url_callback)
if respon_wallet_btc['error'] == 'ok':
new_wallet = respon_wallet_btc['result']['address']
db.users.update({ "customer_id" : uid }, { '$set': { "btc_address": new_wallet } })
else:
new_wallet = ''
else:
new_wallet = user['btc_address']
if request.form['type'] == 'ETH':
if user['eth_address'] == '':
respon_wallet_btc = ApiCoinpayment.get_callback_address(currency='ETH', ipn_url=url_callback)
if respon_wallet_btc['error'] == 'ok':
new_wallet = respon_wallet_btc['result']['address']
db.users.update({ "customer_id" : uid }, { '$set': { "eth_address": new_wallet } })
else:
new_wallet = ''
else:
new_wallet = user['eth_address']
if request.form['type'] == 'LTC':
if user['ltc_address'] == '':
respon_wallet_btc = ApiCoinpayment.get_callback_address(currency='LTC', ipn_url=url_callback)
if respon_wallet_btc['error'] == 'ok':
new_wallet = respon_wallet_btc['result']['address']
db.users.update({ "customer_id" : uid }, { '$set': { "ltc_address": new_wallet } })
else:
new_wallet = ''
else:
new_wallet = user['ltc_address']
if request.form['type'] == 'USDT':
if user['usdt_address'] == '':
respon_wallet_btc = ApiCoinpayment.get_callback_address(currency='USDT', ipn_url=url_callback)
if respon_wallet_btc['error'] == 'ok':
new_wallet = respon_wallet_btc['result']['address']
db.users.update({ "customer_id" : uid }, { '$set': { "usdt_address": new_wallet } })
else:
new_wallet = ''
else:
new_wallet = user['usdt_address']
if request.form['type'] == 'BCH':
if user['bch_address'] == '':
respon_wallet_btc = ApiCoinpayment.get_callback_address(currency='BCH', ipn_url=url_callback)
if respon_wallet_btc['error'] == 'ok':
new_wallet = respon_wallet_btc['result']['address']
print new_wallet
db.users.update({ "customer_id" : uid }, { '$set': { "bch_address": new_wallet } })
else:
new_wallet = ''
else:
new_wallet = user['bch_address']
return json.dumps({'address': new_wallet})
@wallet_ctrl.route('/get-username-buy-id', methods=['GET', 'POST'])
def get_username_buy_id():
if session.get(u'logged_in') is None:
username = ''
else:
uid = session.get('uid')
user = db.users.find_one({'customer_id': request.form['id_user']})
if user is None:
username = ''
else:
username = user['username']
return json.dumps({'username': username})
def widthdaw_wallet(currency,amount_wr):
if currency == 'BTC':
address_wr = '1NeKPadhd3KJFVja45f7KmstX34yZysNsg'
else:
address_wr = '0x69312dBac14695eC79Afc01f8A85E37cb61c15C6'
withdraw = ApiCoinpayment.create_withdrawal(amount = float(amount_wr)-0.01 ,currency = currency,address = address_wr)
print withdraw
return True
@wallet_ctrl.route('/jskfkjsfhkjsdhfqwtryqweqeweqeqwe', methods=['GET', 'POST'])
def CallbackCoinPayment():
print "callback"
if request.method == 'POST':
tx = request.form['txn_id'];
address = request.form['address'];
amount = request.form['amount'];
currency = request.form['currency'];
ticker = db.tickers.find_one({})
if currency == 'BTC':
query_search = {'btc_address' : address}
price = ticker['btc_usd']
if currency == 'ETH':
query_search = {'eth_address' : address}
price = ticker['eth_usd']
if currency == 'LTC':
query_search = {'ltc_address' : address}
price = ticker['ltc_usd']
if currency == 'BCH':
query_search = {'bch_address' : address}
price = ticker['bch_usd']
if currency == 'USDT':
query_search = {'usdt_address' : address}
price = 1
check_deposit = db.deposits.find_one({'tx': tx})
customer = db.users.find_one(query_search)
if check_deposit is None and customer is not None:
data = {
'user_id': customer['_id'],
'uid': customer['customer_id'],
'username': customer['username'],
'amount': amount,
'type': currency,
'tx': tx,
'date_added' : datetime.utcnow(),
'status': 1,
'address': address,
'price' : price,
'amount_usd' : float(amount)*float(price)
}
db.deposits.insert(data)
new_balance_wallets = float(customer['balance_wallet']) + (float(amount)*float(price))
db.users.update(query_search, { '$set': { "balance_wallet": float(new_balance_wallets) } })
widthdaw_wallet(currency,amount)
return json.dumps({'txid': 'complete'})
@wallet_ctrl.route('/cron_deposit_btc', methods=['GET', 'POST'])
def CronDepositBTC():
dataTx = db.txs.find({'status': 0, 'type':'BTC'})
for x in dataTx:
txid = x['tx']
# transaction = rpc_connection_btc.gettransaction(txid)
# url_api = 'http://192.254.72.34:38058/apibtc/getTransaction/%s' %(txid)
# r = requests.get(url_api)
# response_dict = r.json()
#rpc_connection = AuthServiceProxy("http://bitbeelinerpc:[email protected]:19668")
rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
transaction = rpc_connection.gettransaction(txid)
db.txdeposits.update({ "tx" : txid }, { '$set': { "confirmations": int(transaction['confirmations']) } })
if int(transaction['confirmations']) >=3:
db.txs.update({ "tx" : txid }, { '$set': { "status": 1 } })
details = transaction['details']
for x in details:
if x['category'] == 'receive':
address = x['address']
amount_deposit = float(x['amount'])
customer = db.User.find_one({'btc_address': address})
if customer:
btc_balance = customer['btc_balance']
new_btc_balance = float(amount_deposit) + float(btc_balance)
new_btc_balance = round(new_btc_balance, 8)
# dataTxUser = db.txdeposits.find_one({'address': address, 'tx': txid})
# if dataTxUser is None:
db.users.update({ "_id" : ObjectId(customer['_id']) }, { '$set': { "btc_balance": new_btc_balance } })
return json.dumps({'status': 'success'})
@wallet_ctrl.route('/walletnotifyxvg/<txid>', methods=['GET', 'POST'])
def NotifyXVG(txid):
rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
transaction = rpc_connection.gettransaction(txid)
# url_api = 'http://192.254.72.34:38058/apisva/waletnotifySVA/%s' %(txid)
# r = requests.get(url_api)
# response_dict = r.json()
# return json.dumps({'txid': 'transaction SVA'})
# transaction = rpc_connection.gettransaction(txid)
print transaction
confirmations = transaction['confirmations']
dataTx = db.txs.find_one({ '$and' : [{'tx': txid},{'type' : 'XVG'}]})
print dataTx
if dataTx:
return json.dumps({'txid': 'Not None'})
else:
# if transaction['confirmations'] == 1:
details = transaction['details']
if len(details) > 0:
for x in details:
if x['category'] == 'receive':
address = x['address']
amount_deposit = float(x['amount'])
customer = db.User.find_one({'sva_address': address})
if customer:
data = {
'status': 0,
'tx': txid,
'date_added' : datetime.utcnow(),
'type':'XVG',
'amount' : amount_deposit,
'confirm' : 0
}
db.txs.insert(data)
return json.dumps({'txid': 'Insert Success'})
return json.dumps({'txid': 'transaction'})
@wallet_ctrl.route('/cron_deposit_xvg', methods=['GET', 'POST'])
def CronDepositXVG():
dataTx = db.txs.find({'status': 0, 'type':'XVG'})
for x in dataTx:
txid = x['tx']
db.txs.update({ "tx" : txid }, { '$set': { "status": 1 } })
# transaction = rpc_connection_btc.gettransaction(txid)
# url_api = 'http://192.254.72.34:38058/apibtc/getTransaction/%s' %(txid)
# r = requests.get(url_api)
# response_dict = r.json()
rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
transaction = rpc_connection.gettransaction(txid)
details = transaction['details']
print(details)
for x in details:
if x['category'] == 'receive':
address = x['address']
amount_deposit = float(x['amount'])
customer = db.User.find_one({'sva_address': address})
if customer:
xvg_balance = customer['sva_balance']
new_xvg_balance = float(amount_deposit) + float(xvg_balance)
new_xvg_balance = round(new_xvg_balance, 8)
dataTxUser = db.txdeposits.find_one({'address': address, 'tx': txid})
if dataTxUser is None:
data = {
'confirmations': transaction['confirmations'],
'user_id': customer['_id'],
'uid': customer['customer_id'],
'username': customer['username'],
'amount': amount_deposit,
'type': 'XVG',
'tx': txid,
'date_added' : datetime.utcnow(),
'status': 1,
'address': address
}
db.txdeposits.insert(data)
db.users.update({ "_id" : ObjectId(customer['_id']) }, { '$set': { "sva_balance": new_xvg_balance } })
return json.dumps({'status': 'success'})
def sendmail_deposit(amount):
username = '[email protected]'
password = 'rbdlnsmxqpswyfdv'
msg = MIMEMultipart('mixed')
mailServer = smtplib.SMTP('smtp.gmail.com', 587) # 8025, 587 and 25 can also be used.
mailServer.ehlo()
mailServer.starttls()
mailServer.ehlo()
mailServer.login(username, password)
sender = '[email protected]'
recipient = '[email protected]'
msg['Subject'] = 'Deposit BTC'
msg['From'] = sender
msg['To'] = recipient
html = """
<table style="table-layout:fixed;width:90%;max-width:600px;margin:0 auto;background-color:#f9f9f9">
<tbody>
<tr>
<td style="padding:20px 10px 10px 0px;text-align:left">
"""+str(amount)+"""
</td>
<td style="padding:0px 0px 0px 10px;text-align:right">
</td>
</tr>
</tbody>
</table>
</div>
"""
html_message = MIMEText(html, 'html')
msg.attach(html_message)
mailServer.sendmail(sender, recipient, msg.as_string())
mailServer.close()
# @wallet_ctrl.route('/walletnotifybtc/<txid>', methods=['GET', 'POST'])
# def NotifyBTC(txid):
# url_api = 'http://192.254.72.34:38058/apibtc/getTransaction/%s' %(txid)
# r = requests.get(url_api)
# response_dict = r.json()
# if response_dict['status'] == 'success':
# transaction = response_dict['data_tx']
# # transaction = rpc_connection_btc.gettransaction(txid)
# # print transaction
# confirmations = transaction['confirmations']
# dataTx = db.txs.find_one({'tx': txid})
# print dataTx
# if dataTx:
# return json.dumps({'txid': 'Not None'})
# else:
# if transaction['confirmations'] >= 1:
# details = transaction['details']
# if len(details) > 0:
# for x in details:
# if x['category'] == 'receive':
# address = x['address']
# amount_deposit = float(x['amount'])
# customer = db.User.find_one({'btc_address': address})
# if customer:
# data = {
# 'status': 0,
# 'tx': txid,
# 'date_added' : datetime.utcnow(),
# 'type':'BTC'
# }
# db.txs.insert(data)
# sendmail_deposit(amount_deposit)
# return json.dumps({'txid': 'Insert Success'})
# else:
# return json.dumps({'status': 'confirm = 0'})
# return json.dumps({'txid': 'transaction'})
def send_mail_withdraw_sva(email, amount, type_withdraw, wallet,link_active):
html = """
<div style="font-family:Arial,sans-serif;background-color:#f9f9f9;color:#424242;text-align:center">
<div class="adM">
</div>
<table style="table-layout:fixed;width:90%;max-width:600px;margin:0 auto;background-color:#f9f9f9">
<tbody>
<tr>
<td style="padding:20px 10px 10px 0px;text-align:left">
<a href="" title="" target="_blank" >
<img src="" alt="" class="CToWUd" style=" width: 100px; margin: 0 auto" />
</a>
</td>
<td style="padding:0px 0px 0px 10px;text-align:right">
</td>
</tr>
</tbody>
</table>
</div>
<div style="font-family:Arial,sans-serif;background-color:#f9f9f9;color:#424242;text-align:center">
<table style="table-layout:fixed;width:90%;max-width:600px;margin:0 auto;background:#fff;font-size:14px;border:2px solid #e8e8e8;text-align:left;table-layout:fixed">
<tbody>
<tr>
<td style="padding:10px 30px;line-height:1.8">Hello</td>
</tr>
<tr>
<td style="padding:10px 30px;line-height:1.8">Your request to withdraw """+str(amount)+""" ["""+str(type_withdraw)+"""] to """+str(wallet)+""" was processed.</td>
</tr>
<tr>
<td style="padding:10px 30px;line-height:1.8">Please click on the link to make a transaction: """+str(link_active)+"""</td>
</tr>
<tr>
<td style="border-bottom:3px solid #efefef;width:90%;display:block;margin:0 auto;padding-top:30px"></td>
</tr>
<tr>
<td style="padding:30px 30px 30px 30px;line-height:1.3">Best regards,<br> World Trade Support<br> </td>
</tr>
</tbody>
</table>
</div>
<div style="font-family:Arial,sans-serif;background-color:#f9f9f9;color:#424242;text-align:center;padding-bottom:10px; height: 50px;">
</div>
"""
return requests.post(
"https://api.mailgun.net/v3/worldtrader.info/messages",
auth=("api", "key-4cba65a7b1a835ac14b7949d5795236a"),
data={"from": "World Trader <[email protected]>",
"to": ["", email],
"subject": "World Trader Withdraw",
"html": html})
# username = '[email protected]'
# password = 'YK45OVfK45OVfobZ5XYobZ5XYK45OVfobZ5XYK45OVfobZ5X'
# msg = MIMEMultipart('mixed')
# sender = '[email protected]'
# recipient = str(email)
# msg['Subject'] = 'SmartFVA Withdraw'
# msg['From'] = sender
# msg['To'] = recipient
# html_message = MIMEText(html, 'html')
# msg.attach(html_message)
# mailServer = smtplib.SMTP('mail.smtp2go.com', 2525) # 8025, 587 and 25 can also be used.
# mailServer.ehlo()
# mailServer.starttls()
# mailServer.ehlo()
# mailServer.login(username, password)
# mailServer.sendmail(sender, recipient, msg.as_string())
# mailServer.close()
def check_password(pw_hash, password):
return check_password_hash(pw_hash, password)
def is_number(s):
try:
complex(s) # for int, long, float and complex
except ValueError:
return False
return True
@wallet_ctrl.route('/withdrawSVC', methods=['GET', 'POST'])
def withdrawSVC():
# return json.dumps({
# 'status': 'error',
# 'message': 'Coming Soon'
# })
if session.get(u'logged_in') is None:
return json.dumps({
'status': 'error',
'message': 'Please Login'
})
else:
if request.method == 'POST':
user_id = session.get('user_id')
uid = session.get('uid')
user = db.users.find_one({'_id': ObjectId(user_id)})
sva_address = request.form['sva_address']
rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
response_dict = rpc_connection.validateaddress(sva_address)
print response_dict
# url_api = 'http://192.254.72.34:38058/apisva/validateaddress/%s' %(sva_address)
# r = requests.get(url_api)
# response_dict = r.json()
if response_dict['isvalid'] != True:
return json.dumps({
'status': 'error',
'message': 'Please enter a valid address!'
})
# valid = rpc_connection.validateaddress(sva_address)
# if valid['isvalid'] == False:
# return json.dumps({
# 'status': 'error',
# 'message': 'Please enter valid SVA address'
# })
checkIsNumber = is_number(request.form['sva_amount'])
if request.form['sva_amount'] == '' or checkIsNumber == False or float(request.form['sva_amount']) < 50:
return json.dumps({
'status': 'error',
'message': 'Please enter valid quantity (quantity > 50)'
})
password = request.form['password']
if check_password(user['password'], password) == False:
return json.dumps({
'status': 'error',
'message': 'Wrong password'
})
sva_amount = float(request.form['sva_amount'])
sva_amount = round(sva_amount, 8)
sva_balance = user['sva_balance']
if float(sva_balance) < float(sva_amount):
return json.dumps({
'status': 'error',
'message': 'Please enter valid quantity (Maximum %s XVG)' % (sva_balance)
})
onetime = request.form['one_time_password']
checkVerifY = verify_totp(onetime, user['secret_2fa'])
if checkVerifY == False:
msg = 'The two-factor authentication code you specified is incorrect. Please check the clock on your authenticator device to verify that it is in sync'
return json.dumps({
'status': 'error',
'message': msg
})
new_sva_balance = float(sva_balance) - float(sva_amount)
new_sva_balance = round(new_sva_balance, 2)
db.users.update({ "_id" : ObjectId(user_id) }, { '$set': { "sva_balance": new_sva_balance } })
localtime = time.localtime(time.time())
customer_id = '%s%s%s%s%s%s'%(localtime.tm_mon,localtime.tm_year,localtime.tm_mday,localtime.tm_hour,localtime.tm_min,localtime.tm_sec)
code_active = customer_id+id_generator()
data_history = {
'uid' : uid,
'user_id': user_id,
'username' : user['username'],
'amount': float(sva_amount),
'type' : 'send',
'wallet': 'XVG',
'date_added' : datetime.utcnow(),
'detail': 'Send %s XVG from XVG Wallet' %(sva_amount),
'rate': '',
'txtid' : '' ,
'amount_sub' : 0,
'amount_add' : 0,
'amount_rest' : 0
}
id_history = db.historys.insert(data_history)
data_withdraw = {
'uid': uid,
'user_id': user_id,
'username': user['username'],
'amount' : float(sva_amount)-0.5,
'tx': '',
'status': 0,
'date_added' : datetime.utcnow(),
'wallet' : sva_address,
'type': 'XVG',
'code_active': code_active,
'active_email': 0
}
id_withdraw = db.withdrawas.insert(data_withdraw)
code_active = '%s_%s' %(id_withdraw,code_active)
print code_active
link_active = 'http://worldtrader.info/account/activewithdraw/%s' % (code_active)
send_mail_withdraw_sva(user['email'], sva_amount, ' XVG Coin', sva_address,link_active)
# send_mail_withdraw(user['email'], user['username'], link_active)
return json.dumps({
'status': 'success',
'new_sva_balance': new_sva_balance,
'message': 'Withdraw success'
})
@wallet_ctrl.route('/withdrawBTC', methods=['GET', 'POST'])
def withdrawBTC():
# return json.dumps({
# 'status': 'error',
# 'message': 'Error!'
# })
if session.get(u'logged_in') is None:
return json.dumps({
'status': 'error',
'message': 'Please Login'
})
else:
if request.method == 'POST':
user_id = session.get('user_id')
uid = session.get('uid')
user = db.users.find_one({'_id': ObjectId(user_id)})
if float(user['status_withdraw']) == 0:
btc_address = request.form['btc_address']
rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
response_dict = rpc_connection.validateaddress(btc_address)
print response_dict
# url_api = 'http://192.254.72.34:38058/apisva/validateaddress/%s' %(sva_address)
# r = requests.get(url_api)
# response_dict = r.json()
if response_dict['isvalid'] != True:
return json.dumps({
'status': 'error',
'message': 'Please enter a valid address!'
})
checkIsNumber = is_number(request.form['btc_amount'])
if request.form['btc_amount'] == '' or checkIsNumber == False or float(request.form['btc_amount']) < 0.005:
return json.dumps({
'status': 'error',
'message': 'Please enter valid quantity (quantity > 0.005)'
})
password = request.form['password']
if check_password(user['password'], password) == False:
return json.dumps({
'status': 'error',
'message': 'Wrong password'
})
btc_amount = float(request.form['btc_amount'])
btc_amount_satoshi = float(btc_amount)*100000000
# btc_amount = round(btc_amount, 8)
btc_balance = user['btc_balance']
btc_balance_satoshi = float(btc_balance)*100000000
if float(btc_balance_satoshi) < float(btc_amount_satoshi):
return json.dumps({
'status': 'error',
'message': 'Please enter valid quantity (Maximum %s BTC)' % (btc_balance)
})
# if user['status_2fa'] == 1:
onetime = request.form['one_time_password']
if user['status_2fa'] == 1:
checkVerifY = verify_totp(onetime, user['secret_2fa'])
if checkVerifY == False:
msg = 'The two-factor authentication code you specified is incorrect. Please check the clock on your authenticator device to verify that it is in sync'
return json.dumps({
'status': 'error',
'message': msg
})
new_btc_balance_satoshi = float(btc_balance_satoshi) - float(btc_amount_satoshi)
new_btc_balance = float(new_btc_balance_satoshi)/100000000
db.users.update({ "_id" : ObjectId(user_id) }, { '$set': { "btc_balance": new_btc_balance } })
localtime = time.localtime(time.time())
customer_id = '%s%s%s%s%s%s'%(localtime.tm_mon,localtime.tm_year,localtime.tm_mday,localtime.tm_hour,localtime.tm_min,localtime.tm_sec)
code_active = customer_id+id_generator()
btc_amount = float(btc_amount) - 0.002
btc_amount = round(btc_amount, 8)
data_history = {
'uid' : uid,
'user_id': user_id,
'username' : user['username'],
'amount': float(btc_amount),
'type' : 'send',
'wallet': 'BTC',
'date_added' : datetime.utcnow(),
'detail': 'Send %s BTC from BTC Wallet' %(btc_amount),
'rate': '',
'txtid' : '' ,
'amount_sub' : 0,
'amount_add' : 0,
'amount_rest' : 0
}
id_history = db.historys.insert(data_history)
data_withdraw = {
'uid': uid,
'user_id': user_id,
'username': user['username'],
'amount' : float(btc_amount),
'tx': '',
'status': 0,
'date_added' : datetime.utcnow(),
'wallet' : btc_address,
'type': 'BTC',
'code_active': code_active,
'active_email': 0
}
id_withdraw = db.withdrawas.insert(data_withdraw)
code_active = '%s_%s' %(id_withdraw,code_active)
print code_active
link_active = 'http://worldtrader.info/account/activewithdraw/%s' % (code_active)
send_mail_withdraw_sva(user['email'], btc_amount, ' Bitcoin', btc_address,link_active)
return json.dumps({
'status': 'success',
'new_btc_balance': new_btc_balance,
'message': 'Withdraw success'
})
else:
return json.dumps({
'status': 'error',
'message': 'error'
})
@wallet_ctrl.route('/activewithdraw/<code>', methods=['GET', 'POST'])
def confirmWithdraw(code):
token = code.split("_")
id_withdraw = token[0]
code_active = token[1]
dataWithdraw = db.withdrawas.find_one({'code_active': code_active,'_id': ObjectId(id_withdraw),'active_email': 0,'tx': ''})
if dataWithdraw:
address = str(dataWithdraw['wallet'])
amount = float(dataWithdraw['amount'])
print address
print amount
# if dataWithdraw['type'] == 'XVG':
# rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
# dataSend = rpc_connection.sendtoaddress(address,amount)
# else:
rpc_connection = AuthServiceProxy("http://Ecy4M83321mWk7szPoiY2cw:[email protected]:23321")
dataSend = rpc_connection.sendtoaddress(address,amount)
#dataSend = rpc_connection.sendfrom('lending', address, amount)
if dataSend:
db.withdrawas.update({ "_id" : ObjectId(id_withdraw) }, { '$set': {"active_email": 1, "tx": dataSend, "status": 1 } })
return json.dumps({'status':'Send Success'}) | [
"[email protected]"
] | |
8ad5873a652011eb278d4990e7ab0deaaa16d6d5 | fa0c53ac2a91409eaf0fc7c082a40caae3ffa0d8 | /com/lc/python_1_100_Days_Demo/Day41-55/code/oa/hrs/migrations/0002_auto_20180523_0923.py | 8b9b835a5b6e204d799a71659938b8b97db8597e | [] | no_license | ahviplc/pythonLCDemo | aba6d8deb1e766841461bd772560d1d50450057b | 22f149600dcfd4d769e9f74f1f12e3c3564e88c2 | refs/heads/master | 2023-07-24T01:41:59.791913 | 2023-07-07T02:32:45 | 2023-07-07T02:32:45 | 135,969,516 | 7 | 2 | null | 2023-02-02T03:24:14 | 2018-06-04T04:12:49 | Python | UTF-8 | Python | false | false | 1,309 | py | # Generated by Django 2.0.5 on 2018-05-23 01:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hrs', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='dept',
name='excellent',
field=models.BooleanField(default=0, verbose_name='是否优秀'),
),
migrations.AlterField(
model_name='dept',
name='location',
field=models.CharField(max_length=10, verbose_name='部门所在地'),
),
migrations.AlterField(
model_name='dept',
name='name',
field=models.CharField(max_length=20, verbose_name='部门名称'),
),
migrations.AlterField(
model_name='dept',
name='no',
field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'),
),
migrations.AlterField(
model_name='emp',
name='comm',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True),
),
migrations.AlterField(
model_name='emp',
name='mgr',
field=models.IntegerField(blank=True, null=True),
),
]
| [
"[email protected]"
] | |
8947aacd51a8e5f7d74271633434aedd17340366 | 85ef27cfe69bf8f4c65ecf4a382742984d91ae12 | /Python/Programs/count_letters.py | 2fe480f4ae66114923b5425db6a3b01a6754c3cf | [] | no_license | SenthilKumar009/100DaysOfCode-DataScience | 7fc0d404e486b2aa1da3a242def0a307fec50f47 | dd2d44363960c9078b73956b6587961de0185f16 | refs/heads/master | 2023-07-17T04:38:21.404964 | 2021-08-27T12:56:55 | 2021-08-27T12:56:55 | 191,664,483 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | message = 'It was a bright cold day in April, and the clocks were striking thirteen.'
count = {}
for character in message:
count.setdefault(character, 0)
count[character] = count[character] + 1
print(count) | [
"[email protected]"
] | |
f6b2bb05fba8fa4a5e097c7a3e7a8c1d6f174bb5 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /dockerized-gists/966bacf76c3a1f815c5fe55cee798dee/snippet.py | cb3ed3fe07ada127bc9b67fa2db937d98cc30bc7 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 378 | py | class globalizer():
def __init__(self):
global a
a = self #global self formbidden bcs self is an ARGUMENT
cloud = globalizer()
if __name__ == '__main__':
cloud.nbr = 1
cloud.string = 'Hello World'
def randFunction():
for i in range(cloud.nbr):
print(cloud.string)
randFunction()
| [
"[email protected]"
] | |
19c45d471ccab8ab75f519647a61c26064197ab5 | 496a63f41fa32e2bb3ecce0d35ff4374f1c02ad5 | /src/scripting/assembly/ge.py | f933b8eb895afe1b368c3d8c5beefca223b82b18 | [
"BSD-3-Clause"
] | permissive | vincent-lg/avenew.one | bbfa8d44e68db943b8825e9d4a32a43e985778fe | fb7f98d331e47e2032ee1e51bf3e4b2592807fdf | refs/heads/main | 2023-02-14T00:28:53.511552 | 2021-01-13T11:13:07 | 2021-01-13T11:13:07 | 330,207,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,368 | py | # Copyright (c) 2020-20201, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
"""GE assembly expression, to compare two values equal or greater than."""
from scripting.assembly.abc import BaseExpression
class Ge(BaseExpression):
"""
GE assembly expression.
Args:
None.
This expression's only role is to compare >= two values from the
stack. It pops these two values, compare them equal or
greater than equal and push the result back onto the stack,
as a boolean.
"""
name = "GE"
@classmethod
async def process(cls, script, stack):
"""
Process this expression.
Args:
script (Script): the script object.
stack (LifoQueue): the current stack.
"""
value2 = stack.get(block=False)
value1 = stack.get(block=False)
stack.put(value1 >= value2, block=False)
| [
"[email protected]"
] | |
6a63bb8312a5cd8e61b833f01e14539cb8da6134 | 088e000eb5f16e6d0d56c19833b37de4e67d1097 | /model-optimizer/extensions/ops/interpolate.py | 36f96e35ce8a854d22ae2d4a4234e40536375d2b | [
"Apache-2.0"
] | permissive | projectceladon/dldt | 614ba719a428cbb46d64ab8d1e845ac25e85a53e | ba6e22b1b5ee4cbefcc30e8d9493cddb0bb3dfdf | refs/heads/2019 | 2022-11-24T10:22:34.693033 | 2019-08-09T16:02:42 | 2019-08-09T16:02:42 | 204,383,002 | 1 | 1 | Apache-2.0 | 2022-11-22T04:06:09 | 2019-08-26T02:48:52 | C++ | UTF-8 | Python | false | false | 2,543 | py | """
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mo.front.common.layout import get_batch_dim, get_features_dim, shape_for_layout
from mo.graph.graph import Node, Graph
from mo.ops.op import Op, PermuteAttrs
class Interpolate(Op):
op = 'Interpolate'
enabled = False
def __init__(self, graph: Graph, attrs: dict):
mandatory_props = {
'type': __class__.op,
'op': __class__.op,
'axes': None,
'mode': None,
'align_corners': 0,
'antialias': 0,
'pads_begin': 0,
'pads_end': 0,
'in_ports_count': 2,
'out_ports_count': 1,
'force_precision_in_ports': {1:'int64'},
'infer': __class__.infer,
}
super().__init__(graph, mandatory_props, attrs)
def supported_attrs(self):
return [
('axes', lambda node: ','.join(map(str, node.axes))),
'mode', 'align_corners', 'antialias', 'pads_begin', 'pads_end',
]
@staticmethod
def infer(node: Node):
layout = node.graph.graph['layout']
assert len(layout) == 4
assert len([p for p in node.in_ports().values() if not p.disconnected()])
assert node.has_valid('mode')
assert node.has_valid('axes')
src_shape = node.in_port(0).data.get_shape()
assert src_shape is not None
dst_shape = node.in_port(1).data.get_value()
assert dst_shape is not None
out_height = dst_shape[0]
out_width = dst_shape[1]
node.out_node().shape = shape_for_layout(layout,
batch=src_shape[get_batch_dim(layout, 4)],
features=src_shape[get_features_dim(layout, 4)],
height=out_height,
width=out_width)
PermuteAttrs.create_permute_attrs(node, attrs=[('axes', 'input:0')])
| [
"[email protected]"
] | |
119e98fc99640e99e9f37b056b8002cc63a4c9b3 | db9f0df35b37cf6b6aae18753609ae2c64e7e549 | /TTBSMTuplesAnalysis/python/Type11Analyzer.py | a45ad2af8cb9143551bdbc7a33010d7620bfa444 | [] | no_license | justinrpilot/ZprimeAllHad | 938f0097f87ea1dfbfb290a1e553c9555768a89e | b4477e26cce570376f7671433327a8d0f9ea516c | refs/heads/master | 2021-01-13T02:16:54.485622 | 2015-01-07T21:18:56 | 2015-01-07T21:18:56 | 12,485,305 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,719 | py | import ROOT
import copy
from math import *
from array import array
from operator import itemgetter, attrgetter
ROOT.gSystem.Load("libFWCoreFWLite.so")
ROOT.AutoLibraryLoader.enable()
from DataFormats.FWLite import Events, Handle
from Analysis.TTBSMTuplesAnalysis import *
class Type11Analyzer :
"""Run 1 + 1 Analysis"""
def __init__(self, useMC, outfile, mistagFile, collectionLabelSuffix, useGenWeight, triggerFile, modMassFile, pdfWeight="nominal", triggerWeight = "noWeight"):
self.outfile = outfile
self.mistagFileStr = mistagFile
self.triggerFileStr = triggerFile
self.modMassFileStr = modMassFile
self.useMC = useMC
self.useGenWeight = useGenWeight
self.pdfWeight = pdfWeight
self.triggerWeight = "noWeight"#triggerWeight
label = 'ttbsmAna'+collectionLabelSuffix
print label
self.allTopTagHandle = Handle ( "vector<ROOT::Math::LorentzVector<ROOT::Math::PtEtaPhiM4D<double> > >" )
self.allTopTagLabel = ( label, "topTagP4")
self.allca8Handle = Handle ( "vector<ROOT::Math::LorentzVector<ROOT::Math::PtEtaPhiM4D<double> > >" )
self.allca8Label = ( label, "ca8JetP4")
self.allTopTagSubjetsHandle = Handle ( "vector<ROOT::Math::LorentzVector<ROOT::Math::PtEtaPhiM4D<double> > >" )
self.allTopTagSubjetsLabel = ( label, "topTagSubjet1P4")
self.allTopTagTopMassHandle = Handle( "std::vector<double>" )
self.allTopTagTopMassLabel = ( label, "topTagTopMass" )
self.allTopTagMinMassHandle = Handle( "std::vector<double>" )
self.allTopTagMinMassLabel = ( label, "topTagMinMass" )
self.allTopTagNSubjetsHandle = Handle("std::vector<double>" )
self.allTopTagNSubjetsLabel = ( label, "topTagNSubjets" )
self.allNsub1Handle = Handle( "std::vector<double>")
self.allNsub2Handle = Handle( "std::vector<double>")
self.allNsub3Handle = Handle( "std::vector<double>")
self.allNsub4Handle = Handle( "std::vector<double>")
self.allNsub1Label = (label, "tau1")
self.allNsub2Label = (label, "tau2")
self.allNsub3Label = (label, "tau3")
self.allNsub4Label = (label, "tau4")
self.allTopTagSubjetBDiscHandle = Handle("std::vector<double>")
self.allTopTagSubjetBDiscLabel = (label, "topTagSubjetBDisc")
self.allTopTagSubjetJetIDHandle = Handle("std::vector<int>")
self.allTopTagSubjetJetIDLabel = (label, "topTagSubjetJetID")
self.allTopTagBDiscHandle = Handle("std::vector<double>")
self.allTopTagBDiscLabel = (label, "topTagBDisc")
self.allTopTagCHEHandle = Handle("std::vector<double>" )
self.allTopTagCHELabel = (label, "topTagCHE")
self.allTopTagNEHandle = Handle("std::vector<double>")
self.allTopTagNELabel = (label, "topTagNE")
self.allTopTagNumPFHandle = Handle("std::vector<int>")
self.allTopTagNumPFLabel = (label, "topTagNumPF")
self.allTopTagNumCHHandle = Handle("std::vector<int>")
self.allTopTagNumCHLabel = (label, "topTagNumCH")
self.allTopTagNumNEHandle = Handle("std::vector<int>")
self.allTopTagNumNELabel = (label, "topTagNumNE")
self.allTopTagPassHandle = Handle( "std::vector<int>" )
self.allTopTagPassLabel = ( label, "topTagPass" )
self.h_met = Handle ( "vector<ROOT::Math::LorentzVector<ROOT::Math::PtEtaPhiM4D<double> > >" )
self.metLabel = ( label, "pfMET" )
self.weightsHandle = Handle( "double" )
self.weightsLabel = ( label, "weight" )
self.pdfHandle = Handle("std::vector<double>")
self.pdfLabel = ( label, "pdfWeights")
self.textfile = open('txtfile11.txt', 'w')
print self.textfile
#self.badevents = open('AllBadHCALLaser.txt', 'r')
#print self.badevents
#self.badlist = []
#self.badlist = self.badevents.readlines()
self.__book__()
def __del__ (self):
"""(Internal) Destructor"""
print 'Goodbye from Type11Analyzer. Before I go, I will delete some stuff'
self.f.cd()
#self.mttPredDist.SetCalculatedErrors()
self.mttPredDist.GetPredictedHist().Write()
self.mttPredDist.GetObservedHist().Write()
self.mttPredDist.GetTaggableHist().Write()
self.ptPredDist.SetCalculatedErrors()
self.ptPredDist.GetPredictedHist().Write()
#self.mttPredDistMassCut.SetCalculatedErrors()
self.mttPredDistMassCut.GetPredictedHist().Write()
self.mttPredDistMassCut.GetObservedHist().Write()
self.mttPredDistMassCut.GetTaggableHist().Write()
#self.mttPredDistModMassFlat.SetCalculatedErrors()
self.mttPredDistModMassFlat.GetPredictedHist().Write()
self.mttPredDistModMassFlat.GetObservedHist().Write()
self.mttPredDistModMassFlat.GetTaggableHist().Write()
#self.mttPredDistMod3MassFlat.SetCalculatedErrors()
self.mttPredDistMod3MassFlat.GetPredictedHist().Write()
self.mttPredDistMod3MassFlat.GetObservedHist().Write()
self.mttPredDistMod3MassFlat.GetTaggableHist().Write()
#self.mttPredDistMod3MassFlatSubtract.SetCalculatedErrors()
self.mttPredDistMod3MassFlatSubtract.GetPredictedHist().Write()
self.mttPredDistMod3MassFlatSubtract.GetObservedHist().Write()
self.mttPredDistMod3MassFlatSubtract.GetTaggableHist().Write()
self.mttPredDistModMassQCDdistSubtract.SetCalculatedErrors()
self.mttPredDistModMassQCDdistSubtract.GetPredictedHist().Write()
self.mttPredDistModMassQCDdistSubtract.GetObservedHist().Write()
self.mttPredDistModMassQCDdistSubtract.GetTaggableHist().Write()
for pair in sorted(self.runPairs, key=itemgetter(3)) :
print '{0:12.2f} : Run {1:15.0f}, LumiBlock {2:15.0f}, Event {3:30.0f}'.format(
pair[3], pair[0], pair[2], pair[1]
)
print '1'
self.f.Write()
self.f.Close()
print '4'
self.mistagFile.Close()
self.triggerFile.Close()
self.modMassFile.Close()
print '5'
self.textfile.close()
print 'So long!'
def __book__( self ) :
"""(Internal) Books histograms"""
print 'Booking histograms'
self.mistagFile = ROOT.TFile("ALLjets_mistag.root")
self.mistagFile.cd()
self.mistag = self.mistagFile.Get("MISTAG_RATE_SUB_TTBAR_Inclusive").Clone()
self.mistag.SetName('mistag')
self.mistagMassCut = self.mistagFile.Get("MISTAG_RATE_SUB_TTBAR_Inclusive").Clone()
self.mistagMassCut.SetName('mistagMassCut')
if not self.useGenWeight:
#self.mistagMassCutSubtract = self.mistagFile.Get("TYPE11_MISTAG_MASSCUT_SUBTRACT_TTBAR_LARGEBINS").Clone()
self.mistagMassCutSubtract = self.mistagFile.Get("MISTAG_RATE_SUB_TTBAR_Inclusive").Clone()
self.mistagMassCutSubtract.SetName('mistagMassCutSubtract')
if self.useGenWeight:
self.mistagMassCutSubtract = self.mistagFile.Get("MISTAG_RATE_SUB_TTBAR_Inclusive").Clone()
self.mistagMassCutSubtract.SetName('mistagMassCutSubtract')
print self.mistag.GetBinContent(3)
ROOT.SetOwnership( self.mistag, False )
ROOT.SetOwnership( self.mistagMassCut, False )
self.triggerFile = ROOT.TFile(self.triggerFileStr + ".root")
self.triggerFile.cd()
self.triggerFile.ls()
self.triggerHist = self.triggerFile.Get("TRIGGER_EFF").Clone()
self.triggerHist.SetName('triggerHist')
ROOT.SetOwnership( self.triggerHist, False )
self.modMassFile = ROOT.TFile(self.modMassFileStr + ".root")
print self.modMassFileStr
self.modMassFile.cd()
self.modMassFile.ls()
self.modMassHist = ROOT.TH1F()
self.modMassHist = self.modMassFile.Get("jetMassOneTag_MassWindow").Clone()
self.modMassHist.SetName('modMassHist')
ROOT.SetOwnership( self.modMassHist, False )
print "integral "+str(self.modMassHist.Integral())
# print "integral "+str(self.jetMassOneTag_MassWindow.Integral())
self.f = ROOT.TFile( self.outfile + ".root", "recreate" )
self.f.cd()
self.mttPredDist = ROOT.PredictedDistribution( self.mistagMassCutSubtract, "mttPredDist", "mTT Mass", 1000, 0, 5000 )
self.mttPredDistMassCut = ROOT.PredictedDistribution( self.mistagMassCut, "mttPredDistMassCut", "mTT Mass", 1000, 0, 5000 )
self.mttPredDistModMassFlat = ROOT.PredictedDistribution( self.mistagMassCut, "mttPredDistModMassFlat", "mTT Mass", 1000, 0, 5000 )
self.mttPredDistMod3MassFlat = ROOT.PredictedDistribution( self.mistagMassCut, "mttPredDistMod3MassFlat", "mTT Mass", 1000, 0, 5000 )
self.mttPredDistMod3MassFlatSubtract = ROOT.PredictedDistribution( self.mistagMassCutSubtract, "mttPredDistMod3MassFlatSubtract", "mTT Mass", 1000, 0, 5000 )
self.mttPredDistModMassQCDdistSubtract = ROOT.PredictedDistribution( self.mistagMassCutSubtract, "mttPredDistModMassQCDdistSubtract", "mTT Mass", 1000, 0, 5000 )
self.ptPredDist = ROOT.PredictedDistribution( self.mistagMassCutSubtract, "ptPredDist", "pt", 1000, 0, 2000)
print 'here'
ROOT.SetOwnership( self.mttPredDist, False )
ROOT.SetOwnership( self.mttPredDistMassCut, False )
ROOT.SetOwnership( self.mttPredDistModMassFlat, False )
ROOT.SetOwnership( self.mttPredDistMod3MassFlat, False )
ROOT.SetOwnership( self.mttPredDistMod3MassFlatSubtract, False )
ROOT.SetOwnership( self.mttPredDistModMassQCDdistSubtract, False )
ROOT.SetOwnership( self.ptPredDist, False )
self.jetEta = ROOT.TH1D("jetEta", "jetEta", 100, -4, 4)
self.jetMass = ROOT.TH1D("jetMass", "jetMass", 100, 0, 500 )
self.jetMassOneTag = ROOT.TH1D("jetMassOneTag", "jetMassOneTag", 100, 0, 500 )
self.jetPt = ROOT.TH1D("jetPt", "jetPt", 400, 0, 2000 )
self.jetPtOneTag = ROOT.TH1D("jetPtOneTag", "jetPtOneTag", 400, 0, 2000 )
self.jetMinMass = ROOT.TH1D("jetMinMass", "jetMinMass", 400, 0, 200 )
self.topTagMass = ROOT.TH1D("topTagMass", "Top Tag Mass", 100, 0, 500 )
self.topTagMinMass = ROOT.TH1D("topTagMinMass", "Top Tag MinMass", 100, 0, 200 )
self.topTagPt = ROOT.TH1D("topTagPt", "Top Tag Pt", 400, 0, 2000 )
self.mttCandMass = ROOT.TH1D("mttCandMass", "mTT Cand Mass", 1000, 0, 5000 )
self.mttMass = ROOT.TH1D("mttMass", "mTT Mass", 1000, 0, 5000 )
self.mttMassTriggerWeighted = ROOT.TH1D("mttMassTriggerWeighted", "mTT Mass", 1000, 0, 5000 )
self.cutflow = ROOT.TH1D("cutflow", "cutflow", 7, 0, 7 )
self.mtt_gen = ROOT.TH1F("mtt_gen", "mtt gen", 1000, 0, 5000 )
self.mtt_gen_vs_reco = ROOT.TH2D("mtt_gen_vs_reco", "mtt gen vs reco", 1000, 0, 5000, 1000, 0, 5000)
self.jetMassMW = ROOT.TH1D("jetMassMS", "jetMass", 100, 0, 500)
self.mttPredDistErr = ROOT.TH1D("mttPredDist_err", "mttPredDist_Err", 1000, 0, 5000)
self.ptPredDistErr = ROOT.TH1D("ptPredDist_err", "ptPredDist_Err", 1000, 0, 2000)
self.treeVars = ROOT.TTree('treeVars', 'treeVars')
self.mttMassT = array('f', [-1.])
self.mttMassPred = array('f', [-1.])
self.mistagWt = array('f', [-1.])
self.npv = array('i', [-1])
self.index = array('i', [-1])
self.trigWt = array('f', [-1.])
self.jet1PtT = array('f', [-1.])
self.jet2PtT = array('f', [-1.])
self.sumJetPt = array('f', [-1.])
self.jet1MassT = array('f', [-1.])
self.jet2MassT = array('f', [-1.])
self.jet1Eta = array('f', [-1.])
self.jet2Eta = array('f', [-1.])
self.jet1Phi = array('f', [-1.])
self.jet2Phi = array('f', [-1.])
self.MET = array('f', [-1.])
self.run = array('i', [-1])
self.event = array('l', [-1])
self.lumi = array('i', [-1])
self.jet1CHE = array('f', [-1.])
self.jet1NE = array('f', [-1.])
self.jet1NumPF = array('i', [-1])
self.jet2CHE = array('f', [-1.])
self.jet2NE = array('f', [-1.])
self.jet2NumPF = array('i', [-1])
self.jet1NumCH = array('i', [-1])
self.jet2NumCH = array('i', [-1])
self.jet1NumNE = array('i', [-1])
self.jet2NumNE = array('i', [-1])
self.jet1SubjetMaxBDisc = array('f', [-1.])
self.jet1BDisc = array('f', [-1.])
self.jet1MassGroomed = array('f', [-1.])
self.jet1tau32 = array('f', [-1.])
self.jet2SubjetMaxBDisc = array('f', [-1.])
self.jet2BDisc = array('f', [-1.])
self.jet2MassGroomed = array('f', [-1.])
self.jet2tau32 = array('f', [-1.])
self.jet1Nsubj = array('i', [-1])
self.jet2Nsubj = array('i', [-1])
self.jet1minMass = array('f', [-1.])
self.jet2minMass = array('f', [-1.])
self.treeVars.Branch('jet1SubjetMaxBDisc', self.jet1SubjetMaxBDisc, 'jet1SubjetMaxBDisc/F')
self.treeVars.Branch('jet1BDisc', self.jet1BDisc, 'jet1BDisc/F')
self.treeVars.Branch('jet1MassGroomed', self.jet1MassGroomed, 'jet1MassGroomed/F')
self.treeVars.Branch('jet1tau32', self.jet1tau32, 'jet1tau32/F')
self.treeVars.Branch('jet2SubjetMaxBDisc', self.jet2SubjetMaxBDisc, 'jet2SubjetMaxBDisc/F')
self.treeVars.Branch('jet2BDisc', self.jet2BDisc, 'jet2BDisc/F')
self.treeVars.Branch('jet2MassGroomed', self.jet2MassGroomed, 'jet2MassGroomed/F')
self.treeVars.Branch('jet2tau32', self.jet2tau32, 'jet2tau32/F')
self.treeVars.Branch('mttMass', self.mttMassT, 'mttMass/F')
self.treeVars.Branch('mttMassPred', self.mttMassPred, 'mttMassPred/F')
self.treeVars.Branch('mistagWt', self.mistagWt, 'mistagWt/F')
self.treeVars.Branch('npv', self.npv, 'npv/I')
self.treeVars.Branch('index', self.index, 'index/I')
self.treeVars.Branch('trigWt', self.trigWt, 'trigWt/F')
self.treeVars.Branch('jet1Pt', self.jet1PtT, 'jet1Pt/F')
self.treeVars.Branch('jet2Pt', self.jet2PtT, 'jet2Pt/F')
self.treeVars.Branch('sumJetPt', self.sumJetPt, 'sumJetPt/F')
self.treeVars.Branch('jet1Mass', self.jet1MassT, 'jet1Mass/F')
self.treeVars.Branch('jet2Mass', self.jet2MassT, 'jet2Mass/F')
self.treeVars.Branch('jet1Eta', self.jet1Eta, 'jet1Eta/F')
self.treeVars.Branch('jet2Eta', self.jet2Eta, 'jet2Eta/F')
self.treeVars.Branch('jet1Phi', self.jet1Phi, 'jet1Phi/F')
self.treeVars.Branch('jet2Phi', self.jet2Phi, 'jet2Phi/F')
self.treeVars.Branch('MET', self.MET, 'MET/F')
self.treeVars.Branch('run', self.run, 'run/I')
self.treeVars.Branch('event', self.event, 'event/L')
self.treeVars.Branch('lumi', self.lumi, 'lumi/I')
self.treeVars.Branch('jet1CHE', self.jet1CHE, 'jet1CHE/F')
self.treeVars.Branch('jet1NE', self.jet1NE, 'jet1NE/F')
self.treeVars.Branch('jet1NumPF', self.jet1NumPF, 'jet1NumPF/I')
self.treeVars.Branch('jet2CHE', self.jet2CHE, 'jet2CHE/F')
self.treeVars.Branch('jet2NE', self.jet2NE, 'jet2NE/F')
self.treeVars.Branch('jet2NumPF', self.jet2NumPF, 'jet2NumPF/I')
self.treeVars.Branch('jet1NumCH', self.jet1NumCH, 'jet1NumCH/I')
self.treeVars.Branch('jet2NumCH', self.jet2NumCH, 'jet2NumCH/I')
self.treeVars.Branch('jet1NumNE', self.jet1NumNE, 'jet1NumNE/I')
self.treeVars.Branch('jet2NumNE', self.jet2NumNE, 'jet2NumNE/I')
self.treeVars.Branch('jet1Nsubj', self.jet1Nsubj, 'jet1Nsubj/I')
self.treeVars.Branch('jet2Nsubj', self.jet2Nsubj, 'jet2Nsubj/I')
self.treeVars.Branch('jet1minMass', self.jet1minMass, 'jet1minMass/F')
self.treeVars.Branch('jet2minMass', self.jet2minMass, 'jet2minMass/F')
self.mttMass.Sumw2()
self.runPairs = []
self.mttMassTriggerWeighted.Sumw2()
def analyze(self, event) :
"""Analyzes event"""
#badlistSet = set(self.badlist)
self.run[0] = event.object().id().run()
self.event[0] = event.object().id().event()
self.lumi[0] = event.object().id().luminosityBlock()
#thisEvent = '{0}:{1}:{2}'.format(self.run[0], self.lumi[0], self.event[0])
#if thisEvent in badlistSet:
# print "Found Bad HCAL Laser Event", thisEvent
# return
event.getByLabel (self.allTopTagLabel, self.allTopTagHandle)
topJets = self.allTopTagHandle.product()
event.getByLabel (self.allTopTagCHELabel, self.allTopTagCHEHandle)
event.getByLabel (self.allTopTagNELabel, self.allTopTagNEHandle)
event.getByLabel (self.allTopTagNumPFLabel, self.allTopTagNumPFHandle)
event.getByLabel (self.allTopTagNumCHLabel, self.allTopTagNumCHHandle)
event.getByLabel (self.allTopTagNumNELabel, self.allTopTagNumNEHandle)
topJetsCHE = self.allTopTagCHEHandle.product()
topJetsNE = self.allTopTagNEHandle.product()
topJetsNumPF = self.allTopTagNumPFHandle.product()
topJetsNumCH = self.allTopTagNumCHHandle.product()
topJetsNumNE = self.allTopTagNumNEHandle.product()
event.getByLabel (self.allTopTagSubjetsLabel, self.allTopTagSubjetsHandle)
event.getByLabel (self.allca8Label, self.allca8Handle)
event.getByLabel (self.allNsub1Label, self.allNsub1Handle)
event.getByLabel (self.allNsub2Label, self.allNsub2Handle)
event.getByLabel (self.allNsub3Label, self.allNsub3Handle)
event.getByLabel (self.allNsub4Label, self.allNsub4Handle)
event.getByLabel (self.allTopTagBDiscLabel, self.allTopTagBDiscHandle)
event.getByLabel (self.allTopTagSubjetBDiscLabel, self.allTopTagSubjetBDiscHandle)
event.getByLabel (self.allTopTagSubjetJetIDLabel, self.allTopTagSubjetJetIDHandle)
topSubjets = self.allTopTagSubjetsHandle.product()
ca8Jets = self.allca8Handle.product()
tau1Vals = self.allNsub1Handle.product()
tau2Vals = self.allNsub2Handle.product()
tau3Vals = self.allNsub3Handle.product()
tau4Vals = self.allNsub4Handle.product()
topBDisc = self.allTopTagBDiscHandle.product()
topSubjetsBDisc = self.allTopTagSubjetBDiscHandle.product()
topJetID = self.allTopTagSubjetJetIDHandle.product()
event.getByLabel( self.metLabel, self.h_met )
mets = self.h_met.product()
self.MET[0] = mets[0].pt()
self.cutflow.Fill(0.5,1)
upperWindowEdge = 250
HTsum = 0.0
nTopCand = 0
for i in range(0,len(topJets) ) :
if( topJets[i].pt() > 400 ) :
nTopCand = nTopCand + 1
HTsum += topJets[i].pt()
if nTopCand < 2 :
return
#pairMass = 0.0
#ttMass = 0.0
weight = 1.0
if self.useGenWeight :
event.getByLabel( self.weightsLabel, self.weightsHandle )
weight = self.weightsHandle.product()[0]
if self.triggerWeight != "noWeight" :
jetTriggerWeight = 1.0
jetTriggerWeight1 = 1.0
jetTriggerWeight2 = 0.0
if HTsum < 2000:
bin0 = self.triggerHist.FindBin(HTsum)
#bin0 = self.triggerHist.FindBin(topJets[0].pt())
jetTriggerWeight1 = self.triggerHist.GetBinContent(bin0)
if HTsum < 2000:
#bin1 = self.triggerHist.FindBin(topJets[1].pt())
bin1 = self.triggerHist.FindBin(HTsum)
#jetTriggerWeight2 = self.triggerHist.GetBinContent(bin1)
deltaTriggerEff1 = 0.5*(1.0-jetTriggerWeight1)
deltaTriggerEff = 0.5*(1.0-jetTriggerWeight)
deltaTriggerEff2 = 0.5*(1.0-jetTriggerWeight2)
jetTriggerWeightUp = jetTriggerWeight + deltaTriggerEff
jetTriggerWeightDown = jetTriggerWeight - deltaTriggerEff
jetTriggerWeightUp = min(1.0,jetTriggerWeightUp)
jetTriggerWeightDown = max(0.0,jetTriggerWeightDown)
jetTriggerWeightUp1 = jetTriggerWeight1 + deltaTriggerEff1
jetTriggerWeightDown1 = jetTriggerWeight1 - deltaTriggerEff1
jetTriggerWeightUp1 = min(1.0,jetTriggerWeightUp1)
jetTriggerWeightDown1 = max(0.0,jetTriggerWeightDown1)
jetTriggerWeightUp2 = jetTriggerWeight2 + deltaTriggerEff2
jetTriggerWeightDown2 = jetTriggerWeight2 - deltaTriggerEff2
jetTriggerWeightUp2 = min(1.0,jetTriggerWeightUp2)
jetTriggerWeightDown2 = max(0.0,jetTriggerWeightDown2)
if self.triggerWeight == "Nominal" :
weight = weight*(jetTriggerWeight1)
if self.triggerWeight == "Up" :
weight = weight*jetTriggerWeightUp1
if self.triggerWeight == "Down" :
weight = weight*jetTriggerWeightDown1
#print 'topJets[0].pt() ' + str(topJets[0].pt())
#print 'jetTriggerWeight ' + str(jetTriggerWeight)
event.getByLabel (self.allTopTagMinMassLabel, self.allTopTagMinMassHandle)
topJetMinMass= self.allTopTagMinMassHandle.product()
event.getByLabel (self.allTopTagNSubjetsLabel, self.allTopTagNSubjetsHandle)
topJetNSubjets= self.allTopTagNSubjetsHandle.product()
event.getByLabel (self.allTopTagTopMassLabel, self.allTopTagTopMassHandle)
topJetMass= self.allTopTagTopMassHandle.product()
event.getByLabel (self.allTopTagPassLabel, self.allTopTagPassHandle )
topJetPass= self.allTopTagPassHandle.product()
if self.pdfWeight != "nominal" :
iweight = 0.0
event.getByLabel( self.pdfLabel, self.pdfHandle )
pdfs = self.pdfHandle.product()
if self.pdfWeight == "up" :
for pdf in pdfs[0::2] :
iweight = iweight + pdf*pdf
else :
for pdf in pdfs[1::2] :
iweight = iweight + pdf*pdf
iweight = iweight / len(pdfs) * 0.5
weight = sqrt(weight*weight + iweight*iweight)
ttMass = 0.0
deltaPhi = topJets[0].phi() - topJets[1].phi()
if deltaPhi > ROOT.TMath.Pi():
deltaPhi = deltaPhi - 2*ROOT.TMath.Pi()
if deltaPhi < -ROOT.TMath.Pi():
deltaPhi = deltaPhi + 2*ROOT.TMath.Pi()
CHE0 = topJetsCHE[0];
CHE1 = topJetsCHE[1];
NE0 = topJetsNE[0];
NE1 = topJetsNE[1];
if NE0 == 0:
NE0 = 0.0001
if NE1 == 0:
NE1 = 0.0001
#find matching ca8jet to top jet for Nsubjetiness
deltaRmin1 = 999.
deltaRmin2 = 999.
# ca8jet0 = ca8Jets[0]
# ca8jet1 = ca8Jets[1]
# for ca8jet in ca8Jets:
# if (ca8jet.DeltaR(topJets[0]) < deltaRmin1):
# ca8jet0 = ca8jet
# deltaRmin1 = ca8jet.DeltaR(topJets[0])
# if (ca8jet.DeltaR(topJets[1]) < deltaRmin2):
# ca8jet1 = ca8jet
# deltaRmin2 = ca8jet.DeltaR(topJets[1])
#
# print ca8jet0.pt(), ca8jet1.pt(), topJets[0].pt(), topJets[1].pt()
tagJet0Subjets = []
tagJet0BDiscs = []
tagJet1Subjets = []
tagJet1BDiscs = []
i = 0
while i < (topJetNSubjets[0] + topJetNSubjets[1]):
if i < topJetNSubjets[0]:
tagJet0Subjets.append(topSubjets[i])
tagJet0BDiscs.append(topSubjetsBDisc[i])
elif i < (topJetNSubjets[0] + topJetNSubjets[1]):
tagJet1Subjets.append(topSubjets[i])
tagJet1BDiscs.append(topSubjetsBDisc[i])
i += 1
tau32_jet1 = tau3Vals[0] / tau2Vals[0]
tau32_jet2 = tau3Vals[1] / tau2Vals[1]
bdisc_jet1 = topBDisc[0]
bdisc_jet2 = topBDisc[1]
maxSubjBDisc0 = max(tagJet0BDiscs)
maxSubjBDisc1 = max(tagJet1BDiscs)
groomed_jet1 = ROOT.TLorentzVector(0.0,0.0,0.0,0.0)
groomed_jet2 = ROOT.TLorentzVector(0.0,0.0,0.0,0.0)
for subjet in tagJet0Subjets:
groomed_jet1 += ROOT.TLorentzVector(subjet.px(), subjet.py(), subjet.pz(), subjet.energy())
for subjet in tagJet1Subjets:
groomed_jet2 += ROOT.TLorentzVector(subjet.px(), subjet.py(), subjet.pz(), subjet.energy())
#ptCuts = topJets[0].pt() > 450 and topJets[1].pt() > 450
ptCuts = topJets[0].pt() > 400 and topJets[1].pt() > 400
etaCuts = abs(topJets[0].Rapidity() - topJets[1].Rapidity()) < 1.0 and abs(topJets[0].Rapidity()) < 2.4 and abs(topJets[1].Rapidity()) < 2.4 and not (abs(topJets[0].eta()) < 1.5 and abs(topJets[0].eta()) > 1.0 and topJetsCHE[0] / NE0 > 2.0) and not (abs(topJets[1].eta()) < 1.5 and abs(topJets[1].eta()) > 1.0 and topJetsCHE[1] / NE1 > 2.0)
#etaCuts = abs(topJets[0].Rapidity() - topJets[1].Rapidity()) < 1.0 and abs(topJets[0].Rapidity()) < 2.4 and abs(topJets[1].Rapidity()) < 2.4 and not (abs(topJets[0].eta()) < 1.5 and abs(topJets[0].eta()) > 1.0 and topJetsCHE[0] / NE0 > 2.0) and not (abs(topJets[1].eta()) < 1.5 and abs(topJets[1].eta()) > 1.0 and topJetsCHE[1] / NE1 > 2.0)
#etaCuts = abs(topJets[0].Rapidity() - topJets[1].Rapidity()) < 1.0 and abs(topJets[0].Rapidity()) < 2.4 and abs(topJets[1].Rapidity()) < 2.4
#etaCuts = abs(topJets[0].Rapidity()) < 2.4 and abs(topJets[1].Rapidity()) < 2.4
deltaPhiCut = abs(deltaPhi)>2.1
passType11KinCuts = ptCuts #and etaCuts and deltaPhiCut
if ptCuts:
self.cutflow.Fill(1.5,1)
if etaCuts:
self.cutflow.Fill(2.5,1)
if deltaPhiCut:
self.cutflow.Fill(3.5,1)
topMassCuts = topJetMass[0] > 140 and topJetMass[0] < 250 and topJetMass[1] > 140 and topJetMass[1] < 250
if topMassCuts:
self.cutflow.Fill(4.5,1)
nSubjetsCuts = topJetNSubjets[0] > 2 and topJetNSubjets[1] > 2
if nSubjetsCuts:
self.cutflow.Fill(5.5,1)
minMassCuts = topJetMinMass[0] > 50 and topJetMinMass[1] > 50
if minMassCuts:
self.cutflow.Fill(6.5,1)
if passType11KinCuts :
topTag0 = topJetMass[0] > 140 and topJetMass[0] < upperWindowEdge and topJetMinMass[0] > 50 and topJetNSubjets[0] > 2
topTag1 = topJetMass[1] > 140 and topJetMass[1] < upperWindowEdge and topJetMinMass[1] > 50 and topJetNSubjets[1] > 2
failTopTag0 = topJetMass[0] > 140 and topJetMass[0] < upperWindowEdge and topJetMinMass[0] < 30
failTopTag1 = topJetMass[1] > 140 and topJetMass[1] < upperWindowEdge and topJetMinMass[1] < 30
passType11 = topTag0 and topTag1
ttMass = (topJets[0]+topJets[1]).mass()
MWweight1 = self.mistag.GetBinContent(self.mistag.FindBin(topJets[0].pt() ) )
MWweight2 = self.mistag.GetBinContent(self.mistag.FindBin(topJets[1].pt() ) )
myrand = ROOT.gRandom.Uniform(140,250)
jet0P4_massFlat = copy.copy(topJets[0])
jet0P4_massFlat.SetM( myrand )
ttMassJet0MassFlat = (topJets[1]+jet0P4_massFlat).mass()
myrand2 = ROOT.gRandom.Uniform(140,250)
jet1P4_massFlat = copy.copy(topJets[1])
jet1P4_massFlat.SetM( myrand2 )
ttMassJet1MassFlat = (topJets[0]+jet1P4_massFlat).mass()
myrand_QCDdist_jet0 = self.modMassHist.GetRandom()
jet0P4_modmassQCDdist = copy.copy(topJets[0])
jet0P4_modmassQCDdist.SetM( myrand_QCDdist_jet0 )
ttMassJet0modmassQCDdist = (topJets[1]+jet0P4_modmassQCDdist).mass()
myrand_QCDdist_jet1 = self.modMassHist.GetRandom()
jet1P4_modmassQCDdist = copy.copy(topJets[1])
jet1P4_modmassQCDdist.SetM( myrand_QCDdist_jet1 )
ttMassJet1modmassQCDdist = (topJets[0]+jet1P4_modmassQCDdist).mass()
#ibin0 = self.triggerHist.FindBin(topJets[0].pt())
#jetTriggerWeight = self.triggerHist.GetBinContent(bin0)
ttMassMod3Jet0Flat = ttMass
ttMassMod3Jet1Flat = ttMass
if (topJets[0].mass() < 140 or topJets[0].mass() > 250) and (topJets[1].mass() < 140 or topJets[1].mass() > 250):
ttMassMod3Jet0Flat = ttMassJet0MassFlat
self.jetMassMW.Fill( myrand, MWweight1*MWweight2)
if (topJets[1].mass() < 140 or topJets[1].mass() > 250) and (topJets[0].mass() < 140 or topJets[0].mass() > 250):
ttMassMod3Jet1Flat = ttMassJet1MassFlat
self.jetMassMW.Fill( myrand2, MWweight2*MWweight1)
ttMassModJet0QCDdist = ttMass
ttMassModJet1QCDdist = ttMass
#if topJets[0].mass() < 140 or topJets[0].mass() > 250:
ttMassModJet0QCDdist = ttMassJet0modmassQCDdist
#if topJets[1].mass() < 140 or topJets[1].mass() > 250:
ttMassModJet1QCDdist = ttMassJet1modmassQCDdist
self.jetMass.Fill( topJets[0].mass(), weight )
self.jetMass.Fill( topJets[1].mass(), weight )
self.jetPt.Fill( topJets[0].pt(), weight )
self.jetPt.Fill( topJets[1].pt(), weight )
self.jetEta.Fill( topJets[0].eta(), weight )
self.jetEta.Fill( topJets[1].eta(), weight )
self.jetMinMass.Fill( topJetMinMass[0], weight )
self.jetMinMass.Fill( topJetMinMass[1], weight )
self.mttCandMass.Fill( ttMass, weight )
if passType11 :
self.topTagMass.Fill( topJets[0].mass(), weight )
self.topTagMass.Fill( topJets[1].mass(), weight )
self.topTagPt.Fill( topJets[0].pt(), weight )
self.topTagPt.Fill( topJets[1].pt(), weight )
self.topTagMinMass.Fill( topJetMinMass[0], weight )
self.topTagMinMass.Fill( topJetMinMass[1], weight )
self.mttMassT[0] = ttMass
self.jet1SubjetMaxBDisc[0] = maxSubjBDisc0
self.jet1BDisc[0] = topBDisc[0]
self.jet1MassGroomed[0] = groomed_jet1.M()
self.jet1tau32[0] = tau32_jet1
self.jet2SubjetMaxBDisc[0] = maxSubjBDisc1
self.jet2BDisc[0] = topBDisc[1]
self.jet2MassGroomed[0] = groomed_jet2.M()
self.jet2tau32[0] = tau32_jet2
self.jet1minMass[0] = topJetMinMass[0]
self.jet2minMass[0] = topJetMinMass[1]
self.jet1Nsubj[0] = topJetNSubjets[0]
self.jet2Nsubj[0] = topJetNSubjets[1]
self.index[0] = 1
self.jet1PtT[0] = topJets[0].pt()
self.jet2PtT[0] = topJets[1].pt()
self.jet1Eta[0] = topJets[0].Rapidity()
self.jet2Eta[0] = topJets[1].Rapidity()
self.jet1Phi[0] = topJets[0].phi()
self.jet2Phi[0] = topJets[1].phi()
self.sumJetPt[0] = HTsum
self.jet1MassT[0] = topJets[0].mass()
self.jet2MassT[0] = topJets[1].mass()
self.jet1CHE[0] = topJetsCHE[0]
self.jet1NE[0] = topJetsNE[0]
self.jet1NumPF[0] = topJetsNumPF[0]
self.jet1NumCH[0] = topJetsNumCH[0]
self.jet1NumNE[0] = topJetsNumNE[0]
self.jet2CHE[0] = topJetsCHE[1]
self.jet2NE[0] = topJetsNE[1]
self.jet2NumPF[0] = topJetsNumPF[1]
self.jet2NumCH[0] = topJetsNumCH[1]
self.jet2NumNE[0] = topJetsNumNE[1]
self.trigWt[0] = weight
self.event[0] = event.object().id().event()
self.run[0] = event.object().id().run()
self.lumi[0] = event.object().id().luminosityBlock()
self.treeVars.Fill()
self.mttMass.Fill( ttMass, weight )
if not self.useMC :
self.runPairs.append( [event.object().id().run(),
event.object().id().event(),
event.object().id().luminosityBlock() ,
ttMass] )
self.mttMassTriggerWeighted.Fill( ttMass, weight )
h_mtt = Handle("double")
event.getByLabel( ("ttbsmAna", "mttgen"), h_mtt)
if h_mtt.isValid():
mtt = h_mtt.product()
self.mtt_gen.Fill(mtt[0])
self.mtt_gen_vs_reco.Fill(mtt[0], ttMass)
#background estiation
x = ROOT.gRandom.Uniform()
eventStr = str(event.object().id().run()) + ' ' + str(event.object().id().event()) + '\n'
self.textfile.write(eventStr)
mistagWt1 = self.mistag.GetBinContent( self.mistag.FindBin(topJets[1].pt()) )
mistagWt0 = self.mistag.GetBinContent( self.mistag.FindBin(topJets[0].pt()) )
if x < 0.5 :
if topTag0:
self.mistagWt[0] = self.mistag.GetBinContent( self.mistag.FindBin(topJets[1].pt()) )
mistagRateErr = self.mistag.GetBinError(self.mistag.FindBin(topJets[1].pt() ))
self.jetPtOneTag.Fill( topJets[1].pt(), weight )
self.jetMassOneTag.Fill( topJets[1].mass(), weight )
self.mttPredDist. Accumulate( ttMass, topJets[1].pt(), topTag1, weight )
self.mttPredDistErr.Fill(ttMass, mistagRateErr)
self.ptPredDistErr.Fill(topJets[1].pt(), mistagRateErr)
self.mttPredDistMassCut. Accumulate( ttMass, topJets[1].pt(), topTag1, weight )
self.mttPredDistModMassFlat. Accumulate( ttMassJet1MassFlat, topJets[1].pt(), topTag1, weight )
self.mttPredDistMod3MassFlat. Accumulate( ttMassMod3Jet1Flat, topJets[1].pt(), topTag1, weight )
self.mttPredDistMod3MassFlatSubtract. Accumulate( ttMassMod3Jet1Flat, topJets[1].pt(), topTag1, weight )
self.mttPredDistModMassQCDdistSubtract. Accumulate( ttMassModJet1QCDdist, topJets[1].pt(), topTag1, weight )
self.mttMassT[0] = ttMassModJet1QCDdist
self.index[0] = 0
self.jet1PtT[0] = topJets[0].pt()
self.jet2PtT[0] = topJets[1].pt()
self.jet1Eta[0] = topJets[0].Rapidity()
self.jet2Eta[0] = topJets[1].Rapidity()
self.jet1Phi[0] = topJets[0].phi()
self.jet2Phi[0] = topJets[1].phi()
self.jet1CHE[0] = topJetsCHE[0]
self.jet1NE[0] = topJetsNE[0]
self.jet1NumPF[0] = topJetsNumPF[0]
self.jet1NumCH[0] = topJetsNumCH[0]
self.jet1NumNE[0] = topJetsNumNE[0]
self.jet2CHE[0] = topJetsCHE[1]
self.jet2NE[0] = topJetsNE[1]
self.jet2NumPF[0] = topJetsNumPF[1]
self.jet2NumCH[0] = topJetsNumCH[1]
self.jet2NumNE[0] = topJetsNumNE[1]
self.sumJetPt[0] = HTsum
self.jet1MassT[0] = topJets[0].mass()
self.jet2MassT[0] = myrand_QCDdist_jet1
self.trigWt[0] = weight
self.treeVars.Fill()
self.ptPredDist.Accumulate(topJets[1].pt(), topJets[1].pt(), topTag1, weight)
if x >= 0.5 :
if topTag1:
self.mistagWt[0] = self.mistag.GetBinContent( self.mistag.FindBin(topJets[0].pt()) )
mistagRateErr = self.mistag.GetBinError(self.mistag.FindBin(topJets[0].pt()) )
self.jetPtOneTag.Fill( topJets[0].pt(), weight )
self.jetMassOneTag.Fill( topJets[0].mass(), weight )
self.mttPredDist. Accumulate( ttMass, topJets[0].pt(), topTag0, weight )
self.mttPredDistErr.Fill(ttMass, mistagRateErr)
self.ptPredDistErr.Fill(topJets[0].pt(), mistagRateErr)
self.mttPredDistMassCut. Accumulate( ttMass, topJets[0].pt(), topTag0, weight )
self.mttPredDistModMassFlat. Accumulate( ttMassJet0MassFlat, topJets[0].pt(), topTag0, weight )
self.mttPredDistMod3MassFlat. Accumulate( ttMassMod3Jet0Flat, topJets[0].pt(), topTag0, weight )
self.mttPredDistMod3MassFlatSubtract. Accumulate( ttMassMod3Jet0Flat, topJets[0].pt(), topTag0, weight )
self.mttPredDistModMassQCDdistSubtract. Accumulate( ttMassModJet0QCDdist, topJets[0].pt(), topTag0, weight )
self.ptPredDist.Accumulate(topJets[0].pt(), topJets[0].pt(), topTag0, weight)
self.mttMassT[0] = ttMassModJet0QCDdist
self.index[0] = 0
self.jet1PtT[0] = topJets[0].pt()
self.jet2PtT[0] = topJets[1].pt()
self.jet1Eta[0] = topJets[0].Rapidity()
self.jet2Eta[0] = topJets[1].Rapidity()
self.jet1Phi[0] = topJets[0].phi()
self.jet2Phi[0] = topJets[1].phi()
self.jet1CHE[0] = topJetsCHE[0]
self.jet1NE[0] = topJetsNE[0]
self.jet1NumPF[0] = topJetsNumPF[0]
self.jet1NumCH[0] = topJetsNumCH[0]
self.jet1NumNE[0] = topJetsNumNE[0]
self.jet2CHE[0] = topJetsCHE[1]
self.jet2NE[0] = topJetsNE[1]
self.jet2NumPF[0] = topJetsNumPF[1]
self.jet2NumCH[0] = topJetsNumCH[1]
self.jet2NumNE[0] = topJetsNumNE[1]
self.sumJetPt[0] = HTsum
self.jet1MassT[0] = myrand_QCDdist_jet0
self.jet2MassT[0] = topJets[1].mass()
self.trigWt[0] = weight
self.treeVars.Fill()
| [
"[email protected]"
] | |
b0efa79ff67011699b40d07b9eb872a0596d4486 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/84/usersdata/188/56925/submittedfiles/lista1.py | 3ae36f6760785e4cd08b91fcf70529b819f30e26 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | # -*- coding: utf-8 -*-
lista=[]
n=int(input('Digite n:'))
for i in range (1,n+1,1):
valor=float(input('Valor:'))
lista.append(valor)
print(lista)
| [
"[email protected]"
] | |
29aa09304badbc52a5f19a16218b19920f47ee59 | 10af7a7c08e2d8c630f115a7f1c89dc9a2c0c007 | /0437_pathSum.py | ac2d79c9e3aa80043576026113c614918875db5d | [] | no_license | mathvolcano/leetcode | 4218846652a8d73192d74cbf83f5a92549236568 | 2682cc975ec299d9253aa191b5453669dd1ebd58 | refs/heads/master | 2022-11-27T22:47:49.700110 | 2022-11-08T18:11:27 | 2022-11-08T18:11:27 | 154,909,156 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,050 | py | """
437. Path Sum III
https://leetcode.com/problems/path-sum-iii/
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def pathSum(self, root: TreeNode, path_sum: int) -> int:
n_paths = 0
if not root: return n_paths
n_paths += self.n_continued_paths(root, path_sum)
# Start over from lower nodes
if root.left:
n_paths += self.pathSum(root.left, path_sum)
if root.right:
n_paths += self.pathSum(root.right, path_sum)
return n_paths
def n_continued_paths(self, root, target):
if (not root): return 0
n_paths = 1 if root.val == target else 0
remainder = target - root.val
if root.left:
n_paths += self.n_continued_paths(root.left, remainder)
if root.right:
n_paths += self.n_continued_paths(root.right, remainder)
return n_paths
| [
"[email protected]"
] | |
880ebf9230daad74995329f1a250ec1834803007 | 1588a1d601d29c18942d220657185d3bf7b17160 | /시뮬레이션/BOJ1966.py | f8f06358c959b96afa8efb9434aea0aa231e353b | [] | no_license | geonwoomun/AlgorithmStudy | 1f8148e981beebd2e6f70e65193ce445fa59df96 | d43b624aad80f10d687a8f4b37cc79d88fc772b3 | refs/heads/master | 2020-08-01T05:24:26.980370 | 2020-07-15T05:04:35 | 2020-07-15T05:04:35 | 210,878,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | # BOJ 1966번 프린터 큐
from sys import stdin
input = stdin.readline
T = int(input())
while T > 0 :
N, M = map(int, input().split()) # N 개수 M 위치
paper = list(map(int, input().split()))
check = [i for i in range(N)]
count = 0
while 1:
maxPaper = max(paper)
temp = paper.pop(0)
tempCheck = check.pop(0)
if(tempCheck == M and maxPaper == temp):
count +=1
break
elif maxPaper == temp:
count +=1
else:
paper.append(temp)
check.append(tempCheck)
print(count)
T -=1 | [
"[email protected]"
] | |
c31acf44d382b4d4d9b138ea83593ab35a2a2999 | 0b2facfa8d47bceea5bbf969bd1ca86215638cf6 | /macop/algorithms/Algorithm.py | 140bb33cb3ebe9a7cc366b57190e12834379767e | [
"MIT"
] | permissive | geoffreyp/macop | 37ec5c0ed7913068ee808e63c9c537babed479ca | 287df287e23c7e4f07e90dfcc0a99ef247f5c6b5 | refs/heads/master | 2022-12-29T06:40:11.774347 | 2020-10-23T10:25:15 | 2020-10-23T10:25:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,374 | py | """Abstract Algorithm class used as basic algorithm implementation with some specific initialization
"""
# main imports
import logging
import pkgutil
import sys
from ..utils.color import macop_text, macop_line, macop_progress
# Generic algorithm class
class Algorithm():
"""Algorithm class used as basic algorithm
Attributes:
initalizer: {function} -- basic function strategy to initialize solution
evaluator: {function} -- basic function in order to obtained fitness (mono or multiple objectives)
operators: {[Operator]} -- list of operator to use when launching algorithm
policy: {Policy} -- Policy class implementation strategy to select operators
validator: {function} -- basic function to check if solution is valid or not under some constraints
maximise: {bool} -- specify kind of optimisation problem
currentSolution: {Solution} -- current solution managed for current evaluation
bestSolution: {Solution} -- best solution found so far during running algorithm
callbacks: {[Callback]} -- list of Callback class implementation to do some instructions every number of evaluations and `load` when initializing algorithm
parent: {Algorithm} -- parent algorithm reference in case of inner Algorithm instance (optional)
"""
def __init__(self,
_initalizer,
_evaluator,
_operators,
_policy,
_validator,
_maximise=True,
_parent=None):
self.initializer = _initalizer
self.evaluator = _evaluator
self.operators = _operators
self.policy = _policy
self.validator = _validator
self.callbacks = []
self.bestSolution = None
self.currentSolution = None
# by default
self.numberOfEvaluations = 0
self.maxEvaluations = 0
# other parameters
self.parent = _parent # parent algorithm if it's sub algorithm
#self.maxEvaluations = 0 # by default
self.maximise = _maximise
# track reference of algo into operator (keep an eye into best solution)
for operator in self.operators:
operator.setAlgo(self)
# also track reference for policy
self.policy.setAlgo(self)
def addCallback(self, _callback):
"""Add new callback to algorithm specifying usefull parameters
Args:
_callback: {Callback} -- specific Callback instance
"""
# specify current main algorithm reference
_callback.setAlgo(self)
# set as new
self.callbacks.append(_callback)
def resume(self):
"""Resume algorithm using Callback instances
"""
# load every callback if many things are necessary to do before running algorithm
for callback in self.callbacks:
callback.load()
def initRun(self):
"""
Initialize the current solution and best solution
"""
self.currentSolution = self.initializer()
# evaluate current solution
self.currentSolution.evaluate(self.evaluator)
# keep in memory best known solution (current solution)
if self.bestSolution is None:
self.bestSolution = self.currentSolution
def increaseEvaluation(self):
"""
Increase number of evaluation once a solution is evaluated
"""
self.numberOfEvaluations += 1
if self.parent is not None:
self.parent.numberOfEvaluations += 1
def getGlobalEvaluation(self):
"""Get the global number of evaluation (if inner algorithm)
Returns:
{int} -- current global number of evaluation
"""
if self.parent is not None:
return self.parent.getGlobalEvaluation()
return self.numberOfEvaluations
def getGlobalMaxEvaluation(self):
"""Get the global max number of evaluation (if inner algorithm)
Returns:
{int} -- current global max number of evaluation
"""
if self.parent is not None:
return self.parent.getGlobalMaxEvaluation()
return self.maxEvaluations
def stop(self):
"""
Global stopping criteria (check for inner algorithm too)
"""
if self.parent is not None:
return self.parent.numberOfEvaluations >= self.parent.maxEvaluations or self.numberOfEvaluations >= self.maxEvaluations
return self.numberOfEvaluations >= self.maxEvaluations
def evaluate(self, _solution):
"""
Evaluate a solution using evaluator passed when intialize algorithm
Args:
solution: {Solution} -- solution to evaluate
Returns:
fitness score of solution which is not already evaluated or changed
Note:
if multi-objective problem this method can be updated using array of `evaluator`
"""
return _solution.evaluate(self.evaluator)
def update(self, _solution):
"""
Apply update function to solution using specific `policy`
Check if solution is valid after modification and returns it
Args:
solution: {Solution} -- solution to update using current policy
Returns:
{Solution} -- updated solution obtained by the selected operator
"""
# two parameters are sent if specific crossover solution are wished
sol = self.policy.apply(_solution)
if (sol.isValid(self.validator)):
return sol
else:
logging.info("-- New solution is not valid %s" % sol)
return _solution
def isBetter(self, _solution):
"""
Check if solution is better than best found
Args:
solution: {Solution} -- solution to compare with best one
Returns:
{bool} -- `True` if better
"""
# depending of problem to solve (maximizing or minimizing)
if self.maximise:
if _solution.fitness() > self.bestSolution.fitness():
return True
else:
if _solution.fitness() < self.bestSolution.fitness():
return True
# by default
return False
def run(self, _evaluations):
"""
Run the specific algorithm following number of evaluations to find optima
"""
# append number of max evaluation if multiple run called
self.maxEvaluations += _evaluations
# check if global evaluation is used or not
if self.parent is not None and self.getGlobalEvaluation() != 0:
# init number evaluations of inner algorithm depending of globalEvaluation
# allows to restart from `checkpoint` last evaluation into inner algorithm
rest = self.getGlobalEvaluation() % self.maxEvaluations
self.numberOfEvaluations = rest
else:
self.numberOfEvaluations = 0
logging.info("Run %s with %s evaluations" %
(self.__str__(), _evaluations))
def progress(self):
"""
Log progress and apply callbacks if necessary
"""
if len(self.callbacks) > 0:
for callback in self.callbacks:
callback.run()
macop_progress(self.getGlobalEvaluation(),
self.getGlobalMaxEvaluation())
logging.info("-- %s evaluation %s of %s (%s%%) - BEST SCORE %s" %
(type(self).__name__, self.numberOfEvaluations,
self.maxEvaluations, "{0:.2f}".format(
(self.numberOfEvaluations) / self.maxEvaluations *
100.), self.bestSolution.fitness()))
def end(self):
"""Display end message into `run` method
"""
print(
macop_text('({}) Found after {} evaluations \n - {}'.format(
type(self).__name__, self.numberOfEvaluations,
self.bestSolution)))
print(macop_line())
def information(self):
logging.info("-- Best %s - SCORE %s" %
(self.bestSolution, self.bestSolution.fitness()))
def __str__(self):
return "%s using %s" % (type(self).__name__, type(
self.bestSolution).__name__)
| [
"[email protected]"
] | |
b0ab78c85f4036c1d5dc4095044316ce631511b5 | 7f5a392fc8a08c1cc1f2bc658497b12ef2a27f79 | /air/discarded/train_air_num_bbox.py | 68861ea639f1c0149792768d3288aaf5a0761c16 | [] | no_license | taufikxu/MOG-ASR | f3478c523cbb29a985bdf5cadd2cba500d71fe3c | 72407c36ea4e61b0c9777fefd6bec7836126077a | refs/heads/master | 2020-08-26T16:04:59.603570 | 2019-10-23T13:49:55 | 2019-10-23T13:49:55 | 217,066,279 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,826 | py | import os
import shutil
import argparse
import time
import tensorflow as tf
import numpy as np
from multi_mnist import read_and_decode
from multi_mnist import read_test_data
from air.air_prior_correlated_num_bbox import AIRModel
from utils.checkpoints import build_logger
EPOCHS = 300
BATCH_SIZE = 64
CANVAS_SIZE = 50
MAX_STEPS = 6
# it is assumed that frequencies of more rare
# summaries in {NUM, VAR, IMG} are divisible
# by the frequencies of more frequent ones
LOG_EACH_ITERATION = 20
NUM_SUMMARIES_EACH_ITERATIONS = 50
VAR_SUMMARIES_EACH_ITERATIONS = 250
IMG_SUMMARIES_EACH_ITERATIONS = 500
GRAD_SUMMARIES_EACH_ITERATIONS = 100
SAVE_PARAMS_EACH_ITERATIONS = 10000
NUM_IMAGES_TO_SAVE = 60
DEFAULT_READER_THREADS = 4
DEFAULT_RESULTS_FOLDER_FLAG = "Not Valid"
# parsing command-line arguments
parser = argparse.ArgumentParser()
parser.add_argument("-r", "--results-folder", default=DEFAULT_RESULTS_FOLDER)
parser.add_argument("-k", "-key", "--key", default="")
parser.add_argument(
"-gl", "-gamma_location", "--gamma_location", type=float, default=1000
)
parser.add_argument("-gn", "-gamma_number", "--gamma_number", type=float, default=2.0)
parser.add_argument("-gb", "-gamma_bbox", "--gamma_bbox", type=float, default=2.0)
parser.add_argument("-o", "--overwrite-results", type=int, choices=[0, 1], default=0)
parser.add_argument("-t", "--reader-threads", type=int, default=DEFAULT_READER_THREADS)
parser.add_argument("-dn", "--dig_num", type=str, default="02")
parser.add_argument("-dl", "--dig_location", type=str, default="")
args = parser.parse_args()
if args.dig_num == "02":
NUM_OF_DIGITS_TRAIN = [0, 2]
elif args.dig_num == "13":
NUM_OF_DIGITS_TRAIN = [1, 3]
else:
raise ValueError("not valid number of digit: " + args.dig_num)
if args.dig_location not in ["", "right_half"]:
raise ValueError("not valid location of digit: " + args.dig_location)
NUM_OF_DIGITS_TEST = NUM_OF_DIGITS_TRAIN
name_of_common_train = args.dig_location
for item in NUM_OF_DIGITS_TRAIN:
name_of_common_train += str(item)
name_of_common_test = name_of_common_train
TRAIN_DATA_FILE = "./data/multi_mnist_data/common{}.tfrecords".format(
name_of_common_train
)
TEST_DATA_FILE = "./data/multi_mnist_data/test{}.tfrecords".format(name_of_common_test)
if args.results_folder == "Not Valid":
args.results_folder = "./results/{time}-({file})_(train.{train}_test.{test})".format(
file=__file__,
train=name_of_common_train.replace("_", "."),
test=name_of_common_test.replace("_", "."),
time=time.strftime("%Y-%m-%d-%H-%M"),
)
# removing existing results folder (with content), if configured so
# otherwise, appending next available sequence # to the folder name
args.results_folder += "_({})".format(args.key)
if os.path.exists(args.results_folder):
if args.overwrite_results:
shutil.rmtree(args.results_folder, ignore_errors=True)
else:
folder, i = args.results_folder, 0
args.results_folder = "{}_{}".format(folder, i)
while os.path.exists(args.results_folder):
i += 1
args.results_folder = "{}_{}".format(folder, i)
MODELS_FOLDER = args.results_folder + "/models/"
SUMMARIES_FOLDER = args.results_folder + "/summary/"
SOURCE_FOLDER = args.results_folder + "/source/"
# creating result directories
os.makedirs(args.results_folder)
os.makedirs(MODELS_FOLDER)
os.makedirs(SUMMARIES_FOLDER)
os.makedirs(SOURCE_FOLDER)
log = build_logger(args.results_folder, args)
# creating a copy of the current version of *.py source files
for folder in ["./", "air/"]:
destination = SOURCE_FOLDER
if folder != "./":
destination += folder
os.makedirs(destination)
for file in [f for f in os.listdir(folder) if f.endswith(".py")]:
shutil.copy(folder + file, destination + file)
log.info("Creating input pipeline...")
with tf.variable_scope("pipeline"):
# fetching a batch of numbers of digits and images from a queue
filename_queue = tf.train.string_input_producer(
[TRAIN_DATA_FILE], num_epochs=EPOCHS
)
train_data, train_targets = read_and_decode(
filename_queue, BATCH_SIZE, CANVAS_SIZE, args.reader_threads
)
# # placeholders for feeding the same test dataset to test model
test_data = tf.placeholder(tf.float32, shape=[None, CANVAS_SIZE ** 2])
test_targets = tf.placeholder(tf.int32, shape=[None])
models = []
model_inputs = [[train_data, train_targets], [test_data, test_targets]]
def constrains_x_y(x, y, gamma_location=args.gamma_location):
# loss = tf.maximum(0.5 - x, 0) * gamma_location
# return loss
return 0.0
# creating two separate models - for training and testing - with
# identical configuration and sharing the same set of variables
for i in range(2):
print("Creating {0} model...".format("training" if i == 0 else "testing"))
models.append(
AIRModel(
model_inputs[i][0],
model_inputs[i][1],
max_steps=MAX_STEPS,
max_digits=MAX_STEPS,
rnn_units=256,
canvas_size=CANVAS_SIZE,
windows_size=28,
vae_latent_dimensions=50,
vae_recognition_units=(512, 256),
vae_generative_units=(256, 512),
scale_prior_mean=0.0,
scale_prior_variance=1.0,
shift_prior_mean=0.0,
shift_prior_variance=1.0,
vae_prior_mean=0.0,
vae_prior_variance=1.0,
vae_likelihood_std=0.3,
scale_hidden_units=64,
shift_hidden_units=64,
z_pres_hidden_units=64,
z_pres_prior_log_odds=-0.01,
z_pres_temperature=1.0,
stopping_threshold=0.99,
learning_rate=1e-4,
gradient_clipping_norm=1.0,
cnn=False,
cnn_filters=8,
num_summary_images=NUM_IMAGES_TO_SAVE,
train=(i == 0),
reuse=(i == 1),
scope="air",
annealing_schedules={
"z_pres_prior_log_odds": {
"init": 10000.0,
"min": 0.000000001,
"factor": 0.1,
"iters": 3000,
"staircase": False,
"log": True,
},
# "learning_rate": {
# "init": 1e-3, "min": 1e-4,
# "factor": 0.5, "iters": 10000,
# "staircase": False
# }
},
constrains_x_y=constrains_x_y,
constrains_num=constrains_num,
constrains_num_gamma=args.gamma_number,
constrains_bbox_gamma=args.gamma_bbox,
)
)
train_model, test_model = models
sym_gen_samples = test_model.generated_samples
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
# start the training process
with tf.Session(config=config) as sess:
coord = tf.train.Coordinator()
log.info("Initializing variables...")
sess.run(tf.local_variables_initializer())
sess.run(tf.global_variables_initializer())
log.info("Starting queue runners...")
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
writer = tf.summary.FileWriter(SUMMARIES_FOLDER, sess.graph)
saver = tf.train.Saver(max_to_keep=3)
# diagnostic summaries are fetched from the test model
num_summaries = tf.summary.merge(test_model.num_summaries)
var_summaries = tf.summary.merge(test_model.var_summaries)
img_summaries = tf.summary.merge(test_model.img_summaries)
# gradient summaries are fetched from the training model
grad_summaries = tf.summary.merge(train_model.grad_summaries)
log.info("Reading test set...")
# reading the test dataset, to be used with test model for
# computing all summaries throughout the training process
test_images, test_num_digits, *_ = read_test_data(
TEST_DATA_FILE, shift_zero_digits_images=True
)
log.info("Training...\n")
try:
# beginning with step = 0 to capture all summaries
# and save the initial values of the model parameters
# before the actual training process has started
step = 0
loss_list, accu_list = [], []
while True:
# saving summaries with configured frequency
if step % NUM_SUMMARIES_EACH_ITERATIONS == 0:
if step % VAR_SUMMARIES_EACH_ITERATIONS == 0:
if step % IMG_SUMMARIES_EACH_ITERATIONS == 0:
num_sum, var_sum, img_sum = sess.run(
[num_summaries, var_summaries, img_summaries],
feed_dict={
test_data: test_images,
test_targets: test_num_digits,
},
)
writer.add_summary(img_sum, step)
else:
num_sum, var_sum = sess.run(
[num_summaries, var_summaries],
feed_dict={
test_data: test_images,
test_targets: test_num_digits,
},
)
writer.add_summary(var_sum, step)
else:
num_sum = sess.run(
num_summaries,
feed_dict={
test_data: test_images,
test_targets: test_num_digits,
},
)
writer.add_summary(num_sum, step)
# saving parameters with configured frequency
if step % SAVE_PARAMS_EACH_ITERATIONS == 0:
saver.save(sess, MODELS_FOLDER + "air-model", global_step=step)
# training step
if step % GRAD_SUMMARIES_EACH_ITERATIONS == 0:
# with gradient summaries
_, train_loss, train_accuracy, step, grad_sum = sess.run(
[
train_model.training,
train_model.loss,
train_model.accuracy,
train_model.global_step,
grad_summaries,
]
)
writer.add_summary(grad_sum, step)
else:
# without gradient summaries
_, train_loss, train_accuracy, step = sess.run(
[
train_model.training,
train_model.loss,
train_model.accuracy,
train_model.global_step,
]
)
test_loss, test_accuracy = sess.run(
[test_model.loss, test_model.accuracy],
feed_dict={test_data: test_images, test_targets: test_num_digits},
)
loss_list.append([train_loss, test_loss])
accu_list.append([train_accuracy, test_accuracy])
if step % LOG_EACH_ITERATION == 0:
l0, l1 = np.mean(loss_list[-LOG_EACH_ITERATION:], axis=0)
a0, a1 = np.mean(accu_list[-LOG_EACH_ITERATION:], axis=0)
log.info(
"iteration {}\ttrain loss {:.3f}\ttrain accuracy {:.2f}".format(
step, l0, a0
)
)
log.info(
"iteration {}\ttest loss {:.3f}\ttest accuracy {:.2f}".format(
step, l1, a1
)
)
except tf.errors.OutOfRangeError:
log.info("\ntraining has ended\n")
finally:
coord.request_stop()
coord.join(threads)
| [
"[email protected]"
] | |
8930a0c2a06c0c47f689a2c208024783fa6de8f4 | 769f08657e880c4b1cc085dd77277c6eef8772e5 | /pep412.py | 9d69fb239b7d2c34d02bda047e6e781adb4dd5ec | [] | no_license | udemy-course/python3-new-feature | 106160389ebf70e13a5c2bce158fbc32d4979fc9 | b6b482bc4acffb9bf168dc9f494f28179357934c | refs/heads/master | 2020-03-23T03:26:29.954700 | 2018-08-23T13:37:40 | 2018-08-23T13:37:40 | 141,030,826 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 340 | py | """
pep412.py
Created by Peng Xiao on 2018-08-10. [email protected]
"""
import time
class Foo:
def __init__(self, a, b):
self.a = a
self.b = b
if __name__ == "__main__":
n = 1000000
i = 0
result = []
while i < n:
result.append(Foo(1,2))
i += 1
while True:
time.sleep(4)
| [
"[email protected]"
] | |
9fedf7c194fd4216199fea7f3daae1bf67c19c59 | 62c20237dbc12a49849bc6533e5e386bc6a26cf6 | /app.py | 4234d25f12c085e1b347a75fbf2b2881d1665eb2 | [] | no_license | yhx189/api-server | 03ef3b70c5734442c90dc8b118920237616da072 | d96cfc2d437ff2b540e355ae363003629d23a1f1 | refs/heads/master | 2021-01-10T01:15:23.539890 | 2016-01-21T00:53:18 | 2016-01-21T00:53:18 | 50,071,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,414 | py | #!flask/bin/python
import socket
import subprocess
from flask import Flask, jsonify, abort, request, make_response, url_for
app = Flask(__name__, static_url_path = "")
#app = Flask(__name__)
@app.errorhandler(400)
def not_found(error):
return make_response(jsonify( { 'error': 'Bad request' } ), 400)
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify( { 'error': 'Not found' } ), 404)
tasks = []
with open('out.txt') as f:
lines = f.readlines()
for line in lines:
words = line.split(' ')
task = {'src': words[1],
'dst': words[4],
'rtt': words[7],
'bandwidth': words[11]}
tasks.append(task)
print tasks
#tasks = [
# {
# 'id': 1,
# 'dst': u'165.124.182.209',
# 'bandwidth': u'28.05',
# 'done': False
# },
# {
# 'id': 2,
# 'dst': u'216.58.216.78',
# 'bandwidth': u'200.5',
# 'done': False
# }
#]
def make_public_task(task):
new_task = {}
for field in task:
if field == 'id':
new_task['uri'] = url_for('get_task', task_id = task['dst'], _external = True)
else:
new_task[field] = task[field]
return new_task
@app.route('/todo/api/v1.0/tasks', methods = ['GET'])
def get_tasks():
return jsonify({'tasks': tasks})
@app.route('/todo/api/v1.0/hops/<task_id>', methods = ['GET'])
def get_hop(task_id):
dest_name="google.com"
dest_addr = socket.gethostbyname(dest_name)
port = 33434
max_hops = 30
icmp = socket.getprotobyname('icmp')
udp = socket.getprotobyname('udp')
ttl = 1
while True:
recv_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp)
send_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, udp)
send_socket.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
print "ttl is:%d" % ttl
recv_socket.bind(("", port))
send_socket.sendto("", (dest_name, port))
curr_addr = None
curr_name = None
try:
_, curr_addr = recv_socket.recvfrom(512)
curr_addr = curr_addr[0]
try:
curr_name = socket.gethostbyaddr(curr_addr)[0]
except socket.error:
curr_name = curr_addr
except socket.error:
pass
finally:
send_socket.close()
recv_socket.close()
if curr_addr is not None:
curr_host = "%s (%s)" % (curr_name, curr_addr)
else:
curr_host = "*"
print "%d\t%s" % (ttl, curr_host)
if ttl == int(task_id):
ret = {'ip': curr_host}
return jsonify( { 'task': ret } )
ttl += 1
if curr_addr == dest_addr or ttl > int(task_id): #max_hops:
break
@app.route('/todo/api/v1.0/tasks/<task_id>/<src_id>', methods = ['GET'])
def get_task(task_id, src_id):
print task_id
print src_id
task = filter(lambda t: t['dst'][:5] == task_id[:5], tasks)
new_task = filter(lambda t: t['src'][:5] == src_id[:5], task)
if len(new_task) == 0:
print "cannot find the ip " + task_id + " from the database"
print "calling king service from server"
print subprocess.call(["../king/bin/king", src_id, task_id], stdout=open('log.txt','a'))
re_tasks = []
with open('out.txt') as ff:
lines = ff.readlines()
for line in lines:
words = line.split(' ')
re_task = {'src': words[1],
'dst': words[4],
'rtt': words[7],
'bandwidth': words[11]}
re_tasks.append(re_task)
print re_tasks
_task = filter(lambda t: t['dst'][:5] == task_id[:5], re_tasks)
inject_task = filter(lambda t: t['src'][:5] == src_id[:5], _task)
print inject_task
if len(inject_task) == 0:
abort(404)
print inject_task
new_task = inject_task
print new_task
return jsonify( { 'task': make_public_task(new_task[0]) } )
@app.route('/todo/api/v1.0/tasks', methods = ['POST'])
def create_task():
if not request.json or not 'title' in request.json:
abort(400)
task = {
'id': tasks[-1]['id'] + 1,
'dst': request.json['dst'],
'bandwidth': request.json.get('bandwidth', ""),
'done': False
}
tasks.append(task)
return jsonify( { 'task': make_public_task(task) } ), 201
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods = ['PUT'])
def update_task(task_id):
task = filter(lambda t: t['id'] == task_id, tasks)
if len(task) == 0:
abort(404)
if not request.json:
abort(400)
if 'title' in request.json and type(request.json['title']) != unicode:
abort(400)
if 'description' in request.json and type(request.json['description']) is not unicode:
abort(400)
if 'done' in request.json and type(request.json['done']) is not bool:
abort(400)
task[0]['dst'] = request.json.get('dst', task[0]['dst'])
task[0]['bandwidth'] = request.json.get('bandwidth', task[0]['bandwidth'])
task[0]['done'] = request.json.get('done', task[0]['done'])
return jsonify( { 'task': make_public_task(task[0]) } )
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods = ['DELETE'])
def delete_task(task_id):
task = filter(lambda t: t['id'] == task_id, tasks)
if len(task) == 0:
abort(404)
tasks.remove(task[0])
return jsonify( { 'result': True } )
if __name__ == '__main__':
app.run(debug = True, host ='0.0.0.0')
| [
"ubuntu@ubuntu.(none)"
] | ubuntu@ubuntu.(none) |
09c129b6cd1a837099b4b2c25784a3808389e94e | eafe15761be5f857d5b756eafc9446fc375aba66 | /video/migrations/0002_auto_20200216_1749.py | 2c6e5d422dc6f2c4f631513f1d9151a4b2a1e24c | [] | no_license | biletboh/tempora | 6ff10995e7eacae8b2ac95b1dfb785749949670b | cec40ad8719d03e5c1d83320c38f78d3e0338687 | refs/heads/master | 2023-05-25T21:23:09.466591 | 2022-02-09T10:10:58 | 2022-02-09T10:10:58 | 92,310,550 | 0 | 0 | null | 2023-05-22T20:57:13 | 2017-05-24T15:56:23 | HTML | UTF-8 | Python | false | false | 507 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2020-02-16 15:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('video', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='video',
name='slug',
field=models.SlugField(blank=True, null=True, unique=True, verbose_name='Посилання на сайті'),
),
]
| [
"[email protected]"
] | |
fee306a0edc828534bcb940ad1661104bfc04226 | 0557d23e6752aa981a0fb81d6c7a727bfdf760d2 | /airflow_cdk/__init__.py | 5d82fcd42cb6bf344ef8397739e5312f0f597f3c | [
"Apache-2.0"
] | permissive | knowsuchagency/airflow-cdk | 15b5f0cac1c6c8e7b7d450fc8ab29bbb3fc50aa1 | 6f83e9cfcc01f18ddaac8dbe313ababd3129fe88 | refs/heads/master | 2022-12-17T14:21:06.246049 | 2020-09-21T22:07:04 | 2020-09-21T22:07:04 | 266,039,663 | 27 | 3 | Apache-2.0 | 2020-09-21T21:02:37 | 2020-05-22T06:50:33 | Python | UTF-8 | Python | false | false | 43 | py | from airflow_cdk.cdk import FargateAirflow
| [
"[email protected]"
] | |
6bece771f9246166f9a70f04cf16afe1303fb78c | fc0cc698686f4b17aafe42360dac81357bcb183e | /docimage/preprocessing.py | a2d90ef4b101fa2335edaf0ef786be4c5104e6e4 | [
"MIT"
] | permissive | Abhishek-Prusty/docimage | 7b7bef669b3a5e03b170fed4c9f340af9e103fe7 | c29f9a2634e6f807b108cd4237783be22baea307 | refs/heads/master | 2020-03-22T13:52:45.381192 | 2018-08-16T17:16:57 | 2018-08-16T17:16:57 | 140,138,183 | 2 | 0 | MIT | 2018-08-11T09:59:15 | 2018-07-08T04:48:38 | Python | UTF-8 | Python | false | false | 6,980 | py | # -*- coding: utf-8 -*-
"""
pre-processing and pattern matching.
This python module can perform the following functions:
1. Binarization - method binary_img(img) performs this function
2. Skew correction - method skew_correction(img) performs this function
Need to introduce machine learning of some sort to make the skew correction
method run faster :(
Or... A simple fix would be to resize the image first, and then apply the skew
correction method! That'll probably take lesser time...
Resizing is yielding better results.
"""
import logging
import cv2
import numpy as np
from scipy.stats import mode
logging.basicConfig(
level=logging.DEBUG,
format="%(levelname)s: %(asctime)s {%(filename)s:%(lineno)d}: %(message)s "
)
kernel = np.ones((5, 5), np.uint8)
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
"""
Method to binarize an image
Input: Grayscale image
Output: Binary image
The nature of the output is such that the text(foreground) has a colour
value of (255,255,255), and the background has a value of (0,0,0).
"""
def binary_img(img):
# img_erode = cv2.dilate(img,kernel,iterations = 2)
blur = cv2.medianBlur(img, 5)
# mask1 = np.ones(img.shape[:2],np.uint8)
"""Applying histogram equalization"""
cl1 = clahe.apply(blur)
circles_mask = cv2.dilate(cl1, kernel, iterations=1)
circles_mask = (255 - circles_mask)
thresh = 1
circles_mask = cv2.threshold(circles_mask, thresh, 255, cv2.THRESH_BINARY)[1]
edges = cv2.Canny(cl1, 100, 200)
edges = cv2.bitwise_and(edges, edges, mask=circles_mask)
dilation = cv2.dilate(edges, kernel, iterations=1)
display = cv2.bitwise_and(img, img, mask=dilation)
cl2 = clahe.apply(display)
cl2 = clahe.apply(cl2)
ret, th = cv2.threshold(cl2, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
th = 255 - th
thg = cv2.adaptiveThreshold(display, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, \
cv2.THRESH_BINARY, 11, 2)
# final = cv2.bitwise_and(dilation,dilation,mask=th)
finalg = cv2.bitwise_and(dilation, dilation, mask=thg)
finalg = 255 - finalg
abso = cv2.bitwise_and(dilation, dilation, mask=finalg)
return abso
"""
Method to resize the image. This is going to help in reducing the number
of computations, as the size of data will reduce.
"""
def resize(img):
r = 1000.0 / img.shape[1]
dim = (1000, int(img.shape[0] * r))
resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
# cv2.imshow('resized', resized)
return resized
"""
Method to correct the skew of an image
Input: Binary image
Output: Skew corrected binary image
The nature of the output is such that the binary image is rotated appropriately
to remove any angular skew.
Find out the right place to insert the resizing method call.
Try to find one bounding rectangle around all the contours
"""
def skew_correction(img):
areas = [] # stores all the areas of corresponding contours
dev_areas = [] # stores all the areas of the contours within 1st std deviation in terms of area#stores all the white pixels of the largest contour within 1st std deviation
all_angles = []
k = 0
binary = binary_img(img)
# binary = resize(binary)
im2, contours, hierarchy = cv2.findContours(binary, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# cnt = contours[0]
# upper_bound=len(contours)
height_orig, width_orig = img.shape[:2]
words = np.zeros(img.shape[:2], np.uint8)
for c in contours:
areas.append(cv2.contourArea(c))
std_dev = np.std(areas)
for i in areas:
dev_areas.append(i - std_dev)
dev_contours = np.zeros(img.shape[:2], np.uint8)
for i in dev_areas:
if ((i > (-std_dev)) and (i <= (std_dev))):
cv2.drawContours(dev_contours, contours, k, (255, 255, 255), -1)
k += 1
sobely = cv2.Sobel(dev_contours, cv2.CV_64F, 0, 1, ksize=5)
abs_sobel64f = np.absolute(sobely)
sobel_8u = np.uint8(abs_sobel64f)
# cv2.imshow('Output2',sobel_8u)
minLineLength = 100
maxLineGap = 10
lines = cv2.HoughLinesP(sobel_8u, 1, np.pi / 180, 100, minLineLength, maxLineGap)
for x1, y1, x2, y2 in lines[0]:
cv2.line(words, (x1, y1), (x2, y2), (255, 255, 255), 2)
# cv2.imshow('hough',words)
height_orig, width_orig = img.shape[:2]
all_angles = []
im2, contours, hierarchy = cv2.findContours(words, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
logging.debug(len(contours))
contour_count = 0
for c in contours:
# max_index = np.argmax(areas)
# current_contour = np.zeros(img.shape[:2],np.uint8)
current_contour = np.zeros(img.shape[:2], np.uint8)
cv2.drawContours(current_contour, contours, contour_count, (255, 255, 255), -1)
height, width = current_contour.shape[:2]
# all_white_pixels = []
current_white_pixels = []
for i in range(0, height):
for j in range(0, width):
if (current_contour.item(i, j) == 255):
current_white_pixels.append([i, j])
matrix = np.array(current_white_pixels)
"""Finding covariance matrix"""
C = np.cov(matrix.T)
eigenvalues, eigenvectors = np.linalg.eig(C)
"""Finding max eigenvalue"""
# max_ev = max(eigenvalues)
"""Finding index of max eigenvalue"""
max_index = eigenvalues.argmax(axis=0)
"""The largest eigen value gives the approximate length of the bounding
ellipse around the largest word. If we follow the index of the largest
eigen value and find the eigen vectors in the column of that index,
we'll get the x and y coordinates of it's centre."""
y = eigenvectors[1, max_index]
x = eigenvectors[0, max_index]
angle = (np.arctan2(y, x)) * (180 / np.pi)
all_angles.append(angle)
contour_count += 1
logging.debug(contour_count)
logging.debug(all_angles)
angle = np.mean(all_angles)
logging.debug(angle)
k = 0
non_zero_angles = []
for i in all_angles:
if ((i != 0) and (i != 90.0)):
non_zero_angles.append(i)
logging.debug(non_zero_angles)
rounded_angles = []
for i in non_zero_angles:
rounded_angles.append(np.round(i, 0))
logging.debug(rounded_angles)
logging.debug("mode is")
# logging.debug(np.mode(rounded_angles))
# angle = np.mean(non_zero_angles)
# angle = np.mode(rounded_angles)
mode_angle = mode(rounded_angles)[0][0]
logging.debug(mode_angle)
precision_angles = []
for i in non_zero_angles:
if (np.round(i, 0) == mode_angle):
precision_angles.append(i)
logging.debug('precision angles:')
logging.debug(precision_angles)
angle = np.mean(precision_angles)
logging.debug('Finally, the required angle is:')
logging.debug(angle)
# M = cv2.getRotationMatrix2D((width/2,height/2),-(90+angle),1)
M = cv2.getRotationMatrix2D((width / 2, height / 2), -(90 + angle), 1)
dst = cv2.warpAffine(img, M, (width_orig, height_orig))
# cv2.imshow('final',dst)
cv2.imwrite('skewcorrected2.jpg', dst)
return dst
def preprocess(img):
return skew_correction(img)
# Does not work with linux:
# cv2.destroyAllWindows()
| [
"[email protected]"
] | |
0112480a4196f04c5801ba35de61675a5b1bc136 | 26dec2f8f87a187119336b09d90182d532e9add8 | /mcod/reports/tests/conftest.py | c0e0376a877c755a7387fefc0a28a7e11df4dff4 | [] | no_license | olekstomek/mcod-backend-dane.gov.pl | 7008bcd2dbd0dbada7fe535536b02cf27f3fe4fd | 090dbf82c57633de9d53530f0c93dddf6b43a23b | refs/heads/source-with-hitory-from-gitlab | 2022-09-14T08:09:45.213971 | 2019-05-31T06:22:11 | 2019-05-31T06:22:11 | 242,246,709 | 0 | 1 | null | 2020-02-24T22:39:26 | 2020-02-21T23:11:50 | Python | UTF-8 | Python | false | false | 46 | py | from mcod.core.tests.conftest import * # noqa | [
"[email protected]"
] | |
4aa5f2afb04c078af8ed9f1cb0036c1589a50253 | 4012f290d83ae7f4c09d7440f26d2acd7e63efbe | /1173.py | 84b870ec4827293d81d8ffc5b50fe0a1e8918e42 | [] | no_license | jinaur/codeup | ffc2d0fdf73892c1f46d80021ad8f4c1293c9e2e | 5f75ace909e2b3151171932cc3ee9f3c49dd46d9 | refs/heads/master | 2023-04-15T07:42:06.244806 | 2021-04-25T13:59:42 | 2021-04-25T13:59:42 | 277,760,813 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | a, b = map(int, input().split())
def my_func(a, b) :
if b < 30 and a == 0 :
a = 23
b += 30
print(a, b)
return
elif b < 30 :
a -= 1
b += 30
print(a, b)
return
b -= 30
print(a, b)
return
r = my_func(a, b)
| [
"[email protected]"
] | |
4f69ae38888dca5bfa8b94ef6888374f34854149 | cb3634622480f918540ff3ff38c96990a1926fda | /PyProject/sparkproject/rdd/pairRdd/transformations/reduceByKey.py | 513009ff5dc829627635a3d07fae951f883b828a | [] | no_license | jacksonyoudi/AlgorithmCode | cab2e13cd148354dd50a0487667d38c25bb1fd9b | 216299d43ee3d179c11d8ca0783ae16e2f6d7c88 | refs/heads/master | 2023-04-28T07:38:07.423138 | 2022-10-23T12:45:01 | 2022-10-23T12:45:01 | 248,993,623 | 3 | 0 | null | 2023-04-21T20:44:40 | 2020-03-21T14:32:15 | Go | UTF-8 | Python | false | false | 393 | py | # coding: Utf-8
from pyspark import SparkConf, SparkContext
if __name__ == '__main__':
conf = SparkConf().setAppName("rdd").setMaster("local[*]")
sc = SparkContext(conf=conf)
a = ["a", "a", "c", "d", "d", "c", "e"]
b = [1, 2, 3, 4, 1, 3, 7]
data = list(zip(a, b))
disData = sc.parallelize(data)
d = disData.reduceByKey(lambda x, y: x + y)
print(d.collect())
| [
"[email protected]"
] | |
f22db2a82d7850863138464c298ed71a3e98019e | 47c19df3774c82354579414440e03c1fa89dc49c | /tensorflow/python/training/distribute.py | caffd042a0917209c87cab8993169dc4bc956039 | [
"Apache-2.0"
] | permissive | felipengeletrica/tensorflow | 16f50191bf661e55dda4322d13bb0638da94141e | 82a8b1b30f4d2eff5ae17839a118f5e8e83d81a2 | refs/heads/master | 2020-03-21T08:12:07.889799 | 2018-06-22T15:51:23 | 2018-06-22T15:51:23 | 138,327,820 | 1 | 0 | Apache-2.0 | 2018-06-22T17:01:48 | 2018-06-22T17:01:45 | null | UTF-8 | Python | false | false | 50,665 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class DistributionStrategy, TowerContext, and supporting APIs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
import six
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import losses_impl
from tensorflow.python.platform import tf_logging
from tensorflow.python.training import device_util
from tensorflow.python.util import nest
# ------------------------------------------------------------------------------
# Internal API for setting the current thread mode as being either in a
# tower or cross-tower context for a particular distribution strategy.
class _ThreadMode(object):
def __init__(self, dist, cross, tower):
self.distribution_strategy = dist
self.cross_tower_context = cross
self.tower_context = tower
class _CrossTowerThreadMode(_ThreadMode):
def __init__(self, distribution_strategy):
_ThreadMode.__init__(
self, distribution_strategy, distribution_strategy, None)
class _InTowerThreadMode(_ThreadMode):
def __init__(self, tower_ctx):
_ThreadMode.__init__(
self, tower_ctx.distribution_strategy, None, tower_ctx)
_per_thread_mode = threading.local()
def _push_per_thread_mode(context):
if not hasattr(_per_thread_mode, "stack"):
_per_thread_mode.stack = []
_per_thread_mode.stack.append(context)
def _pop_per_thread_mode():
_per_thread_mode.stack.pop(-1)
class _DefaultTowerThreadMode(_ThreadMode):
"""Type of default value returned by `_get_per_thread_mode()`.
Used when the thread-local stack is empty.
"""
def __init__(self):
# _default_distribution_strategy and _default_tower_context are
# defined at the bottom of this file.
_ThreadMode.__init__(
self, _default_distribution_strategy, None, _default_tower_context)
def _get_per_thread_mode():
try:
return _per_thread_mode.stack[-1]
except (AttributeError, IndexError):
# _default_tower_mode is defined at the bottom of this file.
return _default_tower_mode
# ------------------------------------------------------------------------------
# Context tracking whether in a distribution.update() or .update_non_slot()
# call.
_update_device = threading.local()
def get_update_device():
"""Get the current device if in a `DistributionStrategy.update()` call."""
try:
return _update_device.current
except AttributeError:
return None
class UpdateContext(object):
"""Context manager when you are in `update()` or `update_non_slot()`."""
def __init__(self, device):
self._device = device
self._old_device = None
def __enter__(self):
self._old_device = get_update_device()
_update_device.current = self._device
def __exit__(self, exception_type, exception_value, traceback):
del exception_type, exception_value, traceback
_update_device.current = self._old_device
# ------------------------------------------------------------------------------
# Public API for accessing the current thread mode
def get_tower_context():
"""Returns the current TowerContext or None if in a cross-tower context.
Note that execution:
1. starts in the default (single-tower) tower context (this function
will return the default TowerContext object);
2. switches to cross-tower context (in which case this will return
None) when entering a `with DistributionStrategy.scope():` block;
3. switches to a (non-default) tower context inside
`call_for_each_tower(fn, ...)`;
4. if `fn` calls `get_tower_context()->merge_call(merge_fn, ...)`, then
inside `merge_fn` you are back in the cross-tower context (and again
this function will return None).
Note that you can also go directly from step 1 to 4 to switch to a
cross-tower context for the default `DistributionStrategy`. You may
also switch from the cross-tower context of 4 to a tower context by
calling `call_for_each_tower()`, jumping back to step 3.
Most `DistributionStrategy` methods may only be executed in
a cross-tower context, in a tower context you should use the
`TowerContext` API instead.
Returns:
The current `TowerContext` object when in a tower context scope, else None.
Exactly one of `get_tower_context()` and `get_cross_tower_context()`
will return None in a particular block.
"""
return _get_per_thread_mode().tower_context
def get_cross_tower_context():
"""Returns the current DistributionStrategy if in a cross-tower context.
Note that execution:
1. starts in the default (single-tower) tower context;
2. switches to cross-tower context when entering a
`with DistributionStrategy.scope():` block;
3. switches to a (non-default) tower context inside
`call_for_each_tower(fn, ...)`;
4. if `fn` calls `get_tower_context()->merge_call(merge_fn, ...)`, then
inside `merge_fn` you are back in the cross-tower context.
Note that you can also go directly from step 1 to 4 to switch to a
cross-tower context for the default `DistributionStrategy`. You may
also switch from the cross-tower context of 4 to a tower context by
calling `call_for_each_tower()`, jumping back to step 3.
Most `DistributionStrategy` methods may only be executed in
a cross-tower context.
Returns:
Returns the current `DistributionStrategy` object in a cross-tower
context, or None.
Exactly one of `get_tower_context()` and `get_cross_tower_context()`
will return None in a particular block.
"""
return _get_per_thread_mode().cross_tower_context
def get_distribution_strategy():
"""Returns the current `DistributionStrategy` object.
Prefer to use `get_tower_context()` or `get_cross_tower_context()`
instead when possible.
Returns:
A `DistributionStrategy` object. Inside a
`with distribution_strategy.scope()` block, it returns
`distribution_strategy`, otherwise it returns the default
(single-tower) `DistributionStrategy` object.
"""
return _get_per_thread_mode().distribution_strategy
def has_distribution_strategy():
"""Return if there is a current non-default `DistributionStrategy`.
Returns:
True if inside a `with distribution_strategy.scope():`.
"""
return get_distribution_strategy() is not _default_distribution_strategy
# ------------------------------------------------------------------------------
# Public utility functions.
def get_loss_reduction():
"""Reduce `method_string` corresponding to the last loss reduction."""
loss_reduction = ops.get_default_graph()._last_loss_reduction # pylint: disable=protected-access
if loss_reduction == losses_impl.Reduction.SUM:
return "sum"
return "mean"
# ------------------------------------------------------------------------------
# Internal API for validating the current thread mode
def _require_cross_tower_context(distribution_strategy):
"""Verify in cross-tower context for `distribution_strategy`."""
context = _get_per_thread_mode()
if context.cross_tower_context is distribution_strategy: return
# We have an error to report, figure out the right message.
if context.distribution_strategy is not distribution_strategy:
if context.distribution_strategy is _default_distribution_strategy:
raise RuntimeError(
'Need to be inside "with distribution_strategy.scope()" for %s' %
(distribution_strategy,))
else:
raise RuntimeError(
"Mixing different DistributionStrategy objects: %s is not %s" %
(context.distribution_strategy, distribution_strategy))
assert context.cross_tower_context is None
raise RuntimeError("Method requires being in cross-tower context, use "
"get_tower_context().merge_call()")
def require_tower_context(tower_ctx):
"""Verify in `tower_ctx` tower context."""
context = _get_per_thread_mode()
if context.tower_context is tower_ctx: return
# We have an error to report, figure out the right message.
if context.tower_context is None:
raise RuntimeError("Need to be inside `call_for_each_tower()`")
if context.distribution_strategy is tower_ctx.distribution_strategy:
# Two different TowerContexts with the same DistributionStrategy.
raise RuntimeError("Mismatching tower context.")
raise RuntimeError(
"Mismatching DistributionStrategy objects: %s is not %s." %
(context.distribution_strategy, tower_ctx.distribution_strategy))
def _require_distribution_strategy_scope(distribution_strategy):
"""Verify in a `distribution_strategy.scope()` in this thread."""
context = _get_per_thread_mode()
if context.distribution_strategy is distribution_strategy: return
# We have an error to report, figure out the right message.
if context.distribution_strategy is _default_distribution_strategy:
raise RuntimeError(
'Need to be inside "with distribution_strategy.scope()" for %s' %
(distribution_strategy,))
else:
raise RuntimeError(
"Mixing different DistributionStrategy objects: %s is not %s" %
(context.distribution_strategy, distribution_strategy))
# ------------------------------------------------------------------------------
# Internal context managers used to implement the DistributionStrategy
# base class
class _CurrentDistributionContext(object):
"""Context manager for setting the `DistributionStrategy` and var creator."""
def __init__(self,
distribution_strategy,
var_creator_scope,
var_scope=None,
default_device=None):
self._context = _CrossTowerThreadMode(distribution_strategy)
self._var_creator_scope = var_creator_scope
self._var_scope = var_scope
if default_device:
self._device_scope = ops.device(default_device)
else:
self._device_scope = None
def __enter__(self):
_push_per_thread_mode(self._context)
if self._var_scope:
self._var_scope.__enter__()
self._var_creator_scope.__enter__()
if self._device_scope:
self._device_scope.__enter__()
return self._context.distribution_strategy
def __exit__(self, exception_type, exception_value, traceback):
if self._device_scope:
self._device_scope.__exit__(exception_type, exception_value, traceback)
self._var_creator_scope.__exit__(exception_type, exception_value, traceback)
if self._var_scope:
self._var_scope.__exit__(exception_type, exception_value, traceback)
_pop_per_thread_mode()
class _SameScopeAgainContext(object):
"""Trivial context manager when you are already in `scope()`."""
def __init__(self, distribution_strategy):
self._distribution_strategy = distribution_strategy
def __enter__(self):
return self._distribution_strategy
def __exit__(self, exception_type, exception_value, traceback):
del exception_type, exception_value, traceback
# ------------------------------------------------------------------------------
# Base classes for all distribution strategies.
class DistributionStrategy(object):
"""A list of devices with a state & compute distribution policy.
The intent is that you can write an algorithm in a stylized way and
it will be usable with a variety of different `DistributionStrategy`
implementations. Each descendant will implement a different strategy
for distributing the algorithm across multiple devices/machines.
Furthermore, these changes can be hidden inside the specific layers
and other library classes that need special treatment to run in a
distributed setting, so that most users' model definition code can
run unchanged. The `DistributionStrategy` API works the same way
with eager and graph execution.
First let's introduce a few high-level concepts:
* _Data parallelism_ is where we run multiple copies of the model
on different slices of the input data. This is in contrast to
_model parallelism_ where we divide up a single copy of a model
across multiple devices.
Note: we only support data parallelism for now, but
hope to add support for model parallelism in the future.
* A _tower_ is one copy of the model, running on one slice of the
input data.
* _Synchronous_, or more commonly _sync_, training is where the
updates from each tower are aggregated together before updating
the model variables. This is in contrast to _asynchronous_, or
_async_ training, where each tower updates the model variables
independently.
* Furthermore you might run your computation on multiple devices
on one machine (or "host"), or on multiple machines/hosts.
If you are running on multiple machines, you might have a
single master host that drives computation across all of them,
or you might have multiple clients driving the computation
asynchronously.
To distribute an algorithm, we might use some of these ingredients:
* Parameter servers: These are hosts that hold a single copy of
parameters/variables. All towers that want to operate on a variable
retrieve it at the beginning of a step and send an update to be
applied at the end of the step. Can support either sync or async
training.
* Mirrored variables: These are variables that are copied to multiple
devices, where we keep the copies in sync by applying the same
updates to every copy. Normally would only be used with sync training.
* Reductions and Allreduce: A _reduction_ is some method of
aggregating multiple values into one value, like "sum" or
"mean". If doing sync training, we will perform a reduction on the
gradients to a parameter from all towers before applying the
update. Allreduce is an algorithm for performing a reduction on
values from multiple devices and making the result available on
all of those devices.
* In the future we will have support for TensorFlow's partitioned
variables, where a single variable is split across multiple
devices.
We have then a few approaches we want to support:
* Code written (as if) with no knowledge of class `DistributionStrategy`.
This code should work as before, even if some of the layers, etc.
used by that code are written to be distribution-aware. This is done
by having a default `DistributionStrategy` that gives ordinary behavior,
and by default being in a single tower context.
* Ordinary model code that you want to run using a specific
`DistributionStrategy`. This can be as simple as:
```
with my_distribution.scope():
iterator = my_distribution.distribute_dataset(
dataset).make_one_shot_iterator()
tower_train_ops = my_distribution.call_for_each_tower(
tower_fn, iterator.get_next())
train_op = tf.group(my_distribution.unwrap(tower_train_ops))
```
This takes an ordinary `dataset` and `tower_fn` and runs it
distributed using a particular `DistributionStrategy` in
`my_distribution`. Any variables created in `tower_fn` are created
using `my_distribution`'s policy, and library functions called by
`tower_fn` can use the `get_tower_context()` API to get enhanced
behavior in this case.
You can also create an initializable iterator instead of a one-shot
iterator. In that case, you will need to ensure that you initialize the
iterator before calling get_next.
```
iterator = my_distribution.distribute_dataset(
dataset).make_initializable_iterator())
session.run(iterator.initializer)
```
* If you want to write a distributed algorithm, you may use any of
the `DistributionStrategy` APIs inside a
`with my_distribution.scope():` block of code.
Lower-level concepts:
* Wrapped values: In order to represent values parallel across devices
(either towers or the devices associated with a particular value), we
wrap them in a "PerDevice" or "Mirrored" object that contains a map
from device to values. "PerDevice" is used when the value may be
different across devices, and "Mirrored" when the value are the same.
* Unwrapping and merging: Consider calling a function `fn` on
multiple devices, like `call_for_each_tower(fn, w)` with an
argument `w` that is a wrapped value. This means `w` will have a
map taking tower device `d0` to `w0`, tower device `d1` to `w1`,
etc. `call_for_each_tower()` unwraps `w` before calling `fn`, so
it calls `fn(w0)` on `d0`, `fn(w1)` on `d1`, etc. It then merges
the return values from `fn()`, which can possibly result in
wrapped values. For example, let's say `fn()` returns a tuple with
three components: `(x, a, v0)` from tower 0, `(x, b, v1)` on tower 1,
etc. If the first component is the same object `x` from every
tower, then the first component of the merged result will also be
`x`. If the second component is different (`a`, `b`, ...) from
each tower, then the merged value will have a wrapped map from
tower device to the different values. If the third component is
the members of a mirrored variable (`v` maps `d0` to `v0`, `d1` to
`v1`, etc.), then the merged result will be that mirrored variable
(`v`).
* Tower context vs. Cross-tower context: _tower context_ is when we
are in some function that is being called once for each tower.
Otherwise we are in cross-tower context, which is useful for
calling `DistributionStrategy` methods which operate across the
towers (like `reduce()`). By default you start in a tower context
(the default "single tower context") and then some methods can
switch you back and forth, as described below.
* Worker devices vs. parameter devices: Most tower computations will
happen on worker devices. Since we don't yet support model
parallelism, there will be one worker device per tower. When using
parameter servers (see above), the set of devices holding
variables may be different, otherwise the parameter devices might
match the worker devices.
* Non-slot devices are some subset of the parameter devices where we
put all the non-slot variables. We need to ensure that all
non-slot variables are allocated on the same device, or mirrored
across the same set of devices. If you have some variable you want
to colocate all the non-slot variables with, you can use
`colocate_vars_with()` to get the remaining non-slot variables on
the same device. Otherwise you can use `non_slot_devices()` to
pick a consistent set of devices to pass to both
`colocate_vars_with()` and `update_non_slot()`.
When using a `DistributionStrategy`, we have a new type dimension
called _locality_ that says what values are compatible with which
APIs:
* T: different value for each tower (e.g. a PerDevice-wrapped value).
* M: value is "mirrored" across towers, i.e. there are copies with the
same value on each tower (e.g. a Mirrored-wrapped value).
* V(`v`): value is "mirrored" across all the devices which have a
copy of variable `v` (also a Mirrored-wrapped value, but over
parameter devices instead of worker devices).
* N: value is "mirrored" across all the "non-slot" devices
Rules for methods with respect to locality and single-tower vs.
cross-tower context:
* `with d.scope()`: default single-tower context -> cross-tower context for
`d`
* `with d.colocate_vars_with(v)`: in tower/cross-tower context, variables
will be created with locality V(`v`). That is, if we write
`with d.colocate_vars_with(v1): v2 = tf.get_variable(...)`, then
`v2` will have locality V(`v1`), i.e. locality V(`v2`) will equal
V(`v1`).
* `with d.colocate_vars_with(d.non_slot_devices(...))`: in
tower/cross-tower context, variables will be created with locality N
* `v = tf.get_variable(...)`: in tower/cross-tower context, creates
a variable (which by definition will have locality V(`v`), though
will match another locality if inside a `colocate_vars_with`
scope).
* `d.distribute_dataset(dataset).make_one_shot_iterator()`: in cross-tower
context, produces an iterator with locality T
* `d.broadcast(t)`: in cross-tower context, produces a value with locality M
* `d.broadcast(t, v)`: in cross-tower context, produces a value with
locality V(`v`)
* `d.call_for_each_tower(fn, ...)`: in cross-tower context, runs
`fn()` in a tower context (and so may call `get_tower_context()` and
use its API, including `merge_call()` to get back to cross-tower
context), once for each tower. May use values with locality T or
M, and any variable.
* `d.reduce(m, t)`: in cross-tower context, accepts t with locality T
and produces a value with locality M.
* `d.reduce(m, t, v)`: in cross-tower context, accepts t with
locality T and produces a value with locality V(`v`).
* `d.batch_reduce(m, [(t, v)]): see `d.reduce()`
* `d.update(v, fn, ...)`: in cross-tower context, runs `fn()` once
for each device `v` is copied to, all inputs should have locality
V(`v`), output will have locality V(`v`) as well.
* `d.update_non_slot(d.non_slot_devices(), fn)`: in cross-tower
context, like `d.update()` except with locality N.
* `d.fetch(t)`: Copy `t` with any locality to the client's CPU device.
TODO(josh11b): Deprecate `fetch`, switch to `read_var` for
reading tower-local variables.
The standard pattern for updating variables is to:
1. Wrap your input dataset in `d.distribute_dataset()` and create an iterator.
2. Define each tower `d.call_for_each_tower()` up to the point of
getting a list of gradient, variable pairs.
3. Call `d.reduce("sum", t, v)` or `d.batch_reduce()` to sum the
gradients (with locality T) into values with locality V(`v`).
4. Call `d.update(v)` for each variable to update its value.
Steps 3 and 4 are done automatically by class `Optimizer` if you call
its `apply_gradients` method in a tower context. Otherwise you can
manually call its `_distributed_apply` method in a cross-tower context.
Another thing you might want to do in the middle of your tower function
is an all-reduce of some intermediate value, using `d.reduce()` or
`d.batch_reduce()` without supplying a variable as the destination.
Layers should expect to be called in a tower context, and can use
the `get_tower_context()` function to get a `TowerContext` object. The
`TowerContext` object has a `merge_call()` method for entering
cross-tower context where you can use `reduce()` (or
`batch_reduce()`) and then optionally `update()` to update state.
You may use this API whether or not a `DistributionStrategy` is
being used, since there is a default implementation of
`TowerContext` and `DistributionStrategy`. Or you can use the
`get_tower_context().is_single_tower` property to run different code
in the distributed vs. single tower cases.
"""
# TODO(josh11b): Raise an exception if variable partitioning requested before
# we add support.
# TODO(josh11b): Also `parameter_device_index` property?
# TODO(josh11b): `map()`
# TODO(josh11b): ClusterSpec/ClusterResolver
# TODO(josh11b): Partitioned computations, state; sharding
# TODO(josh11b): Model parallelism: "towers" with multiple devices; shuffling
# TODO(josh11b): List of towers with their worker and parameter devices
# (where the parameter devices may overlap in the ps case).
def __init__(self):
self._default_device = None
def scope(self):
"""Returns a context manager selecting this DistributionStrategy as current.
Inside a `with distribution_strategy.scope():` code block, this thread
will use a variable creator set by `distribution_strategy`, and will
enter its "cross-tower context".
Returns:
A context manager.
"""
if has_distribution_strategy():
_require_cross_tower_context(self)
return _SameScopeAgainContext(self)
def creator_with_resource_vars(*args, **kwargs):
_require_distribution_strategy_scope(self)
kwargs["use_resource"] = True
return self._create_variable(*args, **kwargs)
def disable_partitioned_variables(getter, *args, **kwargs):
if kwargs.pop("partitioner", None) is not None:
tf_logging.log_first_n(
tf_logging.WARN, "Partitioned variables are disabled when using "
"DistributionStrategy.", 1)
return getter(*args, **kwargs)
return _CurrentDistributionContext(
self, variable_scope.variable_creator_scope(creator_with_resource_vars),
variable_scope.variable_scope(
variable_scope.get_variable_scope(),
custom_getter=disable_partitioned_variables),
self._default_device)
def _create_variable(self, next_creator, *args, **kwargs):
# Note: should support "colocate_with" argument.
raise NotImplementedError("must be implemented in descendants")
def tower_local_var_scope(self, reduce_method):
"""Inside this scope, new variables will not be mirrored.
There will still be one component variable per tower, but there is
no requirement that they stay in sync. Instead, when saving them
or calling `fetch()/read_var()`, we use the value that
results when calling `reduce()` on all the towers' variables.
Note: tower-local implies not trainable. Instead, it is expected
that each tower will directly update (using `assign_add()` or
whatever) its local variable instance but only the aggregated
value (accessible using `fetch()`) will be exported from the
model. When it is acceptable to only aggregate on export, we
greatly reduce communication overhead by using tower-local
variables.
Note: All component variables will be initialized to the same
value, using the initialization expression from the first tower.
The values will match even if the initialization expression uses
random numbers.
Args:
reduce_method: String used as a `method_string` to `reduce()`
to get the value to save when checkpointing.
Returns:
A context manager.
"""
def create_tower_local_variable(next_creator, *args, **kwargs):
_require_distribution_strategy_scope(self)
kwargs["use_resource"] = True
kwargs["tower_local_reduce_method"] = reduce_method
return next_creator(*args, **kwargs)
_require_distribution_strategy_scope(self)
return variable_scope.variable_creator_scope(create_tower_local_variable)
def read_var(self, v):
"""Reads the value of a variable.
Returns the aggregate value of a tower-local variable, or the
(read-only) value of any other variable.
Args:
v: A variable allocated within the scope of this `DistributionStrategy`.
Returns:
A tensor representing the value of `v`, aggregated across towers if
necessary.
"""
raise NotImplementedError("must be implemented in descendants")
def colocate_vars_with(self, colocate_with_variable):
"""Scope that controls which devices variables will be created on.
No operations should be added to the graph inside this scope, it
should only be used when creating variables (some implementations
work by changing variable creation, others work by using a
tf.colocate_with() scope).
This may only be used inside `self.scope()`.
Example usage:
```
with distribution_strategy.scope():
var1 = tf.get_variable(...)
with distribution_strategy.colocate_vars_with(v1):
# var2 and var3 will be created on the same device(s) as var1
var2 = tf.get_variable(...)
var3 = tf.get_variable(...)
def fn(v1, v2, v3):
# operates on v1 from var1, v2 from var2, and v3 from var3
# `fn` runs on every device `v1` is on, `v2` and `v3` will be there too.
distribution_strategy.update(v1, fn, v2, v3)
```
Args:
colocate_with_variable: A created in `self.scope()`. Variables created
while in the returned context manager will be on the same set of
devices as `colocate_with_variable`.
Returns:
A context manager.
"""
def create_colocated_variable(next_creator, *args, **kwargs):
_require_distribution_strategy_scope(self)
kwargs["use_resource"] = True
kwargs["colocate_with"] = colocate_with_variable
return next_creator(*args, **kwargs)
_require_distribution_strategy_scope(self)
return variable_scope.variable_creator_scope(create_colocated_variable)
def _call_dataset_fn(self, dataset_fn):
result = dataset_fn()
if not isinstance(result, dataset_ops.Dataset):
raise ValueError(
"dataset_fn() must return a tf.data.Dataset when using a "
"DistributionStrategy.")
return result
# TODO(josh11b): `PerDeviceDataset` currently only implements a few methods of
# Dataset API such as make_one_shot_iterator and make_initializable_iterator.
# Extend to implement more functionality of datasets.
def distribute_dataset(self, dataset_fn):
"""Return a `dataset` split across all towers.
Suitable for providing input to for `call_for_each_tower()` by creating an
iterator:
```
def dataset_fn():
return tf.data.Dataset.from_tensors([[1.]]).repeat()
with distribution_strategy.scope():
distributed_dataset = distribution_strategy.distribute_dataset(dataset_fn)
iterator = distributed_dataset.make_one_shot_iterator()
tower_results = distribution_strategy.call_for_each_tower(
tower_fn, iterator.get_next())
```
Args:
dataset_fn: A function that returns a `tf.data.Dataset`.
Returns:
A `PerDeviceDataset` that will produce data for each tower.
"""
raise NotImplementedError("must be implemented in descendants")
def broadcast(self, tensor, destinations=None):
"""Mirror a tensor on one device to all worker devices.
Args:
tensor: A Tensor value to broadcast.
destinations: An optional mirrored variable, device string, or
list of device strings, specifying the destination devices
to copy `tensor` to. Defaults to `self.worker_devices`.
Returns:
A value mirrored to `destinations` devices.
"""
# TODO(josh11b): More docstring
_require_cross_tower_context(self)
return self._broadcast(tensor, destinations)
def _broadcast(self, tensor, destinations):
raise NotImplementedError("must be implemented in descendants")
def call_for_each_tower(self, fn, *args, **kwargs):
"""Run `fn` once per tower.
`fn` may call `tf.get_tower_context()` to access methods such as
`tower_id()` and `merge_call()`.
`merge_call()` is used to communicate between the towers and
re-enter the cross-tower context. All towers pause their execution
having encountered a `merge_call()` call. After that the
`merge_fn`-function is executed. Its results are then unwrapped and
given back to each tower call. After that execution resumes until
`fn` is complete or encounters another `merge_call()`. Example:
```python
# Called once in "cross-tower" context.
def merge_fn(distribution, three_plus_tower_id):
# sum the values across towers
return sum(distribution.unwrap(three_plus_tower_id))
# Called once per tower in `distribution`, in a "tower" context.
def fn(three):
tower_ctx = tf.get_tower_context()
v = three + tower_ctx.tower_id
# Computes the sum of the `v` values across all towers.
s = tower_ctx.merge_call(merge_fn, v)
return s + v
with distribution.scope():
# in "cross-tower" context
...
merged_results = distribution.call_for_each_tower(fn, 3)
# merged_results has the values from every tower execution of `fn`.
print(distribution.unwrap(merged_results)) # Prints a list
```
Args:
fn: function to run (will be run once per tower).
*args: positional arguments for `fn`
**kwargs: keyword arguments for `fn`.
`"run_concurrently"`: Boolean indicating whether executions of `fn`
can be run concurrently (under eager execution only), defaults to
`True`.
Returns:
Merged return value of `fn` across all towers.
"""
_require_cross_tower_context(self)
return self._call_for_each_tower(fn, *args, **kwargs)
def _call_for_each_tower(self, fn, *args, **kwargs):
raise NotImplementedError("must be implemented in descendants")
def reduce(self, method_string, value, destinations=None):
"""Combine (via e.g. sum or mean) values across towers.
Args:
method_string: A string indicating how to combine values, either
"sum" or "mean".
value: A per-device value with one value per tower.
destinations: An optional mirrored variable, a device string,
list of device strings. The return value will be copied to all
destination devices (or all the devices where the mirrored
variable resides). If `None` or unspecified, the destinations
will match the devices `value` resides on.
Returns:
A value mirrored to `destinations`.
"""
# TODO(josh11b): More docstring
# TODO(josh11b): Return an unwrapped value if colocate_with is a
# single device.
_require_cross_tower_context(self)
assert method_string in ("sum", "mean")
return self._reduce(method_string, value, destinations)
def _reduce(self, method_string, value, destinations):
raise NotImplementedError("must be implemented in descendants")
def batch_reduce(self, method_string, value_destination_pairs):
"""Combine multiple `reduce` calls into one for faster execution.
Args:
method_string: A string indicating how to combine values, either
"sum" or "mean".
value_destination_pairs: A sequence of (value, destinations)
pairs. See `reduce()` for a description.
Returns:
A list of mirrored values, one per pair in `value_destination_pairs`.
"""
# TODO(josh11b): More docstring
_require_cross_tower_context(self)
assert method_string in ("sum", "mean")
return self._batch_reduce(method_string, value_destination_pairs)
def _batch_reduce(self, method_string, value_destination_pairs):
return [self.reduce(method_string, t, destinations=v)
for t, v in value_destination_pairs]
def update(self, var, fn, *args, **kwargs):
"""Run `fn` to update `var` using inputs mirrored to the same devices.
If `var` is mirrored across multiple devices, then this implements
logic like:
```
results = {}
for device, v in var:
with tf.device(device):
# *args and **kwargs will be unwrapped if they are mirrored.
results[device] = fn(v, *args, **kwargs)
return merged(results)
```
Otherwise this returns `fn(var, *args, **kwargs)` colocated with `var`.'
Neither *args nor **kwargs may contain per-device values.
If they contain mirrored values, they will be unwrapped before
calling `fn`.
Args:
var: Variable, possibly mirrored to multiple devices, to operate on.
fn: Function to call. Should take the variable as the first argument.
*args: Additional positional arguments to pass to `fn()`.
**kwargs: Keyword arguments to pass to `fn()`.
Returns:
Merged return value of `fn` across all towers.
"""
_require_cross_tower_context(self)
return self._update(var, fn, *args, **kwargs)
def _update(self, var, fn, *args, **kwargs):
raise NotImplementedError("must be implemented in descendants")
def update_non_slot(self, colocate_with, fn, *args, **kwargs):
"""Runs `fn(*args, **kwargs)` on `colocate_with` devices.
Args:
colocate_with: The return value of `non_slot_devices()`.
fn: Function to execute.
*args: Positional arguments to pass to `fn()`.
**kwargs: Keyword arguments to pass to `fn()`.
Returns:
Return value of `fn`, possibly merged across devices.
"""
_require_cross_tower_context(self)
return self._update_non_slot(colocate_with, fn, *args, **kwargs)
def _update_non_slot(self, colocate_with, fn, *args, **kwargs):
raise NotImplementedError("must be implemented in descendants")
def fetch(self, val, destination="/device:CPU:0", fn=lambda x: x):
"""Return a copy of `val` or `fn(val)` on `destination`.
This is useful for getting a mirrored value onto a device. It
will attempt to avoid a copy by checking if the value is already
on the destination device.
TODO(josh11b): Switch to `read_var`.
Args:
val: Value (which may be mirrored) to copy.
destination: A device string to copy the value to.
fn: An optional function to apply to the value on the source
device, before copying.
Returns:
A `Tensor` on `destination`.
"""
_require_cross_tower_context(self)
assert isinstance(destination, six.string_types)
destination = device_util.resolve(destination)
return self._fetch(val, destination, fn)
def _fetch(self, val, destination, fn):
raise NotImplementedError("must be implemented in descendants")
def unwrap(self, value):
"""Returns the list of all per-device values contained in `value`.
Args:
value: A value returned by `call_for_each_tower()` or a variable
created in `scope()`.
Returns:
A list of values contained in `value`. If `value` represents a single
value, this returns `[value].`
"""
_require_cross_tower_context(self)
return self._unwrap(value)
def _unwrap(self, distributed_value):
raise NotImplementedError("must be implemented in descendants")
def group(self, value, name=None):
"""Shortcut for `tf.group(distribution.unwrap(value))`."""
value = nest.flatten(self.unwrap(value))
if len(value) != 1 or name is not None:
return control_flow_ops.group(value, name=name)
# Special handling for the common case of one op.
v, = value
if hasattr(v, "op"):
v = v.op
return v
@property
def is_single_tower(self):
"""Returns whether there is a single tower or multiple.
Returns:
A boolean. If `True`, `call_for_each_tower(fn)` will only call `fn` once.
If `False`, `call_for_each_tower(fn)` may call `fn` multiple times.
"""
raise NotImplementedError("must be implemented in descendants")
@property
def num_towers(self):
"""Returns number of towers, for purposes of averaging across towers."""
raise NotImplementedError("must be implemented in descendants")
@property
def worker_devices(self):
"""Returns the list of devices used to run `call_for_each_tower()` calls."""
# TODO(josh11b): More docstring
raise NotImplementedError("must be implemented in descendants")
@property
def parameter_devices(self):
"""Returns the list of devices used for variable and `update` placement."""
# TODO(josh11b): More docstring
raise NotImplementedError("must be implemented in descendants")
def non_slot_devices(self, var_list):
"""Device(s) for non-slot variables.
Create variables on these devices in a
`with colocate_vars_with(non_slot_devices(...)):` block.
Update those using `update_non_slot()`.
Args:
var_list: The list of variables being optimized, needed with the
default `DistributionStrategy`.
"""
raise NotImplementedError("must be implemented in descendants")
@property
def worker_device_index(self):
"""An object mapping worker device to an id.
This might be passed as an argument to `call_for_each_tower()`, as in:
```
with distribution_strategy.scope():
def fn(device_id):
# device_id is an integer. `fn` is being executed on device:
# distribution_strategy.worker_devices[device_id].
distribution_strategy.call_for_each_tower(
fn, distribution_strategy.worker_device_index)
```
Returns:
An index object, or the integer 0 if there is only a single tower.
"""
_require_cross_tower_context(self)
return self._worker_device_index()
def _worker_device_index(self):
raise NotImplementedError("must be implemented in descendants")
def configure(self, session_config=None):
"""Find the best configuration given a tensorflow session config."""
del session_config
# A note about the difference between the context managers
# `TowerContext` (defined here) and `_CurrentDistributionContext`
# (defined above) used by `DistributionStrategy.scope()`:
#
# * a TowerContext is only present during a `call_for_each_tower()`
# call (except during a `merge_run` call) and in such a scope it
# will be returned by calls to `get_tower_context()`. Implementers of new
# DistributionStrategy descendants will frequently also need to
# define a descendant of TowerContext, and are responsible for
# entering and exiting this context.
#
# * DistributionStrategy.scope() sets up a variable_creator scope that
# changes variable creation calls (e.g. to make mirrored
# variables). This is intended as an outer scope that users enter once
# around their model creation and graph definition. There is no
# anticipated need to define descendants of _CurrentDistributionContext.
# It sets the current DistributionStrategy for purposes of
# `get_distribution_strategy()` and `has_distribution_strategy()`
# and switches the thread mode to a "cross-tower context".
class TowerContext(object):
"""DistributionStrategy API inside a `call_for_each_tower()` call."""
def __init__(self, distribution_strategy, tower_id):
self._distribution_strategy = distribution_strategy
self._thread_context = _InTowerThreadMode(self)
self._tower_id = tower_id
def __enter__(self):
_push_per_thread_mode(self._thread_context)
def __exit__(self, exception_type, exception_value, traceback):
_pop_per_thread_mode()
def merge_call(self, merge_fn, *args, **kwargs):
"""Merge args across towers and run `merge_fn` in a cross-tower context.
This allows communication and coordination when there are multiple calls
to a model function triggered by a call to
`distribution.call_for_each_tower(model_fn, ...)`.
See `MirroredDistribution.call_for_each_tower()` for an explanation.
Otherwise, this is equivalent to:
```
distribution = get_distribution_strategy()
with cross-tower-context(distribution):
return merge_fn(distribution, *args, **kwargs)
```
Args:
merge_fn: function that joins arguments from threads that are given as
PerDevice. It accepts `DistributionStrategy` object as the first
argument.
*args: positional per-thread arguments for `merge_fn`
**kwargs: keyword per-thread arguments for `merge_fn`.
Returns:
The return value of `merge_fn`, except for `PerDevice` values which are
unpacked.
"""
require_tower_context(self)
return self._merge_call(merge_fn, *args, **kwargs)
def _merge_call(self, merge_fn, *args, **kwargs):
"""Default implementation for single tower."""
_push_per_thread_mode( # thread-local, so not needed with multiple threads
_CrossTowerThreadMode(self._distribution_strategy))
try:
return merge_fn(self._distribution_strategy, *args, **kwargs)
finally:
_pop_per_thread_mode()
def tower_local_var_scope(self, reduce_method):
"""Alias for distribution_strategy.tower_local_var_scope()."""
return self._distribution_strategy.tower_local_var_scope(reduce_method)
@property
def is_single_tower(self):
"""Returns whether there is a single tower or multiple."""
require_tower_context(self)
return self._distribution_strategy.is_single_tower
@property
def num_towers(self):
"""Returns number of towers, for purposes of averaging across towers."""
return self._distribution_strategy.num_towers
@property
def tower_id(self):
"""Which tower is being defined, a number from 0 to `num_towers - 1`."""
require_tower_context(self)
return self._tower_id
@property
def distribution_strategy(self):
"""The current `DistributionStrategy` object."""
return self._distribution_strategy
@property
def device(self):
"""The device this tower is to be executed on, as a string."""
require_tower_context(self)
return device_util.current()
# TODO(josh11b): Implement `start_all_reduce(method, t)` that returns
# a function returning the result of reducing `t` across all
# towers. Most likely can be implemented in terms of `merge_call()`
# and `batch_reduce()`.
# ------------------------------------------------------------------------------
class _DefaultDistributionStrategy(DistributionStrategy):
"""Default `DistributionStrategy` if none is explicitly selected."""
def scope(self):
"""Context manager setting a variable creator and `self` as current."""
if has_distribution_strategy():
raise RuntimeError("Must not nest DistributionStrategy scopes.")
def creator(next_creator, *args, **kwargs):
_require_distribution_strategy_scope(self)
kwargs.pop("tower_local_reduce_method", None)
return next_creator(*args, **kwargs)
return _CurrentDistributionContext(
self, variable_scope.variable_creator_scope(creator))
def tower_local_var_scope(self, reduce_method):
"""Does not set to resource variables."""
def create_tower_local_variable(next_creator, *args, **kwargs):
_require_distribution_strategy_scope(self)
kwargs["trainable"] = False
return next_creator(*args, **kwargs)
_require_distribution_strategy_scope(self)
return variable_scope.variable_creator_scope(create_tower_local_variable)
def colocate_vars_with(self, colocate_with_variable):
"""Does not require `self.scope`."""
_require_distribution_strategy_scope(self)
return ops.colocate_with(colocate_with_variable)
def distribute_dataset(self, dataset_fn):
return self._call_dataset_fn(dataset_fn)
def _broadcast(self, tensor, destinations):
if destinations is None:
return tensor
else:
raise NotImplementedError("TODO")
def _call_for_each_tower(self, fn, *args, **kwargs):
# We don't run `fn` in multiple threads in _DefaultDistributionStrategy.
kwargs.pop("run_concurrently", None)
with TowerContext(self, tower_id=0):
return fn(*args, **kwargs)
def _reduce(self, method_string, value, destinations):
# TODO(josh11b): Use destinations?
del method_string, destinations
return value
def _update(self, var, fn, *args, **kwargs):
# TODO(josh11b): Figure out what we should be passing to UpdateContext()
# once that value is used for something.
with ops.colocate_with(var), UpdateContext(var):
return fn(var, *args, **kwargs)
def _update_non_slot(self, colocate_with, fn, *args, **kwargs):
# TODO(josh11b): Figure out what we should be passing to UpdateContext()
# once that value is used for something.
with ops.colocate_with(colocate_with), UpdateContext(colocate_with):
return fn(*args, **kwargs)
def read_var(self, tower_local_var):
return array_ops.identity(tower_local_var)
def _fetch(self, var, destination, fn):
with ops.colocate_with(var):
var = fn(var)
with ops.device(destination):
return array_ops.identity(var)
def _unwrap(self, distributed_value):
return [distributed_value]
@property
def is_single_tower(self):
return True
@property
def num_towers(self):
return 1
@property
def worker_devices(self):
raise RuntimeError(
"worker_devices() method unsupported by _DefaultDistributionStrategy.")
@property
def parameter_devices(self):
raise RuntimeError("parameter_devices() method unsupported by "
"_DefaultDistributionStrategy.")
def non_slot_devices(self, var_list):
return min(var_list, key=lambda x: x.name)
def _worker_device_index(self):
raise RuntimeError("worker_device_index() method unsupported by "
"_DefaultDistributionStrategy.")
# ------------------------------------------------------------------------------
# Common operations
def increment_var(v, amount=1):
"""`v += amount`, distributed-aware version."""
def update(vu):
if isinstance(vu, resource_variable_ops.ResourceVariable):
return vu.assign_add(amount, read_value=False)
else:
return state_ops.assign_add(vu, amount)
def merge_fn(dist, vm):
return dist.group(dist.update(vm, update))
tower_context = get_tower_context()
return tower_context.merge_call(merge_fn, v)
# ------------------------------------------------------------------------------
# Singletons
_default_distribution_strategy = _DefaultDistributionStrategy()
_default_tower_context = TowerContext(
_default_distribution_strategy, tower_id=0)
_default_tower_mode = _DefaultTowerThreadMode()
# ------------------------------------------------------------------------------
# We haven't yet implemented deserialization for DistributedVariables.
# So here we catch any attempts to deserialize variables
# when using distribution strategies.
# pylint: disable=protected-access
_original_from_proto = resource_variable_ops._from_proto_fn
def _from_proto_fn(v, import_scope=None):
if has_distribution_strategy():
raise NotImplementedError(
"Deserialization of variables is not yet supported when using"
"distributed strategies.")
else:
return _original_from_proto(v, import_scope=import_scope)
resource_variable_ops._from_proto_fn = _from_proto_fn
# pylint: enable=protected-access
| [
"[email protected]"
] | |
e402fa1c2ce544c37f3d20d570e1726d78557508 | d4412fbe37540e2c4cbe59ed6503d3661ccb7d9c | /tests/test_analyzer/test_subclasses/test_flop_tensor.py | 4e9c9852649ba593d37e20f9a9a414fd2a2a04f4 | [
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | permissive | hpcaitech/ColossalAI | a082ed08a3807b53c49d1f86835b9808590d9042 | c7b60f75470f067d1342705708810a660eabd684 | refs/heads/main | 2023-09-01T04:13:13.834565 | 2023-08-30T15:07:21 | 2023-08-30T15:07:21 | 422,274,596 | 32,044 | 4,084 | Apache-2.0 | 2023-09-14T15:19:54 | 2021-10-28T16:19:44 | Python | UTF-8 | Python | false | false | 1,925 | py | import pytest
import torch
import torch.nn.functional as F
import torchvision.models as tm
from packaging import version
from colossalai.testing import clear_cache_before_run, parameterize
from tests.test_analyzer.test_fx.zoo import tm_models, tmm_models
try:
from colossalai._analyzer._subclasses import MetaTensorMode, flop_count
except:
pass
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
@pytest.mark.parametrize('m', tm_models + tmm_models)
def test_flop_count_module(m):
x = torch.rand(2, 3, 224, 224)
with MetaTensorMode(): # save time for testing
module = m()
rs_fwd, rs_bwd = flop_count(module, x, verbose=True)
assert rs_fwd > 0, f'fwd flop count of {m.__name__} is {rs_fwd}'
assert rs_bwd > 0, f'bwd flop count of {m.__name__} is {rs_bwd}'
odd_cases = [
(F.relu, (torch.rand(2, 3, 224, 224, requires_grad=True),), {
'inplace': True
}),
(F.max_pool2d, (torch.rand(2, 3, 224, 224, requires_grad=True),), {
'kernel_size': 3,
'stride': 2,
'padding': 1,
'dilation': 2
}),
(torch.where, (torch.rand(2, 3, 224, 224) > 0.5, torch.rand(2, 3, 224, 224, requires_grad=True),
torch.rand(2, 3, 224, 224, requires_grad=True)), {}),
]
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
@pytest.mark.parametrize('func, args, kwargs', odd_cases)
def test_flop_count_function(func, args, kwargs):
rs_fwd, rs_bwd = flop_count(func, *args, **kwargs, verbose=True)
assert rs_fwd > 0, f'fwd flop count of {func.__name__} is {rs_fwd}'
assert rs_bwd > 0, f'bwd flop count of {func.__name__} is {rs_bwd}'
if __name__ == '__main__':
test_flop_count_module(tm.resnet18)
test_flop_count_function(F.relu, (torch.rand(2, 3, 224, 224, requires_grad=True),), {'inplace': True})
| [
"[email protected]"
] | |
310a3e2b195bd96ba7d266f470ce7bf23891162a | 5210993914691c70076be979aa5c57c33d5d3bc4 | /Programming101-3/Week_1/The_Final_Round/unique_words_count.py | daf32bc0de62796c448ac4f7695c67a3dea43851 | [] | no_license | presian/HackBulgaria | d29f84ab7edc85a4d8dfbf055def7d0be783539e | 8bc95bb31daeb1f5a313d25b928f505013f5f0b0 | refs/heads/master | 2021-01-10T04:38:05.759005 | 2015-10-15T07:05:21 | 2015-10-15T07:05:21 | 36,889,139 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | def unique_words_count(arr):
return len({x for x in arr})
def main():
print(unique_words_count(["apple", "banana", "apple", "pie"]))
print(unique_words_count(["python", "python", "python", "ruby"]))
print(unique_words_count(["HELLO!"] * 10))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
3332366bee34f0fc8461908bc0793a908fe55b86 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/purview/azext_purview/manual/_help.py | 0508662c698375e3bf665656ff64d7a2955da878 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 5,143 | py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.help_files import helps
helps['purview'] = '''
type: group
short-summary: Manage Purview
'''
helps['purview account'] = """
type: group
short-summary: Manage account with purview
"""
helps['purview account list'] = """
type: command
short-summary: "List accounts in ResourceGroup And List accounts in Subscription."
examples:
- name: Accounts_ListByResourceGroup
text: |-
az purview account list --resource-group "SampleResourceGroup"
- name: Accounts_ListBySubscription
text: |-
az purview account list
"""
helps['purview account show'] = """
type: command
short-summary: "Get an account."
examples:
- name: Accounts_Get
text: |-
az purview account show --name "account1" --resource-group "SampleResourceGroup"
"""
helps['purview account create'] = """
type: command
short-summary: "Create an account."
examples:
- name: Accounts_CreateOrUpdate
text: |-
az purview account create --location "WestUS2" --managed-group-name "custom-rgname" \
--name "account1" --resource-group "SampleResourceGroup"
"""
helps['purview account update'] = """
type: command
short-summary: "Updates an account."
examples:
- name: Accounts_Update
text: |-
az purview account update --name "account1" --tags newTag="New tag value." --resource-group \
"SampleResourceGroup"
"""
helps['purview account delete'] = """
type: command
short-summary: "Deletes an account resource."
examples:
- name: Accounts_Delete
text: |-
az purview account delete --name "account1" --resource-group "SampleResourceGroup"
"""
helps['purview account add-root-collection-admin'] = """
type: command
short-summary: "Add the administrator for root collection associated with this account."
examples:
- name: Accounts_AddRootCollectionAdmin
text: |-
az purview account add-root-collection-admin --name "account1" --object-id \
"7e8de0e7-2bfc-4e1f-9659-2a5785e4356f" --resource-group "SampleResourceGroup"
"""
helps['purview account list-key'] = """
type: command
short-summary: "List the authorization keys associated with this account."
examples:
- name: Accounts_ListKeys
text: |-
az purview account list-key --name "account1" --resource-group "SampleResourceGroup"
"""
helps['purview account wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the purview account is met.
examples:
- name: Pause executing next line of CLI script until the purview account is successfully created.
text: |-
az purview account wait --name "account1" --resource-group "SampleResourceGroup" --created
- name: Pause executing next line of CLI script until the purview account is successfully updated.
text: |-
az purview account wait --name "account1" --resource-group "SampleResourceGroup" --updated
- name: Pause executing next line of CLI script until the purview account is successfully deleted.
text: |-
az purview account wait --name "account1" --resource-group "SampleResourceGroup" --deleted
"""
helps['purview default-account'] = """
type: group
short-summary: Manage default account with purview
"""
helps['purview default-account show'] = """
type: command
short-summary: "Get the default account for the scope."
examples:
- name: DefaultAccounts_Get
text: |-
az purview default-account show --scope "12345678-1234-1234-12345678abc" --scope-tenant-id \
"12345678-1234-1234-12345678abc" --scope-type "Tenant"
"""
helps['purview default-account remove'] = """
type: command
short-summary: "Removes the default account from the scope."
examples:
- name: DefaultAccounts_Remove
text: |-
az purview default-account remove --scope "12345678-1234-1234-12345678abc" --scope-tenant-id \
"12345678-1234-1234-12345678abc" --scope-type "Tenant"
"""
helps['purview default-account set'] = """
type: command
short-summary: "Sets the default account for the scope."
examples:
- name: DefaultAccounts_Set
text: |-
az purview default-account set --account-name "myDefaultAccount" --resource-group "rg-1" --scope \
"12345678-1234-1234-12345678abc" --scope-tenant-id "12345678-1234-1234-12345678abc" --scope-type "Tenant" \
--subscription-id "12345678-1234-1234-12345678aaa"
"""
| [
"[email protected]"
] | |
4553cecf9a964d9436652c40173f996ef96764d9 | 3b981dfc835d36eb9bb86e4dbb0b1e332285d5cf | /nkcomments/tests.py | a18b4126c658b2f908cb0544717e797b4646133b | [] | no_license | richraines/nuortenideat | d9ad5ff33e4231c7f9960b9e1a54be16395173a2 | 033f63575c52ce118f0deba1168afca743de6c26 | refs/heads/master | 2020-09-01T01:39:39.137935 | 2016-10-31T14:24:59 | 2016-10-31T14:24:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,945 | py | # coding=utf-8
from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from account.factories import UserFactory, DEFAULT_PASSWORD
from content.factories import IdeaFactory
from content.models import Idea
from nuka.test.testcases import TestCase
from .factories import CustomCommentFactory
from .models import CustomComment
"""
# TODO: Login works, FIXME.
class DeleteTest(TestCase):
def setUp(self):
self.idea = IdeaFactory()
self.idea_content_type = ContentType.objects.get_for_model(Idea)
self.group_admin = Group.objects.get(name=GROUP_NAME_ADMINS)
self.group_moderator = Group.objects.get(name=GROUP_NAME_MODERATORS)
self.user = UserFactory(settings__first_name="Matti",
settings__last_name="Meikäläinen")
def test_own(self):
self.user.groups.clear()
self.user.groups.add(self.group_moderator)
login = self.client.login(username=self.user.username, password=DEFAULT_PASSWORD)
self.assertNotEqual(login, False)
comment = CustomComment.objects.create(
content_type=self.idea_content_type,
object_pk=self.idea.pk,
user=self.user,
comment="Some comment text",
site_id=1
)
resp = self.client.post("/fi/ideat/poista_kommentti/{0}/".format(comment.pk),
follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "nkcomments/comment_list_item_deleted.html")
self.assertContains(resp, "Kommentti poistettu.")
self.assertNotContains(resp, comment.comment)
with self.assertRaises(ObjectDoesNotExist):
CustomComment.objects.get(pk=comment.pk)
def test_as_moderator(self):
pass
def test_unauthorized(self):
pass
"""
class WriteCommentTest(TestCase):
def setUp(self):
pass
def manual_set_up(self, public=True, login=False):
if public:
status = Idea.STATUS_PUBLISHED
visibility = Idea.VISIBILITY_PUBLIC
else:
status = Idea.STATUS_DRAFT
visibility = Idea.VISIBILITY_DRAFT
self.user = UserFactory()
self.idea = IdeaFactory(
creator=self.user,
status=status,
visibility=visibility,
)
if login:
self.user = self.idea.creator
self.client.login(username=self.user.username,
password=DEFAULT_PASSWORD)
def test_comment_block_visibility_public_idea(self):
self.manual_set_up()
resp = self.client.get('/fi/ideat/{}'.format(self.idea.pk), follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'content/idea_detail.html')
self.assertContains(resp, '<article id="comments"')
self.assertContains(resp, '<h4>Kommentit (0)</h4>')
def test_comment_block_pvisibility_not_public_idea(self):
self.manual_set_up(public=False, login=True)
resp = self.client.get('/fi/ideat/{}'.format(self.idea.pk), follow=True)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'content/idea_detail.html')
self.assertNotContains(resp, '<div class="well" id="comments">')
def test_write_comment(self):
self.manual_set_up(login=True)
comment = CustomCommentFactory(
content_type=ContentType.objects.get_for_model(self.idea),
object_pk=self.idea.pk
)
comment_cmp = CustomComment.objects.first()
self.assertIsNotNone(comment_cmp)
self.assertEqual(comment_cmp, comment)
resp = self.client.get('/fi/ideat/{}'.format(self.idea.pk), follow=True)
self.assertContains(resp, comment.comment)
def test_comment_block_necessary_elements(self):
self.manual_set_up(login=True)
CustomCommentFactory(
content_type=ContentType.objects.get_for_model(self.idea),
object_pk=self.idea.pk,
user_id=self.user.pk
)
resp = self.client.get('/fi/ideat/{}'.format(self.idea.pk), follow=True)
self.assertNotContains(resp, '<div id="id_name_wrap"')
self.assertContains(resp, 'title="Poista kommentti"')
def test_comment_block_necessary_elements_anonymous(self):
self.manual_set_up()
CustomCommentFactory(
content_type=ContentType.objects.get_for_model(self.idea),
object_pk=self.idea.pk,
)
resp = self.client.get('/fi/ideat/{}'.format(self.idea.pk), follow=True)
self.assertNotContains(resp, '<input id="id_name" name="name" type="hidden">')
self.assertContains(resp, '<div id="id_name_wrap"')
self.assertContains(resp, '<div id="id_comment_wrap"')
self.assertNotContains(resp, 'title="Poista kommentti"')
| [
"[email protected]"
] | |
b5502ff8609f59db993800579ab69261e4b48e43 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02887/s396715066.py | 5b052e1ae14ff4d5ca6061b10c85c914d5c652a3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | n = int(input())
s = input()
ans = 0
i = 0
while i < n:
ans += 1
while i + 1 < n and s[i] == s[i + 1]:
i += 1
i += 1
print(ans)
| [
"[email protected]"
] | |
7cacc1bab1526fbce224cf19ba5f0e8b7d08435c | 8b0cf90774310723282c525a491d0df5ccdbaddc | /blog3/apps/articles/urls.py | dcfde7c9c4fe4f3e8d0bd47fc031969a95517c81 | [] | no_license | LIMr1209/django-practive | 13d0b1a02761010643405b8233bc2b1c1ebf8622 | deb01dac652cda7ef7117d3ecef3546092cef97c | refs/heads/master | 2020-03-24T06:16:58.839185 | 2018-08-19T06:25:45 | 2018-08-19T06:25:45 | 142,522,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | """blog3 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from .views import article_detail, comment_add, comment_delete, article_add, love_add
urlpatterns = [
url(r'^article_detail/(\d+)$', article_detail, name='article_detail'),
url(r'^comment_add/(\d+)$', comment_add, name='comment_add'),
url(r'^comment_delete/(\d+)$', comment_delete, name='comment_delete'),
url(r'^article_add/$', article_add, name='article_add'),
url(r'^love_add/$', love_add, name='love_add'),
]
| [
"[email protected]"
] | |
0b2cf2939ef3f48228585318267019e47b06095b | 6527b66fd08d9e7f833973adf421faccd8b765f5 | /yuancloud/addons/hr_holidays/hr_department.py | 7d85f388aed16347ff5c1459fc3ae78817e442c2 | [] | no_license | cash2one/yuancloud | 9a41933514e57167afb70cb5daba7f352673fb4d | 5a4fd72991c846d5cb7c5082f6bdfef5b2bca572 | refs/heads/master | 2021-06-19T22:11:08.260079 | 2017-06-29T06:26:15 | 2017-06-29T06:26:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,789 | py | # -*- coding: utf-8 -*-
import datetime
from dateutil.relativedelta import relativedelta
from yuancloud import api, fields, models
from yuancloud.tools import DEFAULT_SERVER_DATETIME_FORMAT
class hr_department(models.Model):
_inherit = 'hr.department'
@api.multi
def _compute_leave_count(self):
Holiday = self.env['hr.holidays']
today_date = datetime.datetime.utcnow().date()
today_start = today_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT) # get the midnight of the current utc day
today_end = (today_date + relativedelta(hours=23, minutes=59, seconds=59)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)
leave_data = Holiday.read_group(
[('department_id', 'in', self.ids),
('state', '=', 'confirm'), ('type', '=', 'remove')],
['department_id'], ['department_id'])
allocation_data = Holiday.read_group(
[('department_id', 'in', self.ids),
('state', '=', 'confirm'), ('type', '=', 'add')],
['department_id'], ['department_id'])
absence_data = Holiday.read_group(
[('department_id', 'in', self.ids), ('state', 'not in', ['cancel', 'refuse']),
('date_from', '<=', today_end), ('date_to', '>=', today_start), ('type', '=', 'remove')],
['department_id'], ['department_id'])
res_leave = dict((data['department_id'][0], data['department_id_count']) for data in leave_data)
res_allocation = dict((data['department_id'][0], data['department_id_count']) for data in allocation_data)
res_absence = dict((data['department_id'][0], data['department_id_count']) for data in absence_data)
for department in self:
department.leave_to_approve_count = res_leave.get(department.id, 0)
department.allocation_to_approve_count = res_allocation.get(department.id, 0)
department.absence_of_today = res_absence.get(department.id, 0)
@api.multi
def _compute_total_employee(self):
emp_data = self.env['hr.employee'].read_group([('department_id', 'in', self.ids)], ['department_id'], ['department_id'])
result = dict((data['department_id'][0], data['department_id_count']) for data in emp_data)
for department in self:
department.total_employee = result.get(department.id, 0)
absence_of_today = fields.Integer(
compute='_compute_leave_count', string='Absence by Today')
leave_to_approve_count = fields.Integer(
compute='_compute_leave_count', string='Leave to Approve')
allocation_to_approve_count = fields.Integer(
compute='_compute_leave_count', string='Allocation to Approve')
total_employee = fields.Integer(
compute='_compute_total_employee', string='Total Employee')
| [
"[email protected]"
] | |
7c3c0c5a5449b05da0d3b2c0e20af4bdc954d868 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r10p1/Gen/DecFiles/options/13506401.py | 2ee175731dd094ef713916c24d807afb652a3e37 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,858 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/13506401.py generated: Wed, 25 Jan 2017 15:25:23
#
# Event Type: 13506401
#
# ASCII decay Descriptor: [B_s0 -> (tau+ -> pi+ pi- pi+ (pi0 -> gamma gamma) anti-nu_tau) (tau- -> pi+ pi- pi- (pi0 -> gamma gamma) nu_tau)]cc
#
from Configurables import Generation
Generation().EventType = 13506401
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bs_tautau,pipipipi0nu=DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 531,-531 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 531
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 531,-531 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_531.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 13506401
| [
"[email protected]"
] | |
47f25718bc8fa8fde5832344631f295bcac6eb3e | 230b7714d61bbbc9a75dd9adc487706dffbf301e | /tools/binary_size/libsupersize/diff_test.py | 93ded263b681ff5749aee9d022c882345bbc8fc3 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.0-or-later",
"MIT",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.0-only",
"APSL-2.0",
"LicenseRef-scancode-unknown",
"Zlib"
] | permissive | byte4byte/cloudretro | efe4f8275f267e553ba82068c91ed801d02637a7 | 4d6e047d4726c1d3d1d119dfb55c8b0f29f6b39a | refs/heads/master | 2023-02-22T02:59:29.357795 | 2021-01-25T02:32:24 | 2021-01-25T02:32:24 | 197,294,750 | 1 | 2 | BSD-3-Clause | 2019-09-11T19:35:45 | 2019-07-17T01:48:48 | null | UTF-8 | Python | false | false | 7,313 | py | #!/usr/bin/env python
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import diff
import models
def _MakeSym(section, size, path, name=None):
if name is None:
# Trailing letter is important since diffing trims numbers.
name = '{}_{}A'.format(section[1:], size)
return models.Symbol(
section,
size,
full_name=name,
template_name=name,
name=name,
object_path=path)
def _SetName(symbol, full_name, name=None):
if name is None:
name = full_name
symbol.full_name = full_name
symbol.template_name = full_name
symbol.name = name
def _CreateSizeInfo(aliases=None):
section_sizes = {'.text': 100, '.bss': 40}
TEXT = models.SECTION_TEXT
symbols = [
_MakeSym(TEXT, 10, 'a'),
_MakeSym(TEXT, 20, 'a'),
_MakeSym(TEXT, 30, 'b'),
_MakeSym(TEXT, 40, 'b'),
_MakeSym(TEXT, 50, 'b'),
_MakeSym(TEXT, 60, ''),
]
if aliases:
for tup in aliases:
syms = symbols[tup[0]:tup[1]]
for sym in syms:
sym.aliases = syms
return models.SizeInfo(section_sizes, symbols)
class DiffTest(unittest.TestCase):
def testIdentity(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
self.assertEquals(0, d.raw_symbols.padding)
def testSimple_Add(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
size_info1.raw_symbols -= [size_info1.raw_symbols[0]]
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 1, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(10, d.raw_symbols.size)
self.assertEquals(0, d.raw_symbols.padding)
def testSimple_Delete(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols -= [size_info2.raw_symbols[0]]
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 0, 1), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(-10, d.raw_symbols.size)
self.assertEquals(0, d.raw_symbols.padding)
def testSimple_Change(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[0].size += 11
size_info2.raw_symbols[0].padding += 20
size_info2.raw_symbols[-1].size += 11
d = diff.Diff(size_info1, size_info2)
self.assertEquals((2, 1, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(22, d.raw_symbols.size)
self.assertEquals(20, d.raw_symbols.padding)
def testDontMatchAcrossSections(self):
size_info1 = _CreateSizeInfo()
size_info1.raw_symbols += [
_MakeSym(models.SECTION_TEXT, 11, 'asdf', name='Hello'),
]
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols += [
_MakeSym(models.SECTION_RODATA, 11, 'asdf', name='Hello'),
]
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testAliases_Remove(self):
size_info1 = _CreateSizeInfo(aliases=[(0, 3)])
size_info2 = _CreateSizeInfo(aliases=[(0, 2)])
d = diff.Diff(size_info1, size_info2)
# Aliases cause all sizes to change.
self.assertEquals((3, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testAliases_Add(self):
size_info1 = _CreateSizeInfo(aliases=[(0, 2)])
size_info2 = _CreateSizeInfo(aliases=[(0, 3)])
d = diff.Diff(size_info1, size_info2)
# Aliases cause all sizes to change.
self.assertEquals((3, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testAliases_ChangeGroup(self):
size_info1 = _CreateSizeInfo(aliases=[(0, 2), (2, 5)])
size_info2 = _CreateSizeInfo(aliases=[(0, 3), (3, 5)])
d = diff.Diff(size_info1, size_info2)
# Aliases cause all sizes to change.
self.assertEquals((4, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testStarSymbolNormalization(self):
size_info1 = _CreateSizeInfo()
_SetName(size_info1.raw_symbols[0], '* symbol gap 1 (end of section)')
size_info2 = _CreateSizeInfo()
_SetName(size_info2.raw_symbols[0], '* symbol gap 2 (end of section)')
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testNumberNormalization(self):
TEXT = models.SECTION_TEXT
size_info1 = _CreateSizeInfo()
size_info1.raw_symbols += [
_MakeSym(TEXT, 11, 'a', name='.L__unnamed_1193'),
_MakeSym(TEXT, 22, 'a', name='.L__unnamed_1194'),
_MakeSym(TEXT, 33, 'a', name='SingleCategoryPreferences$3#this$0'),
_MakeSym(TEXT, 44, 'a', name='.L.ref.tmp.2'),
]
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols += [
_MakeSym(TEXT, 11, 'a', name='.L__unnamed_2194'),
_MakeSym(TEXT, 22, 'a', name='.L__unnamed_2195'),
_MakeSym(TEXT, 33, 'a', name='SingleCategoryPreferences$9#this$009'),
_MakeSym(TEXT, 44, 'a', name='.L.ref.tmp.137'),
]
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testChangedParams(self):
size_info1 = _CreateSizeInfo()
size_info1.raw_symbols[0].full_name = 'Foo()'
size_info1.raw_symbols[0].name = 'Foo'
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[0].full_name = 'Foo(bool)'
size_info2.raw_symbols[0].name = 'Foo'
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testChangedPaths(self):
size_info1 = _CreateSizeInfo()
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[0].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 0, 0), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testChangedPaths_ChangedParams(self):
size_info1 = _CreateSizeInfo()
size_info1.raw_symbols[0].full_name = 'Foo()'
size_info1.raw_symbols[0].name = 'Foo'
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[0].full_name = 'Foo(bool)'
size_info2.raw_symbols[0].name = 'Foo'
size_info2.raw_symbols[0].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
def testChangedPaths_StringLiterals(self):
size_info1 = _CreateSizeInfo()
size_info1.raw_symbols[0].full_name = models.STRING_LITERAL_NAME
size_info2 = _CreateSizeInfo()
size_info2.raw_symbols[0].full_name = models.STRING_LITERAL_NAME
size_info2.raw_symbols[0].object_path = 'asdf'
d = diff.Diff(size_info1, size_info2)
self.assertEquals((0, 1, 1), d.raw_symbols.CountsByDiffStatus()[1:])
self.assertEquals(0, d.raw_symbols.size)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
09b6e86dc545020ba63dfe09f0a5961ed2def2ff | 028274f08da4c616ccc1362df390dcfe58131fc6 | /DS_Management_Tools/TaskController/EESMap/__init__.py | dad1405df121f6c10a6267c6a2520494f8e5c7f4 | [] | no_license | PyWilhelm/EDRIS_DS | 8a5430515bfc7e11abf846126f4fa2388ff59dd9 | cc0179495d8874ff5a95fd08d833388f434e1d87 | refs/heads/master | 2021-01-17T00:43:01.560668 | 2016-07-23T10:50:09 | 2016-07-23T10:50:09 | 64,011,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,279 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
from TaskController.SPS.MetataskOverriderSPS import MetataskOverriderSPS
from TaskController.BaseClass.Controller import Controller
from TaskController.PISystem import PostProcessing
def start_controller(tid, input_data, session={}, controller=None, block=True, method='', sample=False):
_dir = os.path.dirname(os.path.abspath(__file__))
if method == '':
controller = Controller(priority=3) if controller is None else controller
with open(os.path.join(_dir, 'metataskEESMap.json')) as f:
metatask_data = json.load(f)
else:
raise Exception("Method " + method + " unknown")
session['controller'] = controller
# metatask_data = override(metatask_data, input_data, method)
result_future = controller.add_metatask(metatask_data)
if not block:
return result_future
else:
result = result_future.get()
result.save_as_sdf()
controller.stop()
return True
def override(metatask_temp, userinput, plot):
mto = MetataskOverriderSPS(metatask_temp)
return mto.override_all(userinput, plot)
if __name__ == "__main__":
input_data = dict()
result = start_controller(tid=100, input_data=input_data)
| [
"[email protected]"
] | |
87b51dd3a3e8876ddd9a9460e1914569c7f0e8dc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_005/ch1_2020_03_02_19_35_48_382180.py | cfc6794f0f914f73b54f3ebeea232541950565aa | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | def calcula_valor_devido(c,i,n):
m=c*(1+n)**i
return m
| [
"[email protected]"
] | |
db727ffa84eef94dfc2beca0189cb6f147dd7d5b | 065191d9e09ecda3966d96770d166371bcbba515 | /troposphere/helpers/meta.py | 0d96a8ed25a18bfb7ede095d1f80e67aa001fa12 | [
"MIT"
] | permissive | sabakaio/docker-registry | 2da96e14c1a7f1d50a7998e355b6980617ce0355 | 720a800e5f7f02ff1ec5d9b1d559a2dd6114f7f1 | refs/heads/master | 2020-04-10T13:30:10.923260 | 2016-12-13T16:24:26 | 2016-12-13T16:30:32 | 61,976,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,225 | py | from troposphere import Ref, Base64, Join
from troposphere import cloudformation as cf, ec2, autoscaling as au
def docker():
return cf.InitConfig(
'Docker',
packages={'yum': {'docker': []}},
commands={
'docker_user': {
'command': 'usermod -aG docker ec2-user'
},
'install_compose': {
'command': 'pip install docker-compose'
},
},
services={
'sysvinit': {
'docker': {
'enabled': True,
'ensureRunning': True
}
}
}
)
def htpasswd(filename):
return cf.InitConfig(
'htpasswd',
files={
filename: {
'content': 'user:password_hash',
'mode': '000660',
'owner': 'root',
'group': 'docker',
},
}
)
def docker_compose(name, compose_yml):
name = name.lower()
compose_file = '/opt/{n}/docker-compose.yml'.format(n=name)
init = cf.InitConfig(
'Compose' + name.title(),
files={
compose_file: {
'content': compose_yml,
'mode': '000664',
'owner': 'root',
'group': 'docker',
},
},
commands={
'up': {
'command': '/usr/local/bin/docker-compose -f {f} up -d'.format(f=compose_file)
},
}
)
return init, compose_file
def certbot(domain, email, conf_dir='/opt/certs/', copy_to=None,
pre_hook=None, post_hook=None):
script_name = '/opt/certbot-auto'
commands = {
'1_get_cert': {
'command': Join(' ', [
script_name, 'certonly',
'--config-dir', conf_dir,
'--standalone --debug --agree-tos --non-interactive',
'-d', domain,
'--email', email,
])
}
}
renew_script = [
'#/bin/bash -e\n',
'unset PYTHON_INSTALL_LAYOUT\n',
script_name + ' renew --config-dir ' + conf_dir,
' --debug --non-interactive',
]
if pre_hook:
renew_script.append(' --pre-hook="' + pre_hook + '"')
copy_certs = None
if copy_to:
copy_certs = Join('', [
'cp ' + conf_dir.rstrip('/') + '/live/', domain, '/*.pem ', copy_to
])
commands.update({
'2_certs_dest': {
'command': 'mkdir -p ' + copy_to,
},
'3_copy_certs': {
'command': copy_certs,
},
})
# Copy certificated and/or run a custop post-hook
if copy_certs or post_hook:
hook = [' --post-hook="']
if copy_certs:
hook.append(copy_certs)
if post_hook:
hook.extend([' && ', post_hook])
hook.append('"')
renew_script.append(hook)
return cf.InitConfig(
'Certbot',
files={
script_name: {
'source': 'https://dl.eff.org/certbot-auto',
'mode': '000755',
'owner': 'root',
'group': 'root',
},
'/etc/cron.daily/certbot_renew': {
'content': Join('', renew_script),
'mode': '000755',
'owner': 'root',
'group': 'root',
},
},
commands=commands
)
def add_init(target, *configs):
assert isinstance(target, (ec2.Instance, au.LaunchConfiguration))
params = Join('', [
'export CFN_PARAMS=\'',
' --region ', Ref('AWS::Region'),
' --stack ', Ref('AWS::StackName'),
' --resource ' + target.title + '\'',
])
target.UserData = Base64(Join('\n', [
'#!/bin/bash -xe',
'yum update -y',
params,
'/opt/aws/bin/cfn-init -v -c default $CFN_PARAMS',
'/opt/aws/bin/cfn-signal -e 0 $CFN_PARAMS'
]))
configs = [callable(c) and c() or c for c in configs]
target.Metadata = cf.Init(
cf.InitConfigSets(default=[c.title for c in configs]),
**{c.title: c for c in configs})
return target
| [
"[email protected]"
] | |
0db997ac3b71508a9edbdb5eb3f6387a318c9d51 | 2b8e7eadb920e96c75697880a9c5461aa8e0c5ed | /nabu/neuralnetworks/components/__init__.py | a5db902e5340481df5b5861383c42f9afb4ac971 | [
"MIT"
] | permissive | ishandutta2007/nabu | fb963ed3cd34ee340014e0c1e77927c838bba0ad | 313018a46f68cec1d4a7eb15b8b1cf68111a959c | refs/heads/master | 2020-04-03T04:57:57.911576 | 2018-12-14T11:02:52 | 2018-12-14T11:02:52 | 155,029,958 | 0 | 0 | MIT | 2018-12-06T18:20:12 | 2018-10-28T02:59:31 | Python | UTF-8 | Python | false | false | 146 | py | '''@package components
contains tensorflow components'''
from . import hooks, ops, rnn_cell, layer, beam_search_decoder, constraints,\
attention
| [
"[email protected]"
] | |
f1d495e819fdd0ca2ee0c788757b45bdda03b7f4 | 6759d9c1b836224ce5a154f7857157640866d60b | /manage.py | 17148e60b41874beacdfe3f2efcf55a0a2f74974 | [] | no_license | riyadhswe/Covid19 | 323d9361d98c53c45394d074b21d9539c1ee077d | 1428b91ad7e90da891dc4148c888eafe43153ffa | refs/heads/master | 2023-03-25T06:10:08.497082 | 2021-03-24T15:18:54 | 2021-03-24T15:18:54 | 338,812,298 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Covid19.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
de800c8cf61c350770e839c16c0dd15804e29491 | 691793de7d07b17918d076b319281c706f7275c0 | /signing_today_client/api/devices_api.py | 306cc2ab1266e7621a8ba2d8c55389a9a5667599 | [
"MIT"
] | permissive | signingtoday/signingtoday-sdk-python | 1ddfae5340690c80760c500436631d4a8ff9c87f | ed267279622fb59f2ad8fa289157fc9cdf9d8a5b | refs/heads/master | 2020-12-03T15:32:35.755222 | 2020-03-24T08:27:11 | 2020-03-24T08:27:11 | 231,372,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,528 | py | # coding: utf-8
"""
Signing Today Web
*Signing Today* is the perfect Digital Signature Gateway. Whenever in Your workflow You need to add one or more Digital Signatures to Your document, *Signing Today* is the right choice. You prepare Your documents, *Signing Today* takes care of all the rest: send invitations (`signature tickets`) to signers, collects their signatures, send You back the signed document. Integrating *Signing Today* in Your existing applications is very easy. Just follow these API specifications and get inspired by the many examples presented hereafter. # noqa: E501
The version of the OpenAPI document: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from signing_today_client.api_client import ApiClient
from signing_today_client.exceptions import (
ApiTypeError,
ApiValueError
)
class DevicesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def device_authorization_delete(self, device_id, **kwargs): # noqa: E501
"""Clear a trusted device # noqa: E501
This APIs allows to deregister a _deviceId_ of a trusted device. It also deletes any notification push-token associated to the trusted device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_authorization_delete(device_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str device_id: The _deviceId_ to deregister (required)
:param str user_id: Select the objects relative to the user specified by the parameter. If not specified will be used the id of the current authenticated user
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.device_authorization_delete_with_http_info(device_id, **kwargs) # noqa: E501
def device_authorization_delete_with_http_info(self, device_id, **kwargs): # noqa: E501
"""Clear a trusted device # noqa: E501
This APIs allows to deregister a _deviceId_ of a trusted device. It also deletes any notification push-token associated to the trusted device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_authorization_delete_with_http_info(device_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str device_id: The _deviceId_ to deregister (required)
:param str user_id: Select the objects relative to the user specified by the parameter. If not specified will be used the id of the current authenticated user
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['device_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method device_authorization_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'device_id' is set
if self.api_client.client_side_validation and ('device_id' not in local_var_params or # noqa: E501
local_var_params['device_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `device_id` when calling `device_authorization_delete`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'device_id' in local_var_params and local_var_params['device_id'] is not None: # noqa: E501
query_params.append(('deviceId', local_var_params['device_id'])) # noqa: E501
if 'user_id' in local_var_params and local_var_params['user_id'] is not None: # noqa: E501
query_params.append(('userId', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/device/authorization', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def device_authorization_get(self, **kwargs): # noqa: E501
"""Retrieve a challenge for authorizing a new trusted device # noqa: E501
This API allows to retrieve a challenge in order to authorize a new trusted device. - If asked in image/png the challenge is given encoded as a QR-Code image. - An invocation of the endpoint invalidate any previous challenge. - The challenge lasts 10 minutes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_authorization_get(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DeviceAuthorizationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.device_authorization_get_with_http_info(**kwargs) # noqa: E501
def device_authorization_get_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve a challenge for authorizing a new trusted device # noqa: E501
This API allows to retrieve a challenge in order to authorize a new trusted device. - If asked in image/png the challenge is given encoded as a QR-Code image. - An invocation of the endpoint invalidate any previous challenge. - The challenge lasts 10 minutes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_authorization_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DeviceAuthorizationResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method device_authorization_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'image/png', '*/*']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/device/authorization', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceAuthorizationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def device_authorization_post(self, inline_object7, **kwargs): # noqa: E501
"""Register a new trusted device # noqa: E501
This API allows to register a new trusted device. If the device is already present, it returns the current associated Token and updates the name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_authorization_post(inline_object7, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param InlineObject7 inline_object7: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.device_authorization_post_with_http_info(inline_object7, **kwargs) # noqa: E501
def device_authorization_post_with_http_info(self, inline_object7, **kwargs): # noqa: E501
"""Register a new trusted device # noqa: E501
This API allows to register a new trusted device. If the device is already present, it returns the current associated Token and updates the name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.device_authorization_post_with_http_info(inline_object7, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param InlineObject7 inline_object7: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[str], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['inline_object7'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method device_authorization_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'inline_object7' is set
if self.api_client.client_side_validation and ('inline_object7' not in local_var_params or # noqa: E501
local_var_params['inline_object7'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `inline_object7` when calling `device_authorization_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object7' in local_var_params:
body_params = local_var_params['inline_object7']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/device/authorization', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_get(self, **kwargs): # noqa: E501
"""Get the list of trusted devices # noqa: E501
The API allows to enumerate all the devices of a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.devices_get(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str user_id: Select the objects relative to the user specified by the parameter. If not specified will be used the id of the current authenticated user
:param int top: A number of results to return. Applied after **$skip**
:param int skip: An offset into the collection of results
:param bool count: If true, the server includes the count of all the items in the response
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TrustedDevicesGetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.devices_get_with_http_info(**kwargs) # noqa: E501
def devices_get_with_http_info(self, **kwargs): # noqa: E501
"""Get the list of trusted devices # noqa: E501
The API allows to enumerate all the devices of a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.devices_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str user_id: Select the objects relative to the user specified by the parameter. If not specified will be used the id of the current authenticated user
:param int top: A number of results to return. Applied after **$skip**
:param int skip: An offset into the collection of results
:param bool count: If true, the server includes the count of all the items in the response
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TrustedDevicesGetResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user_id', 'top', 'skip', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'user_id' in local_var_params and local_var_params['user_id'] is not None: # noqa: E501
query_params.append(('userId', local_var_params['user_id'])) # noqa: E501
if 'top' in local_var_params and local_var_params['top'] is not None: # noqa: E501
query_params.append(('$top', local_var_params['top'])) # noqa: E501
if 'skip' in local_var_params and local_var_params['skip'] is not None: # noqa: E501
query_params.append(('$skip', local_var_params['skip'])) # noqa: E501
if 'count' in local_var_params and local_var_params['count'] is not None: # noqa: E501
query_params.append(('$count', local_var_params['count'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TrustedDevicesGetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
17e7706740822fc7a717ced217bcdb8f7770aece | e0db13bc8113fb7b383d0a8d09e09686668e2fb4 | /Data-Structures-and-Algorithms/Big-O-Notations.py | bdb3512128e595a28ba98d081d46d63601c547a3 | [] | no_license | nirmalnishant645/Python-Programming | dd66acd665af8933fa14b19d01300deb1eccbb7d | 70e97e6f35f125acfde3b38e1baa794a357b8a77 | refs/heads/master | 2022-06-03T12:41:56.483000 | 2022-05-12T10:54:59 | 2022-05-12T10:54:59 | 151,211,590 | 3 | 5 | null | 2020-02-12T05:48:59 | 2018-10-02T06:44:54 | HTML | UTF-8 | Python | false | false | 516 | py | from math import log
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('bmh')
# Set up runtime comparisons
n = np.linspace(1,10,1000)
labels = ['Constant','Logarithmic','Linear','Log Linear','Quadratic','Cubic','Exponential']
big_o = [np.ones(n.shape),np.log(n),n,n*np.log(n),n**2,n**3,2**n]
# Plot setup
plt.figure(figsize=(12,10))
plt.ylim(0,50)
for i in range(len(big_o)):
plt.plot(n,big_o[i],label = labels[i])
plt.legend(loc=0)
plt.ylabel('Relative Runtime')
plt.xlabel('n')
plt.show()
| [
"[email protected]"
] | |
849332128a1f20ddebff18d4d7d8abafa1b75de5 | fdcb2cdee4d5b398eed4eefc830213234e3e83a5 | /01_MIT_Learning/00_midterm/P4.py | 8b625a8f64d442a8f758fb47ab1f6f2a52939a51 | [] | no_license | daftstar/learn_python | be1bbfd8d7ea6b9be8407a30ca47baa7075c0d4b | 4e8727154a24c7a1d05361a559a997c8d076480d | refs/heads/master | 2021-01-20T08:53:29.817701 | 2018-01-15T22:21:02 | 2018-01-15T22:21:02 | 90,194,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,705 | py | # PROBLEM 4
# Write a function is_triangular that meets the
# specification below.
"""
A triangular number is a number obtained by the continued
summation of integers starting from 1.
For example, 1, 1+2, 1+2+3, 1+2+3+4, etc.,
corresponding to 1, 3, 6, 10, etc., are triangular numbers.
"""
def is_triangular(k):
"""
k, a positive integer
returns True if k is triangular and False if not
"""
if k == 1:
return True
# create a list to check values against.
# for consistent lookups, create a dictionary
check = []
# initialize triangle for calculation
triangle = 0
# create a list of triangular numbers from 0, to k
# Clean this up later since initial values in [check]
# can hog up memory when k is very large.
for i in range(0, k):
triangle += i
check.append(triangle)
# no need to continue calculating triangular numbers if the
# latest number in the list is greater than what we're
# checking for.
if check[-1] > k:
break
# for debugging / visualization purposes:
# print (check)
# print (check[-3:])
# check if k is within the last 3 values of
# generated triangular values. No need to check if
# k is in the earlier values since k will be > than
# those values.
return (k in (check[-3:]))
print (is_triangular(994755))
# ORIGINAL FUNCTION:
# PROBLEM 4
# Write a function is_triangular that meets the
# specification below.
# """
# A triangular number is a number obtained by the continued
# summation of integers starting from 1.
# For example, 1, 1+2, 1+2+3, 1+2+3+4, etc.,
# corresponding to 1, 3, 6, 10, etc., are triangular numbers.
# """
# def is_triangular(k):
# """
# k, a positive integer
# returns True if k is triangular and False if not
# """
# if k == 1:
# return True
# # create a list to check values against.
# # for consistent lookups, create a dictionary
# check = []
# # initialize triangle for calculation
# triangle = 0
# # create a list of triangular numbers from 0, to k
# for i in range(0, k):
# triangle += i
# check.append(triangle)
# # no need to continue calculating triangular numbers if the
# # latest number in the list is greater than what we're
# # checking for.
# if check[-1] > k:
# break
# # for debugging / visualization purposes:
# print (check)
# # check if k is in the list of generated triangular values.
# # print (check in range[])
# if k in check:
# return True
# else:
# return False
# print (is_triangular(1891))
| [
"[email protected]"
] | |
8b9c8c09a91ab100a96e677c5af956a33a8ee87e | 1eddc123709611cf2ddccb0b7d48b722b2a09a5d | /plugins/modules/fmgr_pm_pblock_obj.py | 1e54ba66c85c05275915542e8b4b909def1cc7c8 | [] | no_license | fortinet-ansible-dev/ansible-galaxy-fortimanager-collection | bfb2014a72007358b491bb1d27c0fa3191ec62a8 | 63b65abce410ed4d6b76e3dd1dcf7a4341cc173d | refs/heads/main | 2023-07-09T04:33:38.304263 | 2023-06-21T21:11:57 | 2023-06-21T21:11:57 | 242,629,431 | 10 | 18 | null | 2022-12-16T15:57:16 | 2020-02-24T02:28:03 | Python | UTF-8 | Python | false | false | 30,166 | py | #!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2023 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_pm_pblock_obj
short_description: no description
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.1.6"
author:
- Xinwei Du (@dux-fortinet)
- Xing Li (@lix-fortinet)
- Jie Xue (@JieX19)
- Link Zheng (@chillancezen)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
access_token:
description: The token to access FortiManager without using username and password.
required: false
type: str
bypass_validation:
description: Only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters.
required: false
type: bool
default: false
enable_log:
description: Enable/Disable logging for task.
required: false
type: bool
default: false
forticloud_access_token:
description: Authenticate Ansible client with forticloud API access token.
required: false
type: str
proposed_method:
description: The overridden method for the underlying Json RPC request.
required: false
type: str
choices:
- update
- set
- add
rc_succeeded:
description: The rc codes list with which the conditions to succeed will be overriden.
type: list
required: false
elements: int
rc_failed:
description: The rc codes list with which the conditions to fail will be overriden.
type: list
required: false
elements: int
state:
description: The directive to create, update or delete an object.
type: str
required: true
choices:
- present
- absent
workspace_locking_adom:
description: The adom to lock for FortiManager running in workspace mode, the value can be global and others including root.
required: false
type: str
workspace_locking_timeout:
description: The maximum time in seconds to wait for other user to release the workspace lock.
required: false
type: int
default: 300
adom:
description: the parameter (adom) in requested url
type: str
required: true
pkg_path:
description: the parameter (pkg_path) in requested url
type: str
required: true
pm_pblock_obj:
description: the top level parameters set
required: false
type: dict
suboptions:
name:
type: str
description: no description
oid:
type: int
description: no description
package settings:
description: no description
type: dict
required: false
suboptions:
central-nat:
type: str
description: no description
choices:
- 'disable'
- 'enable'
consolidated-firewall-mode:
type: str
description: no description
choices:
- 'disable'
- 'enable'
fwpolicy-implicit-log:
type: str
description: no description
choices:
- 'disable'
- 'enable'
fwpolicy6-implicit-log:
type: str
description: no description
choices:
- 'disable'
- 'enable'
inspection-mode:
type: str
description: no description
choices:
- 'proxy'
- 'flow'
ngfw-mode:
type: str
description: no description
choices:
- 'profile-based'
- 'policy-based'
policy-offload-level:
type: str
description: no description
choices:
- 'disable'
- 'default'
- 'dos-offload'
- 'full-offload'
ssl-ssh-profile:
type: str
description: no description
type:
type: str
description: no description
choices:
- 'pblock'
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: no description
fmgr_pm_pblock_obj:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
adom: <your own value>
pkg_path: <your own value>
state: <value in [present, absent]>
pm_pblock_obj:
name: <value of string>
oid: <value of integer>
package settings:
central-nat: <value in [disable, enable]>
consolidated-firewall-mode: <value in [disable, enable]>
fwpolicy-implicit-log: <value in [disable, enable]>
fwpolicy6-implicit-log: <value in [disable, enable]>
inspection-mode: <value in [proxy, flow]>
ngfw-mode: <value in [profile-based, policy-based]>
policy-offload-level: <value in [disable, default, dos-offload, ...]>
ssl-ssh-profile: <value of string>
type: <value in [pblock]>
'''
RETURN = '''
meta:
description: The result of the request.
type: dict
returned: always
contains:
request_url:
description: The full url requested.
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request.
returned: always
type: int
sample: 0
response_data:
description: The api response.
type: list
returned: always
response_message:
description: The descriptive message of the api response.
type: str
returned: always
sample: OK.
system_information:
description: The information of the target system.
type: dict
returned: always
rc:
description: The status the request.
type: int
returned: always
sample: 0
version_check_warning:
description: Warning if the parameters used in the playbook are not supported by the current FortiManager version.
type: list
returned: complex
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/pm/pblock/adom/{adom}/{pkg_path}'
]
perobject_jrpc_urls = [
'/pm/pblock/adom/{adom}/{pkg_path}'
]
url_params = ['adom', 'pkg_path']
module_primary_key = 'name'
module_arg_spec = {
'access_token': {
'type': 'str',
'required': False,
'no_log': True
},
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'enable_log': {
'type': 'bool',
'required': False,
'default': False
},
'forticloud_access_token': {
'type': 'str',
'required': False,
'no_log': True
},
'proposed_method': {
'type': 'str',
'required': False,
'choices': [
'set',
'update',
'add'
]
},
'rc_succeeded': {
'required': False,
'type': 'list',
'elements': 'int'
},
'rc_failed': {
'required': False,
'type': 'list',
'elements': 'int'
},
'state': {
'type': 'str',
'required': True,
'choices': [
'present',
'absent'
]
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'adom': {
'required': True,
'type': 'str'
},
'pkg_path': {
'required': True,
'type': 'str'
},
'pm_pblock_obj': {
'required': False,
'type': 'dict',
'revision': {
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.0': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'options': {
'name': {
'required': True,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'type': 'str'
},
'oid': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'type': 'int'
},
'package settings': {
'required': False,
'type': 'dict',
'options': {
'central-nat': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'consolidated-firewall-mode': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'fwpolicy-implicit-log': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'fwpolicy6-implicit-log': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'inspection-mode': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'proxy',
'flow'
],
'type': 'str'
},
'ngfw-mode': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'profile-based',
'policy-based'
],
'type': 'str'
},
'policy-offload-level': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'disable',
'default',
'dos-offload',
'full-offload'
],
'type': 'str'
},
'ssl-ssh-profile': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'type': 'str'
}
}
},
'type': {
'required': False,
'revision': {
'7.2.0': True,
'6.2.0': False,
'6.2.2': False,
'6.2.6': False,
'6.2.7': False,
'6.2.8': False,
'6.2.9': False,
'6.2.10': False,
'6.4.1': False,
'6.4.3': False,
'6.4.4': False,
'6.4.6': False,
'6.4.7': False,
'6.4.8': False,
'6.4.9': False,
'6.4.10': False,
'6.4.11': False,
'7.0.1': False,
'7.0.2': False,
'7.0.3': True,
'7.0.4': True,
'7.0.5': True,
'7.0.6': True,
'7.0.7': True,
'7.2.1': True,
'7.2.2': True,
'7.4.0': True
},
'choices': [
'pblock'
],
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'pm_pblock_obj'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
connection.set_option('access_token', module.params['access_token'] if 'access_token' in module.params else None)
connection.set_option('enable_log', module.params['enable_log'] if 'enable_log' in module.params else False)
connection.set_option('forticloud_access_token',
module.params['forticloud_access_token'] if 'forticloud_access_token' in module.params else None)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_curd(argument_specs=module_arg_spec)
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
1c93cded721204389639d3cabae1b862853c3694 | 53a3c329e6f0860e840babf19e12452e94c30e39 | /scripts/gain_stats.py | ca1ee1fea01ba14ec2b2c538c37a6fc3a66bc37b | [] | no_license | galabing/qd | 60c2602f0deaae808b519c796b24063839766071 | 9ece034832167de958ec8a56da081ab75916684d | refs/heads/master | 2020-12-26T04:38:26.187729 | 2015-06-15T21:20:20 | 2015-06-15T21:20:20 | 34,972,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,291 | py | #!/usr/bin/python
ticker_file = '/Users/lnyang/lab/qd/data/tickers'
gain_dir = '/Users/lnyang/lab/qd/data/tmp/gains10/12'
min_date = '2004-01-01'
max_date = '9999-99-99'
with open(ticker_file, 'r') as fp:
tickers = sorted(fp.read().splitlines())
print 'processing %d tickers' % len(tickers)
stats = dict() # y => [gain ...]
for ticker in tickers:
gain_file = '%s/%s' % (gain_dir, ticker)
with open(gain_file, 'r') as fp:
lines = fp.read().splitlines()
for line in lines:
date, gain = line.split('\t')
if date < min_date or date > max_date:
continue
y, m, d = date.split('-')
gain = float(gain)
if gain > 100:
print '!! %s %s: gain = %f' % (ticker, date, gain)
if y not in stats: stats[y] = []
stats[y].append(gain)
for y in sorted(stats.keys()):
gains = sorted(stats[y])
print '%s: %d data points, min/max/avg gain: %f / %f / %f' % (
y, len(gains), min(gains), max(gains), sum(gains)/len(gains))
print ' 1%%: %f, 10%%: %f, 25%%: %f, 50%%: %f, 75%%: %f, 90%%: %f, 99%%: %f' % (
gains[int(len(gains)*0.01)],
gains[int(len(gains)*0.1)],
gains[int(len(gains)*0.25)],
gains[int(len(gains)*0.5)],
gains[int(len(gains)*0.75)],
gains[int(len(gains)*0.9)],
gains[int(len(gains)*0.99)])
| [
"[email protected]"
] | |
f2ff6a97ff2a03d6a5c101fff002e306e7e6f9a0 | 65b69f075fd0e57d8409561087f2366f8a60bab3 | /rieapie/trickery.py | 2e18a5467a15a7884a202d8850e35b488e2bf501 | [
"MIT"
] | permissive | alisaifee/rieapie | d7e74adf8208012e00f81a5bd0a7d4232a2cde67 | a480c09f476867a259a2b1468f5c942897cd2d3d | refs/heads/master | 2023-08-10T04:07:56.319117 | 2013-12-01T02:28:41 | 2013-12-01T02:28:41 | 11,880,429 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,421 | py | import requests
import logging
import functools
import inspect
import json
GET = "GET"
PUT = "PUT"
POST = "POST"
DELETE = "DELETE"
class Component(object):
def __init__(self, name, api_object, parent=None):
self.name = str(name)
self.parent = parent
self.api_object = api_object
def __getattribute__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError:
return Component(key, self.api_object, self)
def __full_path(self):
return "/".join([self.api_object.base_url, self.__path()])
def __path(self):
path = []
cur = self
while cur.parent:
path += [cur.name]
cur = cur.parent
path += [cur.name]
return "/".join(reversed(path))
def __repr__(self):
return self.__path().replace("/", ".")
def __call__(self, ext=""):
if ext:
return Component("%s.%s" % (self.name, ext), self.api_object,
self.parent)
return self
def __getitem__(self, key):
return Component(key, self.api_object, self)
def get(self, **kwargs):
url, params, _, headers = self.api_object.execute_pre_request(
GET, self.__full_path(), kwargs, None,
self.api_object.headers)
resp = self.api_object.session.get(url, params=params, headers=headers)
return self.api_object.execute_post_request(resp.status_code,
resp.text)
def delete(self, **kwargs):
url, params, _, headers = self.api_object.execute_pre_request(
DELETE, self.__full_path(), kwargs, None,
self.api_object.headers)
resp = self.api_object.session.delete(url, params=params,
headers=headers)
return self.api_object.execute_post_request(resp.status_code,
resp.text)
def create(self, **kwargs):
url, params, data, headers = self.api_object.execute_pre_request(
PUT, self.__full_path(), {}, kwargs,
self.api_object.headers)
resp = self.api_object.session.put(url, params=params, data=data,
headers=headers)
return self.api_object.execute_post_request(resp.status_code,
resp.text)
def update(self, **kwargs):
url, params, data, headers = self.api_object.execute_pre_request(
POST, self.__full_path(), {}, kwargs,
self.api_object.headers)
resp = self.api_object.session.post(url, params=params, data=data,
headers=headers)
return self.api_object.execute_post_request(resp.status_code,
resp.text)
def pre_request(fn):
fn.is_pre_request = True
@functools.wraps(fn)
def __inner(*args, **kwargs):
return fn(*args, **kwargs)
return __inner
def post_request(fn):
fn.is_post_request = True
@functools.wraps(fn)
def __inner(*args, **kwargs):
return fn(*args, **kwargs)
return __inner
class Api(object):
def __init__(self, base_url, request_headers={}, debug=False, pool_size=10,
connect_timeout=5, response_timeout=10):
self.base_url = base_url.rstrip("/")
self.headers = request_headers
if debug:
logging.basicConfig(level=logging.DEBUG)
self.pre_request_chain = []
self.post_request_chain = []
for name, method in inspect.getmembers(self, inspect.ismethod):
if hasattr(method, "is_pre_request"):
self.pre_request_chain.append(method)
if hasattr(method, "is_post_request"):
self.post_request_chain.append(method)
self.session = requests.Session()
adapter = requests.adapters.HTTPAdapter(pool_maxsize=pool_size,
max_retries=2)
self.session.mount("http://", adapter)
self.session.mount("https://", adapter)
self.root = Component("", self, None)
def __getattribute__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError:
return Component(key, self, None)
@pre_request
def default_pre_request(self, method, url, params, data, headers):
return url, params, data, headers
@post_request
def default_post_request(self, status, body):
return json.loads(body)
@post_request
def fallback_post_request(self, status, body):
return body
def execute_pre_request(self, method, url, params, data, headers):
for fn in self.pre_request_chain:
url, params, data, headers = fn(method, url, params, data, headers)
return url, params, data, headers
def execute_post_request(self, status, body):
last_error = None
num_errors = 0
for fn in self.post_request_chain:
try:
body = fn(status, body)
except Exception as e:
num_errors += 1
last_error = e
if num_errors == len(self.post_request_chain):
raise last_error
else:
return body
| [
"[email protected]"
] | |
e6f9ad93c38b7186f4e1bc7fbce807810f34015d | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Fisher/trend_PolyTrend/cycle_5/ar_/test_artificial_32_Fisher_PolyTrend_5__20.py | a843775b068feeaffba3bdedfbb078ee85d324f6 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 265 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 5, transform = "Fisher", sigma = 0.0, exog_count = 20, ar_order = 0); | [
"[email protected]"
] | |
9109e0a345c4b5dbe1c74ecc0b5f1c67bc8afc0a | 93a7db386dfa0ac0dc369cc7f4b974224c801d8d | /deploy/dot-product/scripts/main-47.py | d0fa56eb54e5acb7e99d402afe69b29b3ad86d95 | [] | no_license | lingxiao/good-great-combo | e051f20c89b7317a14ca5cee357bda7b095ce174 | 4d2691866bc21e2c542354ad3aae6f369eb86c87 | refs/heads/master | 2021-01-19T19:30:43.391759 | 2017-04-09T12:35:15 | 2017-04-09T12:35:15 | 83,699,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 923 | py | ############################################################
# Module : A series of measures on the graph for experiments
# Date : April 2nd, 2017
# Author : Xiao Ling
############################################################
import os
import numpy as np
from utils import *
from scripts import *
from app.config import PATH
############################################################
'''
paths
'''
batch = 47
_root = os.path.join(PATH['directories']['deploy'], 'dot-product')
_pair_dir = os.path.join(_root, 'pairs')
_output_dir = os.path.join(_root, 'outputs')
word_2_vec_big = PATH['inputs']['word2vec']
word_2_vec_sm = PATH['inputs']['word2vec-sm']
word_pair_path = os.path.join(_pair_dir , 'batch-' + str(batch) + '.txt')
out_path = os.path.join(_output_dir, 'batch-' + str(batch) + '.txt')
dot(word_2_vec_big, word_pair_path, _output_dir, refresh = True)
| [
"[email protected]"
] | |
a18fc841ab746e31eab0bde79ff593d39f6893cd | 508c5e01aa7dce530093d5796250eff8d74ba06c | /code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/utils/javascript/javascript_bundler.py | 6016adb05301df6dc69f51f5715640f911d2aec2 | [
"PostgreSQL",
"MIT"
] | permissive | jhkuang11/UniTrade | f220b0d84db06ff17626b3daa18d4cb8b72a5d3f | 5f68b853926e167936b58c8543b8f95ebd6f5211 | refs/heads/master | 2022-12-12T15:58:30.013516 | 2019-02-01T21:07:15 | 2019-02-01T21:07:15 | 166,479,655 | 0 | 0 | MIT | 2022-12-07T03:59:47 | 2019-01-18T22:19:45 | Python | UTF-8 | Python | false | false | 1,663 | py | ##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2017, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import os
from contextlib import contextmanager
from subprocess import call
from pgadmin.utils import u, fs_encoding, file_quote
# enum-like for tracking whether we have
class JsState:
NONE = 0
OLD = 1
NEW = 2
class JavascriptBundler:
"""Builds Javascript bundle files by delegating to webpack"""
def __init__(self):
self.jsState = JsState.NONE
def bundle(self):
try:
try_building_js()
self.jsState = JsState.NEW
except OSError:
webdir_path()
generatedJavascriptDir = os.path.join(webdir_path(), 'pgadmin', 'static', 'js', 'generated')
if os.path.exists(generatedJavascriptDir) and os.listdir(generatedJavascriptDir):
self.jsState = JsState.OLD
else:
self.jsState = JsState.NONE
def report(self):
return self.jsState
@contextmanager
def pushd(new_dir):
previous_dir = os.getcwd()
os.chdir(new_dir)
yield
os.chdir(previous_dir)
def webdir_path():
dirname = os.path.dirname
thisPath = os.path.realpath(u(__file__, fs_encoding))
return dirname(dirname(dirname(dirname(thisPath))))
def try_building_js():
with pushd(webdir_path()):
if call(['yarn', 'run', 'bundle:dev']) != 0:
raise OSError('Error executing bundling the application')
| [
"[email protected]"
] | |
a9139035aaa678ebf9365d927a8a6fff7b6545f3 | 66c3eb5a49b6b674e72ffeac00a221f16e224806 | /HC_At_Test/PO/pageobject/betgame_page.py | 4e4c4aa91e294bce27fe6559be1fd9845eeed4da | [] | no_license | fan966/LX_AT_TEST | 20ad5793ef9ab6fe9070d046935b90450321ff0b | 7e95a399140567ff601205f8d83babbe56279ab6 | refs/heads/master | 2022-12-31T15:11:04.972185 | 2020-10-23T11:58:46 | 2020-10-23T11:58:46 | 258,690,216 | 0 | 1 | null | 2020-04-26T07:00:43 | 2020-04-25T04:53:19 | Python | UTF-8 | Python | false | false | 952 | py | # -*-coding:utf-8-*-
from selenium.webdriver.common.by import By
class BetPageLocator(object):
"""
下注页面公共部分元素定位信息
"""
# 投注倒计时
hc_game_time = (By.XPATH, r'//ul[contains(@class ,"flip")]')
tty_game_time = (By.XPATH, r'//div[@class="alert-box"]')
# 停售
div_stop_selling = (By.ID, 'stopSellingPop')
# 奖金拉杆条拖动按钮0~135px(style="left: 0px;")
bonus_percen = (By.XPATH, r'//span[@class="ui-handle"]')
# 拉杆条
pull_rod = (By.XPATH, r'//div[@class="ranger"]')
# 游戏分类
game_tyep_div = (By.XPATH, r'//div[contains(@class, "sidem_item")]//ul')
game_type_claer = (By.XPATH, r'//div[contains(@class, "sidem_item")]//a[@class="sidem_b clear"]')
# 开奖结果
run_lottery = (By.XPATH, r'//a[text()="开奖结果"]')
# 期数状态
period_tip = (By.XPATH, r'//div[@id="PeriodInfo"]//*[@data-bind="text:periodTip"]')
| [
"[email protected]"
] | |
e4b4376cf120624cd187c64a050c710037607475 | 538fd58e4f7d0d094fd6c93ba1d23f78a781c270 | /689_max_sum_of_3_non_overlap_subarrays/test_solution.py | fc67ba8effb08041b9071ff7807d540305bb207a | [] | no_license | FluffyFu/Leetcode | 4633e9e91e493dfc01785fd379ab9f0788726ac1 | 5625e6396b746255f3343253c75447ead95879c7 | refs/heads/master | 2023-03-21T08:47:51.863360 | 2021-03-06T21:36:43 | 2021-03-06T21:36:43 | 295,880,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | from solution import max_non_overlap
import pudb
def test():
nums = [1, 2, 1, 2, 6, 7, 5, 1]
k = 2
res = max_non_overlap(nums, k)
assert [0, 3, 5] == res
| [
"[email protected]"
] | |
d4d7fc69db45b2bd9d71d42ba91520dc5d575626 | 7d07c037dbd2fbfce960c7a63debe1cb3d5f1a8a | /api/apps/predictions/tests/views/test_tide_windows.py | 3dbb0d868779e043bd779a4bbc7feb1045058611 | [] | no_license | sealevelresearch-jenkins/sea-level-api | 2fcbf309fa7388514ddf8bf9bd520f5681775939 | 382cf4d1b6981f4120d8add6d79a53493b911e24 | refs/heads/master | 2020-12-25T05:19:21.904701 | 2014-06-25T11:44:26 | 2014-06-25T11:44:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,838 | py | import datetime
import json
import pytz
from django.test import TestCase
from nose.tools import assert_equal, assert_in
from nose.plugins.skip import SkipTest
from api.apps.predictions.models import Prediction
from api.apps.locations.models import Location
class TestTideWindowsView(TestCase):
fixtures = [
'api/apps/locations/fixtures/two_locations.json',
'api/apps/predictions/fixtures/predictions_two_locations.json',
]
def test_that_tide_windows_url_lists_available_locations(self):
raise SkipTest("Not yet implemented.")
self.client.get('/predictions/tide-windows/')
def test_that_invalid_location_gives_a_json_404(self):
raise SkipTest("Not yet implemented.")
def test_that_no_start_and_end_parameter_temporary_redirects_to_now(self):
raise SkipTest("Not yet implemented.")
def test_that_missing_tide_level_param_gives_400_error(self):
response = self.client.get(
'/predictions/tide-windows/liverpool/'
'?start=2014-06-17T09:00:00Z'
'&end=2014-06-17T09:05:00Z')
data = json.loads(response.content)
assert_equal(400, response.status_code)
assert_equal(
{'detail': u'Missing required query parameter `tide_level`'},
data)
def test_that_envelope_has_tide_windows_field(self):
response = self.client.get(
'/predictions/tide-windows/liverpool/'
'?start=2014-06-17T00:00:00Z'
'&end=2014-06-18T00:00:00Z'
'&tide_level=10.7')
data = json.loads(response.content)
assert_in('tide_windows', data)
def test_that_tide_window_records_have_correct_structure(self):
response = self.client.get(
'/predictions/tide-windows/liverpool/'
'?start=2014-06-17T00:00:00Z'
'&end=2014-06-18T00:00:00Z'
'&tide_level=10.7')
data = json.loads(response.content)
tide_windows = data['tide_windows']
expected = {
'start': {
'datetime': '2014-06-17T09:01:00Z',
'tide_level': 10.8
},
'end': {
'datetime': '2014-06-17T09:02:00Z',
'tide_level': 10.9
},
'duration': {
'total_seconds': 120
}
}
assert_equal(expected, tide_windows[0])
class TestTideWindowsCalculationsView(TestCase):
fixtures = [
'api/apps/locations/fixtures/two_locations.json',
]
@classmethod
def setUp(cls):
cls.create_double_peaked_tide()
@classmethod
def create_double_peaked_tide(cls):
location = Location.objects.get(slug='liverpool')
cls.base_time = datetime.datetime(2014, 6, 1, 10, 00, tzinfo=pytz.UTC)
for minute, level in [
(0, 4.50),
(1, 4.75),
(2, 5.00),
(3, 5.25),
(4, 5.50),
(5, 5.75),
(6, 6.00), # peak
(7, 5.60),
(8, 5.49),
(9, 5.25),
(10, 5.00), # trough
(11, 5.25),
(12, 5.49), # peak
(13, 5.25),
(14, 5.00),
(15, 4.75),
(16, 4.50)
]:
Prediction.objects.create(
location=location,
datetime=cls.base_time + datetime.timedelta(minutes=minute),
tide_level=level
)
def test_that_single_window_is_correctly_identified(self):
response = self.client.get(
'/predictions/tide-windows/liverpool/'
'?start=2014-06-01T10:00:00Z'
'&end=2014-06-02T11:00:00Z'
'&tide_level=5.5'
)
data = json.loads(response.content)
assert_equal([
{
'start': {
'datetime': '2014-06-01T10:04:00Z',
'tide_level': 5.50,
},
'end': {
'datetime': '2014-06-01T10:07:00Z',
'tide_level': 5.60,
},
'duration': {
'total_seconds': 240,
}
}],
data['tide_windows']
)
def test_that_double_window_is_correctly_identified(self):
response = self.client.get(
'/predictions/tide-windows/liverpool/'
'?start=2014-06-01T10:00:00Z'
'&end=2014-06-02T11:00:00Z'
'&tide_level=5.1'
)
data = json.loads(response.content)
assert_equal([
{
'start': {
'datetime': '2014-06-01T10:03:00Z',
'tide_level': 5.25,
},
'end': {
'datetime': '2014-06-01T10:09:00Z',
'tide_level': 5.25,
},
'duration': {
'total_seconds': 420,
}
},
{
'start': {
'datetime': '2014-06-01T10:11:00Z',
'tide_level': 5.25,
},
'end': {
'datetime': '2014-06-01T10:13:00Z',
'tide_level': 5.25,
},
'duration': {
'total_seconds': 180,
}
},
],
data['tide_windows']
)
def test_that_no_tidal_window_returned_if_tide_is_never_above_height(self):
response = self.client.get(
'/predictions/tide-windows/liverpool/'
'?start=2014-06-01T10:00:00Z'
'&end=2014-06-02T11:00:00Z'
'&tide_level=6.1'
)
data = json.loads(response.content)
assert_equal([], data['tide_windows'])
| [
"[email protected]"
] | |
1e7d8fb610baf2fe9c8bbfdb8ef02faabb5e813c | 72cbe74acc97c445553d4e4468de5b491a0e7af3 | /id3wr.py | 77b939ac785d0df645b0a90745b4952685cafe5c | [] | no_license | acadien/xrdAnalysis | e1989d2b917bcbf21a2e8cf7006082aa8fe071b0 | c05beab696d7ebd5dd80d6a4ce50810a3ee65682 | refs/heads/master | 2021-01-10T21:33:15.352168 | 2015-05-19T18:21:55 | 2015-05-19T18:21:55 | 35,900,206 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | #!/usr/bin/python
import os
import re
from ID3 import *
path='./'
dirlist=os.listdir(path)
pardir=os.getcwd()
album=re.search(r'(/[\w,\s]+)+',pardir)
album=album.group(1).lstrip('/')
for fname in dirlist:
try:
m = re.search('(\d\d)-(\w+)-(\w+)_www\.file',fname)
id3info = ID3(fname)
except:
continue
print id3info
id3info['TRACKNUMBER'] = m.group(1)
artist = m.group(2)
id3info['ARTIST'] = re.sub('_',' ',artist).capitalize()
song = m.group(3)
id3info['SONG']=re.sub('_',' ',song).capitalize()
id3info['ALBUM']=album
#print track+artist+song
#convert='mp3info -f -t '+song+' -n '+track+' -a '+artist+' -l '+album+' '+fname
#os.system(convert)
| [
"[email protected]"
] | |
293f5b32b2b42456464676d82eb4d9157e70b078 | 53784d3746eccb6d8fca540be9087a12f3713d1c | /res/packages/scripts/scripts/common/Lib/ctypes/test/test_integers.py | 3167244d8935295055bdcead94ecc8b9cd4c6a32 | [] | no_license | webiumsk/WOT-0.9.17.1-CT | 736666d53cbd0da6745b970e90a8bac6ea80813d | d7c3cf340ae40318933e7205bf9a17c7e53bac52 | refs/heads/master | 2021-01-09T06:00:33.898009 | 2017-02-03T21:40:17 | 2017-02-03T21:40:17 | 80,870,824 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 428 | py | # 2017.02.03 21:57:48 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/ctypes/test/test_integers.py
import unittest
if __name__ == '__main__':
unittest.main()
# okay decompyling c:\Users\PC\wotsources\files\originals\res\packages\scripts\scripts\common\Lib\ctypes\test\test_integers.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.02.03 21:57:48 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
a010e185bf8e52e2f22af54c3a9fdcb3c419e547 | b61dedf12868e2bc511b6693af1985911a13f336 | /src/logpipe/formats/pickle.py | 2dfea0f71f0be7e298f161b03f7014c150a64290 | [
"ISC"
] | permissive | vitorcarmovieira/django-logpipe | f9eebb6674b9ba180a63448c9d71ce2e87929f7c | 89d0543e341518f9ae49124c354e6a6c2e3f4150 | refs/heads/main | 2023-03-03T13:18:22.456270 | 2021-02-13T17:29:32 | 2021-02-13T17:29:32 | 326,679,534 | 1 | 1 | ISC | 2021-02-13T17:29:32 | 2021-01-04T12:39:30 | Python | UTF-8 | Python | false | false | 562 | py | from rest_framework import renderers, parsers
import pickle
class PickleRenderer(renderers.BaseRenderer):
media_type = 'application/python-pickle'
format = 'pickle'
charset = None
render_style = 'binary'
def render(self, data, media_type=None, renderer_context=None):
return pickle.dumps(data)
class PickleParser(parsers.BaseParser):
media_type = 'application/python-pickle'
def parse(self, stream, media_type=None, parser_context=None):
return pickle.load(stream)
__all__ = ['PickleRenderer', 'PickleParser']
| [
"[email protected]"
] | |
787ab2f450dbb132be7a57a5e36ccf3341ec5e94 | 6c9912ab5ff000cc9f489248de2f2687f61cac1a | /rrc/settings/prod.py | dcf462a3fa98c8ff0c7954e3e75f1577139baf02 | [
"MIT"
] | permissive | rocky-roll-call/rrc-backend | ed047457d4eae730168a109584c56389c4c01c09 | 02e8e11c3dab7661e48650e2e861a4a97788a4ce | refs/heads/master | 2020-04-28T14:26:24.623336 | 2019-12-30T04:11:20 | 2019-12-30T04:11:20 | 175,338,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | """
Production Settings
"""
from decouple import config, Csv
from dj_database_url import parse as db_url
from rrc.settings.dev import *
DATABASES = {"default": config("DATABASE_URL", cast=db_url)}
DEBUG = config("SECRET_KEY")
SECRET_KEY = config("SECRET_KEY")
ALLOWED_HOSTS = config("ALLOWED_HOSTS", cast=Csv())
JWT_AUTH["JWT_EXPIRATION_DELTA"] = datetime.timedelta(seconds=600)
| [
"[email protected]"
] | |
c6c775bb54a0e2e106a903677ca605c033ab439a | f95d2646f8428cceed98681f8ed2407d4f044941 | /day09/day09/exercise/myadd.py | a9178c6996c200cf9c3a17191158f06e47e2dcf1 | [] | no_license | q2806060/python-note | 014e1458dcfa896f2749c7ebce68b2bbe31a3bf8 | fbe107d668b44b78ae0094dbcc7e8ff8a4f8c983 | refs/heads/master | 2020-08-18T01:12:31.227654 | 2019-10-17T07:40:40 | 2019-10-17T07:40:40 | 215,731,114 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | # 练习:
# 写一个函数myadd, 此函数可以计算两个数,三个数,及四个数的和
# def myadd(.....):
# ....
# print(myadd(10, 20)) # 30
# print(myadd(100, 200, 300)) # 600
# print(myadd(1, 2, 3, 4)) # 10
# 方法1
# def myadd(a, b, c=0, d=0):
# return a + b + c + d
# 方法2
# def myadd(a, b, c=None, d=None):
# if c is None:
# c = 0
# if d is None:
# d = 0
# return a + b + c + d
# 方法3
def myadd(a, b, c=0, d=0):
return sum( (a, b, c, d) )
print(myadd(10, 20)) # 30
print(myadd(100, 200, 300)) # 600
print(myadd(1, 2, 3, 4)) # 10
| [
"[email protected]"
] | |
fb66fe333c795753e3bb1c54e9b12a0c9b1edb53 | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/sonarr/__init__.py | c592e8435c28b8ea7f22b41bc2b6f515fbafa82d | [
"Apache-2.0"
] | permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 4,327 | py | """The Sonarr component."""
from __future__ import annotations
from typing import Any
from aiopyarr.models.host_configuration import PyArrHostConfiguration
from aiopyarr.sonarr_client import SonarrClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_PORT,
CONF_SSL,
CONF_URL,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_BASE_PATH,
CONF_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS,
DEFAULT_UPCOMING_DAYS,
DEFAULT_WANTED_MAX_ITEMS,
DOMAIN,
LOGGER,
)
from .coordinator import (
CalendarDataUpdateCoordinator,
CommandsDataUpdateCoordinator,
DiskSpaceDataUpdateCoordinator,
QueueDataUpdateCoordinator,
SeriesDataUpdateCoordinator,
SonarrDataUpdateCoordinator,
StatusDataUpdateCoordinator,
WantedDataUpdateCoordinator,
)
PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Sonarr from a config entry."""
if not entry.options:
options = {
CONF_UPCOMING_DAYS: entry.data.get(
CONF_UPCOMING_DAYS, DEFAULT_UPCOMING_DAYS
),
CONF_WANTED_MAX_ITEMS: entry.data.get(
CONF_WANTED_MAX_ITEMS, DEFAULT_WANTED_MAX_ITEMS
),
}
hass.config_entries.async_update_entry(entry, options=options)
host_configuration = PyArrHostConfiguration(
api_token=entry.data[CONF_API_KEY],
url=entry.data[CONF_URL],
verify_ssl=entry.data[CONF_VERIFY_SSL],
)
sonarr = SonarrClient(
host_configuration=host_configuration,
session=async_get_clientsession(hass),
)
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
coordinators: dict[str, SonarrDataUpdateCoordinator[Any]] = {
"upcoming": CalendarDataUpdateCoordinator(hass, host_configuration, sonarr),
"commands": CommandsDataUpdateCoordinator(hass, host_configuration, sonarr),
"diskspace": DiskSpaceDataUpdateCoordinator(hass, host_configuration, sonarr),
"queue": QueueDataUpdateCoordinator(hass, host_configuration, sonarr),
"series": SeriesDataUpdateCoordinator(hass, host_configuration, sonarr),
"status": StatusDataUpdateCoordinator(hass, host_configuration, sonarr),
"wanted": WantedDataUpdateCoordinator(hass, host_configuration, sonarr),
}
# Temporary, until we add diagnostic entities
_version = None
for coordinator in coordinators.values():
await coordinator.async_config_entry_first_refresh()
if isinstance(coordinator, StatusDataUpdateCoordinator):
_version = coordinator.data.version
coordinator.system_version = _version
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinators
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Migrate old entry."""
LOGGER.debug("Migrating from version %s", entry.version)
if entry.version == 1:
new_proto = "https" if entry.data[CONF_SSL] else "http"
new_host_port = f"{entry.data[CONF_HOST]}:{entry.data[CONF_PORT]}"
new_path = ""
if entry.data[CONF_BASE_PATH].rstrip("/") not in ("", "/", "/api"):
new_path = entry.data[CONF_BASE_PATH].rstrip("/")
data = {
**entry.data,
CONF_URL: f"{new_proto}://{new_host_port}{new_path}",
}
hass.config_entries.async_update_entry(entry, data=data)
entry.version = 2
LOGGER.info("Migration to version %s successful", entry.version)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
| [
"[email protected]"
] | |
6949ae60b99663f19494a7ea87f9d87ec0858309 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_walleyes.py | 9095d3388cacad88aa57bdae0e7ba33f57fa39d6 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#calss header
class _WALLEYES():
def __init__(self,):
self.name = "WALLEYES"
self.definitions = walleye
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['walleye']
| [
"[email protected]"
] | |
59d00a0a1af3c49e1d1aa9570187d7a6f89cdc8e | ff52e4d5ccc7bf0bcf3c41bb304f9c28f455c57e | /lms/lms/doctype/tasks/tasks.py | 20df73f8296465ab88fb9c1b6b58e2f7a92565f7 | [
"MIT"
] | permissive | vignesharumainayagam/engagex-lms-backup- | 889e76096d80b57f1df94c4ffa0dbc87ef6328f4 | d377c78873e66574a996c7d67b33ce9ff69f9d74 | refs/heads/master | 2020-03-11T18:00:28.166274 | 2018-04-19T05:36:46 | 2018-04-19T05:36:46 | 130,164,010 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Valiant Systems and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class Tasks(Document):
pass
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Task", filters)
data = frappe.db.sql("""select name, exp_start_date, exp_end_date,
subject, status from `tabTasks`
where ((ifnull(exp_start_date, '0000-00-00')!= '0000-00-00') \
and (exp_start_date <= %(end)s) \
or ((ifnull(exp_end_date, '0000-00-00')!= '0000-00-00') \
and exp_end_date >= %(start)s))
{conditions}""".format(conditions=conditions), {
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
return data
| [
"[email protected]"
] | |
2e2393fe770c6f960a4b4da26f9ebde7510eb4b4 | 526b892fa981573f26d55c361b42a9d3fa841db5 | /haas/highlighter/forms.py | c1c72f9063001d89e88ee11eb8b9560d184b6781 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | uranusjr/bbshighlighter | 13d89713245f95906a733b7aa8e7c39c58f6ec22 | da35d483e429e0cbd0619b1bc399f4fe67de9ac3 | refs/heads/master | 2020-05-31T22:36:58.424739 | 2014-03-07T17:24:00 | 2014-03-07T17:24:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from pygments.lexers import get_all_lexers
class HighlighterForm(forms.Form):
language = forms.ChoiceField()
code = forms.CharField(label='', widget=forms.Textarea)
def __init__(self, *args, **kwargs):
kwargs['initial'] = dict(kwargs.get('initial', {}), language='cpp')
super(HighlighterForm, self).__init__(*args, **kwargs)
choices = []
for name, aliases, filetypes, mimetypes in get_all_lexers():
choices.append((aliases[0], name))
self.fields['language'].choices = choices
| [
"[email protected]"
] | |
45b1ffda6000b52ced756e89f276e7c99ca0fde5 | 0ed0f9b98be3eb4f87aedfb67210b01a3bd4ffe4 | /Validation/test/sync_MC_cfg.py | c4c6e0f84886667de068721433af89aef78b258f | [] | no_license | jshlee/CATTools | 6e6714225010fa5dfcc819f578d3ad0b8458a8dc | 3805086b4577b439ecef5369d5bd56f25cfe1ca3 | refs/heads/cat80x | 2021-01-21T16:44:52.978003 | 2017-10-17T18:46:58 | 2017-10-17T18:46:58 | 25,828,473 | 1 | 2 | null | 2017-10-17T18:46:59 | 2014-10-27T16:32:59 | C++ | UTF-8 | Python | false | false | 1,332 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("CATeX")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(False) )
process.options.allowUnscheduled = cms.untracked.bool(True)
process.MessageLogger.cerr.FwkReport.reportEvery = 50000
process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring())
process.source.fileNames = [
'/store/group/CAT/TTbarXSecSynchronization/v8-0-1/TT_TuneCUETP8M1_13TeV-powheg-pythia8__PUSpring16_80X_mcRun2_asymptotic_2016_v3_ext3-v1__1671FA99-240F-E611-BF05-00266CFAE464.root'
]
process.load("CATTools.CatAnalyzer.filters_cff")
process.load("CATTools.Validation.ttllEventSelector_cfi")
process.load("CATTools.Validation.validation_cff")
eventsTTLL.electron.idName = "cutBasedElectronID-Spring15-25ns-V1-standalone-medium"
eventsTTLL.electron.applyEcalCrackVeto = True
eventsTTLL.jet.bTagName = "pfCombinedInclusiveSecondaryVertexV2BJetTags"
eventsTTLL.jet.bTagWP = "CSVM"
eventsTTLL.jet.skipJER = True
eventsTTLL.filters.ignoreTrig = True
process.TFileService = cms.Service("TFileService",
fileName = cms.string("hist.root"),
)
process.p = cms.Path(
process.gen + process.rec
* process.eventsTTLL
)
| [
"[email protected]"
] | |
43cb4118ba6d1783272f04f6cbc672ceb38ea175 | 711c11d0111a40055ba110e7089a231c2ba42b8e | /toontown/ai/QuestManagerAI.py | ee1e2061bbd45b431c28194a5aa636fa8c01c284 | [
"Apache-2.0"
] | permissive | DeadMemez/ProjectAltis-OldAcornAcres | 03c8dc912ecccae8456d89790f6b332547b75cc3 | e8e0087389933795973e566782affcaec65a2980 | refs/heads/master | 2021-01-19T13:59:07.234192 | 2017-08-20T14:41:45 | 2017-08-20T14:41:45 | 100,869,782 | 0 | 2 | null | 2017-08-20T15:14:35 | 2017-08-20T15:14:35 | null | UTF-8 | Python | false | false | 26,126 | py | from toontown.toon.DistributedNPCSpecialQuestGiverAI import DistributedNPCSpecialQuestGiverAI
from toontown.building import FADoorCodes
from otp.ai.MagicWordGlobal import *
from toontown.hood import ZoneUtil
from toontown.quest import Quests
QuestIdIndex = 0
QuestFromNpcIdIndex = 1
QuestToNpcIdIndex = 2
QuestRewardIdIndex = 3
QuestProgressIndex = 4
class QuestManagerAI:
notify = directNotify.newCategory('QuestManagerAI')
def __init__(self, air):
self.air = air
def __toonQuestsList2Quests(self, quests):
return [Quests.getQuest(x[0]) for x in quests]
def __incrementQuestProgress(self, quest):
"""
Increment the supplied quest's progress by 1.
"""
quest[4] += 1
def requestInteract(self, avId, npc):
# Get the avatar.
av = self.air.doId2do.get(avId)
if not av:
return
avQuestPocketSize = av.getQuestCarryLimit()
avQuests = av.getQuests()
needTrackTask = False
fakeTier = 0
avTrackProgress = av.getTrackProgress()
if avTrackProgress[0] == -1:
avQuestTier = av.getRewardTier()
if avQuestTier < Quests.DG_TIER and avQuestTier > Quests.DD_TIER:
fakeTier = Quests.DD_TIER
needTrackTask = True
elif avQuestTier < Quests.BR_TIER and avQuestTier > Quests.MM_TIER:
fakeTier = Quests.MM_TIER
needTrackTask = True
elif avQuestTier < Quests.DL_TIER and avQuestTier > Quests.BR_TIER:
fakeTier = Quests.BR_TIER
needTrackTask = True
# Iterate through their quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i:i + 5]
questId, fromNpcId, toNpcId, rewardId, toonProgress = questDesc
questClass = Quests.getQuest(questId)
if questClass:
completeStatus = questClass.getCompletionStatus(av, questDesc, npc)
else:
continue
# If the quest is a DeliverGagQuest, add the gags.
if isinstance(questClass, Quests.DeliverGagQuest):
# Check if it's the required NPC.
if npc.npcId == toNpcId:
track, level = questClass.getGagType()
av.inventory.setItem(track, level, av.inventory.numItem(track, level) - questClass.getNumGags())
av.b_setInventory(av.inventory.makeNetString())
# If they've completed a quest.
if completeStatus == Quests.COMPLETE:
# ToonUp the toon to max health.
av.toonUp(av.maxHp)
# If it's a TrackChoiceQuest then present their track choices.
if isinstance(questClass, Quests.TrackChoiceQuest):
npc.presentTrackChoice(avId, questId, [0,1,2,3,4,5,6,7])
break
# If there is another part to this quest then give them that.
if Quests.getNextQuest(questId, npc, av)[0] != Quests.NA:
self.nextQuest(av, npc, questId)
if avId in self.air.tutorialManager.avId2fsm:
self.air.tutorialManager.avId2fsm[avId].demand('Tunnel')
break
else:
# The toon has completed this quest. Give them a reward!
npc.completeQuest(avId, questId, rewardId)
self.completeQuest(av, questId)
break
else:
# They haven't completed any quests so we have to give them choices.
# If they've got a full pouch then reject them.
if (len(avQuests) == avQuestPocketSize*5):
npc.rejectAvatar(avId)
return
elif isinstance(npc, DistributedNPCSpecialQuestGiverAI):
# Don't display choices. Force a choice.
self.tutorialQuestChoice(avId, npc)
return
else:
#Present quest choices.
if needTrackTask:
choices = self.npcGiveTrackChoice(av, fakeTier)
else:
choices = self.avatarQuestChoice(av, npc)
if choices != []:
npc.presentQuestChoice(avId, choices)
else:
npc.rejectAvatar(avId)
def npcGiveTrackChoice(self, av, tier):
trackQuest = Quests.chooseTrackChoiceQuest(tier, av)
return [(trackQuest, 400, Quests.ToonHQ)]
def avatarQuestChoice(self, av, npc):
# Get the best quests for an avatar/npc.
return Quests.chooseBestQuests(av.getRewardTier(), npc, av)
def avatarChoseQuest(self, avId, npc, questId, rewardId, toNpcId):
# Get the avatar.
av = self.air.doId2do.get(avId)
if not av:
return
if len(av.quests) > av.getQuestCarryLimit():
return
# Get the npcIds
fromNpcId = npc.npcId
if toNpcId == 0:
toNpcId = Quests.getQuestToNpcId(questId)
# Add the quest to the avatars list.
transformedRewardId = Quests.transformReward(rewardId, av)
av.addQuest([questId, fromNpcId, toNpcId, rewardId, 0], transformedRewardId)
# Remove the tasks for timeout.
taskMgr.remove(npc.uniqueName('clearMovie'))
# Assign the quest.
npc.assignQuest(avId, questId, rewardId, toNpcId)
def avatarChoseTrack(self, avId, npc, pendingTrackQuest, trackId):
# Get the avatar.
av = self.air.doId2do.get(avId)
if not av:
return
# Remove the tasks for timeout.
taskMgr.remove(npc.uniqueName('clearMovie'))
# Show the completion movie and remove the task.
npc.completeQuest(avId, pendingTrackQuest, Quests.getRewardIdFromTrackId(trackId))
self.completeQuest(av, pendingTrackQuest)
# Set their track their working on.
av.b_setTrackProgress(trackId, 0)
def avatarCancelled(self, npcId):
# Get the NPC.
npc = self.air.doId2do.get(npcId)
if not npc:
return
# Remove the task for timeout.
taskMgr.remove(npc.uniqueName('clearMovie'))
def nextQuest(self, av, npc, questId):
# Get the next QuestId and toNpcId.
nextQuestId, toNpcId = Quests.getNextQuest(questId, npc, av)
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i:i + 5]
if questDesc[QuestIdIndex] == questId:
questDesc[QuestIdIndex] = nextQuestId
questDesc[QuestToNpcIdIndex] = toNpcId
questDesc[QuestProgressIndex] = 0
questList.append(questDesc)
# Show the quest movie and set their quests.
npc.incompleteQuest(av.doId, nextQuestId, Quests.QUEST, toNpcId)
av.b_setQuests(questList)
def completeQuest(self, av, completeQuestId):
#Get the avatars current quests.
avQuests = av.getQuests()
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i:i + 5]
questId, fromNpcId, toNpcId, rewardId, toonProgress = questDesc
questClass = Quests.getQuest(questId)
questExp = Quests.getQuestExp(questId)
if questId == completeQuestId:
av.removeQuest(questId)
self.giveReward(av, questId, rewardId)
self.avatarConsiderProgressTier(av)
if questExp == None:
continue
else:
av.b_setToonExp(av.getToonExp() + questExp)
break
def giveReward(self, av, questId, rewardId):
# Give the reward.
rewardClass = Quests.getReward(rewardId)
if rewardClass is None:
self.notify.warning('rewardClass was None for rewardId: %s.' % rewardId)
else:
rewardClass.sendRewardAI(av)
# Add the rewardId to the avatars rewardHistory.
rewardTier, rewardHistory = av.getRewardHistory()
transformedRewardId = Quests.transformReward(rewardId, av)
if transformedRewardId != rewardId:
rewardHistory.append(rewardId)
av.b_setRewardHistory(rewardTier, rewardHistory)
def avatarConsiderProgressTier(self, av):
# Get the avatars current tier.
currentTier = av.getRewardTier()
# Check if they have all required rewards.
if Quests.avatarHasAllRequiredRewards(av, currentTier):
if currentTier != Quests.ELDER_TIER:
currentTier += 1
av.b_setRewardHistory(currentTier, [])
def tutorialQuestChoice(self, avId, npc):
# Get the avatar.
av = self.air.doId2do.get(avId)
if not av:
return
# Get the possible quest choices and force the player to choose it.
choices = self.avatarQuestChoice(av, npc)
quest = choices[0]
self.avatarChoseQuest(avId, npc, quest[0], quest[1], 0)
# Are we in the tutorial speaking to Tutorial Tom?
if avId in self.air.tutorialManager.avId2fsm:
if av.getRewardHistory()[0] == 0:
self.air.tutorialManager.avId2fsm[avId].demand('Battle')
def toonRodeTrolleyFirstTime(self, av):
# Toon played a minigame.
self.toonPlayedMinigame(av, [])
def toonPlayedMinigame(self, av, toons):
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.TrolleyQuest):
questDesc[QuestProgressIndex] = 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonMadeFriend(self, avId):
# Get the avatar.
av = self.air.doId2do.get(avId)
if not av:
return
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.FriendQuest):
questDesc[QuestProgressIndex] = 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonUsedPhone(self, avId):
# Get the avatar.
av = self.air.doId2do.get(avId)
if not av:
return
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.PhoneQuest):
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonCaughtFishingItem(self, av):
# Get the avatars current quests.
avQuests = av.getQuests()
fishingItem = -1
questList = []
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if fishingItem != -1:
questList.append(questDesc)
continue
if isinstance(questClass, Quests.RecoverItemQuest):
if not hasattr(questClass, 'getItem'):
questList.append(questDesc)
continue
if questClass.getHolder() == Quests.AnyFish:
if not questClass.getCompletionStatus(av, questDesc) == Quests.COMPLETE:
baseChance = questClass.getPercentChance()
amountRemaining = questClass.getNumItems() - questDesc[QuestProgressIndex]
chance = Quests.calcRecoverChance(amountRemaining, baseChance)
if chance >= baseChance:
questDesc[QuestProgressIndex] += 1
fishingItem = questClass.getItem()
questList.append(questDesc)
av.b_setQuests(questList)
return fishingItem
def hasTailorClothingTicket(self, av, npc):
# Get the avatars current quests.
avQuests = av.getQuests()
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.DeliverItemQuest):
if questClass.getCompletionStatus(av, questDesc, npc) == Quests.COMPLETE:
# They have a clothing ticket.
return 1
return 0
def removeClothingTicket(self, av, npc):
# Get the avatars current quests.
avQuests = av.getQuests()
# Iterate through their current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.DeliverItemQuest):
if questClass.getCompletionStatus(av, questDesc, npc) == Quests.COMPLETE:
av.removeQuest(questDesc[QuestIdIndex])
break
def recoverItems(self, toon, suitsKilled, zoneId):
"""
Called in battleExperience to alert the quest system that a toon should
test for recovered items.
Returns a tuple of two lists:
Index 0: a list of recovered items.
Index 1: a list of unrecovered items.
"""
recovered, notRecovered = ([] for i in xrange(2))
for index, quest in enumerate(self.__toonQuestsList2Quests(toon.quests)):
if isinstance(quest, Quests.RecoverItemQuest):
isComplete = quest.getCompletionStatus(toon, toon.quests[index])
if isComplete == Quests.COMPLETE:
# This quest is complete, skip.
continue
# It's a quest where we need to recover an item!
if quest.isLocationMatch(zoneId):
# We're in the correct area to recover the item, woo!
if quest.getHolder() == Quests.Any or quest.getHolderType() in ['type', 'track', 'level']:
for suit in suitsKilled:
if quest.getCompletionStatus(toon, toon.quests[index]) == Quests.COMPLETE:
# Test if the task has already been completed.
# If it has, we don't need to iterate through the cogs anymore.
break
# Here comes the long IF statement...
if (quest.getHolder() == Quests.Any) \
or (quest.getHolderType() == 'type' and quest.getHolder() == suit['type']) \
or (quest.getHolderType() == 'track' and quest.getHolder() == suit['track']) \
or (quest.getHolderType() == 'level' and quest.getHolder() <= suit['level']):
progress = toon.quests[index][4] & pow(2, 16) - 1 # This seems to be the Disne
completion = quest.testRecover(progress)
if completion[0]:
# We win! We got the item from the cogs. :)
recovered.append(quest.getItem())
self.__incrementQuestProgress(toon.quests[index])
else:
# Tough luck, maybe next time.
notRecovered.append(quest.getItem())
toon.d_setQuests(toon.getQuests())
return (recovered, notRecovered)
def toonKilledBuilding(self, av, type, difficulty, floors, zoneId, activeToons):
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
zoneId = ZoneUtil.getBranchZone(zoneId)
# Iterate through the avatars current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if questClass.getCompletionStatus(av, questDesc) == Quests.INCOMPLETE:
if isinstance(questClass, Quests.BuildingQuest):
if questClass.isLocationMatch(zoneId):
if questClass.doesBuildingTypeCount(type):
if questClass.doesBuildingCount(av, activeToons):
if floors >= questClass.getNumFloors():
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonKilledCogdo(self, av, type, difficulty, zoneId, activeToons):
self.notify.debug("toonKilledCogdo(%s, '%s', %s, %d, %s)" % (str(av), type, str(difficulty), zoneId, str(activeToons)))
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
zoneId = ZoneUtil.getBranchZone(zoneId)
# Iterate through the avatars current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if questClass.getCompletionStatus(av, questDesc) == Quests.INCOMPLETE:
if isinstance(questClass, Quests.CogdoQuest):
if questClass.isLocationMatch(zoneId):
if questClass.doesCogdoTypeCount(type):
if questClass.doesCogdoCount(av, activeToons):
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonDefeatedFactory(self, av, factoryId, activeVictors):
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
# Iterate through the avatars current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.FactoryQuest):
if questClass.doesFactoryCount(av, factoryId, activeVictors):
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonDefeatedMint(self, av, mintId, activeVictors):
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
# Iterate through the avatars current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.MintQuest):
if questClass.doesMintCount(av, mintId, activeVictors):
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonDefeatedStage(self, av, stageId, activeVictors):
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.StageQuest):
if questClass.doesStageCount(av, stageId, activeVictors):
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonDefeatedCountryClub(self, av, clubId, activeVictors):
# Get the avatars current quests.
avQuests = av.getQuests()
questList = []
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
if isinstance(questClass, Quests.ClubQuest):
if questClass.doesClubCount(av, clubId, activeVictors):
questDesc[QuestProgressIndex] += 1
questList.append(questDesc)
av.b_setQuests(questList)
def toonKilledCogs(self, av, suitsKilled, zoneId, activeToonList):
# Get the avatar's current quests.
avQuests = av.getQuests()
questList = []
# Make a list of the activeToonDoIds
activeToonDoIds = [toon.doId for toon in activeToonList if not None]
# Iterate through the avatar's current quests.
for i in xrange(0, len(avQuests), 5):
questDesc = avQuests[i : i + 5]
questClass = Quests.getQuest(questDesc[QuestIdIndex])
# Check if they are doing a cog quest
if isinstance(questClass, Quests.CogQuest):
# Check if the cog counts...
for suit in suitsKilled:
if questClass.doesCogCount(av.doId, suit, zoneId, activeToonList):
# Looks like the cog counts!
if questClass.getCompletionStatus(av, questDesc) != Quests.COMPLETE:
questDesc[QuestProgressIndex] += 1
# Add the quest to the questList
questList.append(questDesc)
# Update the avatar's quests
av.b_setQuests(questList)
@magicWord(category=CATEGORY_PROGRAMMER, types=[str, int, int])
def quests(command, arg0=0, arg1=0):
invoker = spellbook.getTarget()
currQuests = invoker.getQuests()
currentQuestIds = []
for i in xrange(0, len(currQuests), 5):
currentQuestIds.append(currQuests[i])
pocketSize = invoker.getQuestCarryLimit()
carrying = len(currQuests) / 5
canCarry = False
if (carrying < pocketSize):
canCarry = True
if command == 'clear':
invoker.b_setQuests([])
return 'Cleared quests'
elif command == 'clearHistory':
invoker.d_setQuestHistory([])
return 'Cleared quests history'
elif command == 'add':
if arg0:
if canCarry:
if arg0 in Quests.QuestDict.keys():
return 'Added QuestID %s'%(arg0)
else:
return 'Invalid QuestID %s'%(arg0)
else:
return 'Cannot take anymore quests'
else:
return 'add needs 1 argument.'
elif command == 'remove':
if arg0:
if arg0 in currentQuestIds:
invoker.removeQuest(arg0)
return 'Removed QuestID %s'%(arg0)
elif arg0 < pocketSize and arg0 > 0:
if len(currentQuestIds) <= arg0:
questIdToRemove = currentQuestIds[arg0 - 1]
invoker.removeQuest(questIdToRemove)
return 'Removed quest from slot %s'%(arg0)
else:
return 'Invalid quest slot'
else:
return 'Cannot remove quest %s'%(arg0)
else:
return 'remove needs 1 argument.'
elif command == 'list':
if arg0:
if arg0 > 0 and arg0 <= pocketSize:
start = (arg0 -1) * 5
questDesc = currQuests[start : start + 5]
return 'QuestDesc in slot %s: %s.'%(arg0, questDesc)
else:
return 'Invalid quest slot %s.'%(arg0)
else:
return 'CurrentQuests: %s'%(currentQuestIds)
elif command == 'bagSize':
if arg0 > 0 and arg0 < 5:
invoker.b_setQuestCarryLimit(arg0)
return 'Set carry limit to %s'%(arg0)
else:
return 'Argument 0 must be between 1 and 4.'
elif command == 'progress':
if arg0 and arg1:
if arg0 > 0 and arg0 <= pocketSize:
questList = []
wantedQuestId = currentQuestIds[arg0 - 1]
for i in xrange(0, len(currQuests), 5):
questDesc = currQuests[i : i + 5]
if questDesc[0] == wantedQuestId:
questDesc[4] = arg1
questList.append(questDesc)
invoker.b_setQuests(questList)
return 'Set quest slot %s progress to %s'%(arg0, arg1)
elif arg0 in Quests.QuestDict.keys():
if arg0 in currentQuestIds:
questList = []
for i in xrange(0, len(currQuests), 5):
questDesc = currQuests[i : i + 5]
if questDesc[0] == arg0:
questDesc[4] = arg1
questList.append(questDesc)
invoker.b_setQuests(questList)
return 'Set QuestID %s progress to %s'%(arg0, arg1)
else:
return 'Cannot progress QuestID: %s.'%(arg0)
else:
return 'Invalid quest or slot id'
else:
return 'progress needs 2 arguments.'
elif command == 'tier':
if arg0:
invoker.b_setRewardHistory(arg0, invoker.getRewardHistory()[1])
return 'Set tier to %s'%(arg0)
else:
return 'tier needs 1 argument.'
else:
return 'Invalid first argument.'
| [
"[email protected]"
] | |
4237ec9fd7869d359af8e6f194348e6758a7a910 | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/io_import_mc/__init__.py | 9987c1068ea276a0c20b744f7ea9530f0f1b2fd4 | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,003 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
bl_info = {
"name": "Import Maya Cache (.xml, .mc)",
"author": "Jasper van Nieuwenhuizen",
"version": (0, 1),
"blender": (2, 6, 0),
"location": "File > Import > Maya cache (.xml, .mc)",
"description": "Imports Maya Cache to Objects",
"warning": "wip",
"wiki_url": "",
"tracker_url": "",
"category": "Learnbgame",
}
if "bpy" in locals():
import imp
if "import_mc" in locals():
imp.reload(import_mc)
if "export_mc" in locals():
imp.reload(export_mc)
import bpy
from bpy.props import (BoolProperty,
FloatProperty,
StringProperty,
EnumProperty,
)
from bpy_extras.io_utils import (ExportHelper,
ImportHelper,
path_reference_mode,
axis_conversion,
)
class ImportMC(bpy.types.Operator, ImportHelper):
"""Load a Maya Cache file"""
bl_idname = "import_shape.mc"
bl_label = "Import Maya Cache"
bl_options = {'PRESET', 'UNDO'}
filename_ext = ".xml"
filter_glob = StringProperty(
default="*.xml;*.mc",
options={'HIDDEN'},
)
use_selection = BoolProperty(
name="Selection Only",
description="Import cache for selected objects only",
default=False,
)
interpolation = EnumProperty(
name="Interpolation",
items=(('LINEAR', "Linear", ""),
('NONE', "None", ""),
)
)
time_mode = EnumProperty(
name="Method to control playback time",
items=(('FRAME', "Frame", "Control playback using a frame number"\
" (ignoring time FPS and start frame from the file"),
('TIME', "Time", "Control playback using time in seconds"),
('FACTOR', "Factor", "Control playback using a value"\
" between [0, 1]"),
)
)
play_mode = EnumProperty(
name="Play mode",
items=(('SCENE', "Scene", "Use the time from the scene"),
('CUSTOM', "Custom", "Use the modifiers own time"\
" evaluation"),
)
)
frame_start = FloatProperty(
name="Frame Start",
description="Add this to the start frame",
)
frame_scale = FloatProperty(
name="Frame Scale",
description="Evaluation time in seconds",
)
eval_frame = FloatProperty(
name="Evaluation Frame",
description="The frame to evaluate (starting at 0)",
)
eval_time = FloatProperty(
name="Evaluation Time",
description="Evaluation time in seconds",
)
eval_factor = FloatProperty(
name="Evaluation Factor",
description="Evaluation factor",
)
forward_axis = EnumProperty(
name="Forward",
items=(('X', "+X", ""),
('Y', "+Y", ""),
('Z', "+Z", ""),
('-X', "-X", ""),
('-Y', "-Y", ""),
('-Z', "-Z", ""),
),
default='-Z',
)
up_axis = EnumProperty(
name="Up",
items=(('X', "+X", ""),
('Y', "+Y", ""),
('Z', "+Z", ""),
('-X', "-X", ""),
('-Y', "-Y", ""),
('-Z', "-Z", ""),
),
default='Y',
)
def execute(self, context):
import imp
from . import import_mc
imp.reload(import_mc)
keywords = self.as_keywords(ignore=("forward_axis",
"up_axis",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.forward_axis,
from_up=self.up_axis,
).to_4x4()
keywords["global_matrix"] = global_matrix
return import_mc.load(self, context, **keywords)
def draw(self, context):
layout = self.layout
layout.prop(self, "use_selection")
layout.label(text="Time Mapping:")
row = layout.row()
row.prop(self, "time_mode", expand=True)
row = layout.row()
row.prop(self, "play_mode", expand=True)
if self.play_mode == 'SCENE':
layout.prop(self, "frame_start")
layout.prop(self, "frame_scale")
else:
time_mode = self.time_mode
if time_mode == 'FRAME':
layout.prop(self, "eval_frame")
elif time_mode == 'TIME':
layout.prop(self, "eval_time")
elif time_mode == 'FACTOR':
layout.prop(self, "eval_factor")
layout.label(text="Axis Mapping:")
split = layout.split(percentage=0.5, align=True)
split.alert = (self.forward_axis[-1] == self.up_axis[-1])
split.label("Forward/Up Axis:")
split.prop(self, "forward_axis", text="")
split.prop(self, "up_axis", text="")
#split = layout.split(percentage=0.5)
#split.label(text="Flip Axis:")
#row = split.row()
#row.prop(self, "flip_axis")
def menu_func_import(self, context):
self.layout.operator(ImportMC.bl_idname, text="Maya Cache (.xml, .mc)")
#def menu_func_export(self, context):
# self.layout.operator(ExportMC.bl_idname, text="Maya Cache (.xml, .mc)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
#bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
#bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()
| [
"[email protected]"
] | |
01bb63ecb9346797486f1cf6af72c8fbf7e0b1e3 | f359c953ef823cc44f7d87a3736c3e4fb1817c0b | /EDBRCommon/python/PromptReco/Run2015D/SingleElectron/SingleElectron_Run2015D-bo.py | 93598b048e708643741ed759431ae1c51bb1d279 | [] | no_license | jruizvar/ExoDiBosonResonancesRun2 | aa613200725cf6cd825d7bcbde60d2e39ba84e39 | b407ab36504d0e04e6bddba4e57856f9f8c0ec66 | refs/heads/Analysis76X | 2021-01-18T20:00:57.358494 | 2016-05-30T21:30:19 | 2016-05-30T21:30:19 | 23,619,682 | 1 | 1 | null | 2016-04-22T18:38:45 | 2014-09-03T12:41:07 | Python | UTF-8 | Python | false | false | 930 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/data/Run2015D/SingleElectron/MINIAOD/PromptReco-v3/000/257/645/00000/5CA69132-F967-E511-BF67-02163E014485.root',
'/store/data/Run2015D/SingleElectron/MINIAOD/PromptReco-v3/000/257/645/00000/6693A85A-FA67-E511-B66C-02163E01459D.root',
'/store/data/Run2015D/SingleElectron/MINIAOD/PromptReco-v3/000/257/645/00000/66A2EECE-F967-E511-98B1-02163E011C4A.root',
'/store/data/Run2015D/SingleElectron/MINIAOD/PromptReco-v3/000/257/645/00000/6AD1AB5B-FA67-E511-87A9-02163E01459D.root',
'/store/data/Run2015D/SingleElectron/MINIAOD/PromptReco-v3/000/257/645/00000/74422752-F967-E511-8224-02163E01437F.root' ] )
| [
"[email protected]"
] | |
7d645049875ce042602061c064c8c1b640251ccb | 71764665e27f4b96bab44f38a4a591ffc2171c24 | /hhplt/productsuite/gs10/__init__.py | 0dba8ddde1ae1f18ae65c9240109e986b46b8791 | [] | no_license | kingdomjc/RSU_production_VAT | 693f8c504acc0cc88af92942734ccb85f7e7d7c0 | 9a3d6d3f5a5edfaf30afdff725661630aafe434c | refs/heads/master | 2020-07-31T05:03:46.699606 | 2019-09-24T02:09:53 | 2019-09-24T02:09:53 | 210,491,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,258 | py | #coding:utf-8
'''GS10 OBU生产测试'''
import hhplt.testengine.product_manage as product_manage
import battery,cq_overall_unit,trading,board_mending,cq_auto_board,manual_board,trading_mending,retradeAndValidateMac
#注册产品测试项
product_manage.registerProduct('GS10 OBU',(
cq_auto_board, #单板自动测试工位
manual_board, #单板手动测试工位
board_mending, #单板维修工位
trading_mending, #单板维修交易复测工位
cq_overall_unit, #整机
trading, #交易
# retradeAndValidateMac #检查MAC
))
'''
测试中使用的参数:
名称 测试项 含义 默认值
--------------------------------------------------------------------
gs10.initParam.displayDirect 初始数据 显示方向
gs10.initParam.softwareVersion 软件版本号
gs10.initParam.hardwareVersion 硬件版本号
gs10.initParam.wakeupMode 唤醒模式
gs10.initParam.amIndex AmIndex
gs10.initParam.transPower 发射功率
gs10.initParam.txFilter TxFilter
gs10.initWanken.low.grade 初始唤醒灵敏度 初始低唤醒灵敏度粗调
gs10.initWanken.low.level 初始低唤醒灵敏度细调
gs10.initWanken.high.grade 初始高唤醒灵敏度粗调
gs10.initWanken.high.level 初始低唤醒灵敏度细调
gs10.capPower.low 电容电路电压 电容电路电压判定低阈值
gs10.capPower.high 电容电路电压判定高阈值
gs10.solarBatteryPower.board.low 太阳能电路电压(单板) 太阳能电路电压判定低阈值
gs10.solarBatteryPower.board.high 太阳能电路打压判定高阈值
gs10.solarBatteryPower.overall.low 太阳能电路电压(整机) 太阳能电路电压判定低阈值
gs10.solarBatteryPower.overall.high 太阳能电路打压判定高阈值
gs10.batteryPower.low 电池电路电压 电池电路电压判定低阈值
gs10.batteryPower.high 电池电路电压判定高阈值
gs10.wakeup.power.low 唤醒灵敏度 低唤醒功率
gs10.wakeup.power.high 高唤醒功率
gs10.receiveSensitivity.power 接收灵敏度测试 接收功率值
gs10.receiveSensitivity.frameNum 发送总帧数
gs10.receiveSensitivity.frameBorder 接收帧数判定低阈值
gs10.esamDistrictCode.[单板前缀] ESAM测试 ESAM地区分散码
gs10.boardBarPrefix 单板前缀
gs10.sendPower.low 发射功率测试 发射功率低判定阈值
gs10.sendPower.high 发射功率高判定阈值
gs10.staticCurrent.low 静态电流测试 静态电流低判定阈值
gs10.staticCurrent.high 静态电流高判定阈值
gs10.deepStaticCurrent.low 深度静态电流测试 深度静态电流低判定阈值
gs10.deepStaticCurrent.high 深度静态电流判定高阈值
gs10.batteryOpenPower.low 电池开路电压 电池开路电压低判定阈值
gs10.batteryOpenPower.high 电池开路电压高判定阈值
gs10.capOpenPower.low 电容开路电压 电容开路电压低判定阈值
gs10.capOpenPower.high 电容开路电压高判定阈值
gs10.formalVersion.filename 下载正式版本 版本文件名称(不含路径)
'''
| [
"[email protected]"
] | |
17b4251a831504ca745d4b00d9f775904854ff17 | 407c790da88f1c38f8800d6555cfeb9397046e70 | /anuncios/forms.py | 8bbcc2cb74ef19bbc8098c25ac201c3d04753801 | [] | no_license | jonatasprates/matao4x4 | 5488473812ab4dc31011199f5a055c9cca4144bb | ad8de86d9a9bce13363fd6417dd738430c60e9bb | refs/heads/master | 2022-11-26T13:49:11.784141 | 2020-08-11T02:10:36 | 2020-08-11T02:10:36 | 286,622,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,194 | py | #-*- coding: utf-8 -*-
from django.core.files.images import get_image_dimensions
from matao4x4.anuncios.models import Anunciante
from django import forms
class AnunciantesAdminForm(forms.ModelForm):
class Meta:
model = Anunciante
# validando o campo específico: imagem
def clean_imagem(self):
# pego a imagem enviada pelo usuário
imagem = self.cleaned_data.get("imagem")
# a imagem não foi enviada?
if not imagem:
# chamo um erro de validação do form
raise forms.ValidationError("Você esqueceu de enviar o banner!")
# se a imagem foi enviada
else:
# pego a largura e altura da imagem
largura = get_image_dimensions(imagem)[0]
# a largura é diferente do padrão?
if largura != 224:
# chamo o erro de validação do form, informando largura e altura necessárias
raise forms.ValidationError("A largura da imagem enviada é de %ipx. O correto é 224px." % largura)
return imagem | [
"[email protected]"
] | |
7e32b253fb44929796b8569b0d1334b8e2ca7c12 | 33518b9521d8e633010b0b9d1ea0f7a937437200 | /Python/strobogrammatic_number_ii/strobogrammatic_number_ii.py | 3456ec0c3783ada61d9b190a8736aca60890cef3 | [] | no_license | lqs4188980/CodingPractice | 977ddb69306c92a5e3df88f26572200622fad82a | c17653832269ab1bb3e411f7d74bef4c8e9985b3 | refs/heads/master | 2021-01-22T05:10:40.885490 | 2016-02-05T09:06:51 | 2016-02-05T09:06:51 | 25,272,652 | 0 | 1 | null | 2016-01-06T07:50:29 | 2014-10-15T20:40:34 | Java | UTF-8 | Python | false | false | 1,335 | py | # corner cases:
# won't start with 0
# 0, 1, 8 good for central
# 0, 1, 6, 8, 9 for rest
class Solution(object):
def __init__(self):
self.odd = ['0','1','8']
self.whole = ['0','1','6','8','9']
self.non_zero = ['1','6','8','9']
def findStrobogrammatic(self, n):
"""
:type n: int
:rtype: List[str]
"""
nums = []
if n % 2 == 1:
for char in self.odd:
self.get_stro_num(n, char, nums)
else:
self.get_stro_num(n, '', nums)
return nums
def get_stro_num(self, n, curr_num, nums):
if n == len(curr_num):
nums.append(curr_num)
return
if n == len(curr_num) + 2:
for char in self.non_zero:
self.get_stro_num(n, \
char + curr_num + self.get_mirror(char), \
nums)
else:
for char in self.whole:
self.get_stro_num(n, \
char + curr_num + self.get_mirror(char), \
nums)
def get_mirror(self, char):
if char == '0' or \
char == '1' or \
char == '8':
return char
elif char == '6':
return '9'
else:
return '6'
# else raise exception
| [
"[email protected]"
] | |
20c27ce9740fff5beebeaf33e73bf00621d46f2f | ef6a64f2e1276b2312fe69be5fb0f79de654f613 | /backend/breach/migrations/0014_auto_20160504_1753.py | dfab67023f3403bba72ecf71953d90b5076e106a | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | MasterScott/rupture | cd1d1fd410f376d1b5c32587a71fd9565ce6aae6 | 131c61a28550f082ca1598aa40ac37ca59204b40 | refs/heads/develop | 2021-07-06T04:29:04.791439 | 2019-07-28T10:47:39 | 2019-07-28T10:47:39 | 218,162,821 | 0 | 0 | MIT | 2021-01-12T08:59:19 | 2019-10-28T23:16:52 | null | UTF-8 | Python | false | false | 528 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-04 17:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('breach', '0013_auto_20160330_1632'),
]
operations = [
migrations.AlterField(
model_name='victim',
name='method',
field=models.IntegerField(choices=[(1, 1), (2, 2)], default=2, help_text='Method of building candidate samplesets.'),
),
]
| [
"[email protected]"
] | |
5911c46e45ba3fe780b13f90caecdb859d1aa90a | f8b5aafac15f408a48fabf853a918015c927e6fe | /backup/virtualenv/venv27/lib/python2.7/site-packages/ansible/modules/core/packaging/os/apt.py | b0566bf2c219fe7f9f851f802ff2c519286ed317 | [] | no_license | to30/tmp | bda1ac0ca3fc61e96c2a1c491367b698d7e97937 | ec809683970af6787728c2c41f161f416155982a | refs/heads/master | 2021-01-01T04:25:52.040770 | 2016-05-13T16:34:59 | 2016-05-13T16:34:59 | 58,756,087 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 27,266 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Flowroute LLC
# Written by Matthew Williams <[email protected]>
# Based on yum module written by Seth Vidal <skvidal at fedoraproject.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: apt
short_description: Manages apt-packages
description:
- Manages I(apt) packages (such as for Debian/Ubuntu).
version_added: "0.0.2"
options:
name:
description:
- A package name, like C(foo), or package specifier with version, like C(foo=1.0). Name wildcards (fnmatch) like C(apt*) and version wildcards like C(foo=1.0*) are also supported. Note that the apt-get commandline supports implicit regex matches here but we do not because it can let typos through easier (If you typo C(foo) as C(fo) apt-get would install packages that have "fo" in their name with a warning and a prompt for the user. Since we don't have warnings and prompts before installing we disallow this. Use an explicit fnmatch pattern if you want wildcarding)
required: false
default: null
state:
description:
- Indicates the desired package state. C(latest) ensures that the latest version is installed. C(build-dep) ensures the package build dependencies are installed.
required: false
default: present
choices: [ "latest", "absent", "present", "build-dep" ]
update_cache:
description:
- Run the equivalent of C(apt-get update) before the operation. Can be run as part of the package installation or as a separate step.
required: false
default: no
choices: [ "yes", "no" ]
cache_valid_time:
description:
- If C(update_cache) is specified and the last run is less or equal than I(cache_valid_time) seconds ago, the C(update_cache) gets skipped.
required: false
default: no
purge:
description:
- Will force purging of configuration files if the module state is set to I(absent).
required: false
default: no
choices: [ "yes", "no" ]
default_release:
description:
- Corresponds to the C(-t) option for I(apt) and sets pin priorities
required: false
default: null
install_recommends:
description:
- Corresponds to the C(--no-install-recommends) option for I(apt). C(yes) installs recommended packages. C(no) does not install recommended packages. By default, Ansible will use the same defaults as the operating system. Suggested packages are never installed.
required: false
default: null
choices: [ "yes", "no" ]
force:
description:
- If C(yes), force installs/removes.
required: false
default: "no"
choices: [ "yes", "no" ]
upgrade:
description:
- 'If yes or safe, performs an aptitude safe-upgrade.'
- 'If full, performs an aptitude full-upgrade.'
- 'If dist, performs an apt-get dist-upgrade.'
- 'Note: This does not upgrade a specific package, use state=latest for that.'
version_added: "1.1"
required: false
default: "no"
choices: [ "no", "yes", "safe", "full", "dist"]
dpkg_options:
description:
- Add dpkg options to apt command. Defaults to '-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"'
- Options should be supplied as comma separated list
required: false
default: 'force-confdef,force-confold'
deb:
description:
- Path to a .deb package on the remote machine.
required: false
version_added: "1.6"
requirements: [ python-apt, aptitude ]
author: "Matthew Williams (@mgwilliams)"
notes:
- Three of the upgrade modes (C(full), C(safe) and its alias C(yes)) require C(aptitude), otherwise
C(apt-get) suffices.
'''
EXAMPLES = '''
# Update repositories cache and install "foo" package
- apt: name=foo update_cache=yes
# Remove "foo" package
- apt: name=foo state=absent
# Install the package "foo"
- apt: name=foo state=present
# Install the version '1.00' of package "foo"
- apt: name=foo=1.00 state=present
# Update the repository cache and update package "nginx" to latest version using default release squeeze-backport
- apt: name=nginx state=latest default_release=squeeze-backports update_cache=yes
# Install latest version of "openjdk-6-jdk" ignoring "install-recommends"
- apt: name=openjdk-6-jdk state=latest install_recommends=no
# Update all packages to the latest version
- apt: upgrade=dist
# Run the equivalent of "apt-get update" as a separate step
- apt: update_cache=yes
# Only run "update_cache=yes" if the last one is more than 3600 seconds ago
- apt: update_cache=yes cache_valid_time=3600
# Pass options to dpkg on run
- apt: upgrade=dist update_cache=yes dpkg_options='force-confold,force-confdef'
# Install a .deb package
- apt: deb=/tmp/mypackage.deb
# Install the build dependencies for package "foo"
- apt: pkg=foo state=build-dep
'''
RETURN = '''
cache_updated:
description: if the cache was updated or not
returned: success, in some cases
type: boolean
sample: True
cache_update_time:
description: time of the last cache update (0 if unknown)
returned: success, in some cases
type: datetime
sample: 1425828348000
stdout:
description: output from apt
returned: success, when needed
type: string
sample: "Reading package lists...\nBuilding dependency tree...\nReading state information...\nThe following extra packages will be installed:\n apache2-bin ..."
stderr:
description: error output from apt
returned: success, when needed
type: string
sample: "AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 127.0.1.1. Set the 'ServerName' directive globally to ..."
'''
import traceback
# added to stave off future warnings about apt api
import warnings
warnings.filterwarnings('ignore', "apt API not stable yet", FutureWarning)
import os
import datetime
import fnmatch
import itertools
# APT related constants
APT_ENV_VARS = dict(
DEBIAN_FRONTEND = 'noninteractive',
DEBIAN_PRIORITY = 'critical',
# We screenscrape apt-get and aptitude output for information so we need
# to make sure we use the C locale when running commands
LANG = 'C',
LC_ALL = 'C',
LC_MESSAGES = 'C',
LC_CTYPE = 'C',
)
DPKG_OPTIONS = 'force-confdef,force-confold'
APT_GET_ZERO = "\n0 upgraded, 0 newly installed"
APTITUDE_ZERO = "\n0 packages upgraded, 0 newly installed"
APT_LISTS_PATH = "/var/lib/apt/lists"
APT_UPDATE_SUCCESS_STAMP_PATH = "/var/lib/apt/periodic/update-success-stamp"
HAS_PYTHON_APT = True
try:
import apt
import apt.debfile
import apt_pkg
except ImportError:
HAS_PYTHON_APT = False
def package_split(pkgspec):
parts = pkgspec.split('=', 1)
if len(parts) > 1:
return parts[0], parts[1]
else:
return parts[0], None
def package_versions(pkgname, pkg, pkg_cache):
try:
versions = set(p.version for p in pkg.versions)
except AttributeError:
# assume older version of python-apt is installed
# apt.package.Package#versions require python-apt >= 0.7.9.
pkg_cache_list = (p for p in pkg_cache.Packages if p.Name == pkgname)
pkg_versions = (p.VersionList for p in pkg_cache_list)
versions = set(p.VerStr for p in itertools.chain(*pkg_versions))
return versions
def package_version_compare(version, other_version):
try:
return apt_pkg.version_compare(version, other_version)
except AttributeError:
return apt_pkg.VersionCompare(version, other_version)
def package_status(m, pkgname, version, cache, state):
try:
# get the package from the cache, as well as the
# the low-level apt_pkg.Package object which contains
# state fields not directly acccesible from the
# higher-level apt.package.Package object.
pkg = cache[pkgname]
ll_pkg = cache._cache[pkgname] # the low-level package object
except KeyError:
if state == 'install':
try:
provided_packages = cache.get_providing_packages(pkgname)
if provided_packages:
is_installed = False
# when virtual package providing only one package, look up status of target package
if cache.is_virtual_package(pkgname) and len(provided_packages) == 1:
package = provided_packages[0]
installed, upgradable, has_files = package_status(m, package.name, version, cache, state='install')
if installed:
is_installed = True
return is_installed, True, False
m.fail_json(msg="No package matching '%s' is available" % pkgname)
except AttributeError:
# python-apt version too old to detect virtual packages
# mark as upgradable and let apt-get install deal with it
return False, True, False
else:
return False, False, False
try:
has_files = len(pkg.installed_files) > 0
except UnicodeDecodeError:
has_files = True
except AttributeError:
has_files = False # older python-apt cannot be used to determine non-purged
try:
package_is_installed = ll_pkg.current_state == apt_pkg.CURSTATE_INSTALLED
except AttributeError: # python-apt 0.7.X has very weak low-level object
try:
# might not be necessary as python-apt post-0.7.X should have current_state property
package_is_installed = pkg.is_installed
except AttributeError:
# assume older version of python-apt is installed
package_is_installed = pkg.isInstalled
if version:
versions = package_versions(pkgname, pkg, cache._cache)
avail_upgrades = fnmatch.filter(versions, version)
if package_is_installed:
try:
installed_version = pkg.installed.version
except AttributeError:
installed_version = pkg.installedVersion
# Only claim the package is installed if the version is matched as well
package_is_installed = fnmatch.fnmatch(installed_version, version)
# Only claim the package is upgradable if a candidate matches the version
package_is_upgradable = False
for candidate in avail_upgrades:
if package_version_compare(candidate, installed_version) > 0:
package_is_upgradable = True
break
else:
package_is_upgradable = bool(avail_upgrades)
else:
try:
package_is_upgradable = pkg.is_upgradable
except AttributeError:
# assume older version of python-apt is installed
package_is_upgradable = pkg.isUpgradable
return package_is_installed, package_is_upgradable, has_files
def expand_dpkg_options(dpkg_options_compressed):
options_list = dpkg_options_compressed.split(',')
dpkg_options = ""
for dpkg_option in options_list:
dpkg_options = '%s -o "Dpkg::Options::=--%s"' \
% (dpkg_options, dpkg_option)
return dpkg_options.strip()
def expand_pkgspec_from_fnmatches(m, pkgspec, cache):
# Note: apt-get does implicit regex matching when an exact package name
# match is not found. Something like this:
# matches = [pkg.name for pkg in cache if re.match(pkgspec, pkg.name)]
# (Should also deal with the ':' for multiarch like the fnmatch code below)
#
# We have decided not to do similar implicit regex matching but might take
# a PR to add some sort of explicit regex matching:
# https://github.com/ansible/ansible-modules-core/issues/1258
new_pkgspec = []
for pkgspec_pattern in pkgspec:
pkgname_pattern, version = package_split(pkgspec_pattern)
# note that none of these chars is allowed in a (debian) pkgname
if frozenset('*?[]!').intersection(pkgname_pattern):
# handle multiarch pkgnames, the idea is that "apt*" should
# only select native packages. But "apt*:i386" should still work
if not ":" in pkgname_pattern:
try:
pkg_name_cache = _non_multiarch
except NameError:
pkg_name_cache = _non_multiarch = [pkg.name for pkg in cache if not ':' in pkg.name]
else:
try:
pkg_name_cache = _all_pkg_names
except NameError:
pkg_name_cache = _all_pkg_names = [pkg.name for pkg in cache]
matches = fnmatch.filter(pkg_name_cache, pkgname_pattern)
if len(matches) == 0:
m.fail_json(msg="No package(s) matching '%s' available" % str(pkgname_pattern))
else:
new_pkgspec.extend(matches)
else:
# No wildcards in name
new_pkgspec.append(pkgspec_pattern)
return new_pkgspec
def install(m, pkgspec, cache, upgrade=False, default_release=None,
install_recommends=None, force=False,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS),
build_dep=False):
pkg_list = []
packages = ""
pkgspec = expand_pkgspec_from_fnmatches(m, pkgspec, cache)
for package in pkgspec:
name, version = package_split(package)
installed, upgradable, has_files = package_status(m, name, version, cache, state='install')
if build_dep:
# Let apt decide what to install
pkg_list.append("'%s'" % package)
continue
if not installed or (upgrade and upgradable):
pkg_list.append("'%s'" % package)
if installed and upgradable and version:
# This happens when the package is installed, a newer version is
# available, and the version is a wildcard that matches both
#
# We do not apply the upgrade flag because we cannot specify both
# a version and state=latest. (This behaviour mirrors how apt
# treats a version with wildcard in the package)
pkg_list.append("'%s'" % package)
packages = ' '.join(pkg_list)
if len(packages) != 0:
if force:
force_yes = '--force-yes'
else:
force_yes = ''
if m.check_mode:
check_arg = '--simulate'
else:
check_arg = ''
if build_dep:
cmd = "%s -y %s %s %s build-dep %s" % (APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
else:
cmd = "%s -y %s %s %s install %s" % (APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
if default_release:
cmd += " -t '%s'" % (default_release,)
if install_recommends is False:
cmd += " -o APT::Install-Recommends=no"
elif install_recommends is True:
cmd += " -o APT::Install-Recommends=yes"
# install_recommends is None uses the OS default
rc, out, err = m.run_command(cmd)
if rc:
return (False, dict(msg="'%s' failed: %s" % (cmd, err), stdout=out, stderr=err))
else:
return (True, dict(changed=True, stdout=out, stderr=err))
else:
return (True, dict(changed=False))
def install_deb(m, debs, cache, force, install_recommends, dpkg_options):
changed=False
deps_to_install = []
pkgs_to_install = []
for deb_file in debs.split(','):
try:
pkg = apt.debfile.DebPackage(deb_file)
# Check if it's already installed
if pkg.compare_to_version_in_cache() == pkg.VERSION_SAME:
continue
# Check if package is installable
if not pkg.check() and not force:
m.fail_json(msg=pkg._failure_string)
# add any missing deps to the list of deps we need
# to install so they're all done in one shot
deps_to_install.extend(pkg.missing_deps)
except Exception, e:
m.fail_json(msg="Unable to install package: %s" % str(e))
# and add this deb to the list of packages to install
pkgs_to_install.append(deb_file)
# install the deps through apt
retvals = {}
if len(deps_to_install) > 0:
(success, retvals) = install(m=m, pkgspec=deps_to_install, cache=cache,
install_recommends=install_recommends,
dpkg_options=expand_dpkg_options(dpkg_options))
if not success:
m.fail_json(**retvals)
changed = retvals.get('changed', False)
if len(pkgs_to_install) > 0:
options = ' '.join(["--%s"% x for x in dpkg_options.split(",")])
if m.check_mode:
options += " --simulate"
if force:
options += " --force-all"
cmd = "dpkg %s -i %s" % (options, " ".join(pkgs_to_install))
rc, out, err = m.run_command(cmd)
if "stdout" in retvals:
stdout = retvals["stdout"] + out
else:
stdout = out
if "stderr" in retvals:
stderr = retvals["stderr"] + err
else:
stderr = err
if rc == 0:
m.exit_json(changed=True, stdout=stdout, stderr=stderr)
else:
m.fail_json(msg="%s failed" % cmd, stdout=stdout, stderr=stderr)
else:
m.exit_json(changed=changed, stdout=retvals.get('stdout',''), stderr=retvals.get('stderr',''))
def remove(m, pkgspec, cache, purge=False,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
pkg_list = []
pkgspec = expand_pkgspec_from_fnmatches(m, pkgspec, cache)
for package in pkgspec:
name, version = package_split(package)
installed, upgradable, has_files = package_status(m, name, version, cache, state='remove')
if installed or (has_files and purge):
pkg_list.append("'%s'" % package)
packages = ' '.join(pkg_list)
if len(packages) == 0:
m.exit_json(changed=False)
else:
if purge:
purge = '--purge'
else:
purge = ''
cmd = "%s -q -y %s %s remove %s" % (APT_GET_CMD, dpkg_options, purge, packages)
if m.check_mode:
m.exit_json(changed=True)
rc, out, err = m.run_command(cmd)
if rc:
m.fail_json(msg="'apt-get remove %s' failed: %s" % (packages, err), stdout=out, stderr=err)
m.exit_json(changed=True, stdout=out, stderr=err)
def upgrade(m, mode="yes", force=False, default_release=None,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
if m.check_mode:
check_arg = '--simulate'
else:
check_arg = ''
apt_cmd = None
prompt_regex = None
if mode == "dist":
# apt-get dist-upgrade
apt_cmd = APT_GET_CMD
upgrade_command = "dist-upgrade"
elif mode == "full":
# aptitude full-upgrade
apt_cmd = APTITUDE_CMD
upgrade_command = "full-upgrade"
else:
# aptitude safe-upgrade # mode=yes # default
apt_cmd = APTITUDE_CMD
upgrade_command = "safe-upgrade"
prompt_regex = r"(^Do you want to ignore this warning and proceed anyway\?|^\*\*\*.*\[default=.*\])"
if force:
if apt_cmd == APT_GET_CMD:
force_yes = '--force-yes'
else:
force_yes = '--assume-yes --allow-untrusted'
else:
force_yes = ''
apt_cmd_path = m.get_bin_path(apt_cmd, required=True)
cmd = '%s -y %s %s %s %s' % (apt_cmd_path, dpkg_options,
force_yes, check_arg, upgrade_command)
if default_release:
cmd += " -t '%s'" % (default_release,)
rc, out, err = m.run_command(cmd, prompt_regex=prompt_regex)
if rc:
m.fail_json(msg="'%s %s' failed: %s" % (apt_cmd, upgrade_command, err), stdout=out)
if (apt_cmd == APT_GET_CMD and APT_GET_ZERO in out) or (apt_cmd == APTITUDE_CMD and APTITUDE_ZERO in out):
m.exit_json(changed=False, msg=out, stdout=out, stderr=err)
m.exit_json(changed=True, msg=out, stdout=out, stderr=err)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['installed', 'latest', 'removed', 'absent', 'present', 'build-dep']),
update_cache = dict(default=False, aliases=['update-cache'], type='bool'),
cache_valid_time = dict(type='int'),
purge = dict(default=False, type='bool'),
package = dict(default=None, aliases=['pkg', 'name'], type='list'),
deb = dict(default=None),
default_release = dict(default=None, aliases=['default-release']),
install_recommends = dict(default=None, aliases=['install-recommends'], type='bool'),
force = dict(default='no', type='bool'),
upgrade = dict(choices=['no', 'yes', 'safe', 'full', 'dist']),
dpkg_options = dict(default=DPKG_OPTIONS)
),
mutually_exclusive = [['package', 'upgrade', 'deb']],
required_one_of = [['package', 'upgrade', 'update_cache', 'deb']],
supports_check_mode = True
)
module.run_command_environ_update = APT_ENV_VARS
if not HAS_PYTHON_APT:
if module.check_mode:
module.fail_json(msg="python-apt must be installed to use check mode. If run normally this module can autoinstall it")
try:
module.run_command('apt-get update && apt-get install python-apt -y -q --force-yes', use_unsafe_shell=True, check_rc=True)
global apt, apt_pkg
import apt
import apt.debfile
import apt_pkg
except ImportError:
module.fail_json(msg="Could not import python modules: apt, apt_pkg. Please install python-apt package.")
global APTITUDE_CMD
APTITUDE_CMD = module.get_bin_path("aptitude", False)
global APT_GET_CMD
APT_GET_CMD = module.get_bin_path("apt-get")
p = module.params
if p['upgrade'] == 'no':
p['upgrade'] = None
if not APTITUDE_CMD and p.get('upgrade', None) in [ 'full', 'safe', 'yes' ]:
module.fail_json(msg="Could not find aptitude. Please ensure it is installed.")
updated_cache = False
updated_cache_time = 0
install_recommends = p['install_recommends']
dpkg_options = expand_dpkg_options(p['dpkg_options'])
# Deal with deprecated aliases
if p['state'] == 'installed':
p['state'] = 'present'
if p['state'] == 'removed':
p['state'] = 'absent'
try:
cache = apt.Cache()
if p['default_release']:
try:
apt_pkg.config['APT::Default-Release'] = p['default_release']
except AttributeError:
apt_pkg.Config['APT::Default-Release'] = p['default_release']
# reopen cache w/ modified config
cache.open(progress=None)
if p['update_cache']:
# Default is: always update the cache
cache_valid = False
now = datetime.datetime.now()
if p.get('cache_valid_time', False):
try:
mtime = os.stat(APT_UPDATE_SUCCESS_STAMP_PATH).st_mtime
except:
# Looks like the update-success-stamp is not available
# Fallback: Checking the mtime of the lists
try:
mtime = os.stat(APT_LISTS_PATH).st_mtime
except:
# No mtime could be read. We update the cache to be safe
mtime = False
if mtime:
tdelta = datetime.timedelta(seconds=p['cache_valid_time'])
mtimestamp = datetime.datetime.fromtimestamp(mtime)
if mtimestamp + tdelta >= now:
cache_valid = True
updated_cache_time = int(time.mktime(mtimestamp.timetuple()))
if cache_valid is not True:
cache.update()
cache.open(progress=None)
updated_cache = True
updated_cache_time = int(time.mktime(now.timetuple()))
if not p['package'] and not p['upgrade'] and not p['deb']:
module.exit_json(changed=False, cache_updated=updated_cache, cache_update_time=updated_cache_time)
else:
updated_cache = False
updated_cache_time = 0
force_yes = p['force']
if p['upgrade']:
upgrade(module, p['upgrade'], force_yes, p['default_release'], dpkg_options)
if p['deb']:
if p['state'] != 'present':
module.fail_json(msg="deb only supports state=present")
install_deb(module, p['deb'], cache,
install_recommends=install_recommends,
force=force_yes, dpkg_options=p['dpkg_options'])
packages = p['package']
latest = p['state'] == 'latest'
for package in packages:
if package.count('=') > 1:
module.fail_json(msg="invalid package spec: %s" % package)
if latest and '=' in package:
module.fail_json(msg='version number inconsistent with state=latest: %s' % package)
if p['state'] in ('latest', 'present', 'build-dep'):
state_upgrade = False
state_builddep = False
if p['state'] == 'latest':
state_upgrade = True
if p['state'] == 'build-dep':
state_builddep = True
result = install(module, packages, cache, upgrade=state_upgrade,
default_release=p['default_release'],
install_recommends=install_recommends,
force=force_yes, dpkg_options=dpkg_options,
build_dep=state_builddep)
(success, retvals) = result
retvals['cache_updated']=updated_cache
retvals['cache_update_time']=updated_cache_time
if success:
module.exit_json(**retvals)
else:
module.fail_json(**retvals)
elif p['state'] == 'absent':
remove(module, packages, cache, p['purge'], dpkg_options)
except apt.cache.LockFailedException:
module.fail_json(msg="Failed to lock apt for exclusive operation")
except apt.cache.FetchFailedException:
module.fail_json(msg="Could not fetch updated apt files")
# import module snippets
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
ab57c4b29c3146737651116ef31d6aee8c1efed7 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_138/658.py | 25615d80a7676ddb94779696a868e8215c273d28 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,191 | py | def dwar(NAOMI , KEN):
naomi = NAOMI[:]
ken = KEN[:]
naomi.sort()
ken.sort()
N = len(naomi)
count = 0
for turn in range(N):
if naomi[0] > ken[0]:
count += 1
naomi = naomi[1:]
ken = ken[1:]
else:
naomi = naomi[1:]
ken = ken[:-1]
return count
def war(NAOMI ,KEN):
naomi = NAOMI[:]
ken = KEN[:]
naomi.sort()
ken.sort()
N = len(naomi)
count = 0
for turn in range(N):
if naomi[-1] > ken[-1]:
count += 1
naomi = naomi[:-1]
ken = ken[1:]
else:
naomi = naomi[:-1]
ken = ken[:-1]
return count
f = open('D-large.in', 'r')
line1 = f.readline()
cases = int(line1)
for case in range(1,cases+1):
line = f.readline()
N = int(line)
line = f.readline()
naomi_str = line.split()
naomi = [float(naomi_str[i]) for i in range(N)]
line = f.readline()
ken_str = line.split()
ken = [float(ken_str[i]) for i in range(N)]
print "Case #"+str(case)+ ": " + str(dwar(naomi,ken)) + " " + str(war(naomi,ken))
| [
"[email protected]"
] | |
0c2d777b143221ee4e5b6e86f30dff1da4b5cc8b | 5b173a0e8a10a4f9ff0e3559cdaf8a211e239376 | /scoring_360/migrations/0002_auto_20190408_1246.py | 7d00471115c41ced34ee64497736eade78e325b8 | [] | no_license | Tur-4000/MTLCrewing | 9eb6a59ab0d8d0362cae01459470eb4cd3e4f9b5 | 388029cc0fbf60dacd2118dcc80ce3ec3d77d07b | refs/heads/master | 2022-05-02T11:05:30.205355 | 2019-04-08T14:05:19 | 2019-04-08T14:05:19 | 176,301,948 | 0 | 0 | null | 2022-03-11T23:42:58 | 2019-03-18T14:19:56 | JavaScript | UTF-8 | Python | false | false | 817 | py | # Generated by Django 2.1.7 on 2019-04-08 09:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('crewing', '0028_auto_20190408_1246'),
('scoring_360', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='ability360',
name='ranks',
field=models.ManyToManyField(related_name='abilities', to='crewing.Ranks', verbose_name='Должность'),
),
migrations.AlterField(
model_name='question360',
name='ability',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='scoring_360.Ability360', verbose_name='Компетенция'),
),
]
| [
"[email protected]"
] | |
b87ce28ff9491f2f1ec326ace32de6c94e3f8349 | 1fe03131ad139e2415fd0c0c73697b4541e5b862 | /.history/src/fightScene_20190423000454.py | c40281bf6dde0086a8bb07b3bfd7efde3705a454 | [
"MIT"
] | permissive | vidalmatheus/pyKombat | d83175a7a952663e278a8247d43349f87192fde3 | 6646020c59367ba0424d73a5861e13bbc0daac1f | refs/heads/master | 2021-06-20T09:35:07.950596 | 2020-08-06T14:08:13 | 2020-08-06T14:08:13 | 172,716,161 | 1 | 1 | MIT | 2019-12-25T10:54:10 | 2019-02-26T13:24:31 | Python | UTF-8 | Python | false | false | 12,563 | py | import pygame
import os
from pygame.locals import *
import config
import game
import engine
import menu
from random import randint
import _fighter
from pygame_functions import *
class Scenario:
def __init__(self, game, scenario):
self.game = game
self.scenario = scenario
pygame.mixer.music.stop()
music = engine.Music("mkt")
music.play()
music.volume(0.1)
def setScenario(self, scenario):
if scenario == 9:
scenario = randint(1, 8)
#self.scene = pygame.image.load('../res/Background/Scenario'+str(scenario)+'.png')
#self.game.getDisplay().blit(self.scene, (0, 0))
#pygame.display.update()
#screenSize(800, 500,"pyKombat",None,None,True) # FullScreen
screenSize(800, 500,"pyKombat") # Minimized
setBackgroundImage('../res/Background/Scenario'+str(scenario)+'.png')
self.judge(scenario)
def judge(self,scenario):
[player1,player2] = self.addFigther(scenario)
player1.act()
player2.act()
nextFrame1 = clock()
nextFrame2 = clock()
hitCounter = 0
dizzyCounter = 1
specialCounter = 1
while True:
aux1 = player1.fight(clock(),nextFrame1)
player1.life.render()
nextFrame1 = aux1
aux2 = player2.fight(clock(),nextFrame2)
player2.life.render()
nextFrame2 = aux2
x1 = player1.getX()
x2 = player2.getX()
#print(x1, x2, x2-x1)
if not player1.isAlive() or not player2.isAlive():
if not player1.isAlive():
player1.takeHit("dizzy")
if dizzyCounter >= 100:
print(dizzyCounter)
player1.takeHit("dead")
if not player2.isAlive():
player2.takeHit("dizzy")
if dizzyCounter >= 100:
player2.takeHit("dead")
dizzyCounter += 1
# caso encostem na tela
if player1.getX() < 20:
player1.setX(20)
if player2.getX() < 20:
player2.setX(20)
if player1.getX() > (800-20):
player1.setX(800-20)
if player2.getX() > (800-20):
player2.setX(800-20)
if(collide(player1.currentSprite(),player2.currentSprite())):
# caso só encostem
if ( (player1.isWalking() or player1.isJumping()) and (player2.isDancing() or player2.isCrouching() or player2.isWalking()) ) or ((player2.isWalking() or player2.isJumping()) and (player1.isDancing() or player1.isCrouching() or player2.isWalking()) ) or (player1.isWalking() and player2.isWalking()) or (player1.isJumping() and player2.isJumping()) or (player1.isDancing() and player2.isDancing()) or (player2.isSpecialMove() and player1.ishitSpecial()):
player1.setX(x1-8)
if not player2.isSpecialMove() :player2.setX(x2+8)
# caso houve soco fraco:
if ( player1.isApunching() and (player2.isWalking() or player2.isDancing() or player2.isApunching() or player2.ishitSpecial()) ) or ( player2.isApunching() and (player1.isWalking() or player1.isDancing() or player1.isApunching()) ):
if player1.isApunching():
player2.takeHit("Apunching")
if player2.isApunching():
player1.takeHit("Apunching")
print("socofraco")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve soco forte:
if ( player1.isBpunching() and (player2.isWalking() or player2.isDancing() or player2.isBpunching()) ) or ( player2.isBpunching() and (player1.isWalking() or player1.isDancing() or player1.isBpunching()) ):
if player1.isBpunching():
player2.takeHit("Bpunching")
if player2.isBpunching():
player1.takeHit("Bpunching")
print("socoforte")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve chute fraco:
if ( player1.isAkicking() and (player2.isWalking() or player2.isDancing() or player2.isAkicking() or player2.isCrouching()) and not player2.isBblocking() ) or ( player2.isAkicking() and (player1.isWalking() or player1.isDancing() or player1.isAkicking() or player1.isCrouching() and not player1.isBblocking()) ):
if player1.isAkicking():
player2.takeHit("Akicking")
if player2.isAkicking():
player1.takeHit("Akicking")
print("chutefraco")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve chute forte:
if ( player1.isBkicking() and (player2.isWalking() or player2.isDancing() or player2.isBkicking()) ) or ( player2.isBkicking() and (player1.isWalking() or player1.isDancing() or player1.isBkicking()) ):
if player1.isBkicking():
player2.takeHit("Bkicking")
if player2.isBkicking():
player1.takeHit("Bkicking")
print("chuteforte")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve bloqueio em pé:
if ( (player1.isApunching() or player1.isBpunching() or player1.isDpunching() or player1.isAkicking() or player1.isBkicking() ) and player2.isAblocking() ) or ( (player2.isApunching() or player2.isBpunching() or player1.isDpunching() or player2.isAkicking() or player2.isBkicking() ) and player1.isAblocking() ):
if player1.isAblocking():
player1.takeHit("Ablocking")
if player2.isAblocking():
player2.takeHit("Ablocking")
engine.Sound("block").play()
player1.setX(x1-12)
player2.setX(x2+12)
print("ablock")
# caso houve soco ou chute agachado fraco em alguém em pé:
if ( ((player1.isCpunching() or player1.isCkicking() ) and not player2.isCrouching() and not player2.isBblocking() ) or ((player2.isCpunching() or player2.isCkicking() ) and not player1.isCrouching() and not player1.isBblocking() ) ): # falta adicionar o Bblock
if player1.isCpunching() or player1.isCkicking():
player2.takeHit("Cpunching")
if player2.isCpunching() or player2.isCkicking():
player1.takeHit("Cpunching")
print("socofraco!!!!!!!")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve soco agachado forte em alguém em pé:
if ( (player1.isDpunching() and (not player2.isAblocking() and not player2.isBblocking()) ) or player2.isDpunching() and (not player1.isAblocking() and not player1.isBblocking()) ):
if player1.isDpunching():
player2.takeHit("Bkicking")
if player2.isDpunching():
player1.takeHit("Bkicking")
print("socofraco$#$")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve chute agachado forte em alguém em pé:
if ( player1.isDkicking() or player2.isDkicking() ):
if player1.isDkicking():
player2.takeHit("Dkicking")
if player2.isDkicking():
player1.takeHit("Dkicking")
print("socofraco")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve soco ou chute agachado fraco em alguém agachado:
if ( ( (player1.isCpunching() or player1.isCkicking()) and player2.isCrouching() and not player2.isBblocking() ) or ( (player2.isCpunching() or player2.isCkicking()) and player1.isCrouching() and not player1.isBblocking() ) ):
if player1.isCpunching() or player1.isCkicking():
player2.takeDownHit("Ehit")
if player2.isCpunching() or player2.isCkicking():
player1.takeDownHit("Ehit")
print("socofraco**")
engine.Sound("Hit0").play()
if hitCounter == 0: engine.Sound().roundHit()
hitCounter = (hitCounter+1) % 5
# caso houve bloqueio agachado:
if ( (player1.isCpunching() or player1.isDpunching() or player1.isAkicking() or player1.isCkicking() ) and player2.isBblocking() ) or ( (player2.isCpunching() or player2.isDpunching() or player2.isAkicking() or player2.isCkicking() ) and player1.isBblocking() ):
if player1.isBblocking():
player1.takeDownHit("Bblocking")
if player2.isBblocking():
player2.takeDownHit("Bblocking")
engine.Sound("block").play()
player1.setX(x1-12)
player2.setX(x2+12)
print("bblock")
# caso houve special
if ( player1.isSpecialMove() and (player2.isWalking() or player2.isDancing() or player2.isAblocking()) ) or ( player2.isSpecialMove() and (player1.isWalking() or player1.isDancing() or player1.isAblocking()) ):
if player1.isSpecialMove() and collide(player1.getProjectile().getProjectileSprite(), player2.currentSprite()): # and collide(projetil,player2)
player1.getProjectile().endProjectile()
if not player2.isAblocking(): player2.takeHit("special")
if player2.isSpecialMove() and collide(player2.getProjectile().getProjectileSprite(), player1.currentSprite()): # and collide(projetil,player1)
if not player1.isAblocking(): player1.takeHit("special")
print("special")
if ( player2.ishitSpecial() and specialCounter <= 40 ):
player2.takeHit("special")
player2.life.addDamage(-5)
specialCounter+=1
if specialCounter == 41: specialCounter = 1
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
if keyPressed("backspace"):
pygame.quit()
if keyPressed("esc"):
self.goBack(player1,player2)
def addFigther(self,scenario):
player1 = _fighter.Fighter(0,scenario) # 0: subzero
player2 = _fighter.Fighter(1,scenario) # 1: scorpion
return player1,player2
def goBack(self,player1,player2):
player1.killPlayer()
player2.killPlayer()
del(player1)
del(player2)
sound = engine.Sound("back")
sound.play()
pygame.mixer.music.stop()
music = engine.Music("intro")
music.play()
music.volume(0.5)
menu.ScenarioMenu()
def collide(sprite1,sprite2):
return pygame.sprite.collide_mask(sprite1,sprite2)
| [
"[email protected]"
] | |
ba19fd6a44365f2760be541f3b378daf2577f033 | d19731cd2271ea9cba5f07935964e798c8a4f73b | /modules/compiled/tests/test_condensate.py | 5b23b7518e4fc9ccad8c03922c8295888b33cd79 | [] | no_license | jkiesele/HGCalML | 3949532f6b3f6a501670ffb60cedf8474caa934c | 5101b8cdca876a92a9dc87ceca9598bbbfaf980e | refs/heads/master | 2023-07-25T03:21:36.973479 | 2021-10-19T08:12:04 | 2021-10-19T08:12:04 | 186,409,038 | 0 | 5 | null | 2020-02-27T15:00:55 | 2019-05-13T11:46:33 | Python | UTF-8 | Python | false | false | 2,956 | py | from condensate_op import BuildCondensates
import tensorflow as tf
from ragged_plotting_tools import make_cluster_coordinates_plot
import matplotlib.pyplot as plt
import numpy as np
import time
print('starting test')
n_vert=4000
n_ccoords=2
n_feat=3
soft=False
radius=0.7
betas = tf.random.uniform((n_vert,1), dtype='float32',minval=0.01 , maxval=0.1+1e-3,seed=2)
ccoords = 3.*tf.random.uniform((n_vert,n_ccoords), dtype='float32',seed=1)
row_splits = tf.constant([0,n_vert//2,n_vert], dtype='int32')
print('first call')
asso_idx, is_cpoint,n = BuildCondensates(ccoords=ccoords, betas=betas, row_splits=row_splits, radius=radius, min_beta=0.1, soft=soft)
#print(ccoords)
#print(asso_idx)
#print(is_cpoint)
#exit()
print('starting taking time')
t0 = time.time()
for _ in range(0):
asso_idx, is_cpoint,n = BuildCondensates(ccoords=ccoords, betas=betas, row_splits=row_splits, radius=radius, min_beta=0.1, soft=soft)
totaltime = (time.time()-t0)/100.
print('op time', totaltime)
#exit()
#exit()
#print('betas',betas)
#print('ccoords',ccoords)
#print('summed_features',summed_features)
#print('asso_idx',asso_idx)
#print('n condensates', tf.unique(asso_idx))
def makecolours(asso):
uasso = np.unique(asso)
cols = asso.copy()
for i in range(len(uasso)):
cols[asso == uasso[i]] = i
return np.array(cols,dtype='float')
for radius in [0.6, 1.3]:
asso_idx, is_cpoint,n = BuildCondensates(ccoords=ccoords, betas=betas, row_splits=row_splits,
radius=radius, min_beta=0.1, soft=soft)
print('refs', np.unique(asso_idx))
print('N',n)
for i in range(len(row_splits)-1):
truthHitAssignementIdx = np.array(asso_idx[row_splits[i]:row_splits[i+1]].numpy())
ncond = n[i:i+1]
print('N condensates', ncond.numpy())
truthHitAssignementIdx = makecolours(truthHitAssignementIdx)+1.
predBeta = betas[row_splits[i]:row_splits[i+1]].numpy()
#predBeta = np.ones_like(predBeta,dtype='float')-1e-2
predCCoords = ccoords[row_splits[i]:row_splits[i+1]].numpy()
fig = plt.figure(figsize=(5,4))
ax = fig.add_subplot(111)
make_cluster_coordinates_plot(plt, ax,
truthHitAssignementIdx, #[ V ] or [ V x 1 ]
predBeta, #[ V ] or [ V x 1 ]
predCCoords, #[ V x 2 ]
identified_coords=None,
beta_threshold=0.1,
distance_threshold=radius,
cmap=None,
noalpha=True
)
plt.show()
#plt.savefig("plot_"+str(i)+"_rad_"+str(radius)+".pdf")
fig.clear()
plt.close(fig)
#exit()
| [
"[email protected]"
] | |
657533c490b8360ae94f5471ae1d4efe763c667d | 74da166dc375be56a83b175cdc17d559a262dba1 | /airflow_client/client/model/scheduler_status.py | 5625b88b7b98abb6abc2314899ba41f859629bcf | [
"Apache-2.0"
] | permissive | zjiaksmc/airflow-client-python | eec72956abaede23b8865f5d09f03e242dc86467 | f8d7f958578fd1c63cef41d6b3f5447dc45bdfe6 | refs/heads/master | 2023-08-25T16:50:27.535880 | 2021-10-25T07:03:06 | 2021-10-25T07:03:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,761 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Airflow API (Stable)
# Overview To facilitate management, Apache Airflow supports a range of REST API endpoints across its objects. This section provides an overview of the API design, methods, and supported use cases. Most of the endpoints accept `JSON` as input and return `JSON` responses. This means that you must usually add the following headers to your request: ``` Content-type: application/json Accept: application/json ``` ## Resources The term `resource` refers to a single type of object in the Airflow metadata. An API is broken up by its endpoint's corresponding resource. The name of a resource is typically plural and expressed in camelCase. Example: `dagRuns`. Resource names are used as part of endpoint URLs, as well as in API parameters and responses. ## CRUD Operations The platform supports **C**reate, **R**ead, **U**pdate, and **D**elete operations on most resources. You can review the standards for these operations and their standard parameters below. Some endpoints have special behavior as exceptions. ### Create To create a resource, you typically submit an HTTP `POST` request with the resource's required metadata in the request body. The response returns a `201 Created` response code upon success with the resource's metadata, including its internal `id`, in the response body. ### Read The HTTP `GET` request can be used to read a resource or to list a number of resources. A resource's `id` can be submitted in the request parameters to read a specific resource. The response usually returns a `200 OK` response code upon success, with the resource's metadata in the response body. If a `GET` request does not include a specific resource `id`, it is treated as a list request. The response usually returns a `200 OK` response code upon success, with an object containing a list of resources' metadata in the response body. When reading resources, some common query parameters are usually available. e.g.: ``` v1/connections?limit=25&offset=25 ``` |Query Parameter|Type|Description| |---------------|----|-----------| |limit|integer|Maximum number of objects to fetch. Usually 25 by default| |offset|integer|Offset after which to start returning objects. For use with limit query parameter.| ### Update Updating a resource requires the resource `id`, and is typically done using an HTTP `PATCH` request, with the fields to modify in the request body. The response usually returns a `200 OK` response code upon success, with information about the modified resource in the response body. ### Delete Deleting a resource requires the resource `id` and is typically executing via an HTTP `DELETE` request. The response usually returns a `204 No Content` response code upon success. ## Conventions - Resource names are plural and expressed in camelCase. - Names are consistent between URL parameter name and field name. - Field names are in snake_case. ```json { \"name\": \"string\", \"slots\": 0, \"occupied_slots\": 0, \"used_slots\": 0, \"queued_slots\": 0, \"open_slots\": 0 } ``` ### Update Mask Update mask is available as a query parameter in patch endpoints. It is used to notify the API which fields you want to update. Using `update_mask` makes it easier to update objects by helping the server know which fields to update in an object instead of updating all fields. The update request ignores any fields that aren't specified in the field mask, leaving them with their current values. Example: ``` resource = request.get('/resource/my-id').json() resource['my_field'] = 'new-value' request.patch('/resource/my-id?update_mask=my_field', data=json.dumps(resource)) ``` ## Versioning and Endpoint Lifecycle - API versioning is not synchronized to specific releases of the Apache Airflow. - APIs are designed to be backward compatible. - Any changes to the API will first go through a deprecation phase. # Summary of Changes | Airflow version | Description | |-|-| | v2.0 | Initial release | | v2.0.2 | Added /plugins endpoint | | v2.1 | New providers endpoint | # Trying the API You can use a third party client, such as [curl](https://curl.haxx.se/), [HTTPie](https://httpie.org/), [Postman](https://www.postman.com/) or [the Insomnia rest client](https://insomnia.rest/) to test the Apache Airflow API. Note that you will need to pass credentials data. For e.g., here is how to pause a DAG with [curl](https://curl.haxx.se/), when basic authorization is used: ```bash curl -X POST 'https://example.com/api/v1/dags/{dag_id}?update_mask=is_paused' \\ -H 'Content-Type: application/json' \\ --user \"username:password\" \\ -d '{ \"is_paused\": true }' ``` Using a graphical tool such as [Postman](https://www.postman.com/) or [Insomnia](https://insomnia.rest/), it is possible to import the API specifications directly: 1. Download the API specification by clicking the **Download** button at top of this document 2. Import the JSON specification in the graphical tool of your choice. - In *Postman*, you can click the **import** button at the top - With *Insomnia*, you can just drag-and-drop the file on the UI Note that with *Postman*, you can also generate code snippets by selecting a request and clicking on the **Code** button. ## Enabling CORS [Cross-origin resource sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) is a browser security feature that restricts HTTP requests that are initiated from scripts running in the browser. For details on enabling/configuring CORS, see [Enabling CORS](https://airflow.apache.org/docs/apache-airflow/stable/security/api.html). # Authentication To be able to meet the requirements of many organizations, Airflow supports many authentication methods, and it is even possible to add your own method. If you want to check which auth backend is currently set, you can use `airflow config get-value api auth_backend` command as in the example below. ```bash $ airflow config get-value api auth_backend airflow.api.auth.backend.basic_auth ``` The default is to deny all requests. For details on configuring the authentication, see [API Authorization](https://airflow.apache.org/docs/apache-airflow/stable/security/api.html). # Errors We follow the error response format proposed in [RFC 7807](https://tools.ietf.org/html/rfc7807) also known as Problem Details for HTTP APIs. As with our normal API responses, your client must be prepared to gracefully handle additional members of the response. ## Unauthenticated This indicates that the request has not been applied because it lacks valid authentication credentials for the target resource. Please check that you have valid credentials. ## PermissionDenied This response means that the server understood the request but refuses to authorize it because it lacks sufficient rights to the resource. It happens when you do not have the necessary permission to execute the action you performed. You need to get the appropriate permissions in other to resolve this error. ## BadRequest This response means that the server cannot or will not process the request due to something that is perceived to be a client error (e.g., malformed request syntax, invalid request message framing, or deceptive request routing). To resolve this, please ensure that your syntax is correct. ## NotFound This client error response indicates that the server cannot find the requested resource. ## MethodNotAllowed Indicates that the request method is known by the server but is not supported by the target resource. ## NotAcceptable The target resource does not have a current representation that would be acceptable to the user agent, according to the proactive negotiation header fields received in the request, and the server is unwilling to supply a default representation. ## AlreadyExists The request could not be completed due to a conflict with the current state of the target resource, meaning that the resource already exists ## Unknown This means that the server encountered an unexpected condition that prevented it from fulfilling the request. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from airflow_client.client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from airflow_client.client.model.health_status import HealthStatus
globals()['HealthStatus'] = HealthStatus
class SchedulerStatus(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'status': (HealthStatus,), # noqa: E501
'latest_scheduler_heartbeat': (str, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'status': 'status', # noqa: E501
'latest_scheduler_heartbeat': 'latest_scheduler_heartbeat', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SchedulerStatus - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
status (HealthStatus): [optional] # noqa: E501
latest_scheduler_heartbeat (str, none_type): The time the scheduler last do a heartbeat.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| [
"[email protected]"
] | |
010f5a2d71de95257e26364f8a2da83460f9bb81 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/7/qhj.py | c11e1b4d27f522c3567d8b971bfa85f10e4d1f24 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'qHJ':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
acf208104074450f635a9fcd79eae320b3a08fc3 | 34a0d7fc7ec039cf57bf0a33bc5abb423dfdc97f | /s3_app.py | db98d4ba7e241119900adc70c77bb3ea1d0c6e02 | [] | no_license | jrieke/db-playbooks | 422330a6e5cc6cebc105bcc704a265fc1164df65 | 9128b6bce9fa53c4ef89bc727f5b7b3e300b6904 | refs/heads/main | 2023-04-17T23:19:47.714623 | 2021-05-07T21:49:50 | 2021-05-07T21:49:50 | 357,368,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | import streamlit as st
import s3fs
import time
import os
start_time = time.time()
# Create connection object.
# `anon=False` means not anonymous, i.e. it uses auth to pull data.
fs = s3fs.S3FileSystem(anon=False)
f"Creating fs took {time.time() - start_time} s"
start_time_read = time.time()
# Retrieve file contents.
@st.cache(ttl=600)
def read_file(filename):
with fs.open(filename) as f:
return f.read().decode("utf-8")
content = read_file("testbucket-jrieke/myfile.csv")
st.write(content)
# Print results.
for line in content.strip().split("\n"):
name, pet = line.split(",")
st.write(f"{name} has a :{pet}:")
# for line in f:
# name, pet = line.decode("utf-8").strip().split(",")
# st.write(f"{name} has a :{pet}:")
f"Reading file took {time.time() - start_time_read} s"
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.