blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
288de51aecb74984b26d4c45de7430cecdb35121 | 6c48ad953031fd6be870e8bd8775538b9ac7033e | /python/demo06/demo06_multi_inherit.py | 4d65f096931f3671905f96e664f5289bfe015bca | [] | no_license | yeswhos/Code-Practice | b080c9484f510d02c2d78e388fc03eedc397aa7b | 0fd8263a5c87dbd0e8b1dd5a38f32a188870308b | refs/heads/master | 2023-04-08T13:11:06.105039 | 2023-03-16T11:34:03 | 2023-03-16T11:34:03 | 247,809,031 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | class A:
def demo(self):
print("self方法")
class B:
def test(self):
print("test 方法")
class C(A, B):
pass
c = C()
c.demo()
c.test() | [
"[email protected]"
] | |
e6c752bba55bc005223a795821bd8aa1cb76ec92 | 41c9fde93aeb2afbbe10032a6a5b326573675220 | /notify/models.py | cd84534c00eb3f0ca55493ff698a79c8a5e6542a | [] | no_license | yindashan/nuri | 99dd6c2b944c9014391817018bf9406c40699cfd | a9bda6e88d1dd0db3517d068f540dba6c64bcc74 | refs/heads/master | 2021-01-22T11:46:54.758649 | 2014-10-20T10:36:19 | 2014-10-20T10:36:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,613 | py | # -*- coding:utf-8 -*-
# 所有主机和硬件报警都通过此模块发出
import json
from celery import task
from utils.sms import sms
from utils.mail import send_mail
from appitem.models import AppService, AppRelation
from datetime import datetime
from django.conf import settings
# 异步处理主机存活事件的通知
@task
def notify_host(criterion, host, state, now_time):
# 1. 产生通知信息
dd = {}
dd['time'] = now_time.strftime('%Y-%m-%d %H:%M:%S')
dd['host'] = host
if state == 'UP':
dd['type'] = 'RECOVERY'
dd['information'] = 'OK - The host is up now.'
else:
dd['type'] = 'PROBLEM'
dd['information'] = 'CRITICAL - The host may be down.'
dd['state'] = state
settings.REDIS_DB.rpush('host_event', json.dumps(dd))
# 2. 触发报警
host_ip = settings.REDIS_DB.hget('host_ip', host)
message = "%s Host Alert: IP: %s is %s" % (dd['type'], host_ip, state)
if criterion.email_list:
send_mail(u'技术支持中心--运维监控中心', criterion.email_list.split(','), message, message)
if criterion.mobile_list:
sms(criterion.mobile_list.split(','), message)
# 向应用的业务运维 发出 主机up/down 事件报警
def alert_host4app(appname, host_ip, ntype, state):
message = "%s Host Alert: IP: %s for %s is %s" % (ntype, host_ip, appname, state)
try:
app = AppService.objects.get(app_name = appname)
except BaseException:
return
email_list = change(app.email_list)
mobile_list = change(app.mobile_list)
# 当前逻辑, 子应用会继承父应用的报警人信息
# 获取自己的父应用--单继承
rel_list = AppRelation.objects.filter(child_app=app)
for item in rel_list:
email_list.extend(change(item.parent_app.email_list))
mobile_list.extend(change(item.parent_app.mobile_list))
if email_list:
send_mail(u"技术支持中心--运维监控中心", email_list, message, message)
if mobile_list:
sms(mobile_list, message)
# 如果item_list 是逗号分隔的字符串,就返回一个列表
# 否则返回一个空列表
def change(item_list):
item_list = item_list.strip()
if item_list:
return item_list.split(',')
return []
# 异步处理应用事件的通知
# 异步处理应用报警
@task
def notify_app(appname, host, ntype, state, info):
# 1. 产生应用通知信息
dd = {}
dd['time'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
dd['host'] = host
dd['appname'] = appname
dd['type'] = ntype
dd['state'] = state
dd['information'] = info
settings.REDIS_DB.rpush('notification', json.dumps(dd))
# 2. 触发报警
# 检查主机状态,如果主机状态down, 则将主机事件发给应用的报警人
# 其它情况正常发送应用报警短信
app = None
try:
app = AppService.objects.get(app_name=appname)
except BaseException:
# FIXME
return
if app.is_alarm == 1 :
status = settings.REDIS_DB.hget('host_alive_' + host, 'current_status')
host_ip = settings.REDIS_DB.hget('host_ip', host)
if status is None or status == 'UP':
alert_app(appname, host_ip, ntype, state, info)
else:
alert_host4app(appname, host_ip, 'PROBLEM', 'DOWN')
def alert_app(appname, host_ip, ntype, state, info):
app = None
try:
app = AppService.objects.get(app_name = appname)
except BaseException:
return
email_list = change(app.email_list)
mobile_list = change(app.mobile_list)
# 当前逻辑, 子应用会继承父应用的报警人信息
# 获取自己的父应用--单继承
rel_list = AppRelation.objects.filter(child_app=app)
for item in rel_list:
email_list.extend(change(item.parent_app.email_list))
mobile_list.extend(change(item.parent_app.mobile_list))
if email_list:
alert_app_mail(appname, host_ip, ntype, state, info, email_list)
# 当前只有 CRITICAL 才触发短信报警
if state == 'CRITICAL' and mobile_list:
alert_app_sms(appname, host_ip, ntype, state, mobile_list)
# 应用邮件通知
def alert_app_mail(appname, host_ip, notify_type, state, info, email_list):
subject = gen_subject(appname, host_ip, notify_type, state)
content = gen_mail_content(appname, host_ip, notify_type, state, info)
# 发送邮件
content = content.replace('\\n','\n')
if email_list:
send_mail(u"技术支持中心--运维监控中心", email_list, subject, content)
# 应用短信通知
def alert_app_sms(appname, host_ip, notify_type, state, mobile_list):
message = gen_subject(appname, host_ip, notify_type, state)
if mobile_list:
sms(mobile_list, message)
def gen_subject(appname, host_ip, notify_type, state):
subject = "***%s Service Alert: %s / %s is %s***" % (notify_type, host_ip, appname, state)
return subject
def gen_mail_content(appname, host_ip, notify_type, state, info):
ll = []
ll.append("Notification Type: %s\n" % notify_type)
ll.append("Service: %s\n" % appname)
ll.append("Host: %s\n" % host_ip)
ll.append("State: %s\n" % state)
curr_time = datetime.now()
ll.append("Date/Time: %s\n" % curr_time.strftime("%Y-%m-%d %H:%M:%S"))
ll.append("\n")
ll.append("Additional Info:\n")
if info:
ll.append(info)
else:
ll.append("null")
return ''.join(ll)
| [
"="
] | = |
11155037fa8a2a647ca6bb086624a0138afc34dc | 3d4fcc7cbfafc4aaebea8e08d3a084ed0f0d06a1 | /Programme_2/Creation_donnees/MIDI/grieg_berceusefMidiComplet.py | c3ece892e9ad8431e2d89671e0cefe3dd8e43248 | [] | no_license | XgLsuLzRMy/Composition-Musicale-par-Reseau-de-Neurones | 0421d540efe2d9dc522346810f6237c5f24fa3bf | 518a6485e2ad44e8c7fbae93c94a9dc767454a83 | refs/heads/master | 2021-09-03T20:43:01.218089 | 2018-01-11T20:02:00 | 2018-01-11T20:02:00 | 106,448,584 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 133,069 | py | midi.Pattern(format=1, resolution=480, tracks=\
[midi.Track(\
[midi.TrackNameEvent(tick=0, text='Grieg: Berceuse Opus 38 Nr. 1', data=[71, 114, 105, 101, 103, 58, 32, 66, 101, 114, 99, 101, 117, 115, 101, 32, 79, 112, 117, 115, 32, 51, 56, 32, 78, 114, 46, 32, 49]),
midi.CopyrightMetaEvent(tick=0, text='Copyright © 2010 by Bernd Krueger', data=[67, 111, 112, 121, 114, 105, 103, 104, 116, 32, 169, 32, 50, 48, 49, 48, 32, 98, 121, 32, 66, 101, 114, 110, 100, 32, 75, 114, 117, 101, 103, 101, 114]),
midi.TextMetaEvent(tick=0, text='Allegretto tranquillo', data=[65, 108, 108, 101, 103, 114, 101, 116, 116, 111, 32, 116, 114, 97, 110, 113, 117, 105, 108, 108, 111]),
midi.TextMetaEvent(tick=0, text='Erstellt: 12.9.2012\n', data=[69, 114, 115, 116, 101, 108, 108, 116, 58, 32, 49, 50, 46, 57, 46, 50, 48, 49, 50, 10]),
midi.TextMetaEvent(tick=0, text='Dauer: 2:38 Minuten\n', data=[68, 97, 117, 101, 114, 58, 32, 50, 58, 51, 56, 32, 77, 105, 110, 117, 116, 101, 110, 10]),
midi.TimeSignatureEvent(tick=0, data=[2, 2, 24, 8]),
midi.KeySignatureEvent(tick=0, data=[1, 0]),
midi.SetTempoEvent(tick=0, data=[14, 72, 101]),
midi.SetTempoEvent(tick=1920, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[14, 220, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=320, data=[16, 81, 207]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[16, 157, 161]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 88, 140]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 39, 117]),
midi.SetTempoEvent(tick=160, data=[13, 139, 22]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[16, 8, 164]),
midi.SetTempoEvent(tick=200, data=[14, 44, 25]),
midi.SetTempoEvent(tick=120, data=[14, 44, 25]),
midi.SetTempoEvent(tick=720, data=[16, 8, 164]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[16, 44, 231]),
midi.SetTempoEvent(tick=120, data=[13, 217, 196]),
midi.SetTempoEvent(tick=120, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[13, 244, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=240, data=[15, 27, 147]),
midi.SetTempoEvent(tick=120, data=[14, 251, 237]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[15, 229, 3]),
midi.SetTempoEvent(tick=120, data=[13, 244, 202]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[16, 8, 164]),
midi.SetTempoEvent(tick=120, data=[14, 160, 2]),
midi.SetTempoEvent(tick=120, data=[14, 190, 40]),
midi.SetTempoEvent(tick=720, data=[17, 20, 171]),
midi.SetTempoEvent(tick=120, data=[15, 92, 119]),
midi.SetTempoEvent(tick=440, data=[15, 193, 254]),
midi.SetTempoEvent(tick=160, data=[16, 90, 4]),
midi.SetTempoEvent(tick=240, data=[17, 156, 20]),
midi.SetTempoEvent(tick=80, data=[19, 165, 130]),
midi.SetTempoEvent(tick=40, data=[20, 148, 10]),
midi.SetTempoEvent(tick=120, data=[18, 34, 2]),
midi.SetTempoEvent(tick=480, data=[21, 140, 1]),
midi.SetTempoEvent(tick=60, data=[22, 16, 244]),
midi.SetTempoEvent(tick=420, data=[14, 72, 101]),
midi.SetTempoEvent(tick=1920, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[14, 220, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=320, data=[16, 81, 207]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[16, 157, 161]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 88, 140]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 39, 117]),
midi.SetTempoEvent(tick=160, data=[13, 139, 22]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[16, 8, 164]),
midi.SetTempoEvent(tick=200, data=[14, 44, 25]),
midi.SetTempoEvent(tick=120, data=[14, 44, 25]),
midi.SetTempoEvent(tick=720, data=[16, 8, 164]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[16, 44, 231]),
midi.SetTempoEvent(tick=120, data=[13, 217, 196]),
midi.SetTempoEvent(tick=120, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[13, 244, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=240, data=[15, 27, 147]),
midi.SetTempoEvent(tick=120, data=[14, 251, 237]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[15, 229, 3]),
midi.SetTempoEvent(tick=120, data=[13, 244, 202]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[16, 8, 164]),
midi.SetTempoEvent(tick=120, data=[14, 160, 2]),
midi.SetTempoEvent(tick=120, data=[14, 190, 40]),
midi.SetTempoEvent(tick=720, data=[18, 70, 37]),
midi.SetTempoEvent(tick=120, data=[15, 229, 3]),
midi.SetTempoEvent(tick=440, data=[15, 193, 254]),
midi.SetTempoEvent(tick=40, data=[16, 157, 161]),
midi.SetTempoEvent(tick=120, data=[17, 71, 1]),
midi.SetTempoEvent(tick=240, data=[17, 20, 171]),
midi.SetTempoEvent(tick=80, data=[19, 165, 130]),
midi.SetTempoEvent(tick=40, data=[22, 85, 220]),
midi.SetTempoEvent(tick=120, data=[18, 176, 36]),
midi.SetTempoEvent(tick=480, data=[24, 107, 171]),
midi.KeySignatureEvent(tick=480, data=[254, 0]),
midi.SetTempoEvent(tick=0, data=[9, 13, 144]),
midi.MarkerEvent(tick=0, text='Con moto', data=[67, 111, 110, 32, 109, 111, 116, 111]),
midi.SetTempoEvent(tick=240, data=[8, 152, 75]),
midi.SetTempoEvent(tick=120, data=[9, 125, 67]),
midi.SetTempoEvent(tick=120, data=[8, 218, 231]),
midi.SetTempoEvent(tick=240, data=[9, 93, 242]),
midi.SetTempoEvent(tick=240, data=[8, 246, 223]),
midi.SetTempoEvent(tick=240, data=[8, 126, 146]),
midi.SetTempoEvent(tick=120, data=[9, 109, 128]),
midi.SetTempoEvent(tick=120, data=[8, 218, 231]),
midi.SetTempoEvent(tick=240, data=[9, 78, 150]),
midi.SetTempoEvent(tick=120, data=[9, 109, 128]),
midi.SetTempoEvent(tick=120, data=[8, 152, 75]),
midi.SetTempoEvent(tick=240, data=[8, 53, 68]),
midi.SetTempoEvent(tick=120, data=[9, 93, 242]),
midi.SetTempoEvent(tick=120, data=[8, 152, 75]),
midi.SetTempoEvent(tick=240, data=[8, 29, 203]),
midi.SetTempoEvent(tick=120, data=[9, 19, 78]),
midi.SetTempoEvent(tick=120, data=[8, 165, 44]),
midi.SetTempoEvent(tick=240, data=[8, 41, 118]),
midi.SetTempoEvent(tick=120, data=[8, 246, 223]),
midi.SetTempoEvent(tick=120, data=[9, 78, 150]),
midi.SetTempoEvent(tick=240, data=[9, 157, 38]),
midi.SetTempoEvent(tick=120, data=[10, 93, 103]),
midi.SetTempoEvent(tick=120, data=[8, 250, 5]),
midi.SetTempoEvent(tick=720, data=[10, 15, 213]),
midi.SetTempoEvent(tick=240, data=[9, 63, 170]),
midi.SetTempoEvent(tick=480, data=[10, 197, 173]),
midi.SetTempoEvent(tick=240, data=[12, 252, 241]),
midi.SetTempoEvent(tick=240, data=[12, 12, 74]),
midi.SetTempoEvent(tick=1920, data=[9, 13, 144]),
midi.SetTempoEvent(tick=240, data=[8, 152, 75]),
midi.SetTempoEvent(tick=120, data=[9, 125, 67]),
midi.SetTempoEvent(tick=120, data=[8, 218, 231]),
midi.SetTempoEvent(tick=240, data=[9, 93, 242]),
midi.SetTempoEvent(tick=240, data=[8, 246, 223]),
midi.SetTempoEvent(tick=240, data=[8, 126, 146]),
midi.SetTempoEvent(tick=120, data=[9, 109, 128]),
midi.SetTempoEvent(tick=120, data=[8, 218, 231]),
midi.SetTempoEvent(tick=240, data=[9, 78, 150]),
midi.SetTempoEvent(tick=120, data=[9, 109, 128]),
midi.SetTempoEvent(tick=120, data=[8, 152, 75]),
midi.SetTempoEvent(tick=240, data=[8, 53, 68]),
midi.SetTempoEvent(tick=120, data=[9, 93, 242]),
midi.SetTempoEvent(tick=120, data=[8, 152, 75]),
midi.SetTempoEvent(tick=240, data=[8, 29, 203]),
midi.SetTempoEvent(tick=120, data=[9, 19, 78]),
midi.SetTempoEvent(tick=120, data=[8, 165, 44]),
midi.SetTempoEvent(tick=240, data=[8, 41, 118]),
midi.SetTempoEvent(tick=120, data=[8, 246, 223]),
midi.SetTempoEvent(tick=120, data=[9, 78, 150]),
midi.SetTempoEvent(tick=240, data=[9, 157, 38]),
midi.SetTempoEvent(tick=120, data=[10, 93, 103]),
midi.SetTempoEvent(tick=120, data=[9, 75, 175]),
midi.SetTempoEvent(tick=480, data=[9, 163, 93]),
midi.SetTempoEvent(tick=240, data=[10, 88, 155]),
midi.SetTempoEvent(tick=240, data=[9, 189, 158]),
midi.SetTempoEvent(tick=480, data=[11, 8, 35]),
midi.SetTempoEvent(tick=240, data=[13, 144, 189]),
midi.SetTempoEvent(tick=240, data=[12, 74, 101]),
midi.SetTempoEvent(tick=1916, data=[15, 34, 155]),
midi.TimeSignatureEvent(tick=4, data=[4, 2, 24, 8]),
midi.SetTempoEvent(tick=35, data=[15, 2, 216]),
midi.SetTempoEvent(tick=38, data=[14, 227, 152]),
midi.SetTempoEvent(tick=39, data=[14, 196, 218]),
midi.SetTempoEvent(tick=38, data=[14, 166, 154]),
midi.SetTempoEvent(tick=38, data=[14, 136, 212]),
midi.SetTempoEvent(tick=39, data=[14, 107, 135]),
midi.SetTempoEvent(tick=38, data=[14, 78, 175]),
midi.SetTempoEvent(tick=19, data=[14, 50, 73]),
midi.SetTempoEvent(tick=39, data=[14, 22, 83]),
midi.SetTempoEvent(tick=38, data=[13, 250, 202]),
midi.SetTempoEvent(tick=39, data=[13, 223, 173]),
midi.SetTempoEvent(tick=38, data=[13, 196, 247]),
midi.SetTempoEvent(tick=38, data=[13, 170, 168]),
midi.SetTempoEvent(tick=39, data=[13, 144, 189]),
midi.SetTempoEvent(tick=19, data=[13, 119, 51]),
midi.SetTempoEvent(tick=266, data=[15, 167, 24]),
midi.SetTempoEvent(tick=160, data=[13, 250, 202]),
midi.TimeSignatureEvent(tick=960, data=[2, 2, 24, 8]),
midi.SetTempoEvent(tick=960, data=[14, 72, 101]),
midi.SetTempoEvent(tick=1920, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[14, 220, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=320, data=[16, 81, 207]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[16, 157, 161]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 88, 140]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 39, 117]),
midi.SetTempoEvent(tick=160, data=[13, 139, 22]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[16, 8, 164]),
midi.SetTempoEvent(tick=200, data=[14, 44, 25]),
midi.SetTempoEvent(tick=120, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 88, 140]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 39, 117]),
midi.SetTempoEvent(tick=160, data=[13, 139, 22]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[16, 8, 164]),
midi.SetTempoEvent(tick=200, data=[14, 44, 25]),
midi.SetTempoEvent(tick=120, data=[13, 144, 189]),
midi.SetTempoEvent(tick=160, data=[12, 183, 169]),
midi.SetTempoEvent(tick=160, data=[13, 119, 51]),
midi.SetTempoEvent(tick=160, data=[12, 139, 14]),
midi.SetTempoEvent(tick=160, data=[12, 229, 133]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[13, 94, 8]),
midi.SetTempoEvent(tick=640, data=[14, 166, 154]),
midi.SetTempoEvent(tick=200, data=[14, 44, 25]),
midi.SetTempoEvent(tick=60, data=[14, 44, 25]),
midi.SetTempoEvent(tick=60, data=[12, 252, 241]),
midi.SetTempoEvent(tick=840, data=[14, 16, 59]),
midi.SetTempoEvent(tick=60, data=[14, 44, 25]),
midi.SetTempoEvent(tick=60, data=[13, 20, 177]),
midi.SetTempoEvent(tick=240, data=[12, 95, 168]),
midi.SetTempoEvent(tick=240, data=[13, 20, 177]),
midi.SetTempoEvent(tick=360, data=[14, 16, 59]),
midi.SetTempoEvent(tick=60, data=[14, 44, 25]),
midi.SetTempoEvent(tick=60, data=[12, 12, 74]),
midi.SetTempoEvent(tick=320, data=[13, 223, 173]),
midi.SetTempoEvent(tick=160, data=[11, 248, 32]),
midi.SetTempoEvent(tick=320, data=[13, 119, 51]),
midi.SetTempoEvent(tick=40, data=[14, 16, 59]),
midi.SetTempoEvent(tick=60, data=[14, 44, 25]),
midi.SetTempoEvent(tick=60, data=[11, 228, 58]),
midi.SetTempoEvent(tick=320, data=[13, 144, 189]),
midi.SetTempoEvent(tick=40, data=[14, 78, 175]),
midi.SetTempoEvent(tick=120, data=[11, 228, 58]),
midi.SetTempoEvent(tick=300, data=[19, 71, 64]),
midi.SetTempoEvent(tick=120, data=[16, 205, 63]),
midi.SetTempoEvent(tick=60, data=[14, 22, 83]),
midi.SetTempoEvent(tick=240, data=[14, 136, 212]),
midi.SetTempoEvent(tick=240, data=[13, 196, 247]),
midi.SetTempoEvent(tick=240, data=[14, 107, 135]),
midi.SetTempoEvent(tick=180, data=[16, 16, 144]),
midi.SetTempoEvent(tick=60, data=[14, 78, 175]),
midi.SetTempoEvent(tick=240, data=[14, 227, 152]),
midi.SetTempoEvent(tick=240, data=[14, 78, 175]),
midi.SetTempoEvent(tick=240, data=[15, 236, 204]),
midi.SetTempoEvent(tick=180, data=[16, 16, 144]),
midi.SetTempoEvent(tick=60, data=[14, 196, 218]),
midi.SetTempoEvent(tick=240, data=[15, 236, 204]),
midi.SetTempoEvent(tick=240, data=[15, 201, 165]),
midi.SetTempoEvent(tick=240, data=[15, 236, 204]),
midi.SetTempoEvent(tick=120, data=[17, 244, 125]),
midi.SetTempoEvent(tick=120, data=[15, 201, 165]),
midi.SetTempoEvent(tick=240, data=[17, 244, 125]),
midi.SetTempoEvent(tick=240, data=[19, 123, 195]),
midi.SetTempoEvent(tick=240, data=[29, 18, 215]),
midi.SetTempoEvent(tick=120, data=[16, 245, 21]),
midi.KeySignatureEvent(tick=120, data=[1, 0]),
midi.SetTempoEvent(tick=0, data=[14, 72, 101]),
midi.MarkerEvent(tick=0, text='a tempo', data=[97, 32, 116, 101, 109, 112, 111]),
midi.SetTempoEvent(tick=1920, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[14, 220, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=320, data=[16, 81, 207]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[16, 157, 161]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=800, data=[15, 229, 3]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 88, 140]),
midi.SetTempoEvent(tick=160, data=[14, 44, 25]),
midi.SetTempoEvent(tick=160, data=[13, 39, 117]),
midi.SetTempoEvent(tick=160, data=[13, 139, 22]),
midi.SetTempoEvent(tick=160, data=[15, 27, 147]),
midi.SetTempoEvent(tick=160, data=[14, 72, 101]),
midi.SetTempoEvent(tick=640, data=[16, 8, 164]),
midi.SetTempoEvent(tick=200, data=[14, 44, 25]),
midi.SetTempoEvent(tick=120, data=[14, 44, 25]),
midi.SetTempoEvent(tick=720, data=[16, 8, 164]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[16, 44, 231]),
midi.SetTempoEvent(tick=120, data=[13, 217, 196]),
midi.SetTempoEvent(tick=120, data=[13, 139, 22]),
midi.SetTempoEvent(tick=320, data=[13, 244, 202]),
midi.SetTempoEvent(tick=160, data=[13, 113, 161]),
midi.SetTempoEvent(tick=240, data=[15, 27, 147]),
midi.SetTempoEvent(tick=120, data=[14, 251, 237]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[15, 229, 3]),
midi.SetTempoEvent(tick=120, data=[13, 244, 202]),
midi.SetTempoEvent(tick=120, data=[14, 16, 59]),
midi.SetTempoEvent(tick=720, data=[16, 8, 164]),
midi.SetTempoEvent(tick=120, data=[14, 160, 2]),
midi.SetTempoEvent(tick=120, data=[14, 190, 40]),
midi.SetTempoEvent(tick=720, data=[18, 176, 36]),
midi.SetTempoEvent(tick=120, data=[15, 92, 119]),
midi.SetTempoEvent(tick=120, data=[17, 29, 169]),
midi.SetTempoEvent(tick=420, data=[18, 225, 121]),
midi.SetTempoEvent(tick=60, data=[17, 244, 125]),
midi.SetTempoEvent(tick=240, data=[17, 199, 219]),
midi.SetTempoEvent(tick=60, data=[21, 75, 217]),
midi.SetTempoEvent(tick=20, data=[19, 165, 130]),
midi.SetTempoEvent(tick=40, data=[22, 156, 119]),
midi.SetTempoEvent(tick=120, data=[18, 176, 36]),
midi.SetTempoEvent(tick=240, data=[23, 47, 12]),
midi.SetTempoEvent(tick=240, data=[20, 148, 10]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Piano right', data=[80, 105, 97, 110, 111, 32, 114, 105, 103, 104, 116]),
midi.ProgramChangeEvent(tick=0, channel=0, data=[0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 32]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 30]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=80, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=80, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 32]),
midi.NoteOnEvent(tick=80, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 23]),
midi.NoteOnEvent(tick=80, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 32]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 34]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=160, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 26]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 23]),
midi.NoteOnEvent(tick=160, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 36]),
midi.NoteOnEvent(tick=160, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 32]),
midi.NoteOnEvent(tick=40, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=40, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=420, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=420, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[79, 37]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 40]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 44]),
midi.NoteOnEvent(tick=160, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 39]),
midi.NoteOnEvent(tick=80, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 23]),
midi.NoteOnEvent(tick=80, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 38]),
midi.NoteOnEvent(tick=100, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[81, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[83, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[86, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[88, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[85, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[85, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[88, 42]),
midi.NoteOnEvent(tick=60, channel=0, data=[90, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[86, 44]),
midi.NoteOnEvent(tick=60, channel=0, data=[91, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 33]),
midi.NoteOnEvent(tick=420, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 27]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 33]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 29]),
midi.NoteOnEvent(tick=80, channel=0, data=[62, 25]),
midi.NoteOnEvent(tick=80, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 35]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 28]),
midi.NoteOnEvent(tick=80, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 20]),
midi.NoteOnEvent(tick=80, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 28]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 30]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 29]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 32]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 25]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=160, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 26]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 23]),
midi.NoteOnEvent(tick=160, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 36]),
midi.NoteOnEvent(tick=160, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 32]),
midi.NoteOnEvent(tick=40, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=40, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=420, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=420, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[79, 37]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 40]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 44]),
midi.NoteOnEvent(tick=160, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 39]),
midi.NoteOnEvent(tick=80, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 23]),
midi.NoteOnEvent(tick=80, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 38]),
midi.NoteOnEvent(tick=100, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[81, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[83, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[86, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[88, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[85, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[85, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[88, 42]),
midi.NoteOnEvent(tick=60, channel=0, data=[90, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[86, 44]),
midi.NoteOnEvent(tick=60, channel=0, data=[91, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 33]),
midi.NoteOnEvent(tick=420, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[66, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=360, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[66, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=360, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[66, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 36]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[66, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 44]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 44]),
midi.NoteOnEvent(tick=60, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[66, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 49]),
midi.NoteOnEvent(tick=60, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 61]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 45]),
midi.NoteOnEvent(tick=60, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 45]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 27]),
midi.NoteOnEvent(tick=960, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 35]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 39]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 35]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 39]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 36]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 44]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 44]),
midi.NoteOnEvent(tick=60, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 49]),
midi.NoteOnEvent(tick=60, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 61]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 57]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 45]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 45]),
midi.NoteOnEvent(tick=60, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=960, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=1440, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 27]),
midi.NoteOnEvent(tick=960, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 28]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 27]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 33]),
midi.NoteOnEvent(tick=160, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 29]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 35]),
midi.NoteOnEvent(tick=160, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 28]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 28]),
midi.NoteOnEvent(tick=160, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 30]),
midi.NoteOnEvent(tick=360, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 29]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 32]),
midi.NoteOnEvent(tick=60, channel=0, data=[81, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 25]),
midi.NoteOnEvent(tick=360, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 28]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 38]),
midi.NoteOnEvent(tick=160, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 26]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 23]),
midi.NoteOnEvent(tick=160, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 36]),
midi.NoteOnEvent(tick=160, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 32]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 33]),
midi.NoteOnEvent(tick=40, channel=0, data=[77, 36]),
midi.NoteOnEvent(tick=40, channel=0, data=[81, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 28]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 43]),
midi.NoteOnEvent(tick=160, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 35]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 26]),
midi.NoteOnEvent(tick=160, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 35]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 41]),
midi.NoteOnEvent(tick=160, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 36]),
midi.NoteOnEvent(tick=40, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 37]),
midi.NoteOnEvent(tick=80, channel=0, data=[82, 43]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 32]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 49]),
midi.NoteOnEvent(tick=160, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 40]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 29]),
midi.NoteOnEvent(tick=160, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 40]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[89, 47]),
midi.NoteOnEvent(tick=160, channel=0, data=[89, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 41]),
midi.NoteOnEvent(tick=40, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 42]),
midi.NoteOnEvent(tick=80, channel=0, data=[84, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 45]),
midi.NoteOnEvent(tick=420, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 51]),
midi.NoteOnEvent(tick=60, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 54]),
midi.NoteOnEvent(tick=420, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 51]),
midi.NoteOnEvent(tick=60, channel=0, data=[90, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 65]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 55]),
midi.NoteOnEvent(tick=420, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 51]),
midi.NoteOnEvent(tick=60, channel=0, data=[90, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 65]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 54]),
midi.NoteOnEvent(tick=240, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 73]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 70]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 76]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 64]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 80]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 68]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 61]),
midi.NoteOnEvent(tick=180, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[87, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 46]),
midi.NoteOnEvent(tick=60, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 60]),
midi.NoteOnEvent(tick=400, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 62]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 54]),
midi.NoteOnEvent(tick=400, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[74, 40]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 45]),
midi.NoteOnEvent(tick=400, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[67, 32]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 29]),
midi.NoteOnEvent(tick=400, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[62, 29]),
midi.NoteOnEvent(tick=60, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 27]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=80, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=80, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 32]),
midi.NoteOnEvent(tick=80, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 23]),
midi.NoteOnEvent(tick=80, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 32]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 34]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=60, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=160, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 26]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 23]),
midi.NoteOnEvent(tick=160, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 31]),
midi.NoteOnEvent(tick=160, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 36]),
midi.NoteOnEvent(tick=160, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 32]),
midi.NoteOnEvent(tick=40, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 33]),
midi.NoteOnEvent(tick=40, channel=0, data=[71, 36]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=420, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=420, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[79, 37]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=40, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 40]),
midi.NoteOnEvent(tick=160, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 44]),
midi.NoteOnEvent(tick=160, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 39]),
midi.NoteOnEvent(tick=80, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 23]),
midi.NoteOnEvent(tick=80, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 38]),
midi.NoteOnEvent(tick=100, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[81, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[83, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 36]),
midi.NoteOnEvent(tick=60, channel=0, data=[86, 41]),
midi.NoteOnEvent(tick=60, channel=0, data=[88, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[85, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=360, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[85, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 32]),
midi.NoteOnEvent(tick=60, channel=0, data=[88, 37]),
midi.NoteOnEvent(tick=60, channel=0, data=[90, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 25]),
midi.NoteOnEvent(tick=360, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[90, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 32]),
midi.NoteOnEvent(tick=60, channel=0, data=[86, 35]),
midi.NoteOnEvent(tick=60, channel=0, data=[91, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 26]),
midi.NoteOnEvent(tick=420, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[91, 0]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Piano left', data=[80, 105, 97, 110, 111, 32, 108, 101, 102, 116]),
midi.ProgramChangeEvent(tick=0, channel=0, data=[0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=180, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 23]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 17]),
midi.NoteOnEvent(tick=660, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 15]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 15]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 15]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 15]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=180, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 23]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 17]),
midi.NoteOnEvent(tick=660, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[50, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 27]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[43, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 27]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[43, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 27]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[43, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 30]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[43, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=60, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[43, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 41]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=960, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 45]),
midi.NoteOnEvent(tick=960, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[31, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 32]),
midi.NoteOnEvent(tick=1440, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[31, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 27]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[46, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 28]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 27]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[46, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 28]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 27]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[46, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 30]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 29]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[46, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[46, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 37]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=180, channel=0, data=[46, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 51]),
midi.NoteOnEvent(tick=960, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 45]),
midi.NoteOnEvent(tick=960, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[34, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=1440, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[34, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[37, 24]),
midi.NoteOnEvent(tick=160, channel=0, data=[44, 24]),
midi.NoteOnEvent(tick=160, channel=0, data=[49, 25]),
midi.NoteOnEvent(tick=160, channel=0, data=[52, 29]),
midi.NoteOnEvent(tick=160, channel=0, data=[56, 27]),
midi.NoteOnEvent(tick=160, channel=0, data=[61, 29]),
midi.NoteOnEvent(tick=1600, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[37, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 12]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 12]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 12]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 12]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 12]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 12]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 12]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 12]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 15]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 14]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 15]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 15]),
midi.NoteOnEvent(tick=120, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 15]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 14]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 14]),
midi.NoteOnEvent(tick=120, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 23]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 16]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 23]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 23]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 16]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 26]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 26]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 26]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=180, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 54]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 43]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[55, 54]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 43]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[55, 54]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 65]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 43]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 51]),
midi.NoteOnEvent(tick=180, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[48, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 43]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=180, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 43]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 33]),
midi.NoteOnEvent(tick=180, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 34]),
midi.NoteOnEvent(tick=480, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=180, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 21]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 18]),
midi.NoteOnEvent(tick=180, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 17]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=180, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 20]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 17]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 15]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 14]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 14]),
midi.NoteOnEvent(tick=120, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[43, 15]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 13]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 13]),
midi.NoteOnEvent(tick=660, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Pedale', data=[80, 101, 100, 97, 108, 101]),
midi.ProgramChangeEvent(tick=0, channel=0, data=[0]),
midi.ControlChangeEvent(tick=99, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2320, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=560, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=810, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=51, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=960, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=46, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=929, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=63, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=821, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=61, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=897, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=127, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2355, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=521, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=795, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=105, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=854, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=106, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=849, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=111, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=853, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=107, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=897, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=162, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2320, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=560, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=810, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=51, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=960, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=46, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=929, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=63, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=821, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=61, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=897, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=127, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2355, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=521, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=795, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=105, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=854, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=106, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=849, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=111, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=853, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=107, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=904, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=599, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=417, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=572, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=452, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=3910, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=1200, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=1129, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=417, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=572, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=452, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=3910, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=1200, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=638, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2165, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=762, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2320, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=560, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=810, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=51, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=969, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=161, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=805, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=63, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=821, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=61, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=975, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=63, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=821, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=61, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=975, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=63, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=821, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=38, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=990, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=64, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=719, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=177, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=64, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=719, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=177, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=64, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=719, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=177, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=64, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=719, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=213, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=917, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=57, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=930, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=19, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=922, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=28, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=885, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=138, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2320, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=560, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=810, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=51, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=960, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=46, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=929, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=63, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=821, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=61, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=897, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=127, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=2355, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=521, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=795, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=105, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=854, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=106, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=849, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=111, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=853, channel=0, data=[64, 0]),
midi.ControlChangeEvent(tick=107, channel=0, data=[64, 127]),
midi.ControlChangeEvent(tick=897, channel=0, data=[64, 0]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Grieg: Berceuse', data=[71, 114, 105, 101, 103, 58, 32, 66, 101, 114, 99, 101, 117, 115, 101]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Copyright © 2012 by Bernd Krueger', data=[67, 111, 112, 121, 114, 105, 103, 104, 116, 32, 169, 32, 50, 48, 49, 50, 32, 98, 121, 32, 66, 101, 114, 110, 100, 32, 75, 114, 117, 101, 103, 101, 114]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='http://www.piano-midi.de/', data=[104, 116, 116, 112, 58, 47, 47, 119, 119, 119, 46, 112, 105, 97, 110, 111, 45, 109, 105, 100, 105, 46, 100, 101, 47]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Edition: 2012-09-12', data=[69, 100, 105, 116, 105, 111, 110, 58, 32, 50, 48, 49, 50, 45, 48, 57, 45, 49, 50]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 8', data=[83, 112, 117, 114, 32, 56]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 9', data=[83, 112, 117, 114, 32, 57]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 10', data=[83, 112, 117, 114, 32, 49, 48]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 11', data=[83, 112, 117, 114, 32, 49, 49]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 12', data=[83, 112, 117, 114, 32, 49, 50]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 13', data=[83, 112, 117, 114, 32, 49, 51]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 14', data=[83, 112, 117, 114, 32, 49, 52]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 15', data=[83, 112, 117, 114, 32, 49, 53]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[midi.TrackNameEvent(tick=0, text='Spur 16', data=[83, 112, 117, 114, 32, 49, 54]),
midi.EndOfTrackEvent(tick=0, data=[])])])
| [
"[email protected]"
] | |
acdf4977672dbda2425b7f78c305dc1d7776bfe6 | 0c12d94714546d38266314f4858fa484136f02dc | /fluent_utils/django_compat/django14.py | 8e20d3f46f6f4a1ba1d1a2fc5e03abde5af259fc | [
"Apache-2.0"
] | permissive | benkonrath/django-fluent-utils | a63504dbccd3e21b3c88b04665f48f4721848d40 | 5e90dfe5f7b22ca2c2fe942c304e51981e170ba6 | refs/heads/master | 2021-01-01T04:04:01.452357 | 2017-05-22T11:10:44 | 2017-05-22T11:10:44 | 97,116,253 | 0 | 0 | null | 2017-07-13T11:43:06 | 2017-07-13T11:43:06 | null | UTF-8 | Python | false | false | 1,084 | py | # The timezone support was introduced in Django 1.4, fallback to standard library for 1.3.
try:
from django.utils.timezone import now, utc
except ImportError:
# Django < 1.4
from datetime import datetime
now = datetime.now
utc = None # datetime(..., tzinfo=utc) creates naive datetime this way.
# URLs moved in Django 1.4
try:
# Django 1.6 requires this
from django.conf.urls import url, include
except ImportError:
# Django 1.3 compatibility, kept in minor release
from django.conf.urls.defaults import patterns, url, include
else:
try:
from django.conf.urls import patterns # Django 1.9-
except ImportError:
from django.core.urlresolvers import RegexURLPattern
def patterns(prefix, *args):
pattern_list = []
for t in args:
if isinstance(t, (list, tuple)):
t = url(prefix=prefix, *t)
elif isinstance(t, RegexURLPattern):
t.add_prefix(prefix)
pattern_list.append(t)
return pattern_list
| [
"[email protected]"
] | |
9854c422ac2f90220f55dde8a71086e0fc9de277 | 4c91490373be8867d47a01a181a8257e503236e1 | /Level 1/두개 뽑아서 더하기.py | 1cb9ae1671f978983f0ca5fb93453b00662f303a | [] | no_license | dohee479/PROGRAMMERS | bc1b9d8ad4841e96f8dbe650cb1e70064e4a1bd5 | 398261d5c340de5b0135c4bad6858d925c94482d | refs/heads/master | 2023-03-23T17:28:35.191331 | 2021-03-12T16:53:47 | 2021-03-12T16:53:47 | 298,023,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | def backtrack(numbers, index, cnt, m, result):
global visited
if cnt == m:
answer.add(result)
return
for i in range(index, len(numbers)):
if visited[i]:
continue
for j in range(i + 1):
visited[j] = 1
backtrack(numbers, i + 1, cnt + 1, m, result + numbers[i])
for j in range(len(numbers)):
visited[j] = 0
def solution(numbers):
global answer, visited
answer = set()
visited = [0] * len(numbers)
backtrack(numbers, 0, 0, 2, 0)
return sorted(list(answer))
print(solution(([2,1,3,4,1]))) | [
"[email protected]"
] | |
6f8c1430858a1d50773c27f7a66d2f78b7af6ed8 | a56252fda5c9e42eff04792c6e16e413ad51ba1a | /resources/usr/local/lib/python2.7/dist-packages/scipy/sparse/linalg/isolve/iterative.py | 900364c2141b13046fe9ed63ff04b06df9541251 | [
"Apache-2.0"
] | permissive | edawson/parliament2 | 4231e692565dbecf99d09148e75c00750e6797c4 | 2632aa3484ef64c9539c4885026b705b737f6d1e | refs/heads/master | 2021-06-21T23:13:29.482239 | 2020-12-07T21:10:08 | 2020-12-07T21:10:08 | 150,246,745 | 0 | 0 | Apache-2.0 | 2019-09-11T03:22:55 | 2018-09-25T10:21:03 | Python | UTF-8 | Python | false | false | 21,019 | py | """Iterative methods for solving linear systems"""
from __future__ import division, print_function, absolute_import
__all__ = ['bicg','bicgstab','cg','cgs','gmres','qmr']
from . import _iterative
import numpy as np
from scipy.sparse.linalg.interface import LinearOperator
from scipy.lib.decorator import decorator
from .utils import make_system
_type_conv = {'f':'s', 'd':'d', 'F':'c', 'D':'z'}
# Part of the docstring common to all iterative solvers
common_doc1 = \
"""
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}"""
common_doc2 = \
"""b : {array, matrix}
Right hand side of the linear system. Has shape (N,) or (N,1).
Returns
-------
x : {array, matrix}
The converged solution.
info : integer
Provides convergence information:
0 : successful exit
>0 : convergence to tolerance not achieved, number of iterations
<0 : illegal input or breakdown
Other Parameters
----------------
x0 : {array, matrix}
Starting guess for the solution.
tol : float
Tolerance to achieve. The algorithm terminates when either the relative
or the absolute residual is below `tol`.
maxiter : integer
Maximum number of iterations. Iteration will stop after maxiter
steps even if the specified tolerance has not been achieved.
M : {sparse matrix, dense matrix, LinearOperator}
Preconditioner for A. The preconditioner should approximate the
inverse of A. Effective preconditioning dramatically improves the
rate of convergence, which implies that fewer iterations are needed
to reach a given error tolerance.
callback : function
User-supplied function to call after each iteration. It is called
as callback(xk), where xk is the current solution vector.
xtype : {'f','d','F','D'}
This parameter is deprecated -- avoid using it.
The type of the result. If None, then it will be determined from
A.dtype.char and b. If A does not have a typecode method then it
will compute A.matvec(x0) to get a typecode. To save the extra
computation when A does not have a typecode attribute use xtype=0
for the same type as b or use xtype='f','d','F',or 'D'.
This parameter has been superceeded by LinearOperator.
"""
def set_docstring(header, Ainfo, footer=''):
def combine(fn):
fn.__doc__ = '\n'.join((header, common_doc1,
' ' + Ainfo.replace('\n', '\n '),
common_doc2, footer))
return fn
return combine
@decorator
def non_reentrant(func, *a, **kw):
d = func.__dict__
if d.get('__entered'):
raise RuntimeError("%s is not re-entrant" % func.__name__)
try:
d['__entered'] = True
return func(*a, **kw)
finally:
d['__entered'] = False
@set_docstring('Use BIConjugate Gradient iteration to solve A x = b',
'The real or complex N-by-N matrix of the linear system\n'
'It is required that the linear operator can produce\n'
'``Ax`` and ``A^T x``.')
@non_reentrant
def bicg(A, b, x0=None, tol=1e-5, maxiter=None, xtype=None, M=None, callback=None):
A,M,x,b,postprocess = make_system(A,M,x0,b,xtype)
n = len(b)
if maxiter is None:
maxiter = n*10
matvec, rmatvec = A.matvec, A.rmatvec
psolve, rpsolve = M.matvec, M.rmatvec
ltr = _type_conv[x.dtype.char]
revcom = getattr(_iterative, ltr + 'bicgrevcom')
stoptest = getattr(_iterative, ltr + 'stoptest2')
resid = tol
ndx1 = 1
ndx2 = -1
work = np.zeros(6*n,dtype=x.dtype)
ijob = 1
info = 0
ftflag = True
bnrm2 = -1.0
iter_ = maxiter
while True:
olditer = iter_
x, iter_, resid, info, ndx1, ndx2, sclr1, sclr2, ijob = \
revcom(b, x, work, iter_, resid, info, ndx1, ndx2, ijob)
if callback is not None and iter_ > olditer:
callback(x)
slice1 = slice(ndx1-1, ndx1-1+n)
slice2 = slice(ndx2-1, ndx2-1+n)
if (ijob == -1):
if callback is not None:
callback(x)
break
elif (ijob == 1):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(work[slice1])
elif (ijob == 2):
work[slice2] *= sclr2
work[slice2] += sclr1*rmatvec(work[slice1])
elif (ijob == 3):
work[slice1] = psolve(work[slice2])
elif (ijob == 4):
work[slice1] = rpsolve(work[slice2])
elif (ijob == 5):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(x)
elif (ijob == 6):
if ftflag:
info = -1
ftflag = False
bnrm2, resid, info = stoptest(work[slice1], b, bnrm2, tol, info)
ijob = 2
if info > 0 and iter_ == maxiter and resid > tol:
# info isn't set appropriately otherwise
info = iter_
return postprocess(x), info
@set_docstring('Use BIConjugate Gradient STABilized iteration to solve A x = b',
'The real or complex N-by-N matrix of the linear system\n'
'``A`` must represent a hermitian, positive definite matrix')
@non_reentrant
def bicgstab(A, b, x0=None, tol=1e-5, maxiter=None, xtype=None, M=None, callback=None):
A,M,x,b,postprocess = make_system(A,M,x0,b,xtype)
n = len(b)
if maxiter is None:
maxiter = n*10
matvec = A.matvec
psolve = M.matvec
ltr = _type_conv[x.dtype.char]
revcom = getattr(_iterative, ltr + 'bicgstabrevcom')
stoptest = getattr(_iterative, ltr + 'stoptest2')
resid = tol
ndx1 = 1
ndx2 = -1
work = np.zeros(7*n,dtype=x.dtype)
ijob = 1
info = 0
ftflag = True
bnrm2 = -1.0
iter_ = maxiter
while True:
olditer = iter_
x, iter_, resid, info, ndx1, ndx2, sclr1, sclr2, ijob = \
revcom(b, x, work, iter_, resid, info, ndx1, ndx2, ijob)
if callback is not None and iter_ > olditer:
callback(x)
slice1 = slice(ndx1-1, ndx1-1+n)
slice2 = slice(ndx2-1, ndx2-1+n)
if (ijob == -1):
if callback is not None:
callback(x)
break
elif (ijob == 1):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(work[slice1])
elif (ijob == 2):
if psolve is None:
psolve = get_psolve(A)
work[slice1] = psolve(work[slice2])
elif (ijob == 3):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(x)
elif (ijob == 4):
if ftflag:
info = -1
ftflag = False
bnrm2, resid, info = stoptest(work[slice1], b, bnrm2, tol, info)
ijob = 2
if info > 0 and iter_ == maxiter and resid > tol:
# info isn't set appropriately otherwise
info = iter_
return postprocess(x), info
@set_docstring('Use Conjugate Gradient iteration to solve A x = b',
'The real or complex N-by-N matrix of the linear system\n'
'``A`` must represent a hermitian, positive definite matrix')
@non_reentrant
def cg(A, b, x0=None, tol=1e-5, maxiter=None, xtype=None, M=None, callback=None):
A,M,x,b,postprocess = make_system(A,M,x0,b,xtype)
n = len(b)
if maxiter is None:
maxiter = n*10
matvec = A.matvec
psolve = M.matvec
ltr = _type_conv[x.dtype.char]
revcom = getattr(_iterative, ltr + 'cgrevcom')
stoptest = getattr(_iterative, ltr + 'stoptest2')
resid = tol
ndx1 = 1
ndx2 = -1
work = np.zeros(4*n,dtype=x.dtype)
ijob = 1
info = 0
ftflag = True
bnrm2 = -1.0
iter_ = maxiter
while True:
olditer = iter_
x, iter_, resid, info, ndx1, ndx2, sclr1, sclr2, ijob = \
revcom(b, x, work, iter_, resid, info, ndx1, ndx2, ijob)
if callback is not None and iter_ > olditer:
callback(x)
slice1 = slice(ndx1-1, ndx1-1+n)
slice2 = slice(ndx2-1, ndx2-1+n)
if (ijob == -1):
if callback is not None:
callback(x)
break
elif (ijob == 1):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(work[slice1])
elif (ijob == 2):
work[slice1] = psolve(work[slice2])
elif (ijob == 3):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(x)
elif (ijob == 4):
if ftflag:
info = -1
ftflag = False
bnrm2, resid, info = stoptest(work[slice1], b, bnrm2, tol, info)
ijob = 2
if info > 0 and iter_ == maxiter and resid > tol:
# info isn't set appropriately otherwise
info = iter_
return postprocess(x), info
@set_docstring('Use Conjugate Gradient Squared iteration to solve A x = b',
'The real-valued N-by-N matrix of the linear system')
@non_reentrant
def cgs(A, b, x0=None, tol=1e-5, maxiter=None, xtype=None, M=None, callback=None):
A,M,x,b,postprocess = make_system(A,M,x0,b,xtype)
n = len(b)
if maxiter is None:
maxiter = n*10
matvec = A.matvec
psolve = M.matvec
ltr = _type_conv[x.dtype.char]
revcom = getattr(_iterative, ltr + 'cgsrevcom')
stoptest = getattr(_iterative, ltr + 'stoptest2')
resid = tol
ndx1 = 1
ndx2 = -1
work = np.zeros(7*n,dtype=x.dtype)
ijob = 1
info = 0
ftflag = True
bnrm2 = -1.0
iter_ = maxiter
while True:
olditer = iter_
x, iter_, resid, info, ndx1, ndx2, sclr1, sclr2, ijob = \
revcom(b, x, work, iter_, resid, info, ndx1, ndx2, ijob)
if callback is not None and iter_ > olditer:
callback(x)
slice1 = slice(ndx1-1, ndx1-1+n)
slice2 = slice(ndx2-1, ndx2-1+n)
if (ijob == -1):
if callback is not None:
callback(x)
break
elif (ijob == 1):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(work[slice1])
elif (ijob == 2):
work[slice1] = psolve(work[slice2])
elif (ijob == 3):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(x)
elif (ijob == 4):
if ftflag:
info = -1
ftflag = False
bnrm2, resid, info = stoptest(work[slice1], b, bnrm2, tol, info)
ijob = 2
if info > 0 and iter_ == maxiter and resid > tol:
# info isn't set appropriately otherwise
info = iter_
return postprocess(x), info
@non_reentrant
def gmres(A, b, x0=None, tol=1e-5, restart=None, maxiter=None, xtype=None, M=None, callback=None, restrt=None):
"""
Use Generalized Minimal RESidual iteration to solve A x = b.
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}
The real or complex N-by-N matrix of the linear system.
b : {array, matrix}
Right hand side of the linear system. Has shape (N,) or (N,1).
Returns
-------
x : {array, matrix}
The converged solution.
info : int
Provides convergence information:
* 0 : successful exit
* >0 : convergence to tolerance not achieved, number of iterations
* <0 : illegal input or breakdown
Other parameters
----------------
x0 : {array, matrix}
Starting guess for the solution (a vector of zeros by default).
tol : float
Tolerance to achieve. The algorithm terminates when either the relative
or the absolute residual is below `tol`.
restart : int, optional
Number of iterations between restarts. Larger values increase
iteration cost, but may be necessary for convergence.
Default is 20.
maxiter : int, optional
Maximum number of iterations. Iteration will stop after maxiter
steps even if the specified tolerance has not been achieved.
xtype : {'f','d','F','D'}
This parameter is DEPRECATED --- avoid using it.
The type of the result. If None, then it will be determined from
A.dtype.char and b. If A does not have a typecode method then it
will compute A.matvec(x0) to get a typecode. To save the extra
computation when A does not have a typecode attribute use xtype=0
for the same type as b or use xtype='f','d','F',or 'D'.
This parameter has been superceeded by LinearOperator.
M : {sparse matrix, dense matrix, LinearOperator}
Inverse of the preconditioner of A. M should approximate the
inverse of A and be easy to solve for (see Notes). Effective
preconditioning dramatically improves the rate of convergence,
which implies that fewer iterations are needed to reach a given
error tolerance. By default, no preconditioner is used.
callback : function
User-supplied function to call after each iteration. It is called
as callback(rk), where rk is the current residual vector.
restrt : int, optional
DEPRECATED - use `restart` instead.
See Also
--------
LinearOperator
Notes
-----
A preconditioner, P, is chosen such that P is close to A but easy to solve
for. The preconditioner parameter required by this routine is
``M = P^-1``. The inverse should preferably not be calculated
explicitly. Rather, use the following template to produce M::
# Construct a linear operator that computes P^-1 * x.
import scipy.sparse.linalg as spla
M_x = lambda x: spla.spsolve(P, x)
M = spla.LinearOperator((n, n), M_x)
"""
# Change 'restrt' keyword to 'restart'
if restrt is None:
restrt = restart
elif restart is not None:
raise ValueError("Cannot specify both restart and restrt keywords. "
"Preferably use 'restart' only.")
A,M,x,b,postprocess = make_system(A,M,x0,b,xtype)
n = len(b)
if maxiter is None:
maxiter = n*10
if restrt is None:
restrt = 20
restrt = min(restrt, n)
matvec = A.matvec
psolve = M.matvec
ltr = _type_conv[x.dtype.char]
revcom = getattr(_iterative, ltr + 'gmresrevcom')
stoptest = getattr(_iterative, ltr + 'stoptest2')
resid = tol
ndx1 = 1
ndx2 = -1
work = np.zeros((6+restrt)*n,dtype=x.dtype)
work2 = np.zeros((restrt+1)*(2*restrt+2),dtype=x.dtype)
ijob = 1
info = 0
ftflag = True
bnrm2 = -1.0
iter_ = maxiter
old_ijob = ijob
first_pass = True
resid_ready = False
iter_num = 1
while True:
olditer = iter_
x, iter_, resid, info, ndx1, ndx2, sclr1, sclr2, ijob = \
revcom(b, x, restrt, work, work2, iter_, resid, info, ndx1, ndx2, ijob)
# if callback is not None and iter_ > olditer:
# callback(x)
slice1 = slice(ndx1-1, ndx1-1+n)
slice2 = slice(ndx2-1, ndx2-1+n)
if (ijob == -1): # gmres success, update last residual
if resid_ready and callback is not None:
callback(resid)
resid_ready = False
break
elif (ijob == 1):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(x)
elif (ijob == 2):
work[slice1] = psolve(work[slice2])
if not first_pass and old_ijob == 3:
resid_ready = True
first_pass = False
elif (ijob == 3):
work[slice2] *= sclr2
work[slice2] += sclr1*matvec(work[slice1])
if resid_ready and callback is not None:
callback(resid)
resid_ready = False
iter_num = iter_num+1
elif (ijob == 4):
if ftflag:
info = -1
ftflag = False
bnrm2, resid, info = stoptest(work[slice1], b, bnrm2, tol, info)
old_ijob = ijob
ijob = 2
if iter_num > maxiter:
break
if info >= 0 and resid > tol:
# info isn't set appropriately otherwise
info = maxiter
return postprocess(x), info
@non_reentrant
def qmr(A, b, x0=None, tol=1e-5, maxiter=None, xtype=None, M1=None, M2=None, callback=None):
"""Use Quasi-Minimal Residual iteration to solve A x = b
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}
The real-valued N-by-N matrix of the linear system.
It is required that the linear operator can produce
``Ax`` and ``A^T x``.
b : {array, matrix}
Right hand side of the linear system. Has shape (N,) or (N,1).
Returns
-------
x : {array, matrix}
The converged solution.
info : integer
Provides convergence information:
0 : successful exit
>0 : convergence to tolerance not achieved, number of iterations
<0 : illegal input or breakdown
Other Parameters
----------------
x0 : {array, matrix}
Starting guess for the solution.
tol : float
Tolerance to achieve. The algorithm terminates when either the relative
or the absolute residual is below `tol`.
maxiter : integer
Maximum number of iterations. Iteration will stop after maxiter
steps even if the specified tolerance has not been achieved.
M1 : {sparse matrix, dense matrix, LinearOperator}
Left preconditioner for A.
M2 : {sparse matrix, dense matrix, LinearOperator}
Right preconditioner for A. Used together with the left
preconditioner M1. The matrix M1*A*M2 should have better
conditioned than A alone.
callback : function
User-supplied function to call after each iteration. It is called
as callback(xk), where xk is the current solution vector.
xtype : {'f','d','F','D'}
This parameter is DEPRECATED -- avoid using it.
The type of the result. If None, then it will be determined from
A.dtype.char and b. If A does not have a typecode method then it
will compute A.matvec(x0) to get a typecode. To save the extra
computation when A does not have a typecode attribute use xtype=0
for the same type as b or use xtype='f','d','F',or 'D'.
This parameter has been superceeded by LinearOperator.
See Also
--------
LinearOperator
"""
A_ = A
A,M,x,b,postprocess = make_system(A,None,x0,b,xtype)
if M1 is None and M2 is None:
if hasattr(A_,'psolve'):
def left_psolve(b):
return A_.psolve(b,'left')
def right_psolve(b):
return A_.psolve(b,'right')
def left_rpsolve(b):
return A_.rpsolve(b,'left')
def right_rpsolve(b):
return A_.rpsolve(b,'right')
M1 = LinearOperator(A.shape, matvec=left_psolve, rmatvec=left_rpsolve)
M2 = LinearOperator(A.shape, matvec=right_psolve, rmatvec=right_rpsolve)
else:
def id(b):
return b
M1 = LinearOperator(A.shape, matvec=id, rmatvec=id)
M2 = LinearOperator(A.shape, matvec=id, rmatvec=id)
n = len(b)
if maxiter is None:
maxiter = n*10
ltr = _type_conv[x.dtype.char]
revcom = getattr(_iterative, ltr + 'qmrrevcom')
stoptest = getattr(_iterative, ltr + 'stoptest2')
resid = tol
ndx1 = 1
ndx2 = -1
work = np.zeros(11*n,x.dtype)
ijob = 1
info = 0
ftflag = True
bnrm2 = -1.0
iter_ = maxiter
while True:
olditer = iter_
x, iter_, resid, info, ndx1, ndx2, sclr1, sclr2, ijob = \
revcom(b, x, work, iter_, resid, info, ndx1, ndx2, ijob)
if callback is not None and iter_ > olditer:
callback(x)
slice1 = slice(ndx1-1, ndx1-1+n)
slice2 = slice(ndx2-1, ndx2-1+n)
if (ijob == -1):
if callback is not None:
callback(x)
break
elif (ijob == 1):
work[slice2] *= sclr2
work[slice2] += sclr1*A.matvec(work[slice1])
elif (ijob == 2):
work[slice2] *= sclr2
work[slice2] += sclr1*A.rmatvec(work[slice1])
elif (ijob == 3):
work[slice1] = M1.matvec(work[slice2])
elif (ijob == 4):
work[slice1] = M2.matvec(work[slice2])
elif (ijob == 5):
work[slice1] = M1.rmatvec(work[slice2])
elif (ijob == 6):
work[slice1] = M2.rmatvec(work[slice2])
elif (ijob == 7):
work[slice2] *= sclr2
work[slice2] += sclr1*A.matvec(x)
elif (ijob == 8):
if ftflag:
info = -1
ftflag = False
bnrm2, resid, info = stoptest(work[slice1], b, bnrm2, tol, info)
ijob = 2
if info > 0 and iter_ == maxiter and resid > tol:
# info isn't set appropriately otherwise
info = iter_
return postprocess(x), info
| [
"[email protected]"
] | |
536e71273f02e7028fe26d6242db9bada2af0dcc | 7a4da5ec2196bf975a9e6115846244788b36b952 | /3.7.0/lldb-3.7.0.src/test/macosx/safe-to-func-call/TestSafeFuncCalls.py | 73ae892dc7571294845b8570857cc0d81492d0aa | [
"NCSA",
"MIT"
] | permissive | androm3da/clang_sles | ca4ada2ec85d625c65818ca9b60dcf1bc27f0756 | 2ba6d0711546ad681883c42dfb8661b842806695 | refs/heads/master | 2021-01-10T13:50:25.353394 | 2016-03-31T21:38:29 | 2016-03-31T21:38:29 | 44,787,977 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,887 | py | """Test function call thread safety."""
import os, time
import unittest2
import lldb
import lldbutil
from lldbtest import *
class TestSafeFuncCalls(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipUnlessDarwin
@python_api_test
@dsym_test
def test_with_dsym_and_python_api(self):
"""Test function call thread safety."""
self.buildDsym()
self.function_call_safety_check()
@skipUnlessDarwin
@python_api_test
@dwarf_test
def test_with_dwarf_and_python_api(self):
"""Test function call thread safety."""
self.buildDwarf()
self.function_call_safety_check()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line numbers that we will step to in main:
self.main_source = "main.c"
def check_number_of_threads(self, process):
self.assertTrue(process.GetNumThreads() == 2, "Check that the process has two threads when sitting at the stopper() breakpoint")
def safe_to_call_func_on_main_thread (self, main_thread):
self.assertTrue(main_thread.SafeToCallFunctions() == True, "It is safe to call functions on the main thread")
def safe_to_call_func_on_select_thread (self, select_thread):
self.assertTrue(select_thread.SafeToCallFunctions() == False, "It is not safe to call functions on the select thread")
def function_call_safety_check(self):
"""Test function call safety checks"""
exe = os.path.join(os.getcwd(), "a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
self.main_source_spec = lldb.SBFileSpec (self.main_source)
break1 = target.BreakpointCreateByName ("stopper", 'a.out')
self.assertTrue(break1, VALID_BREAKPOINT)
process = target.LaunchSimple (None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
threads = lldbutil.get_threads_stopped_at_breakpoint (process, break1)
if len(threads) != 1:
self.fail ("Failed to stop at breakpoint 1.")
self.check_number_of_threads(process)
main_thread = lldb.SBThread()
select_thread = lldb.SBThread()
for idx in range (0, process.GetNumThreads()):
t = process.GetThreadAtIndex (idx)
if t.GetName() == "main thread":
main_thread = t
if t.GetName() == "select thread":
select_thread = t
self.assertTrue(main_thread.IsValid() and select_thread.IsValid(), "Got both expected threads")
self.safe_to_call_func_on_main_thread (main_thread)
self.safe_to_call_func_on_select_thread (select_thread)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
| [
"[email protected]"
] | |
003b0869325795d0e20613fbcd858c7435600cdd | d5ad13232e3f1ced55f6956bc4cbda87925c8085 | /RNAseqMSMS/8-snv-virus-sv/1-integration.py | 28de0f65de0b94bbf61a878cf425349690bcfd78 | [] | no_license | arvin580/SIBS | c0ba9a8a41f59cb333517c286f7d80300b9501a2 | 0cc2378bf62359ec068336ea4de16d081d0f58a4 | refs/heads/master | 2021-01-23T21:57:35.658443 | 2015-04-09T23:11:34 | 2015-04-09T23:11:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,646 | py | ### [SNV, Virus, Deltion, Duplication, Inversion, Translocation]
D = {}
inFile = open('sum_snv.exome_summary.overall.filter')
for line in inFile:
line = line.strip()
fields = line.split('\t')
gene = fields[1]
D.setdefault(gene,[0,0,0,0,0,0])
D[gene][0] = 1
inFile.close()
inFile = open('ERR0498-04-05.unmapped.unique.human-viruse-checked-human-gene3')
for line in inFile:
line = line.strip()
fields = line.split('\t')
gene = fields[0]
D.setdefault(gene,[0,0,0,0,0,0])
D[gene][1] = 1
inFile.close()
inFile = open('split-mapped-deletion.gene')
for line in inFile:
line = line.strip()
fields = line.split('\t')
gene = fields[0]
D.setdefault(gene,[0,0,0,0,0,0])
D[gene][2] = 1
inFile.close()
inFile = open('split-mapped-duplication.gene')
for line in inFile:
line = line.strip()
fields = line.split('\t')
gene = fields[0]
D.setdefault(gene,[0,0,0,0,0,0])
D[gene][3] = 1
inFile.close()
inFile = open('split-mapped-inversion.gene')
for line in inFile:
line = line.strip()
fields = line.split('\t')
gene = fields[0]
D.setdefault(gene,[0,0,0,0,0,0])
D[gene][4] = 1
inFile.close()
inFile = open('split-mapped-translocation.gene')
for line in inFile:
line = line.strip()
fields = line.split('\t')
gene = fields[0]
D.setdefault(gene,[0,0,0,0,0,0])
D[gene][5] = 1
inFile.close()
d = D.items()
d.sort(cmp= lambda x,y:cmp(sum(x[1]),sum(y[1])),reverse= True)
ouFile = open('HeLa-Gene-SNV-Virus-Deletion-Duplication-Inversion-Translocation','w')
for item in d:
ouFile.write(item[0]+'\t'+'\t'.join([str(x) for x in item[1]])+'\n')
| [
"[email protected]"
] | |
674ad7c2490eb113d1ff5457500c8598258b54df | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/ixnetwork_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/configelement/stack/fCoEGPFCP_template.py | e65f9365f8707673b7cd59d7b4a6b7714c358188 | [] | no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,422 | py | from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class FCoEGPFCP(Base):
__slots__ = ()
_SDM_NAME = 'fCoEGPFCP'
_SDM_ATT_MAP = {
'FcoeHeaderVersion': 'fCoEGPFCP.header.fcoeHeader.version-1',
'FcoeHeaderReserved': 'fCoEGPFCP.header.fcoeHeader.reserved-2',
'FcoeHeaderESOF': 'fCoEGPFCP.header.fcoeHeader.eSOF-3',
'DeviceDataFramesDeviceDataInfo': 'fCoEGPFCP.header.fcHeader.rCTL.deviceDataFrames.deviceDataInfo-4',
'RCTLReserved': 'fCoEGPFCP.header.fcHeader.rCTL.reserved-5',
'ExtendedLinkServicesInfo': 'fCoEGPFCP.header.fcHeader.rCTL.extendedLinkServices.info-6',
'Fc4LinkDataInfo': 'fCoEGPFCP.header.fcHeader.rCTL.fc4LinkData.info-7',
'VideoDataInfo': 'fCoEGPFCP.header.fcHeader.rCTL.videoData.info-8',
'ExtendedHeaderInfo': 'fCoEGPFCP.header.fcHeader.rCTL.extendedHeader.info-9',
'BasicLinkServicesInfo': 'fCoEGPFCP.header.fcHeader.rCTL.basicLinkServices.info-10',
'LinkControlFramesInfo': 'fCoEGPFCP.header.fcHeader.rCTL.linkControlFrames.info-11',
'ExtendedRoutingInfo': 'fCoEGPFCP.header.fcHeader.rCTL.extendedRouting.info-12',
'FcHeaderDstId': 'fCoEGPFCP.header.fcHeader.dstId-13',
'FcHeaderCsCTLPriority': 'fCoEGPFCP.header.fcHeader.csCTLPriority-14',
'FcHeaderSrcId': 'fCoEGPFCP.header.fcHeader.srcId-15',
'FcHeaderType': 'fCoEGPFCP.header.fcHeader.type-16',
'FCTLCustom': 'fCoEGPFCP.header.fcHeader.fCTL.custom-17',
'BuildFCTLExchangeContext': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.exchangeContext-18',
'BuildFCTLSequenceContext': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.sequenceContext-19',
'BuildFCTLFirstSequence': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.firstSequence-20',
'BuildFCTLLastSequence': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.lastSequence-21',
'BuildFCTLEndSequence': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.endSequence-22',
'BuildFCTLEndConnection': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.endConnection-23',
'BuildFCTLCsCTLPriority': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.csCTLPriority-24',
'BuildFCTLSequenceInitiative': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.sequenceInitiative-25',
'BuildFCTLFcXIDReassigned': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.fcXIDReassigned-26',
'BuildFCTLFcInvalidateXID': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.fcInvalidateXID-27',
'BuildFCTLAckForm': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.ackForm-28',
'BuildFCTLFcDataCompression': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.fcDataCompression-29',
'BuildFCTLFcDataEncryption': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.fcDataEncryption-30',
'BuildFCTLRetransmittedSequence': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.retransmittedSequence-31',
'BuildFCTLUnidirectionalTransmit': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.unidirectionalTransmit-32',
'BuildFCTLContinueSeqCondition': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.continueSeqCondition-33',
'BuildFCTLAbortSeqCondition': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.abortSeqCondition-34',
'BuildFCTLRelativeOffsetPresent': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.relativeOffsetPresent-35',
'BuildFCTLExchangeReassembly': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.exchangeReassembly-36',
'BuildFCTLFillBytes': 'fCoEGPFCP.header.fcHeader.fCTL.buildFCTL.fillBytes-37',
'FcHeaderSeqID': 'fCoEGPFCP.header.fcHeader.seqID-38',
'FcHeaderDfCTL': 'fCoEGPFCP.header.fcHeader.dfCTL-39',
'FcHeaderSeqCNT': 'fCoEGPFCP.header.fcHeader.seqCNT-40',
'FcHeaderOxID': 'fCoEGPFCP.header.fcHeader.oxID-41',
'FcHeaderRxID': 'fCoEGPFCP.header.fcHeader.rxID-42',
'FcHeaderParameter': 'fCoEGPFCP.header.fcHeader.parameter-43',
'FcCTRevision': 'fCoEGPFCP.header.fcCT.revision-44',
'FcCTInId': 'fCoEGPFCP.header.fcCT.inId-45',
'FcCTGsType': 'fCoEGPFCP.header.fcCT.gsType-46',
'FcCTGsSubtype': 'fCoEGPFCP.header.fcCT.gsSubtype-47',
'FcCTOptions': 'fCoEGPFCP.header.fcCT.options-48',
'FcCTReserved': 'fCoEGPFCP.header.fcCT.reserved-49',
'FCSOpcode': 'fCoEGPFCP.header.FCS.opcode-50',
'FCSMaxsize': 'fCoEGPFCP.header.FCS.maxsize-51',
'FCSReserved': 'fCoEGPFCP.header.FCS.reserved-52',
'FCSPlatformName': 'fCoEGPFCP.header.FCS.platformName-53',
'FcCRCAutoCRC': 'fCoEGPFCP.header.fcCRC.autoCRC-54',
'FcCRCGenerateBadCRC': 'fCoEGPFCP.header.fcCRC.generateBadCRC-55',
'FcTrailerEEOF': 'fCoEGPFCP.header.fcTrailer.eEOF-56',
'FcTrailerReserved': 'fCoEGPFCP.header.fcTrailer.reserved-57',
}
def __init__(self, parent, list_op=False):
super(FCoEGPFCP, self).__init__(parent, list_op)
@property
def FcoeHeaderVersion(self):
"""
Display Name: Version
Default Value: 0
Value Format: decimal
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcoeHeaderVersion']))
@property
def FcoeHeaderReserved(self):
"""
Display Name: Reserved
Default Value: 0x00
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcoeHeaderReserved']))
@property
def FcoeHeaderESOF(self):
"""
Display Name: E-SOF
Default Value: 54
Value Format: decimal
Available enum values: SOFf - Fabric, 40, SOFi4 - Initiate Class 4, 41, SOFi2 - Initiate Class 2, 45, SOFi3 - Initiate Class 3, 46, SOFn4 - Normal Class 4, 49, SOFn2 - Normal Class 2, 53, SOFn3 - Normal Class 3, 54, SOFc4 - Connect Class 4, 57, SOFn1 - Normal Class 1 or 6, 250
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcoeHeaderESOF']))
@property
def DeviceDataFramesDeviceDataInfo(self):
"""
Display Name: Information
Default Value: 0
Value Format: decimal
Available enum values: Uncategorized Information, 0, Solicited Data, 1, Unsolicited Control, 2, Solicited Control, 3, Unsolicited Data, 4, Data Descriptor, 5, Unsolicited Command, 6, Command Status, 7
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DeviceDataFramesDeviceDataInfo']))
@property
def RCTLReserved(self):
"""
Display Name: Reserved
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RCTLReserved']))
@property
def ExtendedLinkServicesInfo(self):
"""
Display Name: Information
Default Value: 33
Value Format: decimal
Available enum values: Solicited Data, 32, Request, 33, Reply, 34
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ExtendedLinkServicesInfo']))
@property
def Fc4LinkDataInfo(self):
"""
Display Name: Information
Default Value: 48
Value Format: decimal
Available enum values: Uncategorized Information, 48, Solicited Data, 49, Unsolicited Control, 50, Solicited Control, 51, Unsolicited Data, 52, Data Descriptor, 53, Unsolicited Command, 54, Command Status, 55
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Fc4LinkDataInfo']))
@property
def VideoDataInfo(self):
"""
Display Name: Information
Default Value: 68
Value Format: decimal
Available enum values: Unsolicited Data, 68
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VideoDataInfo']))
@property
def ExtendedHeaderInfo(self):
"""
Display Name: Information
Default Value: 80
Value Format: decimal
Available enum values: Virtual Fabric Tagging Header, 80, Inter Fabric Routing Header, 81, Encapsulation Header, 82
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ExtendedHeaderInfo']))
@property
def BasicLinkServicesInfo(self):
"""
Display Name: Information
Default Value: 128
Value Format: decimal
Available enum values: No Operation, 128, Abort Sequence, 129, Remove Connection, 130, Basic Accept, 132, Basic Reject, 133, Dedicated Connection Preempted, 134
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BasicLinkServicesInfo']))
@property
def LinkControlFramesInfo(self):
"""
Display Name: Information
Default Value: 192
Value Format: decimal
Available enum values: Acknowledge_1, 128, Acknowledge_0, 129, Nx Port Reject, 130, Fabric Reject, 131, Nx Port Busy, 132, Fabric Busy to Data Frame, 133, Fabric Busy to Link Control Frame, 134, Link Credit Reset, 135, Notify, 136, End, 137
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LinkControlFramesInfo']))
@property
def ExtendedRoutingInfo(self):
"""
Display Name: Information
Default Value: 240
Value Format: decimal
Available enum values: Vendor Unique, 240
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ExtendedRoutingInfo']))
@property
def FcHeaderDstId(self):
"""
Display Name: Destination ID
Default Value: 0
Value Format: fCID
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderDstId']))
@property
def FcHeaderCsCTLPriority(self):
"""
Display Name: CS_CTL/Priority
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderCsCTLPriority']))
@property
def FcHeaderSrcId(self):
"""
Display Name: Source ID
Default Value: 0
Value Format: fCID
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderSrcId']))
@property
def FcHeaderType(self):
"""
Display Name: Type
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderType']))
@property
def FCTLCustom(self):
"""
Display Name: Custom
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FCTLCustom']))
@property
def BuildFCTLExchangeContext(self):
"""
Display Name: Exchange Context
Default Value: 0
Value Format: decimal
Available enum values: Originator, 0, Receipient, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLExchangeContext']))
@property
def BuildFCTLSequenceContext(self):
"""
Display Name: Sequence Context
Default Value: 0
Value Format: decimal
Available enum values: Initiator, 0, Receipient, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLSequenceContext']))
@property
def BuildFCTLFirstSequence(self):
"""
Display Name: First Sequence
Default Value: 0
Value Format: decimal
Available enum values: Other, 0, First, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLFirstSequence']))
@property
def BuildFCTLLastSequence(self):
"""
Display Name: Last Sequence
Default Value: 0
Value Format: decimal
Available enum values: Other, 0, Last, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLLastSequence']))
@property
def BuildFCTLEndSequence(self):
"""
Display Name: End Sequence
Default Value: 0
Value Format: decimal
Available enum values: Other, 0, Last, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLEndSequence']))
@property
def BuildFCTLEndConnection(self):
"""
Display Name: End Connection
Default Value: 0
Value Format: decimal
Available enum values: Alive, 0, Pending, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLEndConnection']))
@property
def BuildFCTLCsCTLPriority(self):
"""
Display Name: CS_CTL/Priority
Default Value: 0
Value Format: decimal
Available enum values: CS_CTL, 0, Priority, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLCsCTLPriority']))
@property
def BuildFCTLSequenceInitiative(self):
"""
Display Name: Sequence Initiative
Default Value: 0
Value Format: decimal
Available enum values: Hold, 0, Transfer, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLSequenceInitiative']))
@property
def BuildFCTLFcXIDReassigned(self):
"""
Display Name: FC XID Reassigned
Default Value: 0
Value Format: decimal
Available enum values: No, 0, Yes, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLFcXIDReassigned']))
@property
def BuildFCTLFcInvalidateXID(self):
"""
Display Name: FC Invalidate XID
Default Value: 0
Value Format: decimal
Available enum values: No, 0, Yes, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLFcInvalidateXID']))
@property
def BuildFCTLAckForm(self):
"""
Display Name: ACK_Form
Default Value: 0
Value Format: decimal
Available enum values: No assistance provided, 0, ACK_1 Required, 1, reserved, 2, Ack_0 Required, 3
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLAckForm']))
@property
def BuildFCTLFcDataCompression(self):
"""
Display Name: FC Data Compression
Default Value: 0
Value Format: decimal
Available enum values: No, 0, Yes, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLFcDataCompression']))
@property
def BuildFCTLFcDataEncryption(self):
"""
Display Name: FC Data Encryption
Default Value: 0
Value Format: decimal
Available enum values: No, 0, Yes, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLFcDataEncryption']))
@property
def BuildFCTLRetransmittedSequence(self):
"""
Display Name: Retransmitted Sequence
Default Value: 0
Value Format: decimal
Available enum values: Original, 0, Retransmission, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLRetransmittedSequence']))
@property
def BuildFCTLUnidirectionalTransmit(self):
"""
Display Name: Unidirectional Transmit
Default Value: 0
Value Format: decimal
Available enum values: Bi-directional, 0, Unidirectional, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLUnidirectionalTransmit']))
@property
def BuildFCTLContinueSeqCondition(self):
"""
Display Name: Continue Sequence Condition
Default Value: 0
Value Format: decimal
Available enum values: No information, 0, Sequence to follow-immediately, 1, Squence to follow-soon, 2, Sequence to follow-delayed, 3
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLContinueSeqCondition']))
@property
def BuildFCTLAbortSeqCondition(self):
"""
Display Name: Abort Sequence Condition
Default Value: 0
Value Format: decimal
Available enum values: 0x00, 0, 0x01, 1, 0x10, 2, 0x11, 3
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLAbortSeqCondition']))
@property
def BuildFCTLRelativeOffsetPresent(self):
"""
Display Name: Relative Offset Present
Default Value: 0
Value Format: decimal
Available enum values: Parameter field defined, 0, Relative offset, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLRelativeOffsetPresent']))
@property
def BuildFCTLExchangeReassembly(self):
"""
Display Name: Exchange Reassembly
Default Value: 0
Value Format: decimal
Available enum values: off, 0, on, 1
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLExchangeReassembly']))
@property
def BuildFCTLFillBytes(self):
"""
Display Name: Fill Bytes
Default Value: 0
Value Format: decimal
Available enum values: 0 bytes of fill, 0, 1 bytes of fill, 1, 2 bytes of fill, 2, 3 bytes of fill, 3
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BuildFCTLFillBytes']))
@property
def FcHeaderSeqID(self):
"""
Display Name: SEQ_ID
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderSeqID']))
@property
def FcHeaderDfCTL(self):
"""
Display Name: DF_CTL
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderDfCTL']))
@property
def FcHeaderSeqCNT(self):
"""
Display Name: SEQ_CNT
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderSeqCNT']))
@property
def FcHeaderOxID(self):
"""
Display Name: OX_ID
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderOxID']))
@property
def FcHeaderRxID(self):
"""
Display Name: RX_ID
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderRxID']))
@property
def FcHeaderParameter(self):
"""
Display Name: Parameter
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcHeaderParameter']))
@property
def FcCTRevision(self):
"""
Display Name: Revision
Default Value: 0x01
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCTRevision']))
@property
def FcCTInId(self):
"""
Display Name: IN_ID
Default Value: 0x000000
Value Format: fCID
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCTInId']))
@property
def FcCTGsType(self):
"""
Display Name: GS_Type
Default Value: 250
Value Format: decimal
Available enum values: Event Service, 244, Key Distribution Service, 247, Alias Service, 248, Management Service, 250, Time Service, 251, Directory Service, 252
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCTGsType']))
@property
def FcCTGsSubtype(self):
"""
Display Name: GS_Subtype
Default Value: 0x01
Value Format: hex
Available enum values: Fabric Configuration Server, 1, Unzoned Name Server, 2, Fabric Zone Server, 3, Lock Server, 4, Performance Server, 5, Security Policy Server, 6, Security Information Server, 7
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCTGsSubtype']))
@property
def FcCTOptions(self):
"""
Display Name: Options
Default Value: 0x00
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCTOptions']))
@property
def FcCTReserved(self):
"""
Display Name: Reserved
Default Value: 0x00
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCTReserved']))
@property
def FCSOpcode(self):
"""
Display Name: Command/Response Code
Default Value: 420
Value Format: decimal
Available enum values: GTIN, 256, GIEL, 257, GIET, 273, GDID, 274, GMID, 275, GFN, 276, GIELN, 277, GMAL, 278, GIEIL, 279, GPL, 280, GPT, 289, GPPN, 290, GAPNL, 292, GPS, 294, GPSC, 295, GATIN, 296, GSES, 304, GIEAG, 320, GPAG, 321, GPLNL, 401, GPLT, 402, GPLML, 403, GPAB, 407, GNPL, 417, GPNL, 418, GPFCP, 420, GPLI, 421, GNID, 433, RIELN, 533, RPL, 640, RPLN, 657, RPLT, 658, RPLM, 659, RPAB, 664, RPFCP, 666, RPLI, 667, DPL, 896, DPLN, 913, DPLM, 914, DPLML, 915, DPLI, 916, DPAB, 917, DPALL, 927, FTR, 1024, FPNG, 1025
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FCSOpcode']))
@property
def FCSMaxsize(self):
"""
Display Name: Maximum/Residual Size
Default Value: 0x0000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FCSMaxsize']))
@property
def FCSReserved(self):
"""
Display Name: Reserved
Default Value: 0x00
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FCSReserved']))
@property
def FCSPlatformName(self):
"""
Display Name: Platform Name
Default Value: 0x00
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FCSPlatformName']))
@property
def FcCRCAutoCRC(self):
"""
Display Name: Auto
Default Value: 0
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCRCAutoCRC']))
@property
def FcCRCGenerateBadCRC(self):
"""
Display Name: Bad CRC
Default Value: 0x01
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcCRCGenerateBadCRC']))
@property
def FcTrailerEEOF(self):
"""
Display Name: E-EOF
Default Value: 65
Value Format: decimal
Available enum values: EOFn - Normal, 65, EOFt - Terminate, 66, EOFrt - Remove Terminate, 68, EOFni - Normal Invalid, 73, EOFrti - Remove Terminate Invalid, 79, EOFa - Abort, 80
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcTrailerEEOF']))
@property
def FcTrailerReserved(self):
"""
Display Name: Reserved
Default Value: 0x000000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FcTrailerReserved']))
def add(self):
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
| [
"[email protected]"
] | |
9c1ebff143583caed72eccb5483e60f02ed4113a | 93ca62f2fb727c20f1fc5c8d81b7134877e85c6e | /Refactoring/refactoring_3_abstraction.py | e0146c6f647d0d2e91a1e1af6a47ef2590df21a1 | [] | no_license | lily48/oop-python | f607e954d50236f6f8d7844f1c4e29b38c0dbfae | b009cd4477a5045de115e44aa326923a32ba7c1c | refs/heads/master | 2023-01-19T03:39:22.726179 | 2020-11-18T20:38:14 | 2020-11-18T20:38:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,016 | py | """
Simulate a simple board game.
There are 2 players.
Each player takes turn rolling a die and moving that number of spaces.
The first person to space 100 wins.
"""
import random
class Player:
def __init__(self, player_number):
self.score = 0
self.player_number = player_number
def roll_die(self):
die = random.randint(1, 6)
print(f'{self} rolled a {die}')
return die
def make_move(self):
self.score += self.roll_die()
print(f'{self}: {self.score}')
@property
def has_won(self):
return self.score >= 100
def __str__(self):
return f'Player {self.player_number}'
def play_game(num_players=2):
players = []
for i in range(num_players):
players.append(Player(i + 1))
while True:
for player in players:
player.make_move()
if player.has_won:
print(f'{player} wins!')
return
if __name__ == '__main__':
play_game(num_players=3)
| [
"[email protected]"
] | |
5c95a60905b7a7bda2099078018e15544b41824c | 473625e02c757fd9f9ba58624aa84551280611e3 | /store/migrations/0010_icat.py | d484e1b5289f8c78f96cc0fa8a5342cbe38026ee | [] | no_license | rohitrajput-42/PortalK | 1a15cd182b252de459acc950eb87d3837d7e6ff4 | a5647b560d850c650c9cefae30a43bc7424d188b | refs/heads/main | 2023-01-21T08:50:06.614440 | 2020-11-27T12:58:57 | 2020-11-27T12:58:57 | 314,314,573 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 505 | py | # Generated by Django 3.1.2 on 2020-11-15 12:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0009_joblist_lcat'),
]
operations = [
migrations.CreateModel(
name='Icat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
],
),
]
| [
"[email protected]"
] | |
13d9348edcc2d4475fd1cd08528d9a9ad8745c7a | cbd3ac62b75ac3dceb6ffb219eaa3fe9d2ef0c00 | /src/build/android/pylib/constants/__init__.py | 9209f32c73e5ecda6818cf4cd518e9a6dfee865a | [
"BSD-3-Clause"
] | permissive | crazypeace/naiveproxy | d403fa282bcf65cac3eacb519667d6767080d05d | 0a8242dca02b760272d4a0eb8f8a712f9d1093c4 | refs/heads/master | 2023-03-09T21:23:30.415305 | 2022-10-06T17:23:40 | 2022-10-06T17:23:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,079 | py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Defines a set of constants shared by test runners and other scripts."""
# TODO(jbudorick): Split these constants into coherent modules.
# pylint: disable=W0212
import collections
import glob
import logging
import os
import subprocess
import devil.android.sdk.keyevent
from devil.android.constants import chrome
from devil.android.sdk import version_codes
from devil.constants import exit_codes
keyevent = devil.android.sdk.keyevent
DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, os.pardir)))
PACKAGE_INFO = dict(chrome.PACKAGE_INFO)
PACKAGE_INFO.update({
'legacy_browser':
chrome.PackageInfo('com.google.android.browser',
'com.android.browser.BrowserActivity', None, None),
'chromecast_shell':
chrome.PackageInfo('com.google.android.apps.mediashell',
'com.google.android.apps.mediashell.MediaShellActivity',
'castshell-command-line', None),
'android_webview_shell':
chrome.PackageInfo('org.chromium.android_webview.shell',
'org.chromium.android_webview.shell.AwShellActivity',
'android-webview-command-line', None),
'gtest':
chrome.PackageInfo('org.chromium.native_test',
'org.chromium.native_test.NativeUnitTestActivity',
'chrome-native-tests-command-line', None),
'android_browsertests':
chrome.PackageInfo('org.chromium.android_browsertests_apk',
('org.chromium.android_browsertests_apk' +
'.ChromeBrowserTestsActivity'),
'chrome-native-tests-command-line', None),
'components_browsertests':
chrome.PackageInfo('org.chromium.components_browsertests_apk',
('org.chromium.components_browsertests_apk' +
'.ComponentsBrowserTestsActivity'),
'chrome-native-tests-command-line', None),
'content_browsertests':
chrome.PackageInfo(
'org.chromium.content_browsertests_apk',
'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
'chrome-native-tests-command-line', None),
'chromedriver_webview_shell':
chrome.PackageInfo('org.chromium.chromedriver_webview_shell',
'org.chromium.chromedriver_webview_shell.Main', None,
None),
'android_webview_cts':
chrome.PackageInfo('com.android.webview',
'com.android.cts.webkit.WebViewStartupCtsActivity',
'webview-command-line', None),
'android_google_webview_cts':
chrome.PackageInfo('com.google.android.webview',
'com.android.cts.webkit.WebViewStartupCtsActivity',
'webview-command-line', None),
'android_google_webview_cts_debug':
chrome.PackageInfo('com.google.android.webview.debug',
'com.android.cts.webkit.WebViewStartupCtsActivity',
'webview-command-line', None),
'android_webview_ui_test':
chrome.PackageInfo('org.chromium.webview_ui_test',
'org.chromium.webview_ui_test.WebViewUiTestActivity',
'webview-command-line', None),
'weblayer_browsertests':
chrome.PackageInfo(
'org.chromium.weblayer_browsertests_apk',
'org.chromium.weblayer_browsertests_apk.WebLayerBrowserTestsActivity',
'chrome-native-tests-command-line', None),
})
# Ports arrangement for various test servers used in Chrome for Android.
# Lighttpd server will attempt to use 9000 as default port, if unavailable it
# will find a free port from 8001 - 8999.
LIGHTTPD_DEFAULT_PORT = 9000
LIGHTTPD_RANDOM_PORT_FIRST = 8001
LIGHTTPD_RANDOM_PORT_LAST = 8999
TEST_SYNC_SERVER_PORT = 9031
TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
TEST_POLICY_SERVER_PORT = 9051
TEST_EXECUTABLE_DIR = '/data/local/tmp'
# Directories for common java libraries for SDK build.
# These constants are defined in build/android/ant/common.xml
SDK_BUILD_JAVALIB_DIR = 'lib.java'
SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
SDK_BUILD_APKS_DIR = 'apks'
ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
# The directory on the device where perf test output gets saved to.
DEVICE_PERF_OUTPUT_DIR = (
'/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
ANDROID_SDK_BUILD_TOOLS_VERSION = '33.0.0'
ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk',
'public')
ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
'third_party', 'android_ndk')
BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
os.environ.get('CHROMIUM_OUT_DIR', 'out'),
'bad_devices.json')
UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
# TODO(jbudorick): Remove once unused.
DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
# Configure ubsan to print stack traces in the format understood by "stack" so
# that they will be symbolized, and disable signal handlers because they
# interfere with the breakpad and sandbox tests.
# This value is duplicated in
# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java
UBSAN_OPTIONS = (
'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' '
'handle_segv=0 handle_sigbus=0 handle_sigfpe=0')
# TODO(jbudorick): Rework this into testing/buildbot/
PYTHON_UNIT_TEST_SUITES = {
'pylib_py_unittests': {
'path':
os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
'test_modules': [
'devil.android.device_utils_test',
'devil.android.md5sum_test',
'devil.utils.cmd_helper_test',
'pylib.results.json_results_test',
]
},
'gyp_py_unittests': {
'path':
os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
'test_modules': [
'create_unwind_table_tests',
'java_cpp_enum_tests',
'java_cpp_strings_tests',
'java_google_api_keys_tests',
'extract_unwind_tables_tests',
]
},
}
LOCAL_MACHINE_TESTS = ['junit', 'python']
VALID_ENVIRONMENTS = ['local']
VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
'perf', 'python']
VALID_DEVICE_TYPES = ['Android', 'iOS']
def SetBuildType(build_type):
"""Set the BUILDTYPE environment variable.
NOTE: Using this function is deprecated, in favor of SetOutputDirectory(),
it is still maintained for a few scripts that typically call it
to implement their --release and --debug command-line options.
When writing a new script, consider supporting an --output-dir or
--chromium-output-dir option instead, and calling SetOutputDirectory()
instead.
NOTE: If CHROMIUM_OUTPUT_DIR if defined, or if SetOutputDirectory() was
called previously, this will be completely ignored.
"""
chromium_output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
if chromium_output_dir:
logging.warning(
'SetBuildType("%s") ignored since CHROMIUM_OUTPUT_DIR is already '
'defined as (%s)', build_type, chromium_output_dir)
os.environ['BUILDTYPE'] = build_type
def SetOutputDirectory(output_directory):
"""Set the Chromium output directory.
This must be called early by scripts that rely on GetOutDirectory() or
CheckOutputDirectory(). Typically by providing an --output-dir or
--chromium-output-dir option.
"""
os.environ['CHROMIUM_OUTPUT_DIR'] = os.path.abspath(output_directory)
# The message that is printed when the Chromium output directory cannot
# be found. Note that CHROMIUM_OUT_DIR and BUILDTYPE are not mentioned
# intentionally to encourage the use of CHROMIUM_OUTPUT_DIR instead.
_MISSING_OUTPUT_DIR_MESSAGE = '\
The Chromium output directory could not be found. Please use an option such as \
--output-directory to provide it (see --help for details). Otherwise, \
define the CHROMIUM_OUTPUT_DIR environment variable.'
def GetOutDirectory():
"""Returns the Chromium build output directory.
NOTE: This is determined in the following way:
- From a previous call to SetOutputDirectory()
- Otherwise, from the CHROMIUM_OUTPUT_DIR env variable, if it is defined.
- Otherwise, from the current Chromium source directory, and a previous
call to SetBuildType() or the BUILDTYPE env variable, in combination
with the optional CHROMIUM_OUT_DIR env variable.
"""
if 'CHROMIUM_OUTPUT_DIR' in os.environ:
return os.path.abspath(os.path.join(
DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
build_type = os.environ.get('BUILDTYPE')
if not build_type:
raise EnvironmentError(_MISSING_OUTPUT_DIR_MESSAGE)
return os.path.abspath(os.path.join(
DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
build_type))
def CheckOutputDirectory():
"""Checks that the Chromium output directory is set, or can be found.
If it is not already set, this will also perform a little auto-detection:
- If the current directory contains a build.ninja file, use it as
the output directory.
- If CHROME_HEADLESS is defined in the environment (e.g. on a bot),
look if there is a single output directory under DIR_SOURCE_ROOT/out/,
and if so, use it as the output directory.
Raises:
Exception: If no output directory is detected.
"""
output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
if output_dir:
return
build_type = os.environ.get('BUILDTYPE')
if build_type and len(build_type) > 1:
return
# If CWD is an output directory, then assume it's the desired one.
if os.path.exists('build.ninja'):
output_dir = os.getcwd()
SetOutputDirectory(output_dir)
return
# When running on bots, see if the output directory is obvious.
# TODO(http://crbug.com/833808): Get rid of this by ensuring bots always set
# CHROMIUM_OUTPUT_DIR correctly.
if os.environ.get('CHROME_HEADLESS'):
dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja'))
if len(dirs) == 1:
SetOutputDirectory(dirs[0])
return
raise Exception(
'Chromium output directory not set, and CHROME_HEADLESS detected. ' +
'However, multiple out dirs exist: %r' % dirs)
raise Exception(_MISSING_OUTPUT_DIR_MESSAGE)
# Exit codes
ERROR_EXIT_CODE = exit_codes.ERROR
INFRA_EXIT_CODE = exit_codes.INFRA
WARNING_EXIT_CODE = exit_codes.WARNING
| [
"[email protected]"
] | |
3395107d611736090f54619e51924ac5ed480fdc | fc357aba40672ce57fcbf96e2ad837caaef389d4 | /dashboard/dashboard/auto_bisect_test.py | 3299e395363cceb7762d3ef6115eaa707fb181ea | [
"BSD-3-Clause"
] | permissive | dinosk/catapult | e550a7028ff3836fa7ec974d1a85eae2ccb14513 | 6de275176224197282cfd6a5617f3775abad734b | refs/heads/master | 2021-06-05T16:37:44.163473 | 2016-02-11T19:16:02 | 2016-02-11T19:16:02 | 56,242,905 | 0 | 1 | BSD-3-Clause | 2020-07-24T05:02:58 | 2016-04-14T14:12:00 | HTML | UTF-8 | Python | false | false | 9,419 | py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import unittest
import mock
import webapp2
import webtest
from dashboard import auto_bisect
from dashboard import request_handler
from dashboard import start_try_job
from dashboard import stored_object
from dashboard import testing_common
from dashboard import utils
from dashboard.models import anomaly
from dashboard.models import try_job
@mock.patch.object(utils, 'TickMonitoringCustomMetric', mock.MagicMock())
class AutoBisectTest(testing_common.TestCase):
def setUp(self):
super(AutoBisectTest, self).setUp()
stored_object.Set(
start_try_job._TESTER_DIRECTOR_MAP_KEY,
{
'linux_perf_tester': 'linux_perf_bisector',
'win64_nv_tester': 'linux_perf_bisector',
})
app = webapp2.WSGIApplication(
[('/auto_bisect', auto_bisect.AutoBisectHandler)])
self.testapp = webtest.TestApp(app)
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
def testPost_FailedJobRunTwice_JobRestarted(self, mock_perform_bisect):
testing_common.AddTests(
['ChromiumPerf'], ['linux-release'], {'sunspider': {'score': {}}})
test_key = utils.TestKey('ChromiumPerf/linux-release/sunspider/score')
anomaly.Anomaly(
bug_id=111, test=test_key,
start_revision=300100, end_revision=300200,
median_before_anomaly=100, median_after_anomaly=200).put()
try_job.TryJob(
bug_id=111, status='failed',
last_ran_timestamp=datetime.datetime.now() - datetime.timedelta(days=8),
run_count=2).put()
self.testapp.post('/auto_bisect')
mock_perform_bisect.assert_called_once_with(
try_job.TryJob.query(try_job.TryJob.bug_id == 111).get())
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
def testPost_FailedJobRunOnce_JobRestarted(self, mock_perform_bisect):
try_job.TryJob(
bug_id=222, status='failed',
last_ran_timestamp=datetime.datetime.now(),
run_count=1).put()
self.testapp.post('/auto_bisect')
mock_perform_bisect.assert_called_once_with(
try_job.TryJob.query(try_job.TryJob.bug_id == 222).get())
@mock.patch.object(auto_bisect.start_try_job, 'LogBisectResult')
def testPost_JobRunTooManyTimes_LogsMessage(self, mock_log_result):
job_key = try_job.TryJob(
bug_id=333, status='failed',
last_ran_timestamp=datetime.datetime.now(),
run_count=len(auto_bisect._BISECT_RESTART_PERIOD_DAYS) + 1).put()
self.testapp.post('/auto_bisect')
self.assertIsNone(job_key.get())
mock_log_result.assert_called_once_with(333, mock.ANY)
def testGet_WithStatsParameter_ListsTryJobs(self):
now = datetime.datetime.now()
try_job.TryJob(
bug_id=222, status='failed',
last_ran_timestamp=now, run_count=2).put()
try_job.TryJob(
bug_id=444, status='started',
last_ran_timestamp=now, run_count=1).put()
try_job.TryJob(
bug_id=777, status='started',
last_ran_timestamp=now, use_buildbucket=True, run_count=1).put()
try_job.TryJob(
bug_id=555, status=None,
last_ran_timestamp=now, run_count=1).put()
response = self.testapp.get('/auto_bisect?stats')
self.assertIn('Failed jobs: 1', response.body)
self.assertIn('Started jobs: 2', response.body)
class StartNewBisectForBugTest(testing_common.TestCase):
def setUp(self):
super(StartNewBisectForBugTest, self).setUp()
stored_object.Set(
start_try_job._TESTER_DIRECTOR_MAP_KEY,
{
'linux_perf_tester': 'linux_perf_bisector',
'win64_nv_tester': 'linux_perf_bisector',
})
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
def testStartNewBisectForBug_StartsBisect(self, mock_perform_bisect):
testing_common.AddTests(
['ChromiumPerf'], ['linux-release'], {'sunspider': {'score': {}}})
test_key = utils.TestKey('ChromiumPerf/linux-release/sunspider/score')
anomaly.Anomaly(
bug_id=111, test=test_key,
start_revision=300100, end_revision=300200,
median_before_anomaly=100, median_after_anomaly=200).put()
auto_bisect.StartNewBisectForBug(111)
job = try_job.TryJob.query(try_job.TryJob.bug_id == 111).get()
mock_perform_bisect.assert_called_once_with(job)
def testStartNewBisectForBug_RevisionTooLow_ReturnsError(self):
testing_common.AddTests(
['ChromiumPerf'], ['linux-release'], {'sunspider': {'score': {}}})
test_key = utils.TestKey('ChromiumPerf/linux-release/sunspider/score')
anomaly.Anomaly(
bug_id=222, test=test_key,
start_revision=1200, end_revision=1250,
median_before_anomaly=100, median_after_anomaly=200).put()
result = auto_bisect.StartNewBisectForBug(222)
self.assertEqual({'error': 'Invalid "good" revision: 1199.'}, result)
@mock.patch.object(
auto_bisect.start_try_job, 'PerformBisect',
mock.MagicMock(side_effect=request_handler.InvalidInputError(
'Some reason')))
def testStartNewBisectForBug_InvalidInputErrorRaised_ReturnsError(self):
testing_common.AddTests(['Foo'], ['bar'], {'sunspider': {'score': {}}})
test_key = utils.TestKey('Foo/bar/sunspider/score')
anomaly.Anomaly(
bug_id=345, test=test_key,
start_revision=300100, end_revision=300200,
median_before_anomaly=100, median_after_anomaly=200).put()
result = auto_bisect.StartNewBisectForBug(345)
self.assertEqual({'error': 'Some reason'}, result)
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
def testStartNewBisectForBug_WithDefaultRevs_StartsBisect(
self, mock_perform_bisect):
testing_common.AddTests(
['ChromiumPerf'], ['linux-release'], {'sunspider': {'score': {}}})
test_key = utils.TestKey('ChromiumPerf/linux-release/sunspider/score')
testing_common.AddRows(
'ChromiumPerf/linux-release/sunspider/score',
{
1199: {
'a_default_rev': 'r_foo',
'r_foo': '9e29b5bcd08357155b2859f87227d50ed60cf857'
},
1250: {
'a_default_rev': 'r_foo',
'r_foo': 'fc34e5346446854637311ad7793a95d56e314042'
}
})
anomaly.Anomaly(
bug_id=333, test=test_key,
start_revision=1200, end_revision=1250,
median_before_anomaly=100, median_after_anomaly=200).put()
auto_bisect.StartNewBisectForBug(333)
job = try_job.TryJob.query(try_job.TryJob.bug_id == 333).get()
mock_perform_bisect.assert_called_once_with(job)
def testStartNewBisectForBug_UnbisectableTest_ReturnsError(self):
testing_common.AddTests(['V8'], ['x86'], {'v8': {'sunspider': {}}})
# The test suite "v8" is in the black-list of test suite names.
test_key = utils.TestKey('V8/x86/v8/sunspider')
anomaly.Anomaly(
bug_id=444, test=test_key,
start_revision=155000, end_revision=155100,
median_before_anomaly=100, median_after_anomaly=200).put()
result = auto_bisect.StartNewBisectForBug(444)
self.assertEqual({'error': 'Could not select a test.'}, result)
class TickMonitoringCustomMetricTest(testing_common.TestCase):
def setUp(self):
super(TickMonitoringCustomMetricTest, self).setUp()
app = webapp2.WSGIApplication(
[('/auto_bisect', auto_bisect.AutoBisectHandler)])
self.testapp = webtest.TestApp(app)
@mock.patch.object(utils, 'TickMonitoringCustomMetric')
def testPost_NoTryJobs_CustomMetricTicked(self, mock_tick):
self.testapp.post('/auto_bisect')
mock_tick.assert_called_once_with('RestartFailedBisectJobs')
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
@mock.patch.object(utils, 'TickMonitoringCustomMetric')
def testPost_RunCount1_ExceptionInPerformBisect_CustomMetricNotTicked(
self, mock_tick, mock_perform_bisect):
mock_perform_bisect.side_effect = request_handler.InvalidInputError()
try_job.TryJob(
bug_id=222, status='failed',
last_ran_timestamp=datetime.datetime.now(),
run_count=1).put()
self.testapp.post('/auto_bisect')
self.assertEqual(0, mock_tick.call_count)
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
@mock.patch.object(utils, 'TickMonitoringCustomMetric')
def testPost_RunCount2_ExceptionInPerformBisect_CustomMetricNotTicked(
self, mock_tick, mock_perform_bisect):
mock_perform_bisect.side_effect = request_handler.InvalidInputError()
try_job.TryJob(
bug_id=111, status='failed',
last_ran_timestamp=datetime.datetime.now() - datetime.timedelta(days=8),
run_count=2).put()
self.testapp.post('/auto_bisect')
self.assertEqual(0, mock_tick.call_count)
@mock.patch.object(auto_bisect.start_try_job, 'PerformBisect')
@mock.patch.object(utils, 'TickMonitoringCustomMetric')
def testPost_NoExceptionInPerformBisect_CustomMetricTicked(
self, mock_tick, mock_perform_bisect):
try_job.TryJob(
bug_id=222, status='failed',
last_ran_timestamp=datetime.datetime.now(),
run_count=1).put()
self.testapp.post('/auto_bisect')
self.assertEqual(1, mock_perform_bisect.call_count)
mock_tick.assert_called_once_with('RestartFailedBisectJobs')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
d184179a81f11f57b1cdd2a8e64a7a8ee95a2bd2 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2667/60755/240482.py | d1424016496d8a183f3003a82a8271c426557281 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | NumOfEg = int(input())
result = []
for i in range(NumOfEg):
num = input().split(" ")
result.append(pow(2,int(num[1]))-int(num[0]))
for i in result:
print(i) | [
"[email protected]"
] | |
10f1e85617f62e861d852026b268840e60d81013 | e823bc36af457f229f6879d6e6a3ef6247c129aa | /virtualenv/Lib/site-packages/twisted/conch/test/test_connection.py | f81d90f719d6d096243abd991056097bb9343740 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | William-An/DFB_Final | e772fa979c41f2f83a4bf657cde499456215fb3b | 49a9244c98116574676992ebecd1d9435e1d5b1e | refs/heads/master | 2022-11-07T15:47:36.189057 | 2017-07-22T01:01:37 | 2017-07-22T01:01:43 | 97,426,562 | 1 | 1 | MIT | 2022-10-15T02:45:57 | 2017-07-17T02:21:42 | Python | UTF-8 | Python | false | false | 27,716 | py | # Copyright (c) 2007-2010 Twisted Matrix Laboratories.
# See LICENSE for details
"""
This module tests twisted.conch.ssh.connection.
"""
from __future__ import division, absolute_import
import struct
from twisted.conch import error
from twisted.conch.ssh import channel, common, connection
from twisted.python.compat import long
from twisted.trial import unittest
from twisted.conch.test import test_userauth
class TestChannel(channel.SSHChannel):
"""
A mocked-up version of twisted.conch.ssh.channel.SSHChannel.
@ivar gotOpen: True if channelOpen has been called.
@type gotOpen: L{bool}
@ivar specificData: the specific channel open data passed to channelOpen.
@type specificData: L{bytes}
@ivar openFailureReason: the reason passed to openFailed.
@type openFailed: C{error.ConchError}
@ivar inBuffer: a C{list} of strings received by the channel.
@type inBuffer: C{list}
@ivar extBuffer: a C{list} of 2-tuples (type, extended data) of received by
the channel.
@type extBuffer: C{list}
@ivar numberRequests: the number of requests that have been made to this
channel.
@type numberRequests: L{int}
@ivar gotEOF: True if the other side sent EOF.
@type gotEOF: L{bool}
@ivar gotOneClose: True if the other side closed the connection.
@type gotOneClose: L{bool}
@ivar gotClosed: True if the channel is closed.
@type gotClosed: L{bool}
"""
name = b"TestChannel"
gotOpen = False
def logPrefix(self):
return "TestChannel %i" % self.id
def channelOpen(self, specificData):
"""
The channel is open. Set up the instance variables.
"""
self.gotOpen = True
self.specificData = specificData
self.inBuffer = []
self.extBuffer = []
self.numberRequests = 0
self.gotEOF = False
self.gotOneClose = False
self.gotClosed = False
def openFailed(self, reason):
"""
Opening the channel failed. Store the reason why.
"""
self.openFailureReason = reason
def request_test(self, data):
"""
A test request. Return True if data is 'data'.
@type data: L{bytes}
"""
self.numberRequests += 1
return data == b'data'
def dataReceived(self, data):
"""
Data was received. Store it in the buffer.
"""
self.inBuffer.append(data)
def extReceived(self, code, data):
"""
Extended data was received. Store it in the buffer.
"""
self.extBuffer.append((code, data))
def eofReceived(self):
"""
EOF was received. Remember it.
"""
self.gotEOF = True
def closeReceived(self):
"""
Close was received. Remember it.
"""
self.gotOneClose = True
def closed(self):
"""
The channel is closed. Rembember it.
"""
self.gotClosed = True
class TestAvatar:
"""
A mocked-up version of twisted.conch.avatar.ConchUser
"""
_ARGS_ERROR_CODE = 123
def lookupChannel(self, channelType, windowSize, maxPacket, data):
"""
The server wants us to return a channel. If the requested channel is
our TestChannel, return it, otherwise return None.
"""
if channelType == TestChannel.name:
return TestChannel(remoteWindow=windowSize,
remoteMaxPacket=maxPacket,
data=data, avatar=self)
elif channelType == b"conch-error-args":
# Raise a ConchError with backwards arguments to make sure the
# connection fixes it for us. This case should be deprecated and
# deleted eventually, but only after all of Conch gets the argument
# order right.
raise error.ConchError(
self._ARGS_ERROR_CODE, "error args in wrong order")
def gotGlobalRequest(self, requestType, data):
"""
The client has made a global request. If the global request is
'TestGlobal', return True. If the global request is 'TestData',
return True and the request-specific data we received. Otherwise,
return False.
"""
if requestType == b'TestGlobal':
return True
elif requestType == b'TestData':
return True, data
else:
return False
class TestConnection(connection.SSHConnection):
"""
A subclass of SSHConnection for testing.
@ivar channel: the current channel.
@type channel. C{TestChannel}
"""
def logPrefix(self):
return "TestConnection"
def global_TestGlobal(self, data):
"""
The other side made the 'TestGlobal' global request. Return True.
"""
return True
def global_Test_Data(self, data):
"""
The other side made the 'Test-Data' global request. Return True and
the data we received.
"""
return True, data
def channel_TestChannel(self, windowSize, maxPacket, data):
"""
The other side is requesting the TestChannel. Create a C{TestChannel}
instance, store it, and return it.
"""
self.channel = TestChannel(remoteWindow=windowSize,
remoteMaxPacket=maxPacket, data=data)
return self.channel
def channel_ErrorChannel(self, windowSize, maxPacket, data):
"""
The other side is requesting the ErrorChannel. Raise an exception.
"""
raise AssertionError('no such thing')
class ConnectionTests(unittest.TestCase):
if test_userauth.transport is None:
skip = "Cannot run without both cryptography and pyasn1"
def setUp(self):
self.transport = test_userauth.FakeTransport(None)
self.transport.avatar = TestAvatar()
self.conn = TestConnection()
self.conn.transport = self.transport
self.conn.serviceStarted()
def _openChannel(self, channel):
"""
Open the channel with the default connection.
"""
self.conn.openChannel(channel)
self.transport.packets = self.transport.packets[:-1]
self.conn.ssh_CHANNEL_OPEN_CONFIRMATION(struct.pack('>2L',
channel.id, 255) + b'\x00\x02\x00\x00\x00\x00\x80\x00')
def tearDown(self):
self.conn.serviceStopped()
def test_linkAvatar(self):
"""
Test that the connection links itself to the avatar in the
transport.
"""
self.assertIs(self.transport.avatar.conn, self.conn)
def test_serviceStopped(self):
"""
Test that serviceStopped() closes any open channels.
"""
channel1 = TestChannel()
channel2 = TestChannel()
self.conn.openChannel(channel1)
self.conn.openChannel(channel2)
self.conn.ssh_CHANNEL_OPEN_CONFIRMATION(b'\x00\x00\x00\x00' * 4)
self.assertTrue(channel1.gotOpen)
self.assertFalse(channel2.gotOpen)
self.conn.serviceStopped()
self.assertTrue(channel1.gotClosed)
def test_GLOBAL_REQUEST(self):
"""
Test that global request packets are dispatched to the global_*
methods and the return values are translated into success or failure
messages.
"""
self.conn.ssh_GLOBAL_REQUEST(common.NS(b'TestGlobal') + b'\xff')
self.assertEqual(self.transport.packets,
[(connection.MSG_REQUEST_SUCCESS, b'')])
self.transport.packets = []
self.conn.ssh_GLOBAL_REQUEST(common.NS(b'TestData') + b'\xff' +
b'test data')
self.assertEqual(self.transport.packets,
[(connection.MSG_REQUEST_SUCCESS, b'test data')])
self.transport.packets = []
self.conn.ssh_GLOBAL_REQUEST(common.NS(b'TestBad') + b'\xff')
self.assertEqual(self.transport.packets,
[(connection.MSG_REQUEST_FAILURE, b'')])
self.transport.packets = []
self.conn.ssh_GLOBAL_REQUEST(common.NS(b'TestGlobal') + b'\x00')
self.assertEqual(self.transport.packets, [])
def test_REQUEST_SUCCESS(self):
"""
Test that global request success packets cause the Deferred to be
called back.
"""
d = self.conn.sendGlobalRequest(b'request', b'data', True)
self.conn.ssh_REQUEST_SUCCESS(b'data')
def check(data):
self.assertEqual(data, b'data')
d.addCallback(check)
d.addErrback(self.fail)
return d
def test_REQUEST_FAILURE(self):
"""
Test that global request failure packets cause the Deferred to be
erred back.
"""
d = self.conn.sendGlobalRequest(b'request', b'data', True)
self.conn.ssh_REQUEST_FAILURE(b'data')
def check(f):
self.assertEqual(f.value.data, b'data')
d.addCallback(self.fail)
d.addErrback(check)
return d
def test_CHANNEL_OPEN(self):
"""
Test that open channel packets cause a channel to be created and
opened or a failure message to be returned.
"""
del self.transport.avatar
self.conn.ssh_CHANNEL_OPEN(common.NS(b'TestChannel') +
b'\x00\x00\x00\x01' * 4)
self.assertTrue(self.conn.channel.gotOpen)
self.assertEqual(self.conn.channel.conn, self.conn)
self.assertEqual(self.conn.channel.data, b'\x00\x00\x00\x01')
self.assertEqual(self.conn.channel.specificData, b'\x00\x00\x00\x01')
self.assertEqual(self.conn.channel.remoteWindowLeft, 1)
self.assertEqual(self.conn.channel.remoteMaxPacket, 1)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_OPEN_CONFIRMATION,
b'\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00'
b'\x00\x00\x80\x00')])
self.transport.packets = []
self.conn.ssh_CHANNEL_OPEN(common.NS(b'BadChannel') +
b'\x00\x00\x00\x02' * 4)
self.flushLoggedErrors()
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_OPEN_FAILURE,
b'\x00\x00\x00\x02\x00\x00\x00\x03' + common.NS(
b'unknown channel') + common.NS(b''))])
self.transport.packets = []
self.conn.ssh_CHANNEL_OPEN(common.NS(b'ErrorChannel') +
b'\x00\x00\x00\x02' * 4)
self.flushLoggedErrors()
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_OPEN_FAILURE,
b'\x00\x00\x00\x02\x00\x00\x00\x02' + common.NS(
b'unknown failure') + common.NS(b''))])
def _lookupChannelErrorTest(self, code):
"""
Deliver a request for a channel open which will result in an exception
being raised during channel lookup. Assert that an error response is
delivered as a result.
"""
self.transport.avatar._ARGS_ERROR_CODE = code
self.conn.ssh_CHANNEL_OPEN(
common.NS(b'conch-error-args') + b'\x00\x00\x00\x01' * 4)
errors = self.flushLoggedErrors(error.ConchError)
self.assertEqual(
len(errors), 1, "Expected one error, got: %r" % (errors,))
self.assertEqual(errors[0].value.args, (long(123), "error args in wrong order"))
self.assertEqual(
self.transport.packets,
[(connection.MSG_CHANNEL_OPEN_FAILURE,
# The response includes some bytes which identifying the
# associated request, as well as the error code (7b in hex) and
# the error message.
b'\x00\x00\x00\x01\x00\x00\x00\x7b' + common.NS(
b'error args in wrong order') + common.NS(b''))])
def test_lookupChannelError(self):
"""
If a C{lookupChannel} implementation raises L{error.ConchError} with the
arguments in the wrong order, a C{MSG_CHANNEL_OPEN} failure is still
sent in response to the message.
This is a temporary work-around until L{error.ConchError} is given
better attributes and all of the Conch code starts constructing
instances of it properly. Eventually this functionality should be
deprecated and then removed.
"""
self._lookupChannelErrorTest(123)
def test_lookupChannelErrorLongCode(self):
"""
Like L{test_lookupChannelError}, but for the case where the failure code
is represented as a L{long} instead of a L{int}.
"""
self._lookupChannelErrorTest(long(123))
def test_CHANNEL_OPEN_CONFIRMATION(self):
"""
Test that channel open confirmation packets cause the channel to be
notified that it's open.
"""
channel = TestChannel()
self.conn.openChannel(channel)
self.conn.ssh_CHANNEL_OPEN_CONFIRMATION(b'\x00\x00\x00\x00'*5)
self.assertEqual(channel.remoteWindowLeft, 0)
self.assertEqual(channel.remoteMaxPacket, 0)
self.assertEqual(channel.specificData, b'\x00\x00\x00\x00')
self.assertEqual(self.conn.channelsToRemoteChannel[channel],
0)
self.assertEqual(self.conn.localToRemoteChannel[0], 0)
def test_CHANNEL_OPEN_FAILURE(self):
"""
Test that channel open failure packets cause the channel to be
notified that its opening failed.
"""
channel = TestChannel()
self.conn.openChannel(channel)
self.conn.ssh_CHANNEL_OPEN_FAILURE(b'\x00\x00\x00\x00\x00\x00\x00'
b'\x01' + common.NS(b'failure!'))
self.assertEqual(channel.openFailureReason.args, (b'failure!', 1))
self.assertIsNone(self.conn.channels.get(channel))
def test_CHANNEL_WINDOW_ADJUST(self):
"""
Test that channel window adjust messages add bytes to the channel
window.
"""
channel = TestChannel()
self._openChannel(channel)
oldWindowSize = channel.remoteWindowLeft
self.conn.ssh_CHANNEL_WINDOW_ADJUST(b'\x00\x00\x00\x00\x00\x00\x00'
b'\x01')
self.assertEqual(channel.remoteWindowLeft, oldWindowSize + 1)
def test_CHANNEL_DATA(self):
"""
Test that channel data messages are passed up to the channel, or
cause the channel to be closed if the data is too large.
"""
channel = TestChannel(localWindow=6, localMaxPacket=5)
self._openChannel(channel)
self.conn.ssh_CHANNEL_DATA(b'\x00\x00\x00\x00' + common.NS(b'data'))
self.assertEqual(channel.inBuffer, [b'data'])
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_WINDOW_ADJUST, b'\x00\x00\x00\xff'
b'\x00\x00\x00\x04')])
self.transport.packets = []
longData = b'a' * (channel.localWindowLeft + 1)
self.conn.ssh_CHANNEL_DATA(b'\x00\x00\x00\x00' + common.NS(longData))
self.assertEqual(channel.inBuffer, [b'data'])
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_CLOSE, b'\x00\x00\x00\xff')])
channel = TestChannel()
self._openChannel(channel)
bigData = b'a' * (channel.localMaxPacket + 1)
self.transport.packets = []
self.conn.ssh_CHANNEL_DATA(b'\x00\x00\x00\x01' + common.NS(bigData))
self.assertEqual(channel.inBuffer, [])
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_CLOSE, b'\x00\x00\x00\xff')])
def test_CHANNEL_EXTENDED_DATA(self):
"""
Test that channel extended data messages are passed up to the channel,
or cause the channel to be closed if they're too big.
"""
channel = TestChannel(localWindow=6, localMaxPacket=5)
self._openChannel(channel)
self.conn.ssh_CHANNEL_EXTENDED_DATA(b'\x00\x00\x00\x00\x00\x00\x00'
b'\x00' + common.NS(b'data'))
self.assertEqual(channel.extBuffer, [(0, b'data')])
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_WINDOW_ADJUST, b'\x00\x00\x00\xff'
b'\x00\x00\x00\x04')])
self.transport.packets = []
longData = b'a' * (channel.localWindowLeft + 1)
self.conn.ssh_CHANNEL_EXTENDED_DATA(b'\x00\x00\x00\x00\x00\x00\x00'
b'\x00' + common.NS(longData))
self.assertEqual(channel.extBuffer, [(0, b'data')])
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_CLOSE, b'\x00\x00\x00\xff')])
channel = TestChannel()
self._openChannel(channel)
bigData = b'a' * (channel.localMaxPacket + 1)
self.transport.packets = []
self.conn.ssh_CHANNEL_EXTENDED_DATA(b'\x00\x00\x00\x01\x00\x00\x00'
b'\x00' + common.NS(bigData))
self.assertEqual(channel.extBuffer, [])
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_CLOSE, b'\x00\x00\x00\xff')])
def test_CHANNEL_EOF(self):
"""
Test that channel eof messages are passed up to the channel.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.ssh_CHANNEL_EOF(b'\x00\x00\x00\x00')
self.assertTrue(channel.gotEOF)
def test_CHANNEL_CLOSE(self):
"""
Test that channel close messages are passed up to the channel. Also,
test that channel.close() is called if both sides are closed when this
message is received.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.sendClose(channel)
self.conn.ssh_CHANNEL_CLOSE(b'\x00\x00\x00\x00')
self.assertTrue(channel.gotOneClose)
self.assertTrue(channel.gotClosed)
def test_CHANNEL_REQUEST_success(self):
"""
Test that channel requests that succeed send MSG_CHANNEL_SUCCESS.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.ssh_CHANNEL_REQUEST(b'\x00\x00\x00\x00' + common.NS(b'test')
+ b'\x00')
self.assertEqual(channel.numberRequests, 1)
d = self.conn.ssh_CHANNEL_REQUEST(b'\x00\x00\x00\x00' + common.NS(
b'test') + b'\xff' + b'data')
def check(result):
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_SUCCESS, b'\x00\x00\x00\xff')])
d.addCallback(check)
return d
def test_CHANNEL_REQUEST_failure(self):
"""
Test that channel requests that fail send MSG_CHANNEL_FAILURE.
"""
channel = TestChannel()
self._openChannel(channel)
d = self.conn.ssh_CHANNEL_REQUEST(b'\x00\x00\x00\x00' + common.NS(
b'test') + b'\xff')
def check(result):
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_FAILURE, b'\x00\x00\x00\xff'
)])
d.addCallback(self.fail)
d.addErrback(check)
return d
def test_CHANNEL_REQUEST_SUCCESS(self):
"""
Test that channel request success messages cause the Deferred to be
called back.
"""
channel = TestChannel()
self._openChannel(channel)
d = self.conn.sendRequest(channel, b'test', b'data', True)
self.conn.ssh_CHANNEL_SUCCESS(b'\x00\x00\x00\x00')
def check(result):
self.assertTrue(result)
return d
def test_CHANNEL_REQUEST_FAILURE(self):
"""
Test that channel request failure messages cause the Deferred to be
erred back.
"""
channel = TestChannel()
self._openChannel(channel)
d = self.conn.sendRequest(channel, b'test', b'', True)
self.conn.ssh_CHANNEL_FAILURE(b'\x00\x00\x00\x00')
def check(result):
self.assertEqual(result.value.value, 'channel request failed')
d.addCallback(self.fail)
d.addErrback(check)
return d
def test_sendGlobalRequest(self):
"""
Test that global request messages are sent in the right format.
"""
d = self.conn.sendGlobalRequest(b'wantReply', b'data', True)
# must be added to prevent errbacking during teardown
d.addErrback(lambda failure: None)
self.conn.sendGlobalRequest(b'noReply', b'', False)
self.assertEqual(self.transport.packets,
[(connection.MSG_GLOBAL_REQUEST, common.NS(b'wantReply') +
b'\xffdata'),
(connection.MSG_GLOBAL_REQUEST, common.NS(b'noReply') +
b'\x00')])
self.assertEqual(self.conn.deferreds, {'global':[d]})
def test_openChannel(self):
"""
Test that open channel messages are sent in the right format.
"""
channel = TestChannel()
self.conn.openChannel(channel, b'aaaa')
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_OPEN, common.NS(b'TestChannel') +
b'\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x80\x00aaaa')])
self.assertEqual(channel.id, 0)
self.assertEqual(self.conn.localChannelID, 1)
def test_sendRequest(self):
"""
Test that channel request messages are sent in the right format.
"""
channel = TestChannel()
self._openChannel(channel)
d = self.conn.sendRequest(channel, b'test', b'test', True)
# needed to prevent errbacks during teardown.
d.addErrback(lambda failure: None)
self.conn.sendRequest(channel, b'test2', b'', False)
channel.localClosed = True # emulate sending a close message
self.conn.sendRequest(channel, b'test3', b'', True)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_REQUEST, b'\x00\x00\x00\xff' +
common.NS(b'test') + b'\x01test'),
(connection.MSG_CHANNEL_REQUEST, b'\x00\x00\x00\xff' +
common.NS(b'test2') + b'\x00')])
self.assertEqual(self.conn.deferreds[0], [d])
def test_adjustWindow(self):
"""
Test that channel window adjust messages cause bytes to be added
to the window.
"""
channel = TestChannel(localWindow=5)
self._openChannel(channel)
channel.localWindowLeft = 0
self.conn.adjustWindow(channel, 1)
self.assertEqual(channel.localWindowLeft, 1)
channel.localClosed = True
self.conn.adjustWindow(channel, 2)
self.assertEqual(channel.localWindowLeft, 1)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_WINDOW_ADJUST, b'\x00\x00\x00\xff'
b'\x00\x00\x00\x01')])
def test_sendData(self):
"""
Test that channel data messages are sent in the right format.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.sendData(channel, b'a')
channel.localClosed = True
self.conn.sendData(channel, b'b')
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_DATA, b'\x00\x00\x00\xff' +
common.NS(b'a'))])
def test_sendExtendedData(self):
"""
Test that channel extended data messages are sent in the right format.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.sendExtendedData(channel, 1, b'test')
channel.localClosed = True
self.conn.sendExtendedData(channel, 2, b'test2')
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_EXTENDED_DATA, b'\x00\x00\x00\xff' +
b'\x00\x00\x00\x01' + common.NS(b'test'))])
def test_sendEOF(self):
"""
Test that channel EOF messages are sent in the right format.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.sendEOF(channel)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_EOF, b'\x00\x00\x00\xff')])
channel.localClosed = True
self.conn.sendEOF(channel)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_EOF, b'\x00\x00\x00\xff')])
def test_sendClose(self):
"""
Test that channel close messages are sent in the right format.
"""
channel = TestChannel()
self._openChannel(channel)
self.conn.sendClose(channel)
self.assertTrue(channel.localClosed)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_CLOSE, b'\x00\x00\x00\xff')])
self.conn.sendClose(channel)
self.assertEqual(self.transport.packets,
[(connection.MSG_CHANNEL_CLOSE, b'\x00\x00\x00\xff')])
channel2 = TestChannel()
self._openChannel(channel2)
channel2.remoteClosed = True
self.conn.sendClose(channel2)
self.assertTrue(channel2.gotClosed)
def test_getChannelWithAvatar(self):
"""
Test that getChannel dispatches to the avatar when an avatar is
present. Correct functioning without the avatar is verified in
test_CHANNEL_OPEN.
"""
channel = self.conn.getChannel(b'TestChannel', 50, 30, b'data')
self.assertEqual(channel.data, b'data')
self.assertEqual(channel.remoteWindowLeft, 50)
self.assertEqual(channel.remoteMaxPacket, 30)
self.assertRaises(error.ConchError, self.conn.getChannel,
b'BadChannel', 50, 30, b'data')
def test_gotGlobalRequestWithoutAvatar(self):
"""
Test that gotGlobalRequests dispatches to global_* without an avatar.
"""
del self.transport.avatar
self.assertTrue(self.conn.gotGlobalRequest(b'TestGlobal', b'data'))
self.assertEqual(self.conn.gotGlobalRequest(b'Test-Data', b'data'),
(True, b'data'))
self.assertFalse(self.conn.gotGlobalRequest(b'BadGlobal', b'data'))
def test_channelClosedCausesLeftoverChannelDeferredsToErrback(self):
"""
Whenever an SSH channel gets closed any Deferred that was returned by a
sendRequest() on its parent connection must be errbacked.
"""
channel = TestChannel()
self._openChannel(channel)
d = self.conn.sendRequest(
channel, b"dummyrequest", b"dummydata", wantReply=1)
d = self.assertFailure(d, error.ConchError)
self.conn.channelClosed(channel)
return d
class CleanConnectionShutdownTests(unittest.TestCase):
"""
Check whether correct cleanup is performed on connection shutdown.
"""
if test_userauth.transport is None:
skip = "Cannot run without both cryptography and pyasn1"
def setUp(self):
self.transport = test_userauth.FakeTransport(None)
self.transport.avatar = TestAvatar()
self.conn = TestConnection()
self.conn.transport = self.transport
def test_serviceStoppedCausesLeftoverGlobalDeferredsToErrback(self):
"""
Once the service is stopped any leftover global deferred returned by
a sendGlobalRequest() call must be errbacked.
"""
self.conn.serviceStarted()
d = self.conn.sendGlobalRequest(
b"dummyrequest", b"dummydata", wantReply=1)
d = self.assertFailure(d, error.ConchError)
self.conn.serviceStopped()
return d
| [
"[email protected]"
] | |
3808964e73e804ea86f5c9f6bb724678bd097437 | 8163d8f03aea22cb4fa1e60d809781049fff4bb4 | /relationship/first/migrations/0001_initial.py | f5297a9e109a0d6e57feaf3aac287ec150b66734 | [] | no_license | shubham454/Django-Devlopment | 694b973d31a82d2ded11f95138bd766130d7d3c9 | 43a2c3b98dbe9f582f2394fcfb3beb133c37b145 | refs/heads/master | 2022-12-04T14:34:05.093402 | 2020-08-13T18:35:33 | 2020-08-13T18:35:33 | 287,353,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 921 | py | # Generated by Django 2.2.2 on 2019-12-30 05:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lname', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Framework',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fname', models.CharField(max_length=50)),
('flanguage', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='first.Language')),
],
),
]
| [
"[email protected]"
] | |
a5745b3b97a4ed742e0b12d53ace7eda3fbebed1 | 5b25362295262504a56dcbac10a40226bdc18bba | /libapp/migrations/0012_user_notification.py | 8d2e484be18e0f6780273648fc8161dc05ae32f1 | [
"MIT"
] | permissive | Nyagah-Tech/library | 2fef2c990b7992bb3b311dfb1502b4d2124494ac | 2ae1df4e89257c9c7d7f2328ab6c5f6352867997 | refs/heads/master | 2022-12-14T14:31:54.440553 | 2020-05-29T14:18:59 | 2020-05-29T14:18:59 | 235,101,527 | 0 | 0 | MIT | 2022-12-08T03:43:09 | 2020-01-20T12:56:42 | Python | UTF-8 | Python | false | false | 881 | py | # Generated by Django 2.2.8 on 2020-01-24 05:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('libapp', '0011_borrowing_notification'),
]
operations = [
migrations.CreateModel(
name='User_notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notification', tinymce.models.HTMLField()),
('posted_on', models.DateTimeField(auto_now_add=True)),
('posted_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
13b12204cb376256fe6b328b751c09622f692292 | 49f98241bfd0848f0b9d06d062ef6ead01f2bb9c | /w3af/core/data/db/dbms.py | bc14b89c04021f56ee313741cccf4478bf140f77 | [] | no_license | s0i37/__w3af | 8f63f91879bc3790cc25c1dd7b0b5e983e5e24ab | ebaf108680bd88f0bc48d1a1d86ae80cb35d0213 | refs/heads/master | 2021-09-16T20:23:18.375077 | 2018-06-24T17:33:16 | 2018-06-24T17:33:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,956 | py | """
dbms.py
Copyright 2013 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
from __future__ import with_statement, print_function
import sys
import os
import sqlite3
from functools import wraps
from concurrent.futures import Future
from multiprocessing.dummy import Queue, Process
from w3af.core.data.misc.file_utils import replace_file_special_chars
from w3af.core.controllers.misc.temp_dir import get_temp_dir, create_temp_dir
from w3af.core.controllers.exceptions import (DBException,
NoSuchTableException,
MalformedDBException)
# Constants
SETUP = 'SETUP'
QUERY = 'QUERY'
SELECT = 'SELECT'
COMMIT = 'COMMIT'
POISON = 'POISON'
DB_MALFORMED_ERROR = ('SQLite raised a database disk image is malformed'
' exception. While we do have good understanding on the'
' many reasons that'
' might lead to this issue [0] and multiple bug reports'
' by users [1] there is no clear indication on exactly'
' what causes the issue in w3af.\n\n'
''
'If you are able to reproduce this issue in your'
' environment we would love to hear the OS and hardware'
' details, steps to reproduce, and any other related'
' information. Just send us a comment at #4905 [1].\n\n'
''
'[0] https://www.sqlite.org/howtocorrupt.html\n'
'[1] https://github.com/andresriancho/w3af/issues/4905')
def verify_started(meth):
@wraps(meth)
def inner_verify_started(self, *args, **kwds):
msg = 'No calls to SQLiteDBMS can be made after stop().'
assert self.sql_executor.is_alive(), msg
return meth(self, *args, **kwds)
return inner_verify_started
class SQLiteDBMS(object):
"""
Wrap sqlite connection in a way that allows concurrent requests from
multiple threads.
This is done by internally queuing the requests and processing them
sequentially in a separate thread (in the same order they arrived).
For all requests performed by the client, a Future [0] is returned, in
other words, this is an asynchronous class.
[0] http://www.python.org/dev/peps/pep-3148/
"""
def __init__(self, filename, autocommit=False, journal_mode="OFF",
cache_size=2000):
super(SQLiteDBMS, self).__init__()
in_queue = Queue(100)
self.sql_executor = SQLiteExecutor(in_queue)
self.sql_executor.start()
#
# Performs sqlite database setup, this has the nice side-effect
# that .result() will block until the thread is started and
# processing tasks.
#
future = self.sql_executor.setup(filename, autocommit, journal_mode,
cache_size)
# Raises an exception if an error was found during setup
future.result()
self.filename = filename
self.autocommit = autocommit
@verify_started
def execute(self, query, parameters=(), commit=False):
"""
`execute` calls are non-blocking: just queue up the request and
return a future.
"""
fr = self.sql_executor.query(query, parameters)
if self.autocommit or commit:
self.sql_executor.commit()
return fr
@verify_started
def select(self, query, parameters=()):
"""
I can't think about any non-blocking use of calling select()
"""
future = self.sql_executor.select(query, parameters)
return future.result()
@verify_started
def select_one(self, query, parameters=()):
"""
:return: Only the first row of the SELECT, or None if there are no
matching rows.
"""
try:
return self.select(query, parameters)[0]
except IndexError:
return None
@verify_started
def commit(self):
self.sql_executor.commit()
@verify_started
def close(self):
self.commit()
self.sql_executor.stop()
def get_file_name(self):
"""Return DB filename."""
return self.filename
def drop_table(self, name):
query = 'DROP TABLE %s' % name
return self.execute(query, commit=True)
def clear_table(self, name):
"""
Remove all rows from a table.
"""
query = 'DELETE FROM %s WHERE 1=1' % name
return self.execute(query, commit=True)
def create_table(self, name, columns, pk_columns=()):
"""
Create table in convenient way.
"""
if not name:
raise ValueError('create_table requires a table name')
if not columns:
raise ValueError('create_table requires column names and types')
# Create the table
query = 'CREATE TABLE %s (' % name
all_columns = []
for column_data in columns:
column_name, column_type = column_data
all_columns.append('%s %s' % (column_name, column_type))
query += ', '.join(all_columns)
# Finally the PK
if pk_columns:
query += ', PRIMARY KEY (%s)' % ','.join(pk_columns)
query += ')'
return self.execute(query, commit=True)
def table_exists(self, name):
query = "SELECT name FROM sqlite_master WHERE type='table' AND name=?"\
" LIMIT 1"
r = self.select(query, (name,))
return bool(r)
def create_index(self, table, columns):
"""
Create index for speed and performance
:param table: The table from which you want to create an index from
:param columns: A list of column names.
"""
query = 'CREATE INDEX %s_index ON %s( %s )' % (table, table,
','.join(columns))
return self.execute(query, commit=True)
class SQLiteExecutor(Process):
"""
A very simple thread that takes work via submit() and processes it in a
different thread.
"""
DEBUG = False
def __init__(self, in_queue):
super(SQLiteExecutor, self).__init__(name='SQLiteExecutor')
# Setting the thread to daemon mode so it dies with the rest of the
# process, and a name so we can identify it during debugging sessions
self.daemon = True
self.name = 'SQLiteExecutor'
self._in_queue = in_queue
def query(self, query, parameters):
future = Future()
request = (QUERY, (query, parameters), {}, future)
self._in_queue.put(request)
return future
def _query_handler(self, query, parameters):
return self.cursor.execute(query, parameters)
def select(self, query, parameters):
future = Future()
request = (SELECT, (query, parameters), {}, future)
self._in_queue.put(request)
return future
def _select_handler(self, query, parameters):
result = self.cursor.execute(query, parameters)
result_lst = []
for row in result:
result_lst.append(row)
return result_lst
def commit(self):
future = Future()
request = (COMMIT, None, None, future)
self._in_queue.put(request)
return future
def _commit_handler(self):
return self.conn.commit()
def stop(self):
future = Future()
request = (POISON, None, None, future)
self._in_queue.put(request)
return future
def setup(self, filename, autocommit=False, journal_mode="OFF",
cache_size=2000):
"""
Request the process to perform a setup.
"""
future = Future()
request = (SETUP,
(filename,),
{'autocommit': autocommit,
'journal_mode': journal_mode,
'cache_size': autocommit},
future)
self._in_queue.put(request)
return future
def _setup_handler(self, filename, autocommit=False, journal_mode="OFF",
cache_size=2000):
# Convert the filename to UTF-8, this is needed for windows, and special
# characters, see:
# http://www.sqlite.org/c3ref/open.html
unicode_filename = filename.decode(sys.getfilesystemencoding())
filename = unicode_filename.encode("utf-8")
self.filename = replace_file_special_chars(filename)
self.autocommit = autocommit
self.journal_mode = journal_mode
self.cache_size = cache_size
#
# Setup phase
#
if self.autocommit:
conn = sqlite3.connect(self.filename,
isolation_level=None,
check_same_thread=True)
else:
conn = sqlite3.connect(self.filename,
check_same_thread=True)
conn.execute('PRAGMA journal_mode = %s' % self.journal_mode)
conn.execute('PRAGMA cache_size = %s' % self.cache_size)
conn.text_factory = str
self.conn = conn
self.cursor = conn.cursor()
# Commented line to be: Slower but (hopefully) without malformed
# databases
#
# https://github.com/andresriancho/w3af/issues/4937
#
# It doesn't seem to help because I'm still getting malformed database
# files, but I'll keep it anyways because I'm assuming that it's going
# to reduce (not to zero, but reduce) these issues.
#
#self.cursor.execute('PRAGMA synchronous=OFF')
def run(self):
"""
This is the "main" method for this class, the one that
consumes the commands which are sent to the Queue. The idea is to have
the following architecture features:
* Other parts of the framework which want to insert into the DB
simply add an item to our input Queue and "forget about it" since
it will be processed in another thread.
* Only one thread accesses the sqlite3 object, which avoids many
issues because of sqlite's non thread-safeness
The Queue.get() will make sure we don't have 100% CPU usage in the loop
"""
OP_CODES = {SETUP: self._setup_handler,
QUERY: self._query_handler,
SELECT: self._select_handler,
COMMIT: self._commit_handler,
POISON: POISON}
while True:
op_code, args, kwds, future = self._in_queue.get()
args = args or ()
kwds = kwds or {}
if self.DEBUG:
print('%s %s %s' % (op_code, args, kwds))
handler = OP_CODES.get(op_code, None)
if handler is None:
# Invalid OPCODE
continue
elif handler == POISON:
break
else:
if not future.set_running_or_notify_cancel():
return
try:
result = handler(*args, **kwds)
except sqlite3.OperationalError, e:
# I don't like this string match, but it seems that the
# exception doesn't have any error code to match
if 'no such table' in e.message:
dbe = NoSuchTableException(str(e))
elif 'malformed' in e.message:
print(DB_MALFORMED_ERROR)
dbe = MalformedDBException(DB_MALFORMED_ERROR)
else:
# More specific exceptions to be added here later...
dbe = DBException(str(e))
future.set_exception(dbe)
except Exception, e:
dbe = DBException(str(e))
future.set_exception(dbe)
else:
future.set_result(result)
temp_default_db = None
def clear_default_temp_db_instance():
global temp_default_db
if temp_default_db is not None:
temp_default_db.close()
temp_default_db = None
os.unlink('%s/main.db' % get_temp_dir())
def get_default_temp_db_instance():
global temp_default_db
if temp_default_db is None:
create_temp_dir()
temp_default_db = SQLiteDBMS('%s/main.db' % get_temp_dir())
return temp_default_db
def get_default_persistent_db_instance():
"""
At some point I'll want to have persistent DB for storing the KB and other
information across different w3af processes, or simply to save the findings
in a KB and don't remove them. I'm adding this method as a reminder of
where it should be done.
"""
return get_default_temp_db_instance()
| [
"[email protected]"
] | |
e958a6f04aa7711889b4247d69ccf74e4f61fd27 | d042c1d3b3d62264bc93d9f8e0b9f57f85e24b62 | /Python3/04_RegularExpression/re.py | fd80656fb1ddefc5d0daa6abc0468d0b75463bb3 | [] | no_license | FadelBerakdar/OSTpythonCourse | f1867c1a329d4041914cbdeaa869a8db41ec4625 | 3f1740fd21f341490856a9e434154e049d19933a | refs/heads/master | 2016-08-12T23:24:22.765256 | 2016-02-24T13:39:58 | 2016-02-24T13:39:58 | 52,438,301 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,034 | py | """
______________________________________________________________________
| Regular Expression |
|______________________________________________________________________|
| . | any character except new line \n |
| \w | any unicode word character Aa1_ |
| \W | anything that isn't unicdoe character |
| \s | any white space, tabs, and newlines |
| \S | anything that isn't while space |
| \d | any number from 0 to 9 |
| \D | anything that isn't number |
| \b | any word boundaries "edges of a word" |
| \B | anything that isn't word boundaries |
| \+ | to escape a character +)({}#@!%^&*- |
|______________________________________________________________________|
| {3} | something that occurs exactly three times |
| {,3} | something that occurs 0 to three times |
| {2,3} | something that occurs two to three times |
| ? | something that occurs 0 or one time |
| * | something that occurs at least one time |
| + | something that occurs at least once |
|_______|______________________________________________________________|
|[aple] | apple |
| [a-z] | any lowercase letters from a to z |
| [^2] | anythin that isn't 2 |
| [^\t] | ignore tap character |
|_______|______________________________________________________________|
| ^ | begining of new line |
| $ | the end of the line |
| () | group |
| ?p< >| group name |
| r"" | we have to use r"" to avoid using \\, so we use raw string r |
|_______|______________________________________________________________|
print(r"\tb")
print("\tb")
gro up() | Returns the entire matched string.
555-555-5555
start()
Returns the start index of the match.
0
end()
Returns the end index o f the m atch.
12
span()
Returns a tuple with the start and end indexes of the match.
(0,12)
"""
def ccn_safety(string):
#pattern = r"\d{4}-\d{4}-\d{4}"
pattern = r"(\d{4}-){3}(?P\d{4})"
#return re.sub(pattern, "XXXX-XXXX-XXXX-" + "\g", string)
return re.sub(r"(\d{4}-){3}(\d{4})", "XXXX-XXXX-XXXX-"+"\g", string)
a = "4444"
b = " 4444"
c = "asda 4444"
d = "AAC"
e = "AC"
f = "1AC"
import re
pattern = r"^\d{4}$"
pattern = r"^[A-Z]{2}$"
for ob in (a, b, c, d, e, f):
print(bool(re.search(pattern,ob))) | [
"[email protected]"
] | |
928297c5bd91bcea7bfd13a8298215620b62700a | 5b3bf81b22f4eb78a1d9e801b2d1d6a48509a236 | /leetcode/1010.py | 840fc1be71784ae1cc6aede7f3ce2feac361d083 | [] | no_license | okoks9011/problem_solving | 42a0843cfdf58846090dff1a2762b6e02362d068 | e86d86bb5e3856fcaaa5e20fe19194871d3981ca | refs/heads/master | 2023-01-21T19:06:14.143000 | 2023-01-08T17:45:16 | 2023-01-08T17:45:16 | 141,427,667 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | from collections import defaultdict
class Solution:
def numPairsDivisibleBy60(self, time: List[int]) -> int:
dic = defaultdict(int)
for t in time:
dic[t % 60] += 1
result = 0
for i in range(1, 30):
result += dic[i] * dic[60-i]
result += dic[0] * (dic[0]-1) // 2
result += dic[30] * (dic[30]-1) // 2
return result
| [
"[email protected]"
] | |
a4bc70bf128abaaee85e0af536872bfe798071e7 | 07988c4c354fea7186962a38198b47af95ea40fe | /kubernetes/client/apis/apiregistration_v1_api.py | 76913891609fd738dd42ce965cb69cf9a5a3d8cb | [
"Apache-2.0"
] | permissive | GitObjects/python | cd9fb60798d1d6728c9773a6c71545bfcc83a43e | 2cbd4dfe14e6e0bcec72839bbfc9e5c3f6f6945d | refs/heads/master | 2020-03-22T17:29:41.474848 | 2018-08-01T03:02:03 | 2018-08-01T03:02:03 | 140,398,453 | 0 | 0 | Apache-2.0 | 2018-08-01T03:02:04 | 2018-07-10T08:04:27 | Python | UTF-8 | Python | false | false | 66,956 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.11.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class ApiregistrationV1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_api_service(self, body, **kwargs):
"""
create an APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_service(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1APIService body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_api_service_with_http_info(body, **kwargs)
else:
(data) = self.create_api_service_with_http_info(body, **kwargs)
return data
def create_api_service_with_http_info(self, body, **kwargs):
"""
create an APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_service_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1APIService body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_api_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_api_service`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_api_service(self, name, body, **kwargs):
"""
delete an APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_service(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_api_service_with_http_info(name, body, **kwargs)
else:
(data) = self.delete_api_service_with_http_info(name, body, **kwargs)
return data
def delete_api_service_with_http_info(self, name, body, **kwargs):
"""
delete an APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_service_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_api_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_api_service`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_api_service`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_api_service(self, **kwargs):
"""
delete collection of APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_api_service(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_api_service_with_http_info(**kwargs)
else:
(data) = self.delete_collection_api_service_with_http_info(**kwargs)
return data
def delete_collection_api_service_with_http_info(self, **kwargs):
"""
delete collection of APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_api_service_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_api_service" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_api_service(self, **kwargs):
"""
list or watch objects of kind APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_api_service(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1APIServiceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_api_service_with_http_info(**kwargs)
else:
(data) = self.list_api_service_with_http_info(**kwargs)
return data
def list_api_service_with_http_info(self, **kwargs):
"""
list or watch objects of kind APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_api_service_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1APIServiceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_api_service" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIServiceList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_api_service(self, name, body, **kwargs):
"""
partially update the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_api_service(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_api_service_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_api_service_with_http_info(name, body, **kwargs)
return data
def patch_api_service_with_http_info(self, name, body, **kwargs):
"""
partially update the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_api_service_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_api_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_api_service`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_api_service`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_api_service_status(self, name, body, **kwargs):
"""
partially update status of the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_api_service_status(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_api_service_status_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_api_service_status_with_http_info(name, body, **kwargs)
return data
def patch_api_service_status_with_http_info(self, name, body, **kwargs):
"""
partially update status of the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_api_service_status_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_api_service_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_api_service_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_api_service_status`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}/status', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_api_service(self, name, **kwargs):
"""
read the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_api_service(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_api_service_with_http_info(name, **kwargs)
else:
(data) = self.read_api_service_with_http_info(name, **kwargs)
return data
def read_api_service_with_http_info(self, name, **kwargs):
"""
read the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_api_service_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty', 'exact', 'export']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_api_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_api_service`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'exact' in params:
query_params.append(('exact', params['exact']))
if 'export' in params:
query_params.append(('export', params['export']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_api_service_status(self, name, **kwargs):
"""
read status of the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_api_service_status(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_api_service_status_with_http_info(name, **kwargs)
else:
(data) = self.read_api_service_status_with_http_info(name, **kwargs)
return data
def read_api_service_status_with_http_info(self, name, **kwargs):
"""
read status of the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_api_service_status_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_api_service_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_api_service_status`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_api_service(self, name, body, **kwargs):
"""
replace the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_api_service(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param V1APIService body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_api_service_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_api_service_with_http_info(name, body, **kwargs)
return data
def replace_api_service_with_http_info(self, name, body, **kwargs):
"""
replace the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_api_service_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param V1APIService body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_api_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_api_service`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_api_service`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_api_service_status(self, name, body, **kwargs):
"""
replace status of the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_api_service_status(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param V1APIService body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_api_service_status_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_api_service_status_with_http_info(name, body, **kwargs)
return data
def replace_api_service_status_with_http_info(self, name, body, **kwargs):
"""
replace status of the specified APIService
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_api_service_status_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param V1APIService body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1APIService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_api_service_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_api_service_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_api_service_status`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/apiregistration.k8s.io/v1/apiservices/{name}/status', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIService',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
3f0ceb8350bfe0a17cf4877ecffca5c89455cb04 | f11600b9a256bf6a2b584d127faddc27a0f0b474 | /easy/1566.py | e1fd77a95b8a19432432804e090514e518bd9f8c | [] | no_license | longhao54/leetcode | 9c1f0ce4ca505ec33640dd9b334bae906acd2db5 | d156c6a13c89727f80ed6244cae40574395ecf34 | refs/heads/master | 2022-10-24T07:40:47.242861 | 2022-10-20T08:50:52 | 2022-10-20T08:50:52 | 196,952,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 992 | py | class Solution:
def containsPattern(self, arr: List[int], m: int, k: int) -> bool:
if m * k == len(arr):
return [arr[0]] * m * k == arr
for start in range(0, len(arr)-m*k):
tmp = arr[start:start+m]*k
index = 0
while index < len(arr)-m:
if tmp == arr[index:index+m*k]:
return True
index += 1
return False
# 快一点的方法
# 其实不用向上面一样生成list 再比较 这样会很慢 只要按位比较就可以
class Solution:
def containsPattern(self, arr: List[int], m: int, k: int) -> bool:
n = len(arr)
if n < m*k:
return False
for l in range(n - m * k + 1):
offset = 0
while offset < m * k:
if arr[l + offset] != arr[l + offset % m]:
break
offset += 1
if offset == m * k:
return True
return False
| [
"[email protected]"
] | |
ee6f15e3182bc0c650262c869b4aa170fc6f416d | 40f8107fdd2afa1f9c41d4d02b32298258bd3ae7 | /src/app/cache.py | bdc0c3b9d8a36c32ac9b7ea12af8a10dacd4439c | [
"Apache-2.0"
] | permissive | ConvergeTP/von_tails | 91f62e722325e1a0845e766359dae94de13076d3 | 98ce984b001cd09005b6496ce10687588def53ef | refs/heads/master | 2020-05-30T23:18:30.532496 | 2019-08-21T14:30:58 | 2019-08-21T14:30:58 | 170,901,819 | 0 | 0 | Apache-2.0 | 2019-04-02T18:01:45 | 2019-02-15T17:18:29 | Python | UTF-8 | Python | false | false | 705 | py | """
Copyright 2017-2019 Government of Canada - Public Services and Procurement Canada - buyandsell.gc.ca
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from aiocache import SimpleMemoryCache
MEM_CACHE = SimpleMemoryCache()
| [
"[email protected]"
] | |
5cf1d0203417d378aac698a22fa0890bb4bffcae | ba0e07b34def26c37ee22b9dac1714867f001fa5 | /azure-mgmt-web/azure/mgmt/web/operations/recommendations_operations.py | c3e0db7d769a8e215a861a5b4665f4787afeeee7 | [
"MIT"
] | permissive | CharaD7/azure-sdk-for-python | b11a08ac7d24a22a808a18203072b4c7bd264dfa | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | refs/heads/master | 2023-05-12T12:34:26.172873 | 2016-10-26T21:35:20 | 2016-10-26T21:35:20 | 72,448,760 | 1 | 0 | MIT | 2023-05-04T17:15:01 | 2016-10-31T15:14:09 | Python | UTF-8 | Python | false | false | 14,493 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class RecommendationsOperations(object):
"""RecommendationsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_recommendation_by_subscription(
self, featured=None, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets a list of recommendations associated with the specified
subscription.
:param featured: If set, this API returns only the most critical
recommendation among the others. Otherwise this API returns all
recommendations available
:type featured: bool
:param filter: Return only channels specified in the filter. Filter
is specified by using OData syntax. Example: $filter=channels eq
'Api' or channel eq 'Notification'
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: list of :class:`Recommendation
<azure.mgmt.web.models.Recommendation>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Web/recommendations'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if featured is not None:
query_parameters['featured'] = self._serialize.query("featured", featured, 'bool')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Recommendation]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_rule_details_by_site_name(
self, resource_group_name, site_name, name, custom_headers=None, raw=False, **operation_config):
"""Gets the detailed properties of the recommendation object for the
specified web site.
:param resource_group_name: Resource group name
:type resource_group_name: str
:param site_name: Site name
:type site_name: str
:param name: Recommendation rule name
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`RecommendationRule
<azure.mgmt.web.models.RecommendationRule>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations/{name}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RecommendationRule', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_recommended_rules_for_site(
self, resource_group_name, site_name, featured=None, site_sku=None, num_slots=None, custom_headers=None, raw=False, **operation_config):
"""Gets a list of recommendations associated with the specified web site.
:param resource_group_name: Resource group name
:type resource_group_name: str
:param site_name: Site name
:type site_name: str
:param featured: If set, this API returns only the most critical
recommendation among the others. Otherwise this API returns all
recommendations available
:type featured: bool
:param site_sku: The name of site SKU.
:type site_sku: str
:param num_slots: The number of site slots associated to the site
:type num_slots: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: list of :class:`Recommendation
<azure.mgmt.web.models.Recommendation>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendations'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if featured is not None:
query_parameters['featured'] = self._serialize.query("featured", featured, 'bool')
if site_sku is not None:
query_parameters['siteSku'] = self._serialize.query("site_sku", site_sku, 'str')
if num_slots is not None:
query_parameters['numSlots'] = self._serialize.query("num_slots", num_slots, 'int')
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Recommendation]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_recommendation_history_for_site(
self, resource_group_name, site_name, start_time=None, end_time=None, custom_headers=None, raw=False, **operation_config):
"""Gets the list of past recommendations optionally specified by the time
range.
:param resource_group_name: Resource group name
:type resource_group_name: str
:param site_name: Site name
:type site_name: str
:param start_time: The start time of a time range to query, e.g.
$filter=startTime eq '2015-01-01T00:00:00Z' and endTime eq
'2015-01-02T00:00:00Z'
:type start_time: str
:param end_time: The end time of a time range to query, e.g.
$filter=startTime eq '2015-01-01T00:00:00Z' and endTime eq
'2015-01-02T00:00:00Z'
:type end_time: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: list of :class:`Recommendation
<azure.mgmt.web.models.Recommendation>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/recommendationHistory'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'siteName': self._serialize.url("site_name", site_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start_time is not None:
query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'str')
if end_time is not None:
query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'str')
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[Recommendation]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| [
"[email protected]"
] | |
caa1bab989647808316c09990fc13f5e713b386c | 21e7753732296bfdfb6dd9a9b58c7c6b8d90a1e5 | /Bits/nextNumber/getNext.py | 6ebc140b20a361fff4350f249e1d6384893f8d31 | [] | no_license | rongfeng-china/python-algorithms-and-data-structures | eb8514b44d7ff97dd7c4deda2d8ea888a5aa8d04 | a69241bb7b684bc7d00acdd46c2fc214f7b61887 | refs/heads/master | 2020-03-13T09:08:13.375870 | 2015-12-11T07:37:30 | 2015-12-11T07:37:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 890 | py | # Given a positive number, print the next smallest and the next
# largest number that have the same number of 1 bits in their
# binary representation.
def getNext(n):
c = n
# let p denote the position of first non-trailing 0 ie a zero which is followed by 1s
c0 = 0 # number of zeros to right of position p
c1 = 0 # number of ones to right of position p
# while there are training zeros and c > 0
while (c & 1) == 0 and (c != 0):
c0 += 1
c >>= 1
while (c & 1) == 1:
c1 += 1
c >>= 1
# If n = 111...1100...000, then there is no bigger number with same number of 1s
if c0 + c1 == 31 or c0 + c1 == 0:
return -1
p = c0 + c1
n |= (1 << p) # Flip rightmost non trailing zero
n &= ~((1 << p) - 1) # Clear all bits to right of 1
n |= (1 << (c1 - 1)) - 1 # Insert (c1-1) ones on the right
return n
| [
"[email protected]"
] | |
791eb38cc58c33681d0a94055779d53287ea54ce | 68fb568c78dbcd4e73c2b697ab463e02fdde7960 | /_scripts/utils_databasePopulate.py | cf0c8c512c0b35083f405a6f9bafae21830b87a2 | [] | no_license | StBogdan/LightningExplorer | 114400dfb813ee039b1b73d7d3f92817b982c79b | 4ef5e5e1c633bd1ba18a5b37e3e9d1db79f1d503 | refs/heads/master | 2020-03-20T06:06:06.264959 | 2019-04-21T14:52:07 | 2019-04-21T14:52:07 | 137,238,511 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,013 | py | import os
import django
import sys
import _scripts.utils_config as config
from datetime import datetime
from nodes.models import *
"""
What: Take data from files, convert it, put it in database
Why: Automate data loading
"""
# Django setup (run in the virtual environment)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lightningExplorer.settings")
django.setup()
def get_data_files(full_data_path, one_per_day=0):
"""
Gets the data files from the given string full path
For testing, a one-datapoint-per-day flag can be set
Returns: list of fpath string to files with data
"""
files = []
for day_dir in os.listdir(full_data_path):
# print("In folder " + full_data_path+ os.sep + day_dir )
day_dir_fpath = os.path.join(full_data_path, day_dir)
if not os.path.isdir(day_dir_fpath):
continue # Only look at directories
if one_per_day: # Then only look at the first 2 files (hopefully a .graph and .netinfo pair)
day_data = [x for x in os.listdir(day_dir_fpath) if
x.endswith(".netinfo") or x.endswith(".graph")][0:2]
files += [day_dir_fpath + os.sep + x for x in day_data if x.endswith(".graph")]
else:
day_data = os.listdir(day_dir_fpath)
files += [day_dir_fpath + os.sep + x for x in day_data if x.endswith(".graph")]
return files
def get_node_capacity(nodeID, channel_dict):
capacity = 0
channels = 0
for edge in channel_dict:
if edge["node1_pub"] == nodeID or edge["node2_pub"] == nodeID:
capacity += int(edge["capacity"])
channels += 1
return [channels, capacity]
def get_net_data(file_fpath):
file_name = file_fpath.split(os.sep)[-1]
try:
date = datetime.strptime(file_name.split(".")[0], "%Y-%m-%d-%H-%M-%S")
except Exception as e:
date = datetime.strptime(file_name.split(".")[0], "%Y-%m-%d-%H:%M:%S")
netData = json.loads(open(file_fpath).read())
return [date, netData["nodes"], netData["edges"]]
def createNodeEntries(nodes_info, node_date, nodes_chans, nodes_capacity, network_origin):
new_nodes = []
new_nodes_dict = {}
new_addresses = []
index = 0
for node_info in nodes_info:
nodeObj = Node(date_logged=node_date,
network=network_origin,
last_update=node_info["last_update"],
pub_key=node_info["pub_key"],
alias=node_info["alias"],
color=node_info["color"],
channels=nodes_chans[index],
capacity=nodes_capacity[index])
new_nodes.append(nodeObj)
new_nodes_dict[node_info["pub_key"]] = nodeObj;
index += 1
# Saves the enties, making nodes_dict usable for edge creation
new_node_entries = Node.objects.bulk_create(new_nodes)
index = 0
for node_info in nodes_info:
if (new_node_entries[index].pub_key != node_info["pub_key"]):
raise Exception("Node identity mismatch")
for adAdr in node_info["addresses"]:
new_addresses.append(Address(date_logged=node_date,
node=new_node_entries[index],
addr=adAdr["addr"],
network=adAdr["network"]))
index += 1
new_addresses_entries = Address.objects.bulk_create(new_addresses)
return new_nodes_dict, new_addresses
def createChanEntries(edges_info, edge_date, nodes_entries, network_origin):
# print(edge_info)
# print("Got friends" + str(nodes_entries[edge_info["node1_pub"]][0]) + " ------AND-----" + str(nodes_entries[edge_info["node2_pub"]][0]))
new_chans = []
new_entries_policies = []
for edge_info in edges_info:
new_chans.append(Channel(date_logged=edge_date,
chan_id=edge_info["channel_id"],
last_update=edge_info["last_update"],
node1_pub=nodes_entries[edge_info["node1_pub"]],
# As first elem is node, others are the addresses
node2_pub=nodes_entries[edge_info["node2_pub"]],
capacity=edge_info["capacity"],
chan_point=edge_info["chan_point"],
network=network_origin))
new_chan_entries = Channel.objects.bulk_create(new_chans)
index = 0
for edge_info in edges_info:
if new_chans[index].chan_id != edge_info["channel_id"]:
raise Exception("Channel identity mismatch")
if edge_info["node1_policy"] != None:
new_entries_policies.append(Node_Policy(date_logged=edge_date, network=network_origin,
channel=new_chans[index],
node=nodes_entries[edge_info["node1_pub"]],
time_lock_delta=edge_info["node1_policy"]["time_lock_delta"],
min_htlc=edge_info["node1_policy"]["min_htlc"],
fee_base_msat=edge_info["node1_policy"]["fee_base_msat"],
fee_rate_milli_msat=edge_info["node1_policy"][
"fee_rate_milli_msat"]))
if (int(edge_info["node1_policy"]["time_lock_delta"]) > 2147483647 or int(
edge_info["node1_policy"]["min_htlc"]) > 2147483647):
print(edge_info["node1_policy"])
# print("\n\n\n\n")
# if("node2_policy" in edge_info):
if edge_info["node2_policy"] != None:
new_entries_policies.append(Node_Policy(date_logged=edge_date,
node=nodes_entries[edge_info["node2_pub"]],
channel=new_chans[index],
time_lock_delta=edge_info["node2_policy"]["time_lock_delta"],
min_htlc=edge_info["node2_policy"]["min_htlc"],
fee_base_msat=edge_info["node2_policy"]["fee_base_msat"],
fee_rate_milli_msat=edge_info["node2_policy"][
"fee_rate_milli_msat"]))
if (int(edge_info["node2_policy"]["time_lock_delta"]) > 2147483647 or int(
edge_info["node2_policy"]["min_htlc"]) > 2147483647):
print(edge_info["node2_policy"])
index += 1
new_entries_policies = Node_Policy.objects.bulk_create(new_entries_policies)
return new_chan_entries, new_entries_policies
def createDBentries(full_data_path, network, hourly=False):
nodes_entries = {}
edges_entries = []
policy_entries = []
data_folders = get_data_files(full_data_path) # One per day
index = 0
print(f"[DB Populate][{network}] Have to process: {len(data_folders)} folders, hourly: {hourly}")
current_hour = -1
current_day = -1
for file in sorted(data_folders):
index += 1
try:
if (hourly): # Only go through this is hourly flag is set
summaryTime = datetime.strptime(file.split(os.sep)[-1].split(".")[0], "%Y-%m-%d-%H-%M-%S")
if len(Node.objects.filter(date_logged=summaryTime)) > 0:
print("[Data Update][" + network + "] Date already in database\t" + str(summaryTime))
continue
if current_hour != summaryTime.hour or current_day != summaryTime.day:
print("[DB Populate][" + network + "][Hourly process] Process Hour: " + str(
summaryTime.hour) + " Day: " + str(summaryTime.day) + "compare to Hour:" + str(
current_hour) + " on Day:" + str(current_day))
current_hour = summaryTime.hour
current_day = summaryTime.day
else:
print("[DB Populate][" + network + "][Hourly process] Continue Hour:" + str(
summaryTime.hour) + " on Day:" + str(summaryTime.day) + "compare to last seen " + str(
current_hour) + " on " + str(current_day))
continue
date, nodes, chans = get_net_data(file)
# print(f"Got file: {file}\t with {len(nodes)} nodes\t{len(chans)} channels")
node_extra_info = [get_node_capacity(node["pub_key"], chans) for node in nodes]
nodes_entries, address_entries = createNodeEntries(nodes, date, [x for [x, y] in node_extra_info],
[y for [x, y] in node_extra_info], network)
# for node in nodes:
# node_chans,node_capacity = get_node_capacity(node["pub_key"],chans)
# nodes_entries[node["pub_key"]] =createNodeEntry(node,date,node_chans,node_capacity) #May be a list
edges_entries, policies = createChanEntries(chans, date, nodes_entries, network)
print("[DB Populate][" + network + "][ " + str(index) + "/" + str(
len(data_folders)) + " ]\t" + "Created entries for " + str(len(nodes_entries)) + " nodes and " + str(
len(edges_entries)) + " channels " + " date:" + date.strftime("%Y-%m-%d %H:%M:%S"))
except Exception as e:
print("[DB Populate][" + network + "][ " + str(index) + "/" + str(
len(data_folders)) + " ]\t" + "ERROR ON FILE: " + file + "\t" + str(e))
if ("out of range" in str(e)):
raise e
def clear_db():
print("[DB Populate] Removing all data")
print(Node.objects.all().delete())
print(Channel.objects.all().delete())
if __name__ == "__main__":
"""
Run on command line, takes info from data folder (config-given), puts it in Django-accessible db
arg1 network:
mainnet or testnet
or unsafe_reset_db (clears the local db)
arg2 data frequency:
alldata for getting all data (otherwise hourly)
"""
# Get env settings
site_config = config.get_site_config()
data_location = site_config["lndmon_data_location"]
data_location_mainnet = site_config["lndmon_data_location_mainnet"]
if len(sys.argv) > 1:
if len(sys.argv) > 2:
hourly = (sys.argv[2] != "alldata")
else:
hourly = True
print("[DB Populate] Hourly interval:\t" + str(hourly))
if sys.argv[1] == "mainnet":
print("[DB Populate] Adding mainnet data")
createDBentries(data_location_mainnet, "mainnet", hourly)
elif sys.argv[1] == "testnet":
print("[DB Populate] Adding testnet data")
createDBentries(data_location, "testnet", hourly)
elif sys.argv[1] == "unsafe_reset_db":
clear_db()
else:
print("[DB Populate] Unrecognised first parameter, please use one of mainnet|testnet|unsafe_reset_db")
else:
print("[DB Populate] Adding all network data (both networks)")
if input("Want to rebuild the database? (LOSE ALL CURRENT DATA) [y/n] ") == "y":
clear_db()
hourly_setting = input("Add all times?(default is hourly) [y/n]\t") != "y"
if input("Add new entries? [y/n] ") == "y":
createDBentries(data_location, "testnet", hourly_setting)
createDBentries(data_location_mainnet, "mainnet", hourly_setting)
'''
#For use in django shell
pathScript ="/path/to/DataBasePopulate.py"
exec(open(pathScript).read())
scriptName = "DataBasePopulate.py"
exec(open(scriptName).read()) #Yes, I know
''' | [
"[email protected]"
] | |
621a9809e8f9a0c711fccec07ffb4f43131cc423 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-2/c19541276c8f809733d4587fdcc04a8c7add54b3-<draw_text>-bug.py | 4b03e81403d0a80992ec3ff20502160971ac0508 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,744 | py |
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
s = common_texification(s)
prop_cmds = _font_properties_str(prop)
s = ('%s %s' % (prop_cmds, s))
writeln(self.fh, '\\begin{pgfscope}')
alpha = gc.get_alpha()
if (alpha != 1.0):
writeln(self.fh, ('\\pgfsetfillopacity{%f}' % alpha))
writeln(self.fh, ('\\pgfsetstrokeopacity{%f}' % alpha))
rgb = tuple(gc.get_rgb())[:3]
if (rgb != (0, 0, 0)):
writeln(self.fh, ('\\definecolor{textcolor}{rgb}{%f,%f,%f}' % rgb))
writeln(self.fh, '\\pgfsetstrokecolor{textcolor}')
writeln(self.fh, '\\pgfsetfillcolor{textcolor}')
s = ('\\color{textcolor}' + s)
f = (1.0 / self.figure.dpi)
text_args = []
if (mtext and (((angle == 0) or (mtext.get_rotation_mode() == 'anchor')) and (mtext.get_va() != 'center_baseline'))):
(x, y) = mtext.get_transform().transform_point(mtext.get_position())
text_args.append(('x=%fin' % (x * f)))
text_args.append(('y=%fin' % (y * f)))
halign = {
'left': 'left',
'right': 'right',
'center': '',
}
valign = {
'top': 'top',
'bottom': 'bottom',
'baseline': 'base',
'center': '',
}
text_args.append(halign[mtext.get_ha()])
text_args.append(valign[mtext.get_va()])
else:
text_args.append(('x=%fin' % (x * f)))
text_args.append(('y=%fin' % (y * f)))
text_args.append('left')
text_args.append('base')
if (angle != 0):
text_args.append(('rotate=%f' % angle))
writeln(self.fh, ('\\pgftext[%s]{%s}' % (','.join(text_args), s)))
writeln(self.fh, '\\end{pgfscope}')
| [
"[email protected]"
] | |
3b96e5ce191ac951020d3af07454faec70bbb18a | 6879a8596df6f302c63966a2d27f6b4d11cc9b29 | /abc/problems030/021/b.py | 8a85c8e452bae3d0deb92785c327dd6922669e59 | [] | no_license | wkwkgg/atcoder | 41b1e02b88bf7a8291b709306e54cb56cb93e52a | 28a7d4084a4100236510c05a88e50aa0403ac7cd | refs/heads/master | 2020-07-26T03:47:19.460049 | 2020-03-01T18:29:57 | 2020-03-01T18:29:57 | 208,523,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | N = int(input())
A, B = map(int, input().split())
K = int(input())
P = list(map(int, input().split()))
print("YES" if len(P) + 2 == len(set(P + [A, B])) else "NO")
| [
"[email protected]"
] | |
82449f43a77d7008703082bf0d83768860297c65 | bd48e8af13abb5a8574b47ea3337e64a45e8f672 | /nanum/search/apis.py | c9621326855333e4e5c41e1bd2a515cdc0b21840 | [] | no_license | markui/nanum-project | d221cacfaed9d6e2e882f3d4f29dc77055a4e97b | 399064b62a7c8049b37efd77a98f17a903754070 | refs/heads/master | 2021-09-08T08:03:30.667750 | 2018-01-09T07:06:11 | 2018-01-09T07:06:11 | 110,780,979 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,510 | py | from rest_framework import generics, permissions
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from rest_framework.views import APIView
from topics.models import Topic
from topics.serializers import TopicSerializer
from . import search
class TopicSearchAPIView(generics.RetrieveAPIView):
queryset = Topic.objects.all()
serializer_class = TopicSerializer
authentication_classes = (
permissions.IsAuthenticated,
)
def retrieve(self, request, *args, **kwargs):
query_params = self.request.query_params
topic_name = query_params.get("name", None)
if not topic_name:
raise ParseError(detail={"error": "name 필드가 비어있습니다."})
queryset = Topic.objects.filter(name__contains=topic_name)
if not queryset:
return Response({"result": "결과가 없습니다."})
serializer = self.get_serializer(queryset, many=True)
result = {"result": serializer.data}
return Response(result)
class SearchAPIView(APIView):
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, format=None):
"""
Return a list of all users.
"""
query_params = self.request.query_params
query = query_params.get("query", None)
if not query:
raise ParseError({"error": "query 필드가 비어있습니다."})
result = search.search(query)
return Response(result)
| [
"[email protected]"
] | |
9b9afbb047cf6727bb42595fed496738377aa805 | 64c6134c2873ded7e84b93f10162fb6f27f25139 | /PPPDebug.py | 30ce0f3868cb09886d2cbb64b184695648871941 | [
"BSD-2-Clause"
] | permissive | umd-lhcb/UT-Aux-mapping | 1c22e1aec6eeefaa9d54f0cc48486a8162784c99 | 69f611f133ddcf1df18a9256c9ba1e9a577c1019 | refs/heads/master | 2022-01-19T11:54:26.101859 | 2022-01-09T04:31:49 | 2022-01-09T04:31:49 | 162,521,821 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,246 | py | #!/usr/bin/env python
#
# Author: Yipeng Sun
# License: BSD 2-clause
# Last Change: Fri May 28, 2021 at 03:43 AM +0200
from pathlib import Path
from itertools import permutations
from collections.abc import Iterable
from pyUTM.io import WirelistNaiveReader, PcadNaiveReader
from UT_Aux_mapping.const import input_dir
from UT_Aux_mapping.helpers import ppp_netname_regulator
#####################
# Read all netlists #
#####################
netlists = {}
def read_net(path, name, ext='wirelist', reader=WirelistNaiveReader):
loc_reader = reader(path / Path(name+'.'+ext))
return loc_reader.read()
ppp_vars = ['c_true_ppp_mag', 'c_mirror_ppp_mag']
netlists.update({k: read_net(input_dir, k) for k in ppp_vars})
p2b2_vars = ['true_p2b2', 'mirror_p2b2']
netlists.update({k: read_net(input_dir, k, 'net', PcadNaiveReader)
for k in p2b2_vars})
##########
# Checks #
##########
netnames = {}
def flatten(iterable, depth=0, max_depth=-1):
output = []
for item in iterable:
if isinstance(item, Iterable) and not isinstance(item, str):
if depth == max_depth:
output.append(item)
else:
output += flatten(item, depth+1, max_depth)
else:
output.append(item)
return output
def uniq_elems(l1, l2):
return [i for i in l1 if i not in l2]
def print_uniq(uniq_d):
for rule, result in uniq_d.items():
if result:
print('The following nets are {}:'.format(rule))
print('\n'.join(result))
print('')
# Check if there's nets that a unique to one variant
netnames.update({k: [ppp_netname_regulator(n) for n in netlists[k].keys()]
for k in ppp_vars})
uniq_ppp = {'in {} not {}'.format(k1, k2):
uniq_elems(netnames[k1], netnames[k2])
for k1, k2 in permutations(ppp_vars, 2)}
print_uniq(uniq_ppp)
# Check nets that are unique to P2B2
netnames.update({k: [n for n in netlists[k].keys()] for k in p2b2_vars})
uniq_p2b2 = {'in {} not {}'.format(k1, k2):
uniq_elems(netnames[k1], netnames[k2])
for k1, k2 in
flatten(map(permutations, zip(ppp_vars, p2b2_vars)), max_depth=1)}
print_uniq(uniq_p2b2)
| [
"[email protected]"
] | |
de8847b98859f45a2aa099cc3edc51818cb87fd7 | fbcdb3e66f9fce9bf8596ae9f28e14ad23da30a2 | /lib/elf/header.py | 53d8e10256a8ee921189b7ecba34a03f83eb3a03 | [
"BSD-2-Clause"
] | permissive | arizvisa/syringe | 38349e6ff81bc1d709d520b8a8d949a47a3b5f6c | e02b014dc764ed822288210248c9438a843af8a9 | refs/heads/master | 2023-08-18T11:44:50.096141 | 2023-08-16T21:15:58 | 2023-08-16T21:15:58 | 22,565,979 | 36 | 9 | BSD-2-Clause | 2021-05-24T19:38:31 | 2014-08-03T03:24:16 | Python | UTF-8 | Python | false | false | 23,782 | py | import ptypes, time, datetime, functools, operator, bisect
from . import EV_, E_IDENT, section, segment
from .base import *
class ET_(pint.enum, Elf32_Half):
_values_ = [
('NONE', 0),
('REL', 1),
('EXEC', 2),
('DYN', 3),
('CORE', 4),
# ET_LOOS(0xfe00) - ET_HIOS(0xfeff)
# ET_LOPROC(0xff00) - ET_HIPROC(0xffff)
]
class EM_(pint.enum, Elf32_Half):
_values_ = [
('EM_NONE', 0),
('EM_M32', 1),
('EM_SPARC', 2),
('EM_386', 3),
('EM_68K', 4),
('EM_88K', 5),
('EM_IAMCU', 6),
('EM_860', 7),
('EM_MIPS', 8),
('EM_S370', 9),
('EM_MIPS_RS4_BE', 10),
# ('RESERVED', 11-14),
('EM_PARISC', 15),
# ('RESERVED', 16),
('EM_VPP500', 17),
('EM_SPARC32PLUS', 18),
('EM_960', 19),
('EM_PPC', 20),
('EM_PPC64', 21),
('EM_S390', 22),
('EM_SPU', 23),
# ('RESERVED', 24-35),
('EM_V800', 36),
('EM_FR20', 37),
('EM_RH32', 38),
('EM_RCE', 39),
('EM_ARM', 40),
('EM_ALPHA', 41),
('EM_SH', 42),
('EM_SPARCV9', 43),
('EM_TRICORE', 44),
('EM_ARC', 45),
('EM_H8_300', 46),
('EM_H8_300H', 47),
('EM_H8S', 48),
('EM_H8_500', 49),
('EM_IA_64', 50),
('EM_MIPS_X', 51),
('EM_COLDFIRE', 52),
('EM_68HC12', 53),
('EM_MMA', 54),
('EM_PCP', 55),
('EM_NCPU', 56),
('EM_NDR1', 57),
('EM_STARCORE', 58),
('EM_ME16', 59),
('EM_ST100', 60),
('EM_TINYJ', 61),
('EM_X86_64', 62),
('EM_PDSP', 63),
('EM_PDP10', 64),
('EM_PDP11', 65),
('EM_FX66', 66),
('EM_ST9PLUS', 67),
('EM_ST7', 68),
('EM_68HC16', 69),
('EM_68HC11', 70),
('EM_68HC08', 71),
('EM_68HC05', 72),
('EM_SVX', 73),
('EM_ST19', 74),
('EM_VAX', 75),
('EM_CRIS', 76),
('EM_JAVELIN', 77),
('EM_FIREPATH', 78),
('EM_ZSP', 79),
('EM_MMIX', 80),
('EM_HUANY', 81),
('EM_PRISM', 82),
('EM_AVR', 83),
('EM_FR30', 84),
('EM_D10V', 85),
('EM_D30V', 86),
('EM_V850', 87),
('EM_M32R', 88),
('EM_MN10300', 89),
('EM_MN10200', 90),
('EM_PJ', 91),
('EM_OPENRISC', 92),
('EM_ARC_COMPACT', 93),
('EM_XTENSA', 94),
('EM_VIDEOCORE', 95),
('EM_TMM_GPP', 96),
('EM_NS32K', 97),
('EM_TPC', 98),
('EM_SNP1K', 99),
('EM_ST200', 100),
('EM_IP2K', 101),
('EM_MAX', 102),
('EM_CR', 103),
('EM_F2MC16', 104),
('EM_MSP430', 105),
('EM_BLACKFIN', 106),
('EM_SE_C33', 107),
('EM_SEP', 108),
('EM_ARCA', 109),
('EM_UNICORE', 110),
('EM_EXCESS', 111),
('EM_DXP', 112),
('EM_ALTERA_NIOS2', 113),
('EM_CRX', 114),
('EM_XGATE', 115),
('EM_C166', 116),
('EM_M16C', 117),
('EM_DSPIC30F', 118),
('EM_CE', 119),
('EM_M32C', 120),
# ('RESERVED', 121-130),
('EM_TSK3000', 131),
('EM_RS08', 132),
('EM_SHARC', 133),
('EM_ECOG2', 134),
('EM_SCORE7', 135),
('EM_DSP24', 136),
('EM_VIDEOCORE3', 137),
('EM_LATTICEMICO32', 138),
('EM_SE_C17', 139),
('EM_TI_C6000', 140),
('EM_TI_C2000', 141),
('EM_TI_C5500', 142),
('EM_TI_ARP32', 143),
('EM_TI_PRU', 144),
# ('RESERVED', 145-159),
('EM_MMDSP_PLUS', 160),
('EM_CYPRESS_M8C', 161),
('EM_R32C', 162),
('EM_TRIMEDIA', 163),
('EM_QDSP6', 164),
('EM_8051', 165),
('EM_STXP7X', 166),
('EM_NDS32', 167),
('EM_ECOG1', 168),
('EM_ECOG1X', 168),
('EM_MAXQ30', 169),
('EM_XIMO16', 170),
('EM_MANIK', 171),
('EM_CRAYNV2', 172),
('EM_RX', 173),
('EM_METAG', 174),
('EM_MCST_ELBRUS', 175),
('EM_ECOG16', 176),
('EM_CR16', 177),
('EM_ETPU', 178),
('EM_SLE9X', 179),
('EM_L10M', 180),
('EM_K10M', 181),
# ('RESERVED', 182),
('EM_AARCH64', 183),
# ('RESERVED', 184),
('EM_AVR32', 185),
('EM_STM8', 186),
('EM_TILE64', 187),
('EM_TILEPRO', 188),
('EM_MICROBLAZE', 189),
('EM_CUDA', 190),
('EM_TILEGX', 191),
('EM_CLOUDSHIELD', 192),
('EM_COREA_1ST', 193),
('EM_COREA_2ND', 194),
('EM_ARC_COMPACT2', 195),
('EM_OPEN8', 196),
('EM_RL78', 197),
('EM_VIDEOCORE5', 198),
('EM_78KOR', 199),
('EM_56800EX', 200),
('EM_BA1', 201),
('EM_BA2', 202),
('EM_XCORE', 203),
('EM_MCHP_PIC', 204),
('EM_INTEL205', 205),
('EM_INTEL206', 206),
('EM_INTEL207', 207),
('EM_INTEL208', 208),
('EM_INTEL209', 209),
('EM_KM32', 210),
('EM_KMX32', 211),
('EM_KMX16', 212),
('EM_KMX8', 213),
('EM_KVARC', 214),
('EM_CDP', 215),
('EM_COGE', 216),
('EM_COOL', 217),
('EM_NORC', 218),
('EM_CSR_KALIMBA', 219),
('EM_Z80', 220),
('EM_VISIUM', 221),
('EM_FT32', 222),
('EM_MOXIE', 223),
('EM_AMDGPU', 224),
# ('RESERVED', 225-242),
('EM_RISCV', 243),
('EM_LOONGARCH', 258),
]
class E_VERSION(EV_, Elf32_Word):
pass
class E_FLAGS(ptype.definition):
cache = {}
default = Elf32_Word
@E_FLAGS.define(type=EM_.byname('EM_SPARC'))
@E_FLAGS.define(type=EM_.byname('EM_SPARC32PLUS'))
@E_FLAGS.define(type=EM_.byname('EM_SPARCV9'))
class E_FLAGS_SPARC(pbinary.flags):
VENDOR_MASK = 0x00ffff00
class EF_SPARCV9_MM(pbinary.enum):
length, _values_ = 2, [
('EF_SPARCV9_TSO', 0),
('EF_SPARCV9_PSO', 1),
('EF_SPARCV9_RMO', 2),
]
class EF_SPARC_EXT_MASK(pbinary.flags):
_fields_ = [
(12, 'EF_SPARC_EXT'),
(1, 'EF_SPARC_SUN_US3'),
(1, 'EF_SPARC_HAL_R1'),
(1, 'EF_SPARC_SUN_US1'),
(1, 'EF_SPARC_32PLUS'),
]
_fields_ = [
(8, 'EF_SPARC_NONE'),
(EF_SPARC_EXT_MASK, 'EF_SPARC_EXT_MASK'),
(6, 'EF_SPARC_UNKNOWN'),
(EF_SPARCV9_MM, 'EF_SPARCV9_MM'),
]
@E_FLAGS.define
class E_FLAGS_ARM(pbinary.flags):
type = EM_.byname('EM_ARM')
ABI_MASK = 0xff000000
GCC_MASK = 0x00400FFF
class EF_ARM_GCC_MASK(pbinary.struct):
_fields_ = [
(1, 'EF_ARM_ABI_UNKNOWN'),
(1, 'EF_ARM_ABI_FLOAT_HARD'),
(1, 'EF_ARM_ABI_FLOAT_SOFT'),
(9, 'EF_ARM_GCC_UNKNOWN'),
]
_fields_ = [
(8, 'EF_ARM_ABI'),
(1, 'EF_ARM_BE8'),
(1, 'EF_ARM_GCC_LEGACY'),
(2, 'EF_ARM_GCC_ALIGN'),
(8, 'EF_ARM_UNKNOWN'),
(EF_ARM_GCC_MASK, 'EF_ARM_GCC_MASK'),
]
def summary(self):
gcc_mask_name, unk_name, subunk_name = 'EF_ARM_GCC_MASK', 'EF_ARM_UNKNOWN', 'EF_ARM_GCC_UNKNOWN'
gcc_mask, unknown = self[gcc_mask_name], self[unk_name]
flags = [field for field in ['EF_ARM_BE8', 'EF_ARM_GCC_LEGACY'] if self[field]]
subunknown, subflags = gcc_mask[subunk_name], [field for field in ['EF_ARM_ABI_UNKNOWN', 'EF_ARM_ABI_FLOAT_HARD', 'EF_ARM_ABI_FLOAT_SOFT'] if gcc_mask[field]]
summary = "({:#0{:d}x},{:d}) :> {:s}=({:#0{:d}x},{:d}){:s}".format(gcc_mask.int(), 2 + gcc_mask.bits() // 4, gcc_mask.bits(), subunk_name, subunknown, 2 + (9+3) // 4, 9, " {:s}".format(' '.join(subflags)) if subflags else '')
return "EF_ARM_ABI={:#0{:d}x} EF_ARM_GCC_ALIGN={:d}{:s}{:s} {:s}={:s}".format(self['EF_ARM_ABI'], 2 + 2, self['EF_ARM_GCC_ALIGN'], " {:s}".format(' '.join(flags)) if flags else '', " {:s}={:#0{:d}x}".format(unk_name, unknown, 2+2) if unknown else '', gcc_mask_name, summary if gcc_mask.int() else "({:#0{:d}x},{:d})".format(gcc_mask.int(), 2 + gcc_mask.bits() // 4, gcc_mask.bits()))
@E_FLAGS.define
class E_FLAGS_MIPS(pbinary.flags):
type = EM_.byname('EM_MIPS')
class EF_MIPS_ARCH_(pbinary.enum):
length, _values_ = 4, [
('EF_MIPS_ARCH_1', 0),
('EF_MIPS_ARCH_2', 1),
('EF_MIPS_ARCH_3', 2),
('EF_MIPS_ARCH_4', 3),
('EF_MIPS_ARCH_5', 4),
('EF_MIPS_ARCH_32', 5),
('EF_MIPS_ARCH_64', 6),
('EF_MIPS_ARCH_32R2', 7),
('EF_MIPS_ARCH_64R2', 8),
]
class EF_MIPS_ARCH_ASE_(pbinary.enum):
length, _values_ = 4, [
('EF_MIPS_ARCH_ASE_MDMX', 8),
('EF_MIPS_ARCH_ASE_M16', 4),
('EF_MIPS_ARCH_ASE_MICROMIPS', 2),
]
class E_MIPS_ABI_(pbinary.enum):
length, _values_ = 4, [
('E_MIPS_ABI_O32', 1),
('E_MIPS_ABI_O64', 2),
('E_MIPS_ABI_EABI32', 3),
('E_MIPS_ABI_EABI64', 4),
]
_fields_ = [
(EF_MIPS_ARCH_, 'ARCH'),
(EF_MIPS_ARCH_ASE_, 'ASE'),
(8, 'EF_MIPS_ARCH_UNUSED'),
(E_MIPS_ABI_, 'ABI'),
(1, 'EF_MIPS_ARCH_RESERVED'),
(1, 'E_MIPS_NAN2008'),
(1, 'E_MIPS_FP64'),
(1, 'EF_MIPS_32BITMODE'),
(1, 'EF_MIPS_OPTIONS_FIRST'),
(1, 'EF_MIPS_ABI_ON32'),
(1, 'EF_MIPS_ABI2'),
(1, 'EF_MIPS_64BIT_WHIRL'),
(1, 'EF_MIPS_XGOT'),
(1, 'EF_MIPS_CPIC'),
(1, 'EF_MIPS_PIC'),
(1, 'EF_MIPS_NOREORDER'),
]
@E_FLAGS.define
class E_FLAGS_LOONGARCH(pbinary.flags):
type = EM_.byname('EM_LOONGARCH')
_fields_ = [
(24, 'reserved'),
(2, 'version'),
(3, 'extension'),
(3, 'base modifier'),
]
class PN_(pint.enum):
_values_ = [
('XNUM', 0xffff),
]
class XhdrEntries(parray.type):
def iterate(self):
for index, item in self.enumerate():
yield item
return
def enumerate(self):
for index, item in enumerate(self):
yield index, item
return
def sorted(self, field, *fields):
Fgetfields = operator.itemgetter(field, *fields) if fields else operator.itemgetter(field)
Finteger = functools.partial(map, operator.methodcaller('int'))
# Start by building an index of the entire collection of elements
# by extracting the requested keys from each element.
collection = {}
for index, item in enumerate(self):
key = Fgetfields(item) if len(fields) else [Fgetfields(item)]
# Now that we have each field, convert it into a key and
# insert the array index of the item into our collection.
items = collection.setdefault(tuple(Finteger(key)), [])
bisect.insort(items, index)
# Now we can sort our collection of indices by the suggested
# fields, and fetch the index for a specific key.
for key in sorted(collection):
indices = collection[key]
# Lastly, we just need to iterate each index since they
# were inserted into the collection already sorted. With
# the index, we can then yield the item it references.
for index in indices:
yield index, self[index]
continue
return
class ShdrEntries(XhdrEntries):
def by_offset(self, ofs):
iterable = (item for item in self if item.containsoffset(ofs))
try:
result = next(iterable)
except StopIteration:
raise ptypes.error.ItemNotFoundError(self, 'ShdrEntries.by_offset', "Unable to locate Shdr with the specified offset ({:#x})".format(ofs))
return result
byoffset = by_offset
def by_address(self, va):
iterable = (item for item in self if item.containsaddress(va))
try:
result = next(iterable)
except StopIteration:
raise ptypes.error.ItemNotFoundError(self, 'ShdrEntries.by_address', "Unable to locate Shdr with the specified virtual address ({:#x})".format(va))
return result
byaddress = by_address
def sorted(self):
for index, item in super(ShdrEntries, self).sorted('sh_offset', 'sh_size'):
yield index, item
return
def filter(self, predicate):
iterable = (item for item in self if predicate(item))
return iterable
def by_field(self, field, predicate):
iterable = (item for item in self if predicate(item[field]))
return next(iterable)
def by_name(self, name):
Fcompose = lambda *Fa: functools.reduce(lambda F1, F2: lambda *a: F1(F2(*a)), builtins.reversed(Fa))
Fpredicate = Fcompose(operator.methodcaller('str'), functools.partial(operator.eq, name))
return self.by_field('sh_name', Fpredicate)
def by_type(self, type):
Fpredicate = operator.itemgetter(type)
return self.by_field('sh_type', Fpredicate)
by = by_type
class PhdrEntries(XhdrEntries):
def by_offset(self, ofs):
if isinstance(self.source, ptypes.provider.memorybase):
iterable = (item for item in self if item.loadableQ() and item.containsoffset(ofs))
else:
iterable = (item for item in self if item.containsoffset(ofs))
# Now that we have an iterable, return the first result we find
try:
result = next(iterable)
except StopIteration:
raise ptypes.error.ItemNotFoundError(self, 'PhdrEntries.by_offset', "Unable to locate Phdr with the specified offset ({:#x})".format(ofs))
return result
byoffset = by_offset
def by_address(self, va):
iterable = (item for item in self if item.loadableQ() and item.containsaddress(va))
# Now that we have an iterable, return the first result we find.
try:
result = next(iterable)
# If our iterator has no items, then we weren't able to find a match
# and we'll need to raise an exception.
except StopIteration:
raise ptypes.error.ItemNotFoundError(self, 'PhdrEntries.by_address', "Unable to locate Phdr with the specified virtual address ({:#x})".format(va))
return result
byaddress = by_address
def enumerate(self):
for index, item in super(PhdrEntries, self).enumerate():
# If our source is memory-backed, then we'll want to filter our
# items by whether they're loaded or not. So, we'll just check the
# phdr flags in order to figure that out.
if isinstance(self.source, ptypes.provider.memorybase):
flags = item['p_type']
if any(flags[fl] for fl in ['LOAD', 'DYNAMIC']):
yield index, item
continue
# Otherwise we'll just yield everything because it's in the file.
yield index, item
return
def sorted(self):
fields = ('p_vaddr', 'p_memsz') if isinstance(self.source, ptypes.provider.memorybase) else ('p_offset', 'p_filesz')
for index, item in super(PhdrEntries, self).sorted(*fields):
# If we are actually dealing with a source that's backed by
# actual memory, then only yield a phdr if it's actually loaded.
if isinstance(item.source, ptypes.provider.memory):
if item.loadableQ():
yield index, item
continue
# Otherwise, we can just yield everything without having to filter.
yield index, item
return
def by_field(self, field, predicate):
iterable = (item for item in self if predicate(item[field]))
return next(iterable)
def by_type(self, type):
Fpredicate = operator.itemgetter(type)
return self.by_field('p_type', Fpredicate)
by = by_type
### 32-bit
class Elf32_Ehdr(pstruct.type, ElfXX_Ehdr):
def _ent_array(self, entries, type, size, length):
t = dyn.clone(type, blocksize=lambda self, cb=size.int(): cb)
return dyn.clone(entries, _object_=t, length=length.int())
def _phent_array(self, type, size, length):
return self._ent_array(PhdrEntries, type, size, length)
def _shent_array(self, type, size, length):
return self._ent_array(ShdrEntries, type, size, length)
def __e_flags(self):
res = self['e_machine'].li.int()
return E_FLAGS.withdefault(res, type=res)
class e_phnum(PN_, Elf32_Half): pass
def __padding(self):
res = self['e_ehsize'].li
cb = sum(self[fld].li.size() for fld in self.keys()[:-1]) + E_IDENT().a.blocksize()
return dyn.block(res.int() - cb)
_fields_ = [
(ET_, 'e_type'),
(EM_, 'e_machine'),
(E_VERSION, 'e_version'),
(Elf32_VAddr, 'e_entry'),
(lambda self: dyn.clone(Elf32_BaseOff, _object_=lambda s: self._phent_array(segment.Elf32_Phdr, self['e_phentsize'].li, self['e_phnum'].li)), 'e_phoff'),
(lambda self: dyn.clone(Elf32_Off, _object_=lambda s: self._shent_array(section.Elf32_Shdr, self['e_shentsize'].li, self['e_shnum'].li)), 'e_shoff'),
(__e_flags, 'e_flags'),
(Elf32_Half, 'e_ehsize'),
(Elf32_Half, 'e_phentsize'),
(e_phnum, 'e_phnum'),
(Elf32_Half, 'e_shentsize'),
(Elf32_Half, 'e_shnum'),
(Elf32_Half, 'e_shstrndx'),
(__padding, 'padding'),
]
def stringtable(self):
res, index = self['e_shoff'].d.li, self['e_shstrndx'].int()
if index < len(res):
return res[index]['sh_offset'].d.li
raise ptypes.error.ItemNotFoundError(self, 'stringtable')
### 64-bit
class Elf64_Ehdr(pstruct.type, ElfXX_Ehdr):
def _ent_array(self, entries, type, size, length):
t = dyn.clone(type, blocksize=lambda self, cb=size.int(): cb)
return dyn.clone(entries, _object_=t, length=length.int())
def _phent_array(self, type, size, length):
return self._ent_array(PhdrEntries, type, size, length)
def _shent_array(self, type, size, length):
return self._ent_array(ShdrEntries, type, size, length)
def __e_flags(self):
res = self['e_machine'].li.int()
return E_FLAGS.withdefault(res, type=res)
class e_phnum(PN_, Elf64_Half): pass
def __padding(self):
res = self['e_ehsize'].li
cb = sum(self[fld].li.size() for fld in self.keys()[:-1]) + E_IDENT().a.blocksize()
return dyn.block(res.int() - cb)
_fields_ = [
(ET_, 'e_type'),
(EM_, 'e_machine'),
(E_VERSION, 'e_version'),
(Elf64_VAddr, 'e_entry'),
(lambda self: dyn.clone(Elf64_BaseOff, _object_=lambda s: self._phent_array(segment.Elf64_Phdr, self['e_phentsize'].li, self['e_phnum'].li)), 'e_phoff'),
(lambda self: dyn.clone(Elf64_Off, _object_=lambda s: self._shent_array(section.Elf64_Shdr, self['e_shentsize'].li, self['e_shnum'].li)), 'e_shoff'),
(__e_flags, 'e_flags'),
(Elf64_Half, 'e_ehsize'),
(Elf64_Half, 'e_phentsize'),
(e_phnum, 'e_phnum'),
(Elf64_Half, 'e_shentsize'),
(Elf64_Half, 'e_shnum'),
(Elf64_Half, 'e_shstrndx'),
(__padding, 'padding'),
]
def stringtable(self):
res, index = self['e_shoff'].d.li, self['e_shstrndx'].int()
if index < len(res):
return res[index]['sh_offset'].d.li
raise ptypes.error.ItemNotFoundError(self, 'stringtable')
### Archives
class Elf_Armag(pstr.string):
length = 8
def default(self, **kwargs):
archiveQ = next((kwargs.get(item) for item in kwargs if item in {'thin', 'archive'}), True)
if archiveQ:
return self.set('!<arch>\012')
return self.set('!<thin>\012')
def valid(self):
res = self.str()
if res == self.copy().default(archive=True).str():
return True
elif res == self.copy().default(thin=True).str():
return True
return False
def properties(self):
res = super(Elf_Armag, self).properties()
if self.initializedQ():
res['valid'] = self.valid()
return res
class Elf_Arhdr(pstruct.type):
class time_t(stringinteger):
length = 12
def datetime(self):
res = self.int()
return datetime.datetime.fromtimestamp(res, datetime.timezone.utc)
def gmtime(self):
res = self.int()
return time.gmtime(res)
def details(self):
tzinfo = datetime.timezone(datetime.timedelta(seconds=-(time.altzone if time.daylight else time.timezone)))
try:
res = self.datetime().astimezone(tzinfo)
except (ValueError, OverflowError):
return super(Elf_Arhdr.time_t, self).details() + '\n'
return "({:d}) {!s}".format(self.int(), res.isoformat())
repr = details
def summary(self):
tzinfo = datetime.timezone(datetime.timedelta(seconds=-(time.altzone if time.daylight else time.timezone)))
try:
res = self.datetime().astimezone(tzinfo)
except (ValueError, OverflowError):
return super(Elf_Arhdr.time_t, self).summary()
return "({:d}) {!s}".format(self.int(), res.isoformat())
class uid_t(stringinteger): length = 6
class gid_t(stringinteger): length = 6
class mode_t(octalinteger): length = 8
class size_t(stringinteger): length = 10
class _fmag(pstr.string):
length = 2
def default(self):
return self.set('`\012')
_fields_ = [
(dyn.clone(padstring, length=0x10), 'ar_name'),
(time_t, 'ar_date'),
(uid_t, 'ar_uid'),
(gid_t, 'ar_gid'),
(mode_t, 'ar_mode'),
(size_t, 'ar_size'),
(_fmag, 'ar_fmag'),
]
def summary(self):
try:
name, ts = self['ar_name'], self['ar_date'].summary()
mode, size, uid, gid = (self[fld].int() for fld in ['ar_mode', 'ar_size', 'ar_uid', 'ar_gid'])
return "ar_name=\"{!s}\" ar_mode={:o} ar_size={:+d} ar_date={:s} ar_uid/ar_gid={:d}/{:d}".format(name.str(), mode, size, ts.isoformat(), uid, gid)
except ValueError:
pass
return super(Elf_Arhdr, self).summary()
class Elf_Arnames(pstruct.type):
class _an_pointer(parray.type):
_object_ = pint.bigendian(pint.uint32_t)
def summary(self):
iterable = (item.int() for item in self)
return "[{:s}]".format(', '.join(map("{:#x}".format, iterable)))
def __an_pointer(self):
res = self['an_count'].li
return dyn.clone(self._an_pointer, length=res.int())
class _an_table(parray.type):
_object_ = pstr.szstring
def summary(self):
iterable = (item.str() for item in self)
return "[{:s}]".format(', '.join(iterable))
def __an_table(self):
res = self['an_count'].li
return dyn.clone(self._an_table, length=res.int())
_fields_ = [
(pint.bigendian(pint.uint32_t), 'an_count'),
(__an_pointer, 'an_pointer'),
(__an_table, 'an_table'),
]
class Elf_Armember(pstruct.type):
def __am_data(self):
res = self['am_hdr'].li
if res['ar_name'].str() == '//':
return dyn.clone(pstr.string, length=res['ar_size'].int())
elif res['ar_name'].str() == '/':
return Elf_Arnames
return dyn.block(res['ar_size'].int())
_fields_ = [
(Elf_Arhdr, 'am_hdr'),
(__am_data, 'am_data'),
]
| [
"[email protected]"
] | |
d68fe861a80437aa7df982272ee1d513723f0492 | 69582e48fd7965df3f769c52e27caf0868a09e93 | /lista1/roberta/questao4.py | 23e35af4384697af77761db66b666f007e2d6d4d | [] | no_license | yurimalheiros/IP-2019-2 | b591cd48fd8f253dfd17f2f99d5a2327b17432c2 | 25b9e5802709a7491933d62d8300cbd7c3ef177f | refs/heads/master | 2020-09-18T16:59:50.937764 | 2020-02-26T15:42:41 | 2020-02-26T15:42:41 | 224,156,588 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | # Função Definir alarme
# Autor Roberta de Lima
from datetime import datetime, timedelta
# Função Estática
print("ALARME")
dt = datetime(2019,11,3, 14)
hrAlarme = dt + timedelta(hours=51)
print("Sendo 14hrs, daqui a 51hrs o alarme tocará às ",hrAlarme.strftime("%H:%M "))
# Função dinâmica
#tempo = int(input("Digite o tempo para alarme(horas): "))
#hj = datetime.now()
#hrAlarme = hj + timedelta(hours=tempo)
#print("Hora do alarme: ", hrAlarme.strftime("%H:%M %d/%m/%Y"))
| [
"[email protected]"
] | |
245e30a12a1d2ad46ca40d3018cb3e900a6d25a6 | 24f664aa2344d4f5d5e7b048ac4e85231715c4c8 | /experimental/dsmith/glsl/generators.py | 145d0ae0ab124b97f48c357842f95e93fd61bc24 | [] | no_license | speycode/clfuzz | 79320655e879d1e0a06a481e8ec2e293c7c10db7 | f2a96cf84a7971f70cb982c07b84207db407b3eb | refs/heads/master | 2020-12-05T13:44:55.486419 | 2020-01-03T14:14:03 | 2020-01-03T14:15:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,783 | py | #
# Copyright 2017, 2018 Chris Cummins <[email protected]>.
#
# This file is part of DeepSmith.
#
# DeepSmith is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# DeepSmith is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# DeepSmith. If not, see <http://www.gnu.org/licenses/>.
#
"""
GLSL program generators.
"""
import math
import random
import string
from time import time
from experimental.dsmith.glsl.db import *
from experimental.dsmith.langs import Generator
from labm8.py import fs
class GlslGenerator(Generator):
"""
Common baseclass for program generators.
"""
# Abstract methods (must be implemented):
def generate_one(self, session: session_t) -> ProgramProxy:
""" Generate a single program. """
raise NotImplementedError("abstract class")
# Default methods (may be overriden):
def __repr__(self):
return f"{Colors.BOLD}{Colors.GREEN}{self.__name__}{Colors.END}"
def num_programs(self, session: session_t = None) -> int:
""" return the number of generated programs in the database """
with ReuseSession(session) as s:
return (
s.query(func.count(Program.id))
.filter(Program.generator == self.id)
.scalar()
)
def sloc_total(self, session: session_t = None) -> int:
""" return the total linecount of generated programs """
with ReuseSession(session) as s:
return (
s.query(func.sum(Program.linecount))
.filter(Program.generator == self.id)
.scalar()
)
def generation_time(self, session: session_t = None) -> float:
""" return the total generation time of all programs """
with ReuseSession(session) as s:
return (
s.query(func.sum(Program.generation_time))
.filter(Program.generator == self.id)
.scalar()
or 0
)
def num_testcases(self, session: session_t = None) -> int:
""" return the total number of testcases """
with ReuseSession(session) as s:
return (
s.query(func.count(Testcase.id))
.join(Program)
.filter(Program.generator == self.id)
.scalar()
)
def generate(self, n: int = math.inf, up_to: int = math.inf) -> None:
""" generate 'n' new programs 'up_to' this many exist in db """
with Session() as s:
num_progs = self.num_programs(s)
# Determine the termination criteria:
if n == math.inf and up_to == math.inf:
max_value = math.inf
bar_max = progressbar.UnknownLength
elif n == math.inf:
max_value = up_to
bar_max = max_value
else:
max_value = num_progs + n
bar_max = max_value
# Exit early if possible:
if num_progs >= max_value:
print(
f"There are already {Colors.BOLD}{num_progs}{Colors.END} "
"programs in the database. Nothing to be done."
)
return
# Print a preamble message:
num_to_generate = max_value - num_progs
if num_to_generate < math.inf:
estimated_time = (
self.generation_time(s) / max(num_progs, 1)
) * num_to_generate
eta = humanize.Duration(estimated_time)
print(
f"{Colors.BOLD}{num_to_generate}{Colors.END} programs are "
"to be generated. Estimated generation time is "
+ f"{Colors.BOLD}{eta}{Colors.END}."
)
else:
print(f"Generating programs {Colors.BOLD}forever{Colors.END} ...")
bar = progressbar.ProgressBar(
initial_value=num_progs, max_value=bar_max, redirect_stdout=True
)
# The actual generation loop:
buf = []
while num_progs < max_value:
buf.append(self.generate_one(s))
# Update progress bar
num_progs += 1
bar.update(num_progs)
if len(buf) >= dsmith.DB_BUF_SIZE:
save_proxies_uniq_on(s, buf, "sha1")
num_progs = self.num_programs(s)
buf = []
save_proxies_uniq_on(s, buf, "sha1")
print(
f"All done! You now have {Colors.BOLD}{num_progs}{Colors.END} "
f"{self} programs in the database"
)
def import_from_dir(self, indir: Path) -> None:
""" import program sources from a directory """
with Session() as s:
start_num_progs = self.num_programs(s)
def _save(proxies):
# Create records from proxies:
programs = [proxy.to_record(s) for proxy in proxies]
app.Warning(getattr(type(programs[0]), "sha1"))
import sys
sys.exit(0)
# Filter duplicates in the set of new records:
programs = dict(
(program.sha1, program) for program in programs
).values()
# Fetch a list of dupe keys already in the database:
sha1s = [program.sha1 for program in programs]
dupes = set(
x[0] for x in s.query(Program.sha1).filter(Program.sha1.in_(sha1s))
)
# Filter the list of records to import, excluding dupes:
uniq = [program for program in programs if program.sha1 not in dupes]
# Import those suckas:
s.add_all(uniq)
s.commit()
nprog, nuniq = len(programs), len(uniq)
app.Log(1, f"imported {nuniq} of {nprog} unique programs")
num_progs = self.num_programs(s)
# Print a preamble message:
paths = fs.ls(indir, abspaths=True)
num_to_import = humanize.Commas(len(paths))
print(
f"{Colors.BOLD}{num_to_import}{Colors.END} files are " "to be imported."
)
bar = progressbar.ProgressBar(redirect_stdout=True)
# The actual import loop:
buf = []
for i, path in enumerate(bar(paths)):
buf.append(self.import_from_file(s, path))
if len(buf) >= dsmith.DB_BUF_SIZE:
save_proxies_uniq_on(s, buf, "sha1")
buf = []
save_proxies_uniq_on(s, buf, "sha1")
num_imported = humanize.Commas(self.num_programs(s) - start_num_progs)
num_progs = humanize.Commas(self.num_programs(s))
print(
f"All done! Imported {Colors.BOLD}{num_imported}{Colors.END} "
f"new {self} programs. You now have "
f"{Colors.BOLD}{num_progs}{Colors.END} {self} programs in the "
"database"
)
def import_from_file(
self, session: session_t, path: Path
) -> Union[None, ProgramProxy]:
""" Import a program from a file. """
# app.Log(2, f"importing '{path}'")
# Simply ignore non-ASCII chars:
src = "".join([i if ord(i) < 128 else "" for i in fs.Read(path).strip()])
return ProgramProxy(generator=self.id, generation_time=0, src=src)
class RandChar(GlslGenerator):
"""
This generator produces a uniformly random sequence of ASCII characters, of
a random length.
"""
__name__ = "randchar"
id = Generators.RANDCHAR
# Arbitrary range
charcount_range = (100, 100000)
def generate_one(self, session: session_t) -> ProgramProxy:
""" Generate a single program. """
start_time = time()
charcount = random.randint(*self.charcount_range)
src = "".join(random.choices(string.printable, k=charcount))
runtime = time() - start_time
return ProgramProxy(generator=self.id, generation_time=runtime, src=src)
class GitHub(GlslGenerator):
"""
Programs mined from GitHub.
"""
__name__ = "github"
id = Generators.GITHUB
class DSmith(GlslGenerator):
__name__ = "dsmith"
id = Generators.DSMITH
| [
"[email protected]"
] | |
c7ba61c0a3b5a899b8ee49ba4ba2fc8900cae79b | b1f748d761751e89f62cf5b8a2b13adac5bf3a29 | /setprice/apps.py | f40a45dfd11be0925a3a5178812e4788c49ae528 | [] | no_license | sangeeth-subramoniam/buildingmanagementheroku | 7b77be693fa73dbd2dff9c816bf50daf1e501029 | db26de549f7088d2ff80a303abeeaaa548d43e0b | refs/heads/master | 2023-07-08T13:46:06.384694 | 2021-08-10T06:50:14 | 2021-08-10T06:50:14 | 392,492,925 | 0 | 0 | null | 2021-08-04T02:46:57 | 2021-08-04T00:14:10 | Python | UTF-8 | Python | false | false | 148 | py | from django.apps import AppConfig
class SetpriceConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'setprice'
| [
"[email protected]"
] | |
97a357ea5ffdef6f835f86617addc0cc7f749d5c | 87d5b21265c381104de8f45aa67842a4adc880eb | /257. Binary Tree Paths.py | 51ff3604e23dc618adedd7bd3014b6538442da6f | [] | no_license | MYMSSENDOG/leetcodes | ac047fe0d951e0946740cb75103fc94aae967166 | 8a52a417a903a0742034161471a084bc1e494d68 | refs/heads/master | 2020-09-23T16:55:08.579319 | 2020-09-03T19:44:26 | 2020-09-03T19:44:26 | 225,543,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 865 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from tree_node_lib import *
class Solution:
def binaryTreePaths(self, root: TreeNode) :
if not root:
return []
ret = []
def dfs(cur_node, path):
if path:
path = path + "->" + str(cur_node.val)
else:
path = str(cur_node.val)
if not cur_node.left and not cur_node.right:
ret.append(path)
return
if cur_node.left:
dfs(cur_node.left, path)
if cur_node.right:
dfs(cur_node.right, path)
dfs(root, "")
return ret
root = makeTree([1,2,3,None,5])
sol = Solution()
print(sol.binaryTreePaths(root))
| [
"[email protected]"
] | |
3828a0b1c4613505ab9b4cda45351013c7a86543 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03608/s343365481.py | d46c782a99ce70c039b4156e5d7b06c88335bcd8 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 644 | py | import itertools
n, m, r = map(int, input().split())
d = [[float('inf')]*n for _ in range(n)]
rs = list(map(int, input().split()))
for _ in range(m):
a, b, t = map(int, input().split())
a -= 1
b -= 1
d[a][b] = t
d[b][a] = t
for i in range(n):
d[i][i] = 0
def warshall_floyd(d):
#d[i][j]: iからjへの最短距離
for k in range(n):
for i in range(n):
for j in range(n):
d[i][j] = min(d[i][j],d[i][k] + d[k][j])
return d
d = warshall_floyd(d)
ans = float('inf')
for p in itertools.permutations(rs):
tmp = 0
for i in range(len(p)-1):
tmp += d[p[i]-1][p[i+1]-1]
ans = min(ans, tmp)
print(ans)
| [
"[email protected]"
] | |
60210a71a62757ae874cc9ded2d4021e2125873f | 86df33d911606ecae9996752d79b530a124c472b | /hipsr-converter-tandem.py | 0147ac8002f3f239f71b190911e4b8564052e80f | [] | no_license | telegraphic/hipsr_reduction | 905248c004e46ad7e62d05652e831c0913b32b90 | 1ec8e8877f68ee6d4ba0c7c103015c7749400280 | refs/heads/master | 2020-05-20T10:56:43.677174 | 2019-01-31T05:57:57 | 2019-01-31T05:57:57 | 9,489,410 | 0 | 0 | null | 2019-01-31T05:57:58 | 2013-04-17T04:48:16 | Python | UTF-8 | Python | false | false | 8,541 | py | #!/usr/bin/env python
"""
hipsr-converter.py
==================
This script starts a graphical user interface for converting HIPSR + MBCORR data to SD-FITS.
Use this script when converting HIPSR + MBCORR data taken in tandem.
"""
# Imports
import sys
from lib.sdfits import *
from lib import eager_weaver
try:
from termcolor import cprint
except ImportError:
def cprint_fallback(textstr, color):
print textstr
cprint = cprint_fallback
# Python metadata
__version__ = "v2.0 - Ballistic Bandicoot"
__author__ = "Danny Price"
__email__ = "[email protected]"
__modified__ = datetime.fromtimestamp(os.path.getmtime(os.path.abspath( __file__ )))
try:
import lib.qt_compat as qt_compat
QtGui = qt_compat.import_module("QtGui")
QtCore = qt_compat.QtCore
USES_PYSIDE = qt_compat.is_pyside()
except:
print "Error: cannot load PySide or PyQt4. Please check your install."
exit()
try:
import numpy as np
except:
print "Error: cannot load Numpy. Please check your install."
exit()
try:
import pyfits as pf
except ImportError:
try:
from astropy.io import fits as pf
print "Using Astropy for FITS I/O"
except:
print "Error: cannot load PyFITS or AstroPY I/O. Please check your install."
exit()
try:
import tables as tb
except:
print "Error: cannot load PyTables. Please check your install."
exit()
class Window(QtGui.QDialog):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
last_in, last_mb, last_out = self.load_last()
self.in_combox = self.createComboBox(last_in)
self.in_label = QtGui.QLabel("HIPSR input directory:")
self.in_browse = self.createButton("&Browse...", self.in_set)
self.in_label.setToolTip("Select input directory (HDF files)")
self.in_combox.setToolTip("Select input directory (HDF files)")
self.mb_combox = self.createComboBox(last_mb)
self.mb_label = QtGui.QLabel("MBCORR input directory:")
self.mb_browse = self.createButton("&Browse...", self.mb_set)
self.mb_label.setToolTip("Select MBCORR input directory (SD-FITS files)")
self.mb_combox.setToolTip("Select MBCORR input directory (SD-FITS files)")
self.out_combox = self.createComboBox(last_out)
self.out_label = QtGui.QLabel("Output directory:")
self.out_browse = self.createButton("&Browse...", self.out_set)
self.out_label.setToolTip("Select output directory (SD-FITS)")
self.out_combox.setToolTip("Select output directory (SD-FITS")
self.convert_button = self.createButton("&Convert", self.convert)
#self.rb_autos = QtGui.QRadioButton("Write autocorrs", self)
#self.rb_xpol = QtGui.QRadioButton("Write cross-pol", self)
#self.rb_stokes = QtGui.QRadioButton("Write Stokes", self)
#self.rb_autos.setChecked(True)
mainLayout = QtGui.QGridLayout()
mainLayout.addWidget(self.in_label, 0, 0)
mainLayout.addWidget(self.in_combox, 0, 1)
mainLayout.addWidget(self.in_browse, 0, 2)
mainLayout.addWidget(self.mb_label, 1, 0)
mainLayout.addWidget(self.mb_combox, 1, 1)
mainLayout.addWidget(self.mb_browse, 1, 2)
mainLayout.addWidget(self.out_label, 2, 0)
mainLayout.addWidget(self.out_combox, 2, 1)
mainLayout.addWidget(self.out_browse, 2, 2)
#mainLayout.addWidget(self.rb_autos, 3, 1)
#mainLayout.addWidget(self.rb_xpol, 4, 1)
#mainLayout.addWidget(self.rb_stokes, 5, 1)
mainLayout.addWidget(self.convert_button, 3, 2)
self.setLayout(mainLayout)
self.setWindowTitle("HIPSR-MBCORR tandem observation data converter")
def load_last(self):
try:
f = open(QtCore.QDir.currentPath()+'/.last_tandem')
last_in = f.readline().strip('\n')
last_mb = f.readline().strip('\n')
last_out = f.readline().strip('\n')
f.close()
if os.path.exists(last_in) and os.path.exists(last_out):
return last_in, last_mb, last_out
else:
raise IOError
except:
return QtCore.QDir.currentPath(), QtCore.QDir.currentPath(), QtCore.QDir.currentPath()
def save_last(self):
try:
f = open(QtCore.QDir.currentPath()+'/.last_tandem', 'w')
f.write(self.in_combox.currentText()+'\n')
f.write(self.mb_combox.currentText()+'\n')
f.write(self.out_combox.currentText()+'\n')
f.close()
except IOError:
pass
def in_set(self):
last_in, last_mb, last_out = self.load_last()
directory = QtGui.QFileDialog.getExistingDirectory(self, "Select HIPSR input directory",
last_in + '/..')
if directory:
if self.in_combox.findText(directory) == -1:
self.in_combox.addItem(directory)
self.in_combox.setCurrentIndex(self.in_combox.findText(directory))
def mb_set(self):
last_in, last_mb, last_out = self.load_last()
directory = QtGui.QFileDialog.getExistingDirectory(self, "Select MBCORR input directory",
last_mb + '/..')
if directory:
if self.mb_combox.findText(directory) == -1:
self.mb_combox.addItem(directory)
self.mb_combox.setCurrentIndex(self.mb_combox.findText(directory))
def out_set(self):
last_in, last_mb, last_out = self.load_last()
directory = QtGui.QFileDialog.getExistingDirectory(self, "Select SD-FITS ouput directory",
last_out + '/..')
if directory:
if self.out_combox.findText(directory) == -1:
self.out_combox.addItem(directory)
self.out_combox.setCurrentIndex(self.out_combox.findText(directory))
def updateComboBox(comboBox):
if comboBox.findText(comboBox.currentText()) == -1:
comboBox.addItem(comboBox.currentText())
def createButton(self, text, member):
button = QtGui.QPushButton(text)
button.clicked.connect(member)
return button
def createComboBox(self, text=""):
comboBox = QtGui.QComboBox()
comboBox.setEditable(True)
comboBox.addItem(text)
comboBox.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
return comboBox
def convert(self):
self.save_last()
print("HIPSR-MBCORR tandem converter")
print("-----------------------------")
print("Input directory (HIPSR): %s"%self.in_combox.currentText())
print("Input directory (MBCORR): %s"%self.mb_combox.currentText())
print("Output directory: %s"%self.out_combox.currentText())
hipsr_dir = self.in_combox.currentText()
mbcorr_dir = self.mb_combox.currentText()
mbcorr_files = eager_weaver.findMbcorrFiles(self.mb_combox.currentText())
output_dir = self.out_combox.currentText()
# Make sure output directory exists
if not os.path.exists(output_dir):
print("Creating directory %s"%output_dir)
os.makedirs(output_dir)
i = 0
for mb_filename in mbcorr_files:
i += 1
cprint("\nfile %i of %i (%02d%%)"%(i, len(mbcorr_files), float(i)/len(mbcorr_files)*100), 'green')
cprint("-------------------", 'green')
hp_filename, t_diff = eager_weaver.filePairer(mb_filename, hipsr_dir)
if t_diff >= 60:
print "No match found for %s"%mb_filename
break
if t_diff <= 60:
print "MBCORR input file: %s"%mb_filename
print "Closest matching file: %s"%hp_filename
print "Time delta: %d\n"%t_diff
out_filename = os.path.join(output_dir, 'hipsr_'+os.path.basename(mb_filename))
eager_weaver.eagerWeaver(mb_filename, hp_filename, out_filename,
hp_search_dir=hipsr_dir, sd_search_dir=mbcorr_dir, gmt_diff=0)
else:
print "No matching file found. Skipping..."
print("DONE!")
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = Window()
window.show()
app.exec_() | [
"[email protected]"
] | |
998f5b3d89a07a14d7dc41bd878db07e4902b536 | 6022fcef8175c71e3f9d702fc7098eee2a3eb9ac | /game/steam/review.py | adc766e91db223836ccb85b430ef0afc1fcb34f9 | [] | no_license | yxw19870806/Py3Crawler | f8fe8d68138dcfe3c63cc086d8b0042c814eab20 | e79889d0dbc13df90bca29e616ca5024ad2cdf18 | refs/heads/master | 2023-08-22T19:19:43.458412 | 2023-05-17T06:15:31 | 2023-05-17T06:15:31 | 139,689,380 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,030 | py | # -*- coding:UTF-8 -*-
"""
获取steam可以发布评测的游戏
https://store.steampowered.com/
@author: hikaru
email: [email protected]
如有问题或建议请联系
"""
import os
from common import *
from game.steam.lib import steam
# 打印列表
# print_type 0 全部游戏
# print_type 1 只要本体
# print_type 2 只要DLC
# print_type 3 只要本体已评测的DLC
def print_list(apps_cache_data, game_dlc_list, print_type=0):
for game_id in apps_cache_data["can_review_lists"]:
# 是DLC
if game_id in game_dlc_list:
if print_type == 1:
continue
# 本体没有评测过
if game_dlc_list[game_id] in apps_cache_data["can_review_lists"]:
if print_type == 3:
continue
else:
if print_type == 2 or print_type == 3:
continue
console.log("https://store.steampowered.com/app/%s" % game_id)
def main(check_game=True):
# 获取登录状态
steam_class = steam.Steam(need_login=True)
# 历史记录
apps_cache_data = steam_class.load_cache_apps_info()
# 已检测过的游戏列表
checked_apps_file_path = os.path.join(steam_class.cache_data_path, "review_checked.txt")
checked_apps_string = file.read_file(checked_apps_file_path)
if checked_apps_string:
checked_apps_list = checked_apps_string.split(",")
else:
checked_apps_list = []
# 已删除的游戏
deleted_app_list = steam_class.load_deleted_app_list()
# 已资料受限制的游戏
restricted_app_list = steam_class.load_restricted_app_list()
# 游戏的DLC列表
game_dlc_list = steam_class.load_game_dlc_list()
# 获取自己的全部玩过的游戏列表
try:
played_game_list = steam.get_account_owned_app_list(steam_class.account_id, True)
except crawler.CrawlerException as e:
console.log(e.http_error("个人游戏主页"))
raise
if check_game:
while len(played_game_list) > 0:
game_id = played_game_list.pop()
if game_id in deleted_app_list:
continue
if game_id in checked_apps_list:
continue
console.log("开始解析游戏 %s,剩余数量:%s" % (game_id, len(played_game_list)))
# 获取游戏信息
try:
game_data = steam.get_game_store_index(game_id)
except crawler.CrawlerException as e:
console.log("游戏 %s 解析失败,原因:%s" % (game_id, e.message))
console.log(e.http_error("游戏%s" % game_id))
continue
is_change = False
# 已删除
if game_data["deleted"]:
deleted_app_list.append(game_id)
# 保存数据
steam_class.save_deleted_app_list(deleted_app_list)
else:
# 有DLC的话,遍历每个DLC
for dlc_id in game_data["dlc_list"]:
# 已经评测过了,跳过检查
if dlc_id in apps_cache_data["review_list"]:
continue
# DLC和游戏本体关系字典
if dlc_id not in game_dlc_list:
game_dlc_list[dlc_id] = game_id
is_change = True
# 获取DLC信息
try:
dlc_data = steam.get_game_store_index(dlc_id)
except crawler.CrawlerException as e:
console.log(e.http_error("游戏%s" % dlc_id))
continue
if dlc_data["owned"]:
# 已经评测过了
if dlc_data["reviewed"]:
# 从待评测列表中删除
if dlc_id in apps_cache_data["can_review_lists"]:
apps_cache_data["can_review_lists"].remove(dlc_id)
# 增加已评测记录
if dlc_id not in apps_cache_data["review_list"]:
apps_cache_data["review_list"].append(dlc_id)
# 新的可以评测游戏
else:
if dlc_id not in apps_cache_data["can_review_lists"]:
apps_cache_data["can_review_lists"].append(dlc_id)
# 已经评测过了
if game_data["reviewed"]:
# 从待评测列表中删除
if game_id in apps_cache_data["can_review_lists"]:
apps_cache_data["can_review_lists"].remove(game_id)
# 增加已评测记录
if game_id not in apps_cache_data["review_list"]:
apps_cache_data["review_list"].append(game_id)
# 新的可以评测游戏
else:
if game_id not in apps_cache_data["can_review_lists"]:
apps_cache_data["can_review_lists"].append(game_id)
if is_change:
steam_class.save_game_dlc_list(game_dlc_list)
# 已资料受限制
if game_data["restricted"]:
if game_id not in restricted_app_list:
restricted_app_list.append(game_id)
# 保存数据
steam_class.save_restricted_app_list(restricted_app_list)
# 增加检测标记
steam_class.save_cache_apps_info(apps_cache_data)
# 保存数据
checked_apps_list.append(game_id)
file.write_file(",".join(checked_apps_list), checked_apps_file_path, const.WriteFileMode.REPLACE)
# 输出
print_list(apps_cache_data, game_dlc_list)
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass
| [
"[email protected]"
] | |
12d4022463c4e14e4a8d07430052771096ea3c82 | 48894ae68f0234e263d325470178d67ab313c73e | /sa/profiles/Supertel/K2X/get_arp.py | 000b19c788f9e60b4779efc75da73afe20e2447b | [
"BSD-3-Clause"
] | permissive | DreamerDDL/noc | 7f949f55bb2c02c15ac2cc46bc62d957aee43a86 | 2ab0ab7718bb7116da2c3953efd466757e11d9ce | refs/heads/master | 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null | UTF-8 | Python | false | false | 2,424 | py | # -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## Supertel.K2X.get_arp
##----------------------------------------------------------------------
## Copyright (C) 2007-2014 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
## Python modules
import re
## NOC modules
from noc.sa.script import Script as NOCScript
from noc.sa.interfaces import IGetARP
class Script(NOCScript):
name = "Supertel.K2X.get_arp"
implements = [IGetARP]
cache = True
rx_line = re.compile(
r"^(VLAN\s+\d+|)\s+(?P<interface>\S+)\s+(?P<ip>\S+)\s+"
r"(?P<mac>\S+)\s+(Dynamic|Static)\s*$",
re.MULTILINE | re.IGNORECASE)
def execute(self):
r = []
"""
# Try SNMP first
#
# Some time return vlan ID, not interface name!!!
#
if self.snmp and self.access_profile.snmp_ro:
try:
for v in self.snmp.get_tables(["1.3.6.1.2.1.4.22.1.1",
"1.3.6.1.2.1.4.22.1.2",
"1.3.6.1.2.1.4.22.1.3"],
bulk=True):
iface = self.snmp.get("1.3.6.1.2.1.31.1.1.1.1." + v[1],
cached=True)
mac = ":".join(["%02x" % ord(c) for c in v[2]])
ip = ["%02x" % ord(c) for c in v[3]]
ip = ".".join(str(int(c, 16)) for c in ip)
r.append({
"ip": ip,
"mac": mac,
"interface": iface,
})
return r
except self.snmp.TimeOutError:
pass
"""
# Fallback to CLI
for match in self.rx_line.finditer(self.cli("show arp", cached=True)):
mac = match.group("mac")
if mac.lower() == "incomplete":
r.append({
"ip": match.group("ip"),
"mac": None,
"interface": None
})
else:
r.append({
"ip": match.group("ip"),
"mac": match.group("mac"),
"interface": match.group("interface")
})
return r
| [
"[email protected]"
] | |
e193c720a834640102192c91e1ebc0f7a0c50778 | 8fa191cd4a67431a04eff62d35122ee83cc7b0af | /bookwyrm/migrations/0100_shelf_description.py | 18185b17def91702d69be55cd555a444186df05a | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | bookwyrm-social/bookwyrm | 24678676a7a58dba96641194dfae3fffbf01574d | 0f8da5b738047f3c34d60d93f59bdedd8f797224 | refs/heads/main | 2023-08-20T21:45:30.957277 | 2023-08-19T23:41:50 | 2023-08-19T23:41:50 | 236,415,735 | 1,398 | 216 | NOASSERTION | 2023-09-08T20:43:06 | 2020-01-27T03:51:54 | Python | UTF-8 | Python | false | false | 416 | py | # Generated by Django 3.2.5 on 2021-09-28 23:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0099_readthrough_is_active"),
]
operations = [
migrations.AddField(
model_name="shelf",
name="description",
field=models.TextField(blank=True, max_length=500, null=True),
),
]
| [
"[email protected]"
] | |
ed78e6a932f361509c7b2a9d7e57906f6d9a5d0f | b0485dc3e30dc0170391e131ec00ccb335768817 | /_build.py | afeb10aaecab2d7aba8fdf9720bc4faddcca2854 | [] | no_license | leisvip/djc_helper | 8a54483780bcb6ec3a5316a869d5652cfad393f7 | 9e5982047ce6db05f09db3d81e7f4df6303f21d7 | refs/heads/master | 2023-06-25T07:06:13.307176 | 2021-07-19T02:09:09 | 2021-07-19T02:09:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,837 | py | # 编译脚本
import argparse
import os
import shutil
import subprocess
from _init_venv_and_requirements import init_venv_and_requirements
from log import logger, color
from util import human_readable_size, show_head_line
def build(disable_douban=False):
# 初始化相关路径变量
venv_path = ".venv"
pyinstaller_path = os.path.join(venv_path, "Scripts", "pyinstaller")
# 初始化venv和依赖
init_venv_and_requirements(".venv", disable_douban)
show_head_line(f"将使用.venv环境进行编译", color("bold_yellow"))
build_configs = [
("main.py", "DNF蚊子腿小助手.exe", "utils/icons/DNF蚊子腿小助手.ico", ".", ["PyQt5"], []),
("auto_updater.py", "auto_updater.exe", "", "utils", ["PyQt5"], []),
("ark_lottery_special_version.py", "DNF蚊子腿小助手_集卡特别版.exe", "utils/icons/ark_lottery_special_version.ico", ".", ["PyQt5"], []),
("config_ui.py", "DNF蚊子腿小助手配置工具.exe", "utils/icons/config_ui.ico", ".", [], ["--noconsole"]),
]
for idx, config in enumerate(build_configs):
prefix = f"{idx + 1}/{len(build_configs)}"
src_path, exe_name, icon_path, target_dir, exclude_modules, extra_args = config
logger.info(color("bold_yellow") + f"{prefix} 开始编译 {exe_name}")
cmd_build = [
pyinstaller_path,
'--name', exe_name,
'-F',
src_path,
]
if icon_path != "":
cmd_build.extend(['--icon', icon_path])
for module in exclude_modules:
cmd_build.extend(['--exclude-module', module])
cmd_build.extend(extra_args)
logger.info(f"{prefix} 开始编译 {exe_name},命令为:{' '.join(cmd_build)}")
subprocess.call(cmd_build)
logger.info(f"编译结束,进行善后操作")
# 复制二进制
logger.info(f"复制{exe_name}到目标目录{target_dir}")
if not os.path.isdir(target_dir):
os.mkdir(target_dir)
target_path = os.path.join(target_dir, exe_name)
shutil.copyfile(os.path.join("dist", exe_name), target_path)
# 删除临时文件
logger.info("删除临时文件")
for directory in ["build", "dist", "__pycache__"]:
shutil.rmtree(directory, ignore_errors=True)
os.remove(f"{exe_name}.spec")
filesize = os.path.getsize(target_path)
logger.info(color("bold_green") + f"{prefix} 编译{exe_name}结束,最终大小为{human_readable_size(filesize)}")
logger.info("done")
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--disable_douban", action='store_true')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
build(args.disable_douban)
| [
"[email protected]"
] | |
42e95b4be95d83bcba4b00923df10849d38dd895 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03393/s345995802.py | 343e2097a0d4f2b25bd715fd2830e3222965ec14 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | S = input()
se = set()
for s in S:
se.add(s)
if len(S) < 26:
for i in range(26):
s = chr(ord('a')+i)
if not s in se:
print(S+s)
exit()
else:
while len(S) > 1:
se.remove(S[-1])
S = S[:-1]
for i in range(ord(S[-1]), ord('z')+1):
s = chr(i)
if not s in se:
print(S[:-1]+s)
exit()
print(-1)
| [
"[email protected]"
] | |
2cc201cf266ff314089da1d203b0b3d0cc31fdfd | 1b862f34c125ce200244dd79e4fda4b5b605ce2e | /.history/images_20210218000603.py | 0a459524a8dabc3b90e7a0501798a67b7e4b69cb | [] | no_license | edwino26/CoreImages | 26085a49cf1cb79442ae563a88354b2fdceace87 | 6bf6e68cac8ab36c87b1e6ea702bfe6882b0f40e | refs/heads/master | 2023-06-22T12:53:37.344895 | 2021-07-21T04:31:44 | 2021-07-21T04:31:44 | 309,553,247 | 0 | 4 | null | 2021-04-29T23:23:15 | 2020-11-03T02:45:07 | Lasso | UTF-8 | Python | false | false | 2,563 | py | import os
clear = lambda : os.system('cls')
#
# %%
import glob
import cv2
import os.path
import numpy as np
import matplotlib.pyplot as plt
# %%
cores_per_image = 6
uvFiles = glob.glob('./Photos/*.jpg')
print(uvFiles)
# Picture path
img = cv2.imread(uvFiles[0].replace('./Photos/',''))
print(img)
a = []
b = []
# %%
def oneventlbuttondown(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
xy = "%d,%d" % (x, y)
a.append(x)
b.append(y)
cv2.circle(img, (x, y), 10, (0, 0, 255), thickness=-1)
# cv2.putText(img, xy, (x, y), cv2.FONT_HERSHEY_PLAIN, 1.0, (0, 0, 0), thickness=1)
cv2.imshow("image", img)
core_length = 3
vc = []
do = int(uvFiles[0][2:6])
dn = int(uvFiles[0][7:11])
for i in range(cores_per_image):
if i == 0:
cv2.namedWindow("image", cv2.WINDOW_NORMAL)
# cv2.resizeWindow("output", 400, 300)
cv2.setMouseCallback("image", oneventlbuttondown)
cv2.imshow("image", img)
print(
'Click 1) left upper corner 2) right lower corner in leftmost core and 3) leftupper corner in second core')
cv2.waitKey(0)
y = b[0];
x = a[0];
dy = b[1] - b[0];
dx = a[1] - a[0]
gap = a[2] - a[1]
if i == 3:
midgap = gap * 4
else:
midgap = 0
if i > 0: x = x + (dx + gap) + midgap
crop_img = img[y:y + dy, x:x + dx]
if i == 0:
vc = crop_img
else:
vc = cv2.vconcat([vc, crop_img])
crop_name = str(int(uvFiles[0][2:6]) + (core_length * i)) + ".jpg"
path = os.path.join(os.path.relpath('Cropped', start=os.curdir), crop_name)
cv2.imwrite(path, crop_img)
concat_name = uvFiles[0][2:6] + "-" + uvFiles[0][7:11] + ".jpg"
path = os.path.join(os.path.relpath('Cropped', start=os.curdir), concat_name)
cv2.imwrite(path, vc)
p = vc.shape
vc_gray = cv2.cvtColor(vc, cv2.COLOR_BGR2GRAY)
print(vc.shape) # Dimensions of Image
print(vc_gray.shape) # It is already a numpy array
print(type(vc_gray))
# print(p[:10, :10, 1 ])
img_log = np.average(vc_gray[:, 80:120], axis=1)
depths = np.arange(do, dn, (dn - do) / len(img_log))
plt.figure()
# plt.subplot(1, 2, 1)
plt.subplot2grid((1, 10), (0, 0), colspan=3)
plt.plot(img_log, depths, 'green');
plt.axis([0, 120, do, dn]);
plt.gca().invert_yaxis();
plt.gca().invert_xaxis()
# plt.subplot(1, 2 ,2)
plt.subplot2grid((1, 10), (0, 3), colspan=7)
plt.imshow(vc_gray[:, 40:120], aspect='auto', origin='upper');
plt.colorbar()
p_50 = np.percentile(img_log, 50)
plt.show()
# %%
| [
"[email protected]"
] | |
7aaf30d580238668767fc362313bb0f9006f72eb | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/azurestackhci/azure-mgmt-azurestackhci/azure/mgmt/azurestackhci/_configuration.py | b419fc134f8d50033f2cbf5f5c9a6e3c66cd4704 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 3,264 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
class AzureStackHCIClientConfiguration(Configuration):
"""Configuration for AzureStackHCIClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(AzureStackHCIClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2020-10-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-azurestackhci/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
"[email protected]"
] | |
e0a1f5a316d34042d099a185a757946ab5667625 | 3f99756553008745dcac63da942c8afe189a0bbb | /src/common/debug.py | 2c2a20b77c88f20d2287098df133eea0833a9ebc | [] | no_license | hekaistorm/DQN_wechat_jump_AI | b34e1d15486e4f6884221e68cb110f4b5f8fcaa6 | 2dc71f22e234bc17dd280d309103e84596754588 | refs/heads/master | 2020-09-06T09:05:59.478004 | 2018-02-04T05:28:06 | 2018-02-04T05:28:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,185 | py | # -*- coding: utf-8 -*-
"""
这儿是debug的代码,当DEBUG_SWITCH开关开启的时候,会将各种信息存在本地,方便检查故障
"""
import os
import sys
import shutil
from PIL import ImageDraw
# 用来保存每一次的图片
screenshot_backup_dir = '../data/backups/'
def make_debug_dir(screenshot_backup_dir):
"""
创建备份文件夹
"""
if not os.path.isdir(screenshot_backup_dir):
os.mkdir(screenshot_backup_dir)
def backup_screenshot(ts):
"""
为了方便失败的时候 debug
"""
make_debug_dir(screenshot_backup_dir)
shutil.copy('autojump.png', '{}{}.png'.format(screenshot_backup_dir, ts))
def save_debug_screenshot(ts, im, piece_x, piece_y, board_x, board_y):
"""
对 debug 图片加上详细的注释
"""
make_debug_dir(screenshot_backup_dir)
draw = ImageDraw.Draw(im)
draw.line((piece_x, piece_y) + (board_x, board_y), fill=2, width=3)
draw.line((piece_x, 0, piece_x, im.size[1]), fill=(255, 0, 0))
draw.line((0, piece_y, im.size[0], piece_y), fill=(255, 0, 0))
draw.line((board_x, 0, board_x, im.size[1]), fill=(0, 0, 255))
draw.line((0, board_y, im.size[0], board_y), fill=(0, 0, 255))
draw.ellipse((piece_x - 10, piece_y - 10, piece_x + 10, piece_y + 10), fill=(255, 0, 0))
draw.ellipse((board_x - 10, board_y - 10, board_x + 10, board_y + 10), fill=(0, 0, 255))
del draw
im.save('{}{}{}_d.png'.format(screenshot_backup_dir, ts, str(piece_x) + '_' + str(piece_y)))
def dump_device_info():
"""
显示设备信息
"""
size_str = os.popen('adb shell wm size').read()
device_str = os.popen('adb shell getprop ro.product.device').read()
phone_os_str = os.popen('adb shell getprop ro.build.version.release').read()
density_str = os.popen('adb shell wm density').read()
print("""**********
Screen: {size}
Density: {dpi}
Device: {device}
Phone OS: {phone_os}
Host OS: {host_os}
Python: {python}
**********""".format(
size=size_str.strip(),
dpi=density_str.strip(),
device=device_str.strip(),
phone_os=phone_os_str.strip(),
host_os=sys.platform,
python=sys.version
))
| [
"[email protected]"
] | |
ee4e5ba67072d17cb87c3d167e85dfec37495d32 | bac37a96ead59a3c4caaac63745d5748f5060195 | /第9章 异常/异常4.py | 7086b6c235bbdfbd54433b2b3796687261036263 | [] | no_license | pod1019/python_learning | 1e7d3a9c10fc8c1b4e8ff31554d495df518fb385 | a15213d33a253c3a77ab0d5de9a4f937c27693ca | refs/heads/master | 2020-09-14T11:11:53.100591 | 2020-04-11T04:00:27 | 2020-04-11T04:00:27 | 223,112,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | # 多个except结构
try:
a = input("请输入被除数:")
b = input("请输入除数:")
c = float(a)/float(b)
print("两数相除的结果是:",c)
except ZeroDivisionError:
print("异常:除数不能为0")
except TypeError:
print("异常:除数和被除数都应该为数值类型")
except NameError:
print("异常:变量不存在")
except BaseException as e:
print(e)
print(type(e))
finally: # 无论如果,此语句必然执行
print("kkkkkkkkk") | [
"[email protected]"
] | |
8336f14eb60ba8b70f687a50cfcfb4356b0cb70a | 9360aeefb3605a3fe0c5e512e52ec3bc0942903f | /app.py | 7527e6647e891d5f6706d20edee3162f0ce7496d | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | eliaswalyba/facebound | 1ff7dc32cc4bf50d14f2e6434af2adfb14300245 | 92500e61b1bc50702ea339563ee8b38b55a31169 | refs/heads/master | 2022-07-01T17:42:02.360416 | 2020-05-08T15:23:03 | 2020-05-08T15:23:03 | 262,851,606 | 0 | 0 | MIT | 2020-05-10T18:37:03 | 2020-05-10T18:37:02 | null | UTF-8 | Python | false | false | 3,484 | py |
import cv2, os
import numpy as np
import streamlit as st
import matplotlib.pyplot as plt
from PIL import Image, ImageEnhance
@st.cache
def load_image(img):
im = Image.open(img)
return im
FACE_CASCADE_PATH = '/algos/haarcascade_frontalface_default.xml'
face_cascade = cv2.CascadeClassifier(FACE_CASCADE_PATH )
# eye_cascade = cv2.CascadeClassifier('algos/haarcascade_eye.xml')
# smile_cascade = cv2.CascadeClassifier('algos/haarcascade_smile.xml')
def detect_faces(uploaded_image):
new_img = np.array(uploaded_image.convert('RGB'))
temp_img = cv2.cvtColor(new_img, 1)
gray = cv2.cvtColor(temp_img, cv2.COLOR_BGR2GRAY)
# Detect Face
faces = face_cascade.detectMultiScale(gray, 1.1, 4)
# Draw Rectangle
for (x,y,w,h) in faces:
cv2.rectangle(temp_img, (x,y), (x+w, y+h), (255,0,0), 2)
return temp_img, faces
def main():
'''
Face Detection App
'''
st.title('Facebound')
st.text('by Fodé Diop')
options = ['Detection', 'About']
choice = st.sidebar.selectbox('Select Option', options)
if choice == 'Detection':
st.subheader('Face Detection')
image_file = st.file_uploader('Upload Image', type=['jpg', 'png', 'jpeg'])
if image_file is not None:
uploaded = Image.open(image_file)
# st.write(type(uploaded))
st.text('Original Image')
st.image(uploaded)
enhance_type = st.sidebar.radio('Enhance Type', ['Original', 'Grayscale', 'Contrast', 'Brightness', 'Blur'])
if enhance_type == 'Grayscale':
new_img = np.array(uploaded.convert('RGB'))
temp_img = cv2.cvtColor(new_img, 1)
gray = cv2.cvtColor(temp_img, cv2.COLOR_BGR2GRAY)
st.image(gray)
# Print on screen
st.write(gray)
st.write(new_img)
if enhance_type == 'Contrast':
contrast_rate = st.sidebar.slider('Contrtast', 0.5, 3.5)
enhancer = ImageEnhance.Contrast(uploaded)
img_output = enhancer.enhance(contrast_rate)
st.image(img_output)
if enhance_type == 'Brightness':
contrast_rate = st.sidebar.slider('Brigthness', 0.5, 3.5)
enhancer = ImageEnhance.Brightness(uploaded)
img_output = enhancer.enhance(contrast_rate)
st.image(img_output)
if enhance_type == 'Blur':
blur_rate = st.sidebar.slider('Blur', 0.5, 3.5)
new_img = np.array(uploaded.convert('RGB'))
temp_img = cv2.cvtColor(new_img, 1)
blurred = cv2.GaussianBlur(temp_img, (11,11), blur_rate)
st.image(blurred)
# else:
# st.image(uploaded)
# Face Detection
target = ['Face', 'Smiles', 'Eyes']
feature_choice = st.sidebar.selectbox('Find Features', target)
if st.button('Detect Faces'):
if feature_choice == 'Faces':
st.write('Print something goda damn it!!!!')
result_img, result_faces = detect_faces(uploaded)
st.image(result_img)
st.success(f'Found {len(result_faces)} faces.')
elif choice == 'About':
st.subheader('About Facebound')
st.markdown("Built with Streamlit and OpenCV by [Fodé Diop](https://www.github.com/diop)")
st.text("© Copyright 2020 Fodé Diop - MIT")
st.success("Dakar Institute of Technology")
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
fd07de3d5d3a2288f381e55246f4331593b092d8 | f4b8c90c1349c8740c1805f7b6b0e15eb5db7f41 | /test/test_room_space_inventory_status_item.py | 832bf42dbe4715ab3e1fb79003804c3340495d2d | [] | no_license | CalPolyResDev/StarRezAPI | 012fb8351159f96a81352d6c7bfa36cd2d7df13c | b184e1863c37ff4fcf7a05509ad8ea8ba825b367 | refs/heads/master | 2021-01-25T10:29:37.966602 | 2018-03-15T01:01:35 | 2018-03-15T01:01:35 | 123,355,501 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,152 | py | # coding: utf-8
"""
StarRez API
This is a way to connect with the StarRez API. We are not the developers of the StarRez API, we are just an organization that uses it and wanted a better way to connect to it. # noqa: E501
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import starrez_client
from starrez_client.models.room_space_inventory_status_item import RoomSpaceInventoryStatusItem # noqa: E501
from starrez_client.rest import ApiException
class TestRoomSpaceInventoryStatusItem(unittest.TestCase):
"""RoomSpaceInventoryStatusItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testRoomSpaceInventoryStatusItem(self):
"""Test RoomSpaceInventoryStatusItem"""
# FIXME: construct object with mandatory attributes with example values
# model = starrez_client.models.room_space_inventory_status_item.RoomSpaceInventoryStatusItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
12bc7081611f0cf4e76ac1ca7877c8802cf8993e | 61673ab9a42f7151de7337608c442fa6247f13bb | /pyqtgraph/TextItem/main.py | cee6f112509d051cd992e54fb2b7de2352ab1089 | [
"MIT"
] | permissive | furas/python-examples | 22d101670ecd667a29376d7c7d7d86f8ec71f6cf | 95cb53b664f312e0830f010c0c96be94d4a4db90 | refs/heads/master | 2022-08-23T23:55:08.313936 | 2022-08-01T14:48:33 | 2022-08-01T14:48:33 | 45,575,296 | 176 | 91 | MIT | 2021-02-17T23:33:37 | 2015-11-04T23:54:32 | Python | UTF-8 | Python | false | false | 268 | py | from PyQt5 import QtGui
import pyqtgraph as pg
app = QtGui.QApplication([])
x = [1,2,3,4,5]
y = [0,3,1,2,0]
plotWidget = pg.plot()
plotWidget.plot(x, y)
text = pg.TextItem("Hello World", color='f00')
plotWidget.addItem(text)
text.setPos(3, 2)
app.exec_()
| [
"[email protected]"
] | |
86c01d24c8a65bb62895d8f7fd5b4121e6227f36 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_032/ch15_2020_03_09_16_43_39_394880.py | d97f1bf48a978c5d3d9d2f3b83f3fff684be1147 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | NOME=input('Digite o seu nome:')
if NOME == 'Chris':
print('Todo mundo odeia o Chris')
else:
print('Olá, {0}'.format(NOME)) | [
"[email protected]"
] | |
a0ed7cbed9ef438cd2a8f009ffb45f28b0f19996 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-1515.py | fd70e3a1a249fdf5f31490f6386c64d3a7cd4f0b | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,755 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:$IDSTRING) -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
] | |
95b99eeeb62fe5d5845a1d7211ce8f29cf1115e8 | 64d1211404c89da4e09d77d859f2cdf6609a057e | /models/official/nlp/bert/model_training_utils_test.py | 1940a764e46a11fba48b6faab7706797c15bace1 | [
"Apache-2.0"
] | permissive | Nerfertili/Deep_learning_learning_udemy | f375209e0675ab8f4da9551d8a5bdee4f2948ed8 | 0fe6c1f36019b29151acb17a1f248b34d6089aeb | refs/heads/master | 2023-02-17T10:10:52.536426 | 2021-01-19T02:48:23 | 2021-01-19T02:48:23 | 330,823,730 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,085 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for official.modeling.training.model_training_utils."""
import os
from absl import logging
from absl.testing import flagsaver
from absl.testing import parameterized
from absl.testing.absltest import mock
import numpy as np
import tensorflow as tf
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import strategy_combinations
from official.nlp.bert import common_flags
from official.nlp.bert import model_training_utils
common_flags.define_common_bert_flags()
def eager_strategy_combinations():
return combinations.combine(
distribution=[
strategy_combinations.default_strategy,
strategy_combinations.cloud_tpu_strategy,
strategy_combinations.one_device_strategy_gpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.mirrored_strategy_with_two_gpus,
],)
def eager_gpu_strategy_combinations():
return combinations.combine(
distribution=[
strategy_combinations.default_strategy,
strategy_combinations.one_device_strategy_gpu,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.mirrored_strategy_with_two_gpus,
],)
def create_fake_data_input_fn(batch_size, features_shape, num_classes):
"""Creates a dummy input function with the given feature and label shapes.
Args:
batch_size: integer.
features_shape: list[int]. Feature shape for an individual example.
num_classes: integer. Number of labels.
Returns:
An input function that is usable in the executor.
"""
def _dataset_fn(input_context=None):
"""An input function for generating fake data."""
local_batch_size = input_context.get_per_replica_batch_size(batch_size)
features = np.random.rand(64, *features_shape)
labels = np.random.randint(2, size=[64, num_classes])
# Convert the inputs to a Dataset.
dataset = tf.data.Dataset.from_tensor_slices((features, labels))
dataset = dataset.shard(input_context.num_input_pipelines,
input_context.input_pipeline_id)
def _assign_dtype(features, labels):
features = tf.cast(features, tf.float32)
labels = tf.cast(labels, tf.float32)
return features, labels
# Shuffle, repeat, and batch the examples.
dataset = dataset.map(_assign_dtype)
dataset = dataset.shuffle(64).repeat()
dataset = dataset.batch(local_batch_size, drop_remainder=True)
dataset = dataset.prefetch(buffer_size=64)
return dataset
return _dataset_fn
def create_model_fn(input_shape, num_classes, use_float16=False):
def _model_fn():
"""A one-layer softmax model suitable for testing."""
input_layer = tf.keras.layers.Input(shape=input_shape)
x = tf.keras.layers.Dense(num_classes, activation='relu')(input_layer)
output_layer = tf.keras.layers.Dense(num_classes, activation='softmax')(x)
sub_model = tf.keras.models.Model(input_layer, x, name='sub_model')
model = tf.keras.models.Model(input_layer, output_layer, name='model')
model.add_metric(
tf.reduce_mean(input_layer), name='mean_input', aggregation='mean')
model.optimizer = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9)
if use_float16:
model.optimizer = tf.keras.mixed_precision.LossScaleOptimizer(
model.optimizer)
return model, sub_model
return _model_fn
def metric_fn():
"""Gets a tf.keras metric object."""
return tf.keras.metrics.CategoricalAccuracy(name='accuracy', dtype=tf.float32)
def summaries_with_matching_keyword(keyword, summary_dir):
"""Yields summary protos matching given keyword from event file."""
event_paths = tf.io.gfile.glob(os.path.join(summary_dir, 'events*'))
for event in tf.compat.v1.train.summary_iterator(event_paths[-1]):
if event.summary is not None:
for value in event.summary.value:
if keyword in value.tag:
logging.error(event)
yield event.summary
def check_eventfile_for_keyword(keyword, summary_dir):
"""Checks event files for the keyword."""
return any(summaries_with_matching_keyword(keyword, summary_dir))
class RecordingCallback(tf.keras.callbacks.Callback):
def __init__(self):
self.batch_begin = [] # (batch, logs)
self.batch_end = [] # (batch, logs)
self.epoch_begin = [] # (epoch, logs)
self.epoch_end = [] # (epoch, logs)
def on_batch_begin(self, batch, logs=None):
self.batch_begin.append((batch, logs))
def on_batch_end(self, batch, logs=None):
self.batch_end.append((batch, logs))
def on_epoch_begin(self, epoch, logs=None):
self.epoch_begin.append((epoch, logs))
def on_epoch_end(self, epoch, logs=None):
self.epoch_end.append((epoch, logs))
class ModelTrainingUtilsTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
super(ModelTrainingUtilsTest, self).setUp()
self._model_fn = create_model_fn(input_shape=[128], num_classes=3)
@flagsaver.flagsaver
def run_training(self, strategy, model_dir, steps_per_loop, run_eagerly):
input_fn = create_fake_data_input_fn(
batch_size=8, features_shape=[128], num_classes=3)
model_training_utils.run_customized_training_loop(
strategy=strategy,
model_fn=self._model_fn,
loss_fn=tf.keras.losses.categorical_crossentropy,
model_dir=model_dir,
steps_per_epoch=20,
steps_per_loop=steps_per_loop,
epochs=2,
train_input_fn=input_fn,
eval_input_fn=input_fn,
eval_steps=10,
init_checkpoint=None,
sub_model_export_name='my_submodel_name',
metric_fn=metric_fn,
custom_callbacks=None,
run_eagerly=run_eagerly)
@combinations.generate(eager_strategy_combinations())
def test_train_eager_single_step(self, distribution):
model_dir = self.create_tempdir().full_path
if isinstance(
distribution,
(tf.distribute.TPUStrategy, tf.distribute.experimental.TPUStrategy)):
with self.assertRaises(ValueError):
self.run_training(
distribution, model_dir, steps_per_loop=1, run_eagerly=True)
else:
self.run_training(
distribution, model_dir, steps_per_loop=1, run_eagerly=True)
@combinations.generate(eager_gpu_strategy_combinations())
def test_train_eager_mixed_precision(self, distribution):
model_dir = self.create_tempdir().full_path
tf.keras.mixed_precision.set_global_policy('mixed_float16')
self._model_fn = create_model_fn(
input_shape=[128], num_classes=3, use_float16=True)
self.run_training(
distribution, model_dir, steps_per_loop=1, run_eagerly=True)
@combinations.generate(eager_strategy_combinations())
def test_train_check_artifacts(self, distribution):
model_dir = self.create_tempdir().full_path
self.run_training(
distribution, model_dir, steps_per_loop=10, run_eagerly=False)
# Two checkpoints should be saved after two epochs.
files = map(os.path.basename,
tf.io.gfile.glob(os.path.join(model_dir, 'ctl_step_*index')))
self.assertCountEqual(
['ctl_step_20.ckpt-1.index', 'ctl_step_40.ckpt-2.index'], files)
# Three submodel checkpoints should be saved after two epochs (one after
# each epoch plus one final).
files = map(
os.path.basename,
tf.io.gfile.glob(os.path.join(model_dir, 'my_submodel_name*index')))
self.assertCountEqual([
'my_submodel_name.ckpt-3.index',
'my_submodel_name_step_20.ckpt-1.index',
'my_submodel_name_step_40.ckpt-2.index'
], files)
self.assertNotEmpty(
tf.io.gfile.glob(
os.path.join(model_dir, 'summaries/training_summary*')))
# Loss and accuracy values should be written into summaries.
self.assertTrue(
check_eventfile_for_keyword('loss',
os.path.join(model_dir, 'summaries/train')))
self.assertTrue(
check_eventfile_for_keyword('accuracy',
os.path.join(model_dir, 'summaries/train')))
self.assertTrue(
check_eventfile_for_keyword('mean_input',
os.path.join(model_dir, 'summaries/train')))
self.assertTrue(
check_eventfile_for_keyword('accuracy',
os.path.join(model_dir, 'summaries/eval')))
self.assertTrue(
check_eventfile_for_keyword('mean_input',
os.path.join(model_dir, 'summaries/eval')))
@combinations.generate(eager_strategy_combinations())
def test_train_check_callbacks(self, distribution):
model_dir = self.create_tempdir().full_path
callback = RecordingCallback()
callbacks = [callback]
input_fn = create_fake_data_input_fn(
batch_size=8, features_shape=[128], num_classes=3)
model_training_utils.run_customized_training_loop(
strategy=distribution,
model_fn=self._model_fn,
loss_fn=tf.keras.losses.categorical_crossentropy,
model_dir=model_dir,
steps_per_epoch=20,
num_eval_per_epoch=4,
steps_per_loop=10,
epochs=2,
train_input_fn=input_fn,
eval_input_fn=input_fn,
eval_steps=10,
init_checkpoint=None,
metric_fn=metric_fn,
custom_callbacks=callbacks,
run_eagerly=False)
self.assertEqual(callback.epoch_begin, [(1, {}), (2, {})])
epoch_ends, epoch_end_infos = zip(*callback.epoch_end)
self.assertEqual(list(epoch_ends), [1, 2, 2])
for info in epoch_end_infos:
self.assertIn('accuracy', info)
self.assertEqual(callback.batch_begin, [(0, {}), (5, {}), (10, {}),
(15, {}), (20, {}), (25, {}),
(30, {}), (35, {})])
batch_ends, batch_end_infos = zip(*callback.batch_end)
self.assertEqual(list(batch_ends), [4, 9, 14, 19, 24, 29, 34, 39])
for info in batch_end_infos:
self.assertIn('loss', info)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.one_device_strategy_gpu,
],))
def test_train_check_artifacts_non_chief(self, distribution):
# We shouldn't export artifacts on non-chief workers. Since there's no easy
# way to test with real MultiWorkerMirroredStrategy, we patch the strategy
# to make it as if it's MultiWorkerMirroredStrategy on non-chief workers.
extended = distribution.extended
with mock.patch.object(extended.__class__, 'should_checkpoint',
new_callable=mock.PropertyMock, return_value=False), \
mock.patch.object(extended.__class__, 'should_save_summary',
new_callable=mock.PropertyMock, return_value=False):
model_dir = self.create_tempdir().full_path
self.run_training(
distribution, model_dir, steps_per_loop=10, run_eagerly=False)
self.assertEmpty(tf.io.gfile.listdir(model_dir))
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
f05afaefedd21c8a8362d23218c7eb4c9d7ffa0f | 1ffc17893d9e15fd939628bbc41c3d2633713ebd | /docs/tests/test_documentation_tutorial.py | d607bf9a8a7c3372aaf0f4fa1cdc37a04f40be05 | [
"Apache-2.0"
] | permissive | xadupre/sklearn-onnx | 646e8a158cdded725064964494f0f8a760630aa8 | b05e4864cedbf4f2a9e6c003781d1db8b53264ac | refs/heads/master | 2023-09-01T15:58:38.112315 | 2022-12-21T01:59:45 | 2022-12-21T01:59:45 | 382,323,831 | 0 | 2 | Apache-2.0 | 2023-01-04T13:41:33 | 2021-07-02T11:22:00 | Python | UTF-8 | Python | false | false | 3,935 | py | # SPDX-License-Identifier: Apache-2.0
"""
Tests examples from the documentation.
"""
import unittest
import os
import sys
import importlib
import subprocess
def import_source(module_file_path, module_name):
if not os.path.exists(module_file_path):
raise FileNotFoundError(module_file_path)
module_spec = importlib.util.spec_from_file_location(
module_name, module_file_path)
if module_spec is None:
raise FileNotFoundError(
"Unable to find '{}' in '{}'.".format(
module_name, module_file_path))
module = importlib.util.module_from_spec(module_spec)
return module_spec.loader.exec_module(module)
class TestDocumentationTutorial(unittest.TestCase):
def test_documentation_tutorial(self):
this = os.path.abspath(os.path.dirname(__file__))
fold = os.path.normpath(os.path.join(this, '..', 'tutorial'))
found = os.listdir(fold)
tested = 0
for name in found:
if name.startswith("plot_") and name.endswith(".py"):
print("run %r" % name)
try:
mod = import_source(fold, os.path.splitext(name)[0])
assert mod is not None
except FileNotFoundError:
# try another way
cmds = [sys.executable, "-u",
os.path.join(fold, name)]
p = subprocess.Popen(
cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
res = p.communicate()
out, err = res
st = err.decode('ascii', errors='ignore')
if len(st) > 0 and 'Traceback' in st:
if "No such file or directory: 'dot'" in st:
# dot not installed, this part
# is tested in onnx framework
pass
elif '"dot" not found in path.' in st:
# dot not installed, this part
# is tested in onnx framework
pass
elif ("cannot import name 'LightGbmModelContainer' "
"from 'onnxmltools.convert.common."
"_container'") in st:
# onnxmltools not recent enough
pass
elif ('Please fix either the inputs or '
'the model.') in st:
# onnxruntime datasets changed in master branch,
# still the same in released version on pypi
pass
elif ('Current official support for domain ai.onnx '
'is till opset 12.') in st:
# one example is using opset 13 but onnxruntime
# only support up to opset 12.
pass
elif "'str' object has no attribute 'decode'" in st:
# unstable bug in scikit-learn<0.24
pass
elif ("This method should be overwritten for "
"operator") in st:
# raised by old version of packages
# used in the documentation
pass
else:
raise RuntimeError(
"Example '{}' (cmd: {} - exec_prefix='{}') "
"failed due to\n{}"
"".format(name, cmds, sys.exec_prefix, st))
tested += 1
if tested == 0:
raise RuntimeError("No example was tested.")
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
49c6cbd957b72bbe58f4d43230aeb3420ad6a399 | fad392b7b1533103a0ddcc18e059fcd2e85c0fda | /install/px4_msgs/lib/python3.6/site-packages/px4_msgs/msg/_ulog_stream_ack.py | 70bef6edd37055221ffb8d9a6fa0096b1b9e0d97 | [] | no_license | adamdai/px4_ros_com_ros2 | bee6ef27559a3a157d10c250a45818a5c75f2eff | bcd7a1bd13c318d69994a64215f256b9ec7ae2bb | refs/heads/master | 2023-07-24T18:09:24.817561 | 2021-08-23T21:47:18 | 2021-08-23T21:47:18 | 399,255,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | /home/navlab-tx2-4/px4_ros_com_ros2/build/px4_msgs/rosidl_generator_py/px4_msgs/msg/_ulog_stream_ack.py | [
"[email protected]"
] | |
d9918150ac573b71d76f30a0003d5f85cfd22438 | b76f7cd15857016c9b7d7a444488f0e122d90109 | /dongyeop/2020.07.29.py | cf15c73be6e0f913cef857e2272ff3985ff1f911 | [] | no_license | eunjin917/Summer-Algorithm-Merge | 705e5a8837898a27911d8270799a37e6360262f5 | d2e11873dac7745e8d662c3d7b4ff34570e1286d | refs/heads/master | 2023-03-23T22:02:18.957706 | 2020-08-29T14:00:50 | 2020-08-29T14:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | #개인 퀴즈 서울에서 김서방 찾기
def solution(seoul):
return ("김서방은 {}에 있다".format(seoul.index('Kim')))
| [
"[email protected]"
] | |
25eb28da4c15af658689383ec67271d21e30711e | 2e145222a18d4509d937951f5cec4df0e26ee86f | /vas/sqlfire/AgentInstances.py | c7014e3358ef52496bfff95762d437cb06c53a4c | [
"Apache-2.0"
] | permissive | vdreamakitex/vas-python-api | 7627b7e3fcf76c16b1ea8b9fb670fdb708eff083 | ce7148a2044863e078e78b47abbaafc426f732ee | refs/heads/master | 2021-01-18T05:13:25.459916 | 2012-11-05T09:58:45 | 2012-11-05T09:58:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,711 | py | # vFabric Administration Server API
# Copyright (c) 2012 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from vas.shared.Instance import Instance
from vas.shared.MutableCollection import MutableCollection
class AgentInstances(MutableCollection):
"""Used to enumerate, create, and delete agent instances
:ivar `vas.shared.Security.Security` security: The resource's security
"""
def __init__(self, client, location):
super(AgentInstances, self).__init__(client, location, 'agent-group-instances', AgentInstance)
def create(self, installation, name, jvm_options=None):
"""Creates a new agent instance
:param `vas.sqlfire.Installations.Installation` installation: The installation ot be used by the instance
:param str name: The name of the instances
:param list jvm_options: The JVM options that are based to the agent's
JVM when it is started
:rtype: :class:`vas.sqlfire.AgentInstances.AgentInstance`
:return: The new agent instance
"""
payload = {'installation': installation._location, 'name': name}
if jvm_options is not None:
payload['jvm-options'] = jvm_options
return self._create(payload, 'agent-group-instance')
class AgentInstance(Instance):
"""An agent instance
:ivar `vas.sqlfire.Groups.Group` group: The group that contains this instance
:ivar `vas.sqlfire.Installations.Installation` installation: The installation that this instance is using
:ivar list jvm_options: The JVM options that are passed to the agent's JVM
when it is started
:ivar `vas.sqlfire.AgentLiveConfigurations.AgentLiveConfigurations` live_configurations: The instance's live
configurations
:ivar str name: The instance's name
:ivar list node_instances: The instance's individual node instances
:ivar `vas.sqlfire.AgentPendingConfigurations.AgentPendingConfigurations` pending_configurations: The instance's pending
configurations
:ivar `vas.shared.Security.Security` security: The resource's security
:ivar str state: Retrieves the state of the resource from the server.
Will be one of:
* ``STARTING``
* ``STARTED``
* ``STOPPING``
* ``STOPPED``
"""
@property
def jvm_options(self):
return self.__jvm_options
def __init__(self, client, location):
super(AgentInstance, self).__init__(client, location, Group, Installation, AgentLiveConfigurations,
AgentPendingConfigurations, AgentNodeInstance, 'agent-node-instance')
def reload(self):
"""Reloads the agent instance's details from the server"""
super(AgentInstance, self).reload()
self.__jvm_options = self._details['jvm-options']
def update(self, installation=None, jvm_options=None):
"""Updates the instance
:param `vas.sqlfire.Installations.Installation` installation: The installation to be used by the instance. If
omitted or `None`, the configuration will not be
changed
:param list jvm_options: The JVM options that are passed to the agent's
JVM when it is started. If omitted or `None`,
the configuration will not be changed
"""
payload = {}
if installation:
payload['installation'] = installation._location
if jvm_options is not None:
payload['jvm-options'] = jvm_options
self._client.post(self._location, payload)
self.reload()
def __str__(self):
return "<{} name={} jvm_options={}>".format(self.__class__, self.name, self.__jvm_options)
from vas.sqlfire.AgentLiveConfigurations import AgentLiveConfigurations
from vas.sqlfire.AgentNodeInstances import AgentNodeInstance
from vas.sqlfire.AgentPendingConfigurations import AgentPendingConfigurations
from vas.sqlfire.Groups import Group
from vas.sqlfire.Installations import Installation
| [
"[email protected]"
] | |
205101325b29051add4fec00ed7a7ca59766cd56 | f1e7c31d22ee90e5e3705352170388c09b7470d4 | /day2/decorator4.py | 69c18443a573abae91c47ec9c78ea8ce44331c52 | [] | no_license | taizilinger123/apple | d0b58eb184d4a7c5aec674e914fa67139d2419ca | 285c10eedfa0863ad6d51efb9dea5c7d0ae540a6 | refs/heads/master | 2020-03-28T07:54:50.118668 | 2018-09-09T05:37:12 | 2018-09-09T05:37:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | import time
def timer(func): #timer(test1) func=test1
def deco(*args,**kwargs):
start_time=time.time()
func(*args,**kwargs) #run test1()
stop_time=time.time()
print("the func run time is %s" %(stop_time-start_time))
return deco
@timer #test1=timer(test1)
def test1():
time.sleep(1)
print('in the test1')
@timer # test2 = timer(test2) #deco test2(name) = deco(name)
def test2(name,age):
print("test2:",name,age)
test1()
test2("alex",22) | [
"[email protected]"
] | |
9a7fc8e23d82ffb80c491b1e51e26e71ab025f91 | ce18cf6bdb1a85a65a509597b4c0ec046b855186 | /2020年12月/换酒问题.py | 2aae2a3d340b6efc017fbb7a2722d70f2ee89088 | [] | no_license | elssm/leetcode | e12e39faff1da5afb234be08e7d9db85fbee58f8 | a38103d2d93b34bc8bcf09f87c7ea698f99c4e36 | refs/heads/master | 2021-06-11T06:44:44.993905 | 2021-04-28T06:14:23 | 2021-04-28T06:14:23 | 171,072,054 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | class Solution(object):
def numWaterBottles(self, numBottles, numExchange):
"""
:type numBottles: int
:type numExchange: int
:rtype: int
"""
res=numBottles
while numBottles//numExchange:
res+=numBottles//numExchange
numBottles=numBottles//numExchange+numBottles%numExchange
return res | [
"[email protected]"
] | |
19051aed542c9f4efa751cfbf4908783c1d3215e | dd0d2a4da64200a7bea42d23122384189b900850 | /common_digit.py | 64c95fda4c01ff6bdc0db9231dae66fbd66e46a4 | [] | no_license | gakkistyle/comp9021 | 06ad00b47b7b0135013b014464b5f13530cad49d | 4d0d4a2d719745528bf84ed0dfb88a43f858be7e | refs/heads/master | 2022-09-24T13:10:29.609277 | 2020-06-06T16:54:42 | 2020-06-06T16:54:42 | 270,043,710 | 14 | 7 | null | null | null | null | UTF-8 | Python | false | false | 770 | py | def average_of_digits(digit=None):
if digit == None:
return -1
if len(digit) == 1:
digit_set = set(str(digit[0]))
sum = 0
for e in digit_set:
sum += int(e)
return sum/len(digit_set)
common = []
word_set1 = set(str(digit[0]))
word_set2 = set(str(digit[1]))
for e in word_set1:
if e in word_set2:
common.append(e)
for i in range(2,len(digit)):
word_setn = set(str(digit[i]))
for e in common:
if e not in word_setn:
common.remove(e)
if common == []:
return -1
sum = 0
for e in common:
sum += int(e)
return sum/len(common)
print(average_of_digits([3136823,665537857,8363265,35652385]))
| [
"[email protected]"
] | |
b5ce86e5c7206e0947b0bcb912983f891ecd0ce1 | 6df76f8a6fcdf444c3863e3788a2f4b2c539c22c | /django code/p109/p109/asgi.py | dbabed799f89d9fe7ba5076c4cdafffb94c9e6d1 | [] | no_license | basantbhandari/DjangoProjectsAsDocs | 068e4a704fade4a97e6c40353edb0a4299bd9678 | 594dbb560391eaf94bb6db6dc07702d127010b88 | refs/heads/master | 2022-12-18T22:33:23.902228 | 2020-09-22T13:11:01 | 2020-09-22T13:11:01 | 297,651,728 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | """
ASGI config for p109 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'p109.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
c357997cbb60325855930257b942fbd28f13b1d8 | 6130f811f3acfcb9f60939d8752bb035cadaf928 | /examples/adspygoogle/dfp/v201311/order_service/update_orders.py | 24750560181ff34e7c6f4beb06d0f96e0cb10b50 | [
"Apache-2.0"
] | permissive | gsembi/googleads-python-legacy-lib | f2e3197413c23c1192b11e54bf78c087f04a2baa | 9de235ffb65d014dd6ba22be50659c910eca5ae2 | refs/heads/master | 2021-01-23T23:38:28.076465 | 2014-10-14T20:38:20 | 2014-10-14T20:38:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,707 | py | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates the notes of a single order specified by ID.
To determine which orders exist, run get_all_orders.py."""
__author__ = 'Nicholas Chen'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.common import Utils
from adspygoogle.dfp import DfpUtils
ORDER_ID = 'INSERT_ORDER_ID_HERE'
def main(client, order_id):
# Initialize appropriate service.
order_service = client.GetService('OrderService', version='v201311')
# Create statement object to select a single order by an ID.
values = [{
'key': 'orderId',
'value': {
'xsi_type': 'NumberValue',
'value': order_id
}
}]
query = 'WHERE id = :orderId'
statement = DfpUtils.FilterStatement(query, values)
# Get orders by statement.
response = order_service.GetOrdersByStatement(statement.ToStatement())[0]
orders = response.get('results')
if orders:
# Update each local order object by changing its notes.
updated_orders = []
for order in orders:
# Archived orders cannot be updated.
if not Utils.BoolTypeConvert(order['isArchived']):
order['notes'] = 'Spoke to advertiser. All is well.'
updated_orders.append(order)
# Update orders remotely.
orders = order_service.UpdateOrders(updated_orders)
# Display results.
if orders:
for order in orders:
print ('Order with id \'%s\', name \'%s\', advertiser id \'%s\', and '
'notes \'%s\' was updated.'
% (order['id'], order['name'], order['advertiserId'],
order['notes']))
else:
print 'No orders were updated.'
else:
print 'No orders found to update.'
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
main(dfp_client, ORDER_ID)
| [
"[email protected]"
] | |
ac8f9f2c4057ab0711a7c50124f020d7bd010361 | 5479cdac56abc115d3b52fbd31814dfd27262da7 | /TaobaoSdk/Request/MarketingPromotionsGetRequest.py | 5cd18fc86d83e7ffabe7feb005d71b73b4f75e98 | [] | no_license | xuyaoqiang-maimiao/TaobaoOpenPythonSDK | d9d2be6a7aa27c02bea699ed5667a9a30bf483ab | c82cde732e443ecb03cfeac07843e884e5b2167c | refs/heads/master | 2021-01-18T07:49:57.984245 | 2012-08-21T07:31:10 | 2012-08-21T07:31:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,100 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set ts=4 sts=4 sw=4 et:
## @brief 根据商品ID查询卖家使用该第三方工具对商品设置的所有优惠策略
# @author [email protected]
# @date 2012-08-09 12:36:54
# @version: 0.0.0
import os
import sys
import time
def __getCurrentPath():
return os.path.normpath(os.path.join(os.path.realpath(__file__), os.path.pardir))
__modulePath = os.path.join(__getCurrentPath(), os.path.pardir)
__modulePath = os.path.normpath(__modulePath)
if __modulePath not in sys.path:
sys.path.insert(0, __modulePath)
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">根据商品ID查询卖家使用该第三方工具对商品设置的所有优惠策略</SPAN>
# <UL>
# </UL>
class MarketingPromotionsGetRequest(object):
def __init__(self):
super(self.__class__, self).__init__()
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">获取API名称</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">str</SPAN>
# </LI>
# </UL>
self.method = "taobao.marketing.promotions.get"
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">时间戳,如果不设置,发送请求时将使用当时的时间</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">int</SPAN>
# </LI>
# </UL>
self.timestamp = int(time.time())
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">需返回的优惠策略结构字段列表。可选值为Promotion中所有字段,如:promotion_id, promotion_title, item_id, status, tag_id等等</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">Field List</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Required</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">required</SPAN>
# </LI>
# </UL>
self.fields = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">商品数字ID。根据该ID查询商品下通过第三方工具设置的所有优惠策略</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">String</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Required</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">required</SPAN>
# </LI>
# </UL>
self.num_iid = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">优惠策略状态。可选值:ACTIVE(有效),UNACTIVE(无效),若不传或者传入其他值,则默认查询全部</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">String</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Required</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">optional</SPAN>
# </LI>
# </UL>
self.status = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">标签ID</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">Number</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Required</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">optional</SPAN>
# </LI>
# </UL>
self.tag_id = None
| [
"[email protected]"
] | |
5f8dc7f6ee3faba3e195cb8bd5c54cebab04e678 | 6eb282bbe4d43273b9e9cc8c0fa79400888cba9e | /tests/test_routes_image.py | e98c7912a9f48cdf2e6d390350ecba8cd232dc51 | [
"MIT"
] | permissive | jdelgad/memegen | d4300c707c5fee59aa2f5c5e0e8e606d699255ef | 0de8e5d6bfae75843bbe0d149c7796cb044e24a7 | refs/heads/master | 2020-12-25T08:51:20.523408 | 2016-06-10T13:50:46 | 2016-06-10T13:50:46 | 60,996,119 | 1 | 0 | null | 2016-06-13T00:30:19 | 2016-06-13T00:30:19 | null | UTF-8 | Python | false | false | 7,191 | py | # pylint: disable=unused-variable,misplaced-comparison-constant,expression-not-assigned
import os
import pytest
from expecter import expect
from .conftest import load
TESTS = os.path.dirname(__file__)
ROOT = os.path.dirname(TESTS)
IMAGES = os.path.join(ROOT, "data", "images")
LATEST = os.path.join(IMAGES, "latest.jpg")
def describe_get():
def describe_visible():
def with_nominal_text(client):
path = os.path.join(IMAGES, 'iw', 'hello', 'world.jpg')
if os.path.exists(path):
os.remove(path)
response = client.get("/iw/hello/world.jpg")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
assert os.path.isfile(path)
def with_only_1_line(client):
response = client.get("/iw/hello.jpg")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
@pytest.mark.xfail(os.name == 'nt', reason="Windows has a path limit")
def with_lots_of_text(client):
top = "-".join(["hello"] * 20)
bottom = "-".join(["world"] * 20)
response = client.get("/iw/" + top + "/" + bottom + ".jpg")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
def describe_hidden():
def when_jpg(client):
response = client.get("/_aXcJaGVsbG8vd29ybGQJ.jpg")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
def describe_custom_style():
def when_provided(client):
response = client.get("/sad-biden/hello.jpg?alt=scowl")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
def it_redirects_to_lose_alt_when_default_style(client):
response = client.get("/sad-biden/hello.jpg?alt=default")
assert 302 == response.status_code
assert '<a href="/sad-biden/hello.jpg">' in \
load(response, as_json=False)
def it_redirects_to_lose_alt_when_unknown_style(client):
response = client.get("/sad-biden/hello.jpg?alt=__unknown__")
assert 302 == response.status_code
assert '<a href="/sad-biden/hello.jpg">' in \
load(response, as_json=False)
def it_keeps_alt_after_template_redirect(client):
response = client.get("/sad-joe/hello.jpg?alt=scowl")
assert 302 == response.status_code
assert '<a href="/sad-biden/hello.jpg?alt=scowl">' in \
load(response, as_json=False)
def it_keeps_alt_after_text_redirect(client):
response = client.get("/sad-biden.jpg?alt=scowl")
assert 302 == response.status_code
assert '-vote.jpg?alt=scowl">' in \
load(response, as_json=False)
def when_url(client):
url = "http://www.gstatic.com/webp/gallery/1.jpg"
response = client.get("/sad-biden/hello.jpg?alt=" + url)
expect(response.status_code) == 200
expect(response.mimetype) == 'image/jpeg'
def it_returns_an_error_with_non_image_urls(client):
url = "http://example.com"
response = client.get("/sad-biden/hello.jpg?alt=" + url)
expect(response.status_code) == 415
def it_redirects_to_lose_alt_when_unknown_url(client):
url = "http://example.com/not/a/real/image.jpg"
response = client.get("/sad-biden/hello.jpg?alt=" + url)
expect(response.status_code) == 302
expect(load(response, as_json=False)).contains(
'<a href="/sad-biden/hello.jpg">')
def it_redirects_to_lose_alt_when_bad_url(client):
url = "http:invalid"
response = client.get("/sad-biden/hello.jpg?alt=" + url)
expect(response.status_code) == 302
expect(load(response, as_json=False)).contains(
'<a href="/sad-biden/hello.jpg">')
def describe_custom_font():
def when_provided(client):
response = client.get("/iw/hello.jpg?font=impact")
expect(response.status_code) == 200
expect(response.mimetype) == 'image/jpeg'
def it_redirects_on_unknown_fonts(client):
response = client.get("/iw/hello.jpg?font=__unknown__")
expect(response.status_code) == 302
expect(load(response, as_json=False)).contains(
'<a href="/iw/hello.jpg">')
def describe_latest():
def when_existing(client):
open(LATEST, 'w').close() # force the file to exist
response = client.get("/latest.jpg")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
def when_missing(client):
try:
os.remove(LATEST)
except FileNotFoundError:
pass
response = client.get("/latest.jpg")
assert 200 == response.status_code
assert 'image/png' == response.mimetype
def describe_redirects():
def when_missing_dashes(client):
response = client.get("/iw/HelloThere_World/How-areYOU.jpg")
assert 302 == response.status_code
assert '<a href="/iw/hello-there-world/how-are-you.jpg">' in \
load(response, as_json=False)
def when_no_text(client):
response = client.get("/live.jpg")
assert 302 == response.status_code
assert '<a href="/live/_/do-it-live!.jpg">' in \
load(response, as_json=False)
def when_aliased_template(client):
response = client.get("/insanity-wolf/hello/world.jpg")
assert 302 == response.status_code
assert '<a href="/iw/hello/world.jpg">' in \
load(response, as_json=False)
def when_jpeg_extension_without_text(client):
response = client.get("/iw.jpeg")
assert 302 == response.status_code
assert '<a href="/iw.jpg">' in \
load(response, as_json=False)
def when_jpeg_extension_with_text(client):
response = client.get("/iw/hello/world.jpeg")
assert 302 == response.status_code
assert '<a href="/iw/hello/world.jpg">' in \
load(response, as_json=False)
def describe_errors():
def when_unknown_template(client):
response = client.get("/make/sudo/give.me.jpg")
assert 200 == response.status_code
assert 'image/jpeg' == response.mimetype
# unit tests ensure this is a placeholder image
@pytest.mark.xfail(os.name == 'nt', reason="Windows has a path limit")
def when_too_much_text_for_a_filename(client):
top = "hello"
bottom = "-".join(["world"] * 50)
response = client.get("/iw/" + top + "/" + bottom + ".jpg")
assert 414 == response.status_code
assert {
'message': "Filename too long."
} == load(response)
| [
"[email protected]"
] | |
b5e23c5c655c526644f144779516ce18dd7a353e | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/97/usersdata/194/54823/submittedfiles/lecker.py | f56acb6233287f3cbe81bfd2b3aa0164580158d3 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 824 | py | # -*- coding: utf-8 -*-
from __future__ import division
def lecker(lista):
cont=0
for i in range(0,len(lista)-1,1):
if i==0:
if lista[i]>lista[i+1]:
cont=cont+1
elif i==(len(lista)-1):
if lista[i]>lista[i-1]:
cont=cont+1
else:
if lista[i]>lista[i+1] and lista[i]>lista[i-1]:
cont=cont+1
if cont==1:
return True
else:
return False
a=[]
b=[]
n=int(input('quantidade de elementos:'))
for i in range(1,n+1,1):
valor=float(input('elementos da lista 1:'))
a.append(valor)
for i in range(1,n+1,1):
valor=float(input('elementos da lista 2:'))
b.append(valor)
if lecker(a):
print('S')
else:
print('N')
if lecker(b):
print('S')
else:
print('N') | [
"[email protected]"
] | |
8aac474ed41ab941cc830699ba847bd56a96843a | 7698a74a06e10dd5e1f27e6bd9f9b2a5cda1c5fb | /zzz.masterscriptsTEB_GIST/for005md.py | 5c2e1af3abcf60dbbdff817943ffd3a973318e9a | [] | no_license | kingbo2008/teb_scripts_programs | ef20b24fe8982046397d3659b68f0ad70e9b6b8b | 5fd9d60c28ceb5c7827f1bd94b1b8fdecf74944e | refs/heads/master | 2023-02-11T00:57:59.347144 | 2021-01-07T17:42:11 | 2021-01-07T17:42:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,330 | py | import sys
import copy
import math
import matplotlib
import scipy
import numpy
import pylab
def read_MD_outfile(filename,totE, kE, pE, time, temp, pres):
fileh = open(filename,'r')
result_flag = False
count = 0
for line in fileh:
line = line.strip('\n')
splitline = line.split()
if "4. RESULTS" in line:
result_flag = True
elif "A V E R A G E S O V E R" in line:
result_flag = False
if (result_flag):
if "NSTEP" in line:
if (len(splitline)<11):
continue
t_time = float(splitline[5])/1000.0 # convert from ps to ns
t_temp = float(splitline[8])
t_pres = float(splitline[11])
time.append(t_time)
temp.append(t_temp)
pres.append(t_pres)
if "Etot" in line:
if (len(splitline)<8):
continue
t_totE = float(splitline[2])
t_kE = float(splitline[5])
t_pE = float(splitline[8])
totE.append(t_totE)
kE.append(t_kE)
pE.append(t_pE)
fileh.close()
return totE, kE, pE, time, temp, pres
def main():
if len(sys.argv) != 3:
print "error: this program takes 2 inputs:"
print " (1) filename that contains a list of md output files. If it doesn't exist do sth like this: "
print " ls 5609039/*.out > tmpout.txt"
print " (2) filename for png plot"
print " This should be done automatically as part of 005md.checkMDrun.csh"
exit()
filelist = sys.argv[1]
filenamepng = sys.argv[2]
# read in file with a list of mdout files.
print "filelist containing MD.out files: " + filelist
print "Plot will be saved as: " + filenamepng
filenamelist = []
fileh = open(filelist,'r')
for line in fileh:
tfile = line.strip("\n")
splitline = tfile.split(".")
if (splitline[-1] != "out"):
print "Error. %s is not a .out file" % tfile
exit()
filenamelist.append(tfile)
fileh.close()
totE = []
kE = []
pE = []
time = []
temp = []
pres = []
for filename in filenamelist:
print "reading info from file: " + filename
totE, kE, pE, time, temp, pres = read_MD_outfile(filename,totE, kE, pE, time, temp, pres)
# Plot with 5 panels; tabs [x_left,y_left,x_up,y_up].
subpanel = [ [0.2,0.1,0.3,0.2], [0.6,0.1,0.3,0.2], [0.2,0.4,0.3,0.2], [0.6,0.4,0.3,0.2], [0.2,0.7,0.3,0.2], [0.6,0.7,0.3,0.2] ]
descname = ["totE", "kE", "pE", "temp", "pres"]
fig = pylab.figure(figsize=(8,8))
for i,desc in enumerate([totE, kE, pE, temp, pres]):
#print len(desc), len(totE), len(time)
axis = fig.add_axes(subpanel[i])
#lim_min = min(math.floor(Ymin),math.floor(Xmin))
# lim_max = max(math.ceil(Ymax), math.ceil(Xmax))
im = axis.plot(time,desc,'k-') #,[0,100],[0,100],'--')
axis.set_xlabel("time (ns)")
axis.set_ylabel(descname[i])
#axis.set_title('file='+xyfilename)
#axis.set_ylim(lim_min, lim_max)
#axis.set_xlim(lim_min, lim_max)
#fig.savefig('md_analysis_fig.png',dpi=600)
fig.savefig(filenamepng,dpi=600)
main()
| [
"[email protected]"
] | |
7bfaaf0db70cf0354f13f8bb62ab277d818e5da2 | 972dff80b81c78082e9022084ef75e954b204471 | /gui/system/alertmods/volume_status.py | 44a265cdb00c201d6b3499a3c0ac6c890b8daed5 | [] | no_license | TomHoenderdos/freenas | 34bbf9957ed5904f1296af5a57eedc95e04f1074 | 83ae0c1805ea7e57b70f436810eca3b9cc0c9825 | refs/heads/master | 2021-01-17T09:29:19.668079 | 2014-01-28T01:58:23 | 2014-01-28T01:58:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,124 | py | import re
import subprocess
from django.utils.translation import ugettext_lazy as _
from freenasUI.storage.models import Volume
from freenasUI.system.alert import alertPlugins, Alert, BaseAlert
class VolumeStatusAlert(BaseAlert):
def on_volume_status_not_healthy(self, vol, status, message):
if message:
return Alert(
Alert.WARN,
_('The volume %(volume)s status is %(status)s:'
' %(message)s') % {
'volume': vol,
'status': status,
'message': message,
}
)
else:
return Alert(
Alert.WARN,
_('The volume %(volume)s status is %(status)s') % {
'volume': vol,
'status': status,
}
)
def volumes_status_enabled(self):
return True
def on_volume_status_degraded(self, vol, status, message):
self.log(self.LOG_CRIT, _('The volume %s status is DEGRADED') % vol)
def run(self):
if not self.volumes_status_enabled():
return
for vol in Volume.objects.filter(vol_fstype__in=['ZFS', 'UFS']):
if not vol.is_decrypted():
continue
status = vol.status
message = ""
if vol.vol_fstype == 'ZFS':
p1 = subprocess.Popen(
["zpool", "status", "-x", vol.vol_name],
stdout=subprocess.PIPE
)
stdout = p1.communicate()[0]
if stdout.find("pool '%s' is healthy" % vol.vol_name) != -1:
status = 'HEALTHY'
else:
reg1 = re.search('^\s*state: (\w+)', stdout, re.M)
if reg1:
status = reg1.group(1)
else:
# The default case doesn't print out anything helpful,
# but instead coredumps ;).
status = 'UNKNOWN'
reg1 = re.search(r'^\s*status: (.+)\n\s*action+:',
stdout, re.S | re.M)
reg2 = re.search(r'^\s*action: ([^:]+)\n\s*\w+:',
stdout, re.S | re.M)
if reg1:
msg = reg1.group(1)
msg = re.sub(r'\s+', ' ', msg)
message += msg
if reg2:
msg = reg2.group(1)
msg = re.sub(r'\s+', ' ', msg)
message += msg
if status == 'HEALTHY':
return [Alert(
Alert.OK, _('The volume %s status is HEALTHY') % (vol, )
)]
elif status == 'DEGRADED':
return [self.on_volume_status_degraded(vol, status, message)]
else:
return [
self.on_volume_status_not_healthy(vol, status, message)
]
alertPlugins.register(VolumeStatusAlert)
| [
"[email protected]"
] | |
a6fa412a4318bdd44745d738c2d2aa91cac8f9d2 | 277b9292d494db49836c93693257ecab87ebeb18 | /ynlu/sdk/evaluation/tests/test_entity_overlapping_ratio.py | 541945abbd95063fa16157907ee1d43443903ffe | [
"MIT"
] | permissive | hsiaoyi0504/yoctol-nlu-py | 90c2df421994006a49a4db7fe6f104d247201fbd | 4cec8d52ba3dd7827bddde152c95e814e533c0f2 | refs/heads/master | 2020-03-23T23:41:40.055683 | 2018-04-19T08:57:42 | 2018-04-19T08:57:42 | 142,249,617 | 0 | 0 | MIT | 2018-07-25T05:00:05 | 2018-07-25T05:00:05 | null | UTF-8 | Python | false | false | 6,265 | py | from unittest import TestCase
from ..entity_overlapping_score import (
single__entity_overlapping_score,
entity_overlapping_score,
)
class OverlappingScoreTestCase(TestCase):
def test_single__entity_overlapping_score_different_length(self):
with self.assertRaises(ValueError):
single__entity_overlapping_score(
utterance="12",
entity_prediction=[
{"value": "1", "entity": "a"},
{"value": "2", "entity": "b"},
],
y_true=["a", "b", "c"],
)
def test_single__entity_overlapping_score(self):
test_cases = [
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["4", "5", "6"],
"wrong_penalty_rate": 2.0,
},
-1.0,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["4", "DONT_CARE", "6"],
"wrong_penalty_rate": 2.0,
},
-0.666666666667,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["4", "2", "6"],
"wrong_penalty_rate": 2.0,
},
-0.33333333333333,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["DONT_CARE", "DONT_CARE", "DONT_CARE"],
"wrong_penalty_rate": 2.0,
},
0.0,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "DONT_CARE", "value": "2"},
{"entity": "DONT_CARE", "value": "3"},
],
"utterance": "123",
"y_true": ["DONT_CARE", "2", "3"],
"wrong_penalty_rate": 2.0,
},
0.0,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["DONT_CARE", "2", "3"],
"wrong_penalty_rate": 2.0,
},
0.6666666666666667,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["5", "2", "3"],
"wrong_penalty_rate": 2.0,
},
0.3333333333333333,
),
(
{
"entity_prediction": [
{"entity": "DONT_CARE", "value": "1"},
{"entity": "DONT_CARE", "value": "2"},
{"entity": "DONT_CARE", "value": "3"},
],
"utterance": "123",
"y_true": ["DONT_CARE", "DONT_CARE", "DONT_CARE"],
"wrong_penalty_rate": 2.0,
},
1.0,
),
(
{
"entity_prediction": [
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
"utterance": "123",
"y_true": ["1", "2", "3"],
"wrong_penalty_rate": 2.0,
},
1.0,
),
]
for i, test_case in enumerate(test_cases):
with self.subTest(i=i):
result = single__entity_overlapping_score(**test_case[0])
self.assertAlmostEqual(test_case[1], result)
def test_entity_overlapping_score_different_amount(self):
with self.assertRaises(ValueError):
entity_overlapping_score(
utterances=["123", "345"],
entity_predictions=[[{"a": 1}], [{"b": 2}]],
y_trues=[["a"], ["b"], ["c"]],
)
def test_entity_overlapping_score(self):
result = entity_overlapping_score(
utterances=["123", "123"],
entity_predictions=[
[
{"entity": "1", "value": "1"},
{"entity": "2", "value": "2"},
{"entity": "3", "value": "3"},
],
[
{"entity": "DONT_CARE", "value": "1"},
{"entity": "DONT_CARE", "value": "2"},
{"entity": "DONT_CARE", "value": "3"},
],
],
y_trues=[
["5", "2", "3"],
["DONT_CARE", "DONT_CARE", "DONT_CARE"],
],
)
self.assertAlmostEqual(
(0.33333333333 + 1.0) / 2,
result,
)
| [
"[email protected]"
] | |
7ffcb76ec73333e2ac89d9c1b17839de77716f5e | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/420/usersdata/329/87976/submittedfiles/exe11.py | 715adcb70c57813e5b1796b83f844bcbc85024f3 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | py | # -*- coding: utf-8 -*-
n = int(input("digite um numero com 8 algarismos: "))
soma = 0
while n < 10000000 and n > 9999999:
resto = n % 10
n = (n - resto)/10
soma = soma + resto
print ('%d' % soma)
else:
print("NAO SEI")
| [
"[email protected]"
] | |
85872ca81454d863e57c47043a303a247a75e42d | 2a8abd5d6acdc260aff3639bce35ca1e688869e9 | /telestream_cloud_qc_sdk/telestream_cloud_qc/models/frame_aspect_ratio_test.py | e350d1d1f34c6e4931d4824fe21895777c5735ce | [
"MIT"
] | permissive | Telestream/telestream-cloud-python-sdk | 57dd2f0422c83531e213f48d87bc0c71f58b5872 | ce0ad503299661a0f622661359367173c06889fc | refs/heads/master | 2021-01-18T02:17:44.258254 | 2020-04-09T11:36:07 | 2020-04-09T11:36:07 | 49,494,916 | 0 | 0 | MIT | 2018-01-22T10:07:49 | 2016-01-12T11:10:56 | Python | UTF-8 | Python | false | false | 6,377 | py | # coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_qc.configuration import Configuration
class FrameAspectRatioTest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'frame_aspect_ratio_numerator': 'int',
'frame_aspect_ratio_denominator': 'int',
'reject_on_error': 'bool',
'checked': 'bool'
}
attribute_map = {
'frame_aspect_ratio_numerator': 'frame_aspect_ratio_numerator',
'frame_aspect_ratio_denominator': 'frame_aspect_ratio_denominator',
'reject_on_error': 'reject_on_error',
'checked': 'checked'
}
def __init__(self, frame_aspect_ratio_numerator=None, frame_aspect_ratio_denominator=None, reject_on_error=None, checked=None, local_vars_configuration=None): # noqa: E501
"""FrameAspectRatioTest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._frame_aspect_ratio_numerator = None
self._frame_aspect_ratio_denominator = None
self._reject_on_error = None
self._checked = None
self.discriminator = None
if frame_aspect_ratio_numerator is not None:
self.frame_aspect_ratio_numerator = frame_aspect_ratio_numerator
if frame_aspect_ratio_denominator is not None:
self.frame_aspect_ratio_denominator = frame_aspect_ratio_denominator
if reject_on_error is not None:
self.reject_on_error = reject_on_error
if checked is not None:
self.checked = checked
@property
def frame_aspect_ratio_numerator(self):
"""Gets the frame_aspect_ratio_numerator of this FrameAspectRatioTest. # noqa: E501
:return: The frame_aspect_ratio_numerator of this FrameAspectRatioTest. # noqa: E501
:rtype: int
"""
return self._frame_aspect_ratio_numerator
@frame_aspect_ratio_numerator.setter
def frame_aspect_ratio_numerator(self, frame_aspect_ratio_numerator):
"""Sets the frame_aspect_ratio_numerator of this FrameAspectRatioTest.
:param frame_aspect_ratio_numerator: The frame_aspect_ratio_numerator of this FrameAspectRatioTest. # noqa: E501
:type: int
"""
self._frame_aspect_ratio_numerator = frame_aspect_ratio_numerator
@property
def frame_aspect_ratio_denominator(self):
"""Gets the frame_aspect_ratio_denominator of this FrameAspectRatioTest. # noqa: E501
:return: The frame_aspect_ratio_denominator of this FrameAspectRatioTest. # noqa: E501
:rtype: int
"""
return self._frame_aspect_ratio_denominator
@frame_aspect_ratio_denominator.setter
def frame_aspect_ratio_denominator(self, frame_aspect_ratio_denominator):
"""Sets the frame_aspect_ratio_denominator of this FrameAspectRatioTest.
:param frame_aspect_ratio_denominator: The frame_aspect_ratio_denominator of this FrameAspectRatioTest. # noqa: E501
:type: int
"""
self._frame_aspect_ratio_denominator = frame_aspect_ratio_denominator
@property
def reject_on_error(self):
"""Gets the reject_on_error of this FrameAspectRatioTest. # noqa: E501
:return: The reject_on_error of this FrameAspectRatioTest. # noqa: E501
:rtype: bool
"""
return self._reject_on_error
@reject_on_error.setter
def reject_on_error(self, reject_on_error):
"""Sets the reject_on_error of this FrameAspectRatioTest.
:param reject_on_error: The reject_on_error of this FrameAspectRatioTest. # noqa: E501
:type: bool
"""
self._reject_on_error = reject_on_error
@property
def checked(self):
"""Gets the checked of this FrameAspectRatioTest. # noqa: E501
:return: The checked of this FrameAspectRatioTest. # noqa: E501
:rtype: bool
"""
return self._checked
@checked.setter
def checked(self, checked):
"""Sets the checked of this FrameAspectRatioTest.
:param checked: The checked of this FrameAspectRatioTest. # noqa: E501
:type: bool
"""
self._checked = checked
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FrameAspectRatioTest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, FrameAspectRatioTest):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
ed7b5fcf55324e383b99dd8f860e850435b47ada | 0faf534ebb6db6f32279e5bee25b968bd425ce3a | /tests/core/_while/_while.py | b6d827a12289764a394e2ef4beffb7579457bc29 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | PyHDI/veriloggen | e8647cb2d40737d84e31d6b89c5799bab9cbd583 | f2b1b9567150af097eed1b5e79ba2b412854ef43 | refs/heads/develop | 2023-08-09T10:02:35.626403 | 2023-08-09T00:50:14 | 2023-08-09T00:50:14 | 37,813,184 | 282 | 60 | Apache-2.0 | 2023-07-20T03:03:29 | 2015-06-21T15:05:30 | Python | UTF-8 | Python | false | false | 1,032 | py | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
from veriloggen import *
def mkTest():
m = Module('test')
clk = m.Reg('CLK')
rst = m.Reg('RST')
count = m.Reg('count', width=32)
m.Initial(
Systask('dumpfile', '_while.vcd'),
Systask('dumpvars', 0, clk, rst, count),
)
m.Initial(
clk(0),
Forever(clk(Not(clk), ldelay=5)) # forever #5 CLK = ~CLK;
)
m.Initial(
rst(0),
Delay(100),
rst(1),
Delay(100),
rst(0),
Delay(1000),
count(0),
While(count < 1024)(
count(count + 1),
Event(Posedge(clk))
),
Systask('finish'),
)
return m
if __name__ == '__main__':
test = mkTest()
verilog = test.to_verilog('')
print(verilog)
| [
"[email protected]"
] | |
4541f9dcb4fab88b6bbf5c77db6b8d07c29b9cc9 | 16ccfb5d13029afde7fb5d54371c97d1866de905 | /corkscrew/version.py | 3f686add9e1a94e02216d00bd7ebc2291ef4da42 | [] | no_license | mattvonrocketstein/corkscrew | b69c32ea78f0bfe948b83a85bb4f60351e560116 | 8c992599e865aee8cfc93900a945ff5248ed1ab2 | refs/heads/master | 2021-01-01T18:42:10.205684 | 2015-11-08T09:55:45 | 2015-11-08T09:55:45 | 2,240,780 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 44 | py | """ corkscrew.version
"""
__version__=0.18 | [
"matthewvonrocketstein@gmail-dot-com"
] | matthewvonrocketstein@gmail-dot-com |
eb599ad48afd47de67a5a38758872173421836a2 | f2a0c0cad8ccc82ac00c7fa9dbf06c5fec96089c | /Student_Management/main/urls.py | b6fe5fc9e31bae12859e560cff9d8544ad9433a3 | [] | no_license | tushargoyal22/Django-Learning | 49bb0c97f6e344dae053a3c913a74c765a9a021b | eb87ac56220d7f0e1e4741cda754547180835713 | refs/heads/master | 2020-12-26T18:12:07.305533 | 2020-04-20T06:22:14 | 2020-04-20T06:22:14 | 237,585,513 | 0 | 0 | null | 2020-06-06T09:08:09 | 2020-02-01T08:31:48 | CSS | UTF-8 | Python | false | false | 497 | py | from django.urls import path
from main import views
urlpatterns = [
path('',views.Index.as_view()),
path('college/<int:pk>' , views.CollegeDetail.as_view(),name='college'),
path('colleges/',views.CollegeList.as_view()),
path('create_college/' , views.CollegeCreate.as_view()),
path('update_college/<int:pk>' , views.CollegeUpdate.as_view()),
path('create_student/' , views.StudentCreate.as_view()),
path('delete_student/<int:pk>' , views.StudentDelete.as_view())
]
| [
"[email protected]"
] | |
e187641d7db47cec739bd694e61860ff1f2d4b26 | a48eaa4419b87c011abdee1eebfd04b469f4417b | /.history/ghostpost/views_20200211120737.py | 0c516f78e19399fa4ac83bde5fc952b2f89adef3 | [] | no_license | Imraj423/ghostpost | 6418d6c9561528ac8c31dd70d8aae7fac4c77cca | 4edc559eb1f9ef0d11aae78e2b1dbd5c4903ddb5 | refs/heads/master | 2021-01-02T13:32:58.032239 | 2020-02-11T23:21:31 | 2020-02-11T23:21:31 | 239,644,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,858 | py |
from django.shortcuts import render
from django.contrib.auth.models import User
from ghostpost.models import ghostPost
from django.shortcuts import render, reverse, HttpResponseRedirect
from ghostpost.forms import addPost
def index(request):
item = ghostPost.objects.all()
return render(request, 'index.html', {'data': item})
def detail(request):
item = ghostPost.objects.all()
return render(request, 'detail.html', {'data': item})
def post_add(request):
html = 'addpost.html'
if request.method == 'POST':
form = addPost(request.POST)
if form.is_valid():
data = form.cleaned_data
ghostPost.objects.create(
message=data['message'],
is_Boast=data['is_Boast']
)
return HttpResponseRedirect(reverse("index"))
form = addPost()
return render(request, html, {'form': form})
def like(request, id):
post = ghostPost.objects.get(id=id)
post.like += 1
post.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def dislike(request, id):
post = ghostPost.objects.get(id=id)
post.like -= 1
post.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def sorted(request):
html = "index.html"
data = ghostPost.objects.all().order_by(
"-like")
return render(request, html, {"data": data})
def sortedt(request):
html = "index.html"
data = ghostPost.objects.all().order_by("-time")
return render(request, html, {"data": data})
def sortedb(request):
html = "index.html"
data = ghostPost.objects.all().order_by("-is_Boast")
return render(request, html, {"data": data})
def sortedb(request):
html = "index.html"
data = ghostPost.objects.all().order_by("-is_Boast=False")
return render(request, html, {"data": data})
| [
"[email protected]"
] | |
652e8748f26f358862132b7fc9300aa65f1f05ec | 3ff9821b1984417a83a75c7d186da9228e13ead9 | /No_0530_Minimum Absolute Difference in BST/minimum_absolute)difference_in_BST_by_inorder_iteration.py | a7418dd3d34f9db81a543e4abdb35916f72c1593 | [
"MIT"
] | permissive | brianchiang-tw/leetcode | fd4df1917daef403c48cb5a3f5834579526ad0c2 | 6978acfb8cb767002cb953d02be68999845425f3 | refs/heads/master | 2023-06-11T00:44:01.423772 | 2023-06-01T03:52:00 | 2023-06-01T03:52:00 | 222,939,709 | 41 | 12 | null | null | null | null | UTF-8 | Python | false | false | 2,094 | py | '''
Description:
Given a binary search tree with non-negative values, find the minimum absolute difference between values of any two nodes.
Example:
Input:
1
\
3
/
2
Output:
1
Explanation:
The minimum absolute difference is 1, which is the difference between 2 and 1 (or between 2 and 3).
Note: There are at least two nodes in this BST.
'''
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def getMinimumDifference(self, root: TreeNode) -> int:
traversal_queue = [(root, 'init')]
min_diff, prev_node_value = float('inf'), -2**31
while traversal_queue:
node, label = traversal_queue.pop()
if label is not 'c':
if node.right:
traversal_queue.append( (node.right, 'r') )
traversal_queue.append( (node, 'c') )
if node.left:
traversal_queue.append( (node.left, 'l') )
else:
min_diff = min(min_diff, node.val - prev_node_value )
prev_node_value = node.val
return min_diff
# n : the number of nodes in binary search tree
## Time Complexity: O( n )
#
# The overhead in time is the cost of in-order traversal, which is of O( n )
## Space Complexity: O( n )
#
# THe overhead in space is the storage for traversal_queue, which is of O( n )
def test_bench():
## Test case_#1
root_1 = TreeNode(1)
root_1.right = TreeNode(3)
root_1.right.left = TreeNode(2)
# expected output:
'''
1
'''
print( Solution().getMinimumDifference(root_1) )
## Test case_#2
root_2 = TreeNode(5)
root_2.left = TreeNode(1)
root_2.right = TreeNode(10)
root_2.right.left = TreeNode(8)
root_2.right.right = TreeNode(13)
# expected output:
'''
2
'''
print( Solution().getMinimumDifference(root_2) )
if __name__ == '__main__':
test_bench() | [
"[email protected]"
] | |
35fddb176546bcdc04b5f7168fe7656d9d16c1c5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02675/s648199301.py | b63ed0e3bb4be52116a50e76ac3fe5f3864781f1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | # -*- coding: utf-8 -*-
def main():
N = int(input())
case1 = [2, 4, 5, 7, 9]
case2 = [0, 1, 6, 8]
case3 = [3]
num = N % 10
if num in case1:
ans = 'hon'
elif num in case2:
ans = 'pon'
elif num in case3:
ans = 'bon'
print(ans)
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
96a1d7b58328b30fde41e93d4831caca9bf6fc36 | 9aaa39f200ee6a14d7d432ef6a3ee9795163ebed | /Algorithm/Python/146. LRU Cache.py | 12ae61d680fa056cf718b935addad161d26c1efe | [] | no_license | WuLC/LeetCode | 47e1c351852d86c64595a083e7818ecde4131cb3 | ee79d3437cf47b26a4bca0ec798dc54d7b623453 | refs/heads/master | 2023-07-07T18:29:29.110931 | 2023-07-02T04:31:00 | 2023-07-02T04:31:00 | 54,354,616 | 29 | 16 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py | # -*- coding: utf-8 -*-
# @Author: WuLC
# @Date: 2016-08-04 22:39:03
# @Last modified by: WuLC
# @Last Modified time: 2016-08-04 22:40:49
# @Email: [email protected]
class LRUCache(object):
def __init__(self, capacity):
"""
:type capacity: int
"""
self.capacity = capacity
self.cache = {}
self.keys = collections.deque()
self.exist_keys = set()
def get(self, key):
"""
:rtype: int
"""
if key in self.exist_keys:
self.keys.remove(key)
self.keys.append(key)
return self.cache[key]
return -1
def set(self, key, value):
"""
:type key: int
:type value: int
:rtype: nothing
"""
if key not in self.exist_keys:
self.exist_keys.add(key)
if len(self.keys) == self.capacity:
# remove the LRU element
old_key = self.keys.popleft()
self.exist_keys.remove(old_key)
del self.cache[old_key]
else:
self.keys.remove(key)
self.keys.append(key)
self.cache[key] = value
| [
"[email protected]"
] | |
f86f346345b1e788b5612e8ac5f117bc6c0dbce1 | e168a4b9e7997b5266df4c1fe2afbaf0ed031fed | /url_shortener/profiles/forms.py | 470c5cd6344634922a1279b0c41660591cc5b23a | [] | no_license | AaronScruggs/urly-bird | 756eba26f21c66e78ed93bf6f936b50fb927aaef | a27314afb309de42230852fc2bd35416dece46d9 | refs/heads/master | 2021-01-22T01:18:59.907605 | 2016-04-05T07:01:53 | 2016-04-05T07:01:53 | 55,178,264 | 0 | 0 | null | 2016-03-31T19:45:02 | 2016-03-31T19:45:01 | null | UTF-8 | Python | false | false | 217 | py | from django import forms
from django.contrib.auth.models import User
from profiles.models import Profile
class ImageUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ("image",)
| [
"[email protected]"
] | |
a74b58b3e5974f4098f7a4932dfa112f9fedbc7e | 19ddab74600f71700a6b693281d0180d5271f295 | /程序员面试金典/01_04_回文排列.py | bc02963092c9dc8e4d739287a6103fd74aad53ce | [] | no_license | zhulf0804/Coding.Python | 4d55a430da1a8077c81feba65c13ac654aaf094a | 46ab03e23d15ebd5434ef4dd5ae99130000b00a5 | refs/heads/master | 2022-09-14T18:40:59.880941 | 2022-08-20T08:25:51 | 2022-08-20T08:25:51 | 213,113,482 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | class Solution:
def canPermutePalindrome(self, s: str) -> bool:
d = {}
for item in s:
d[item] = d.get(item, 0) + 1
is_odd = False
for k, v in d.items():
if v & 1 == 1:
if is_odd:
return False
is_odd = True
return True | [
"[email protected]"
] | |
16ae617aa0dff53873785822c7cb2db033f9590b | 494e3fbbdff5cf6edb087f3103ad5f15acbc174f | /schedule/migrations/0002_auto_20180727_2329.py | d0a6847d6321e79defcf1bfbd06aa6f38fb59def | [] | no_license | TalentoUnicamp/my | 1209048acdedbb916b8ae8ec80761d09f6ad7754 | 3d87a33cd282d97dbbbd5f62658f231456f12765 | refs/heads/master | 2020-03-23T21:12:58.316033 | 2018-08-14T06:11:36 | 2018-08-14T06:11:36 | 142,090,262 | 11 | 0 | null | 2018-08-17T05:13:26 | 2018-07-24T01:53:23 | JavaScript | UTF-8 | Python | false | false | 1,698 | py | # Generated by Django 2.0.3 on 2018-07-28 02:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='event',
name='attended',
field=models.ManyToManyField(null=True, related_name='attended_events', to='user_profile.Profile'),
),
migrations.AlterField(
model_name='event',
name='attendees',
field=models.ManyToManyField(null=True, related_name='selected_events', to='user_profile.Profile'),
),
migrations.AlterField(
model_name='event',
name='event_type',
field=models.CharField(choices=[('Meta', 'Meta'), ('Keynote', 'Keynote'), ('Workshop', 'Workshop'), ('Palestra', 'Palestra')], max_length=20),
),
migrations.AlterField(
model_name='event',
name='max_attendees',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='event',
name='speaker',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='my_events', to='user_profile.Profile'),
),
migrations.AlterField(
model_name='feedback',
name='comments',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='feedback',
name='rating',
field=models.IntegerField(blank=True, null=True),
),
]
| [
"[email protected]"
] | |
9abea3f326ea59ebd86d1c7b1d83e63ad82ffd60 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/ChenglongChen_Kaggle_HomeDepot/Kaggle_HomeDepot-master/Code/Chenglong/feature_group_distance.py | 8be14bcf62e8f822d47294b1071b3b95a6516e0a | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 4,848 | py | # -*- coding: utf-8 -*-
"""
@author: Chenglong Chen <[email protected]>
@brief: group relevance based distance features
@note: such features are not used in final submission
"""
import re
import string
import numpy as np
import pandas as pd
import config
from config import TRAIN_SIZE
from utils import dist_utils, ngram_utils, nlp_utils
from utils import logging_utils, pkl_utils, time_utils
from feature_base import BaseEstimator, StandaloneFeatureWrapper, PairwiseFeatureWrapper
# tune the token pattern to get a better correlation with y_train
# token_pattern = r"(?u)\b\w\w+\b"
# token_pattern = r"\w{1,}"
# token_pattern = r"\w+"
# token_pattern = r"[\w']+"
token_pattern = " " # just split the text into tokens
# -------------------- Group by (obs, relevance) based distance features ----------------------------------- #
# Something related to Query Expansion
class GroupRelevance_Ngram_Jaccard(BaseEstimator):
"""Single aggregation features"""
def __init__(self, obs_corpus, target_corpus, id_list, dfTrain, target_field, relevance, ngram, aggregation_mode=""):
super().__init__(obs_corpus, target_corpus, aggregation_mode, id_list)
self.dfTrain = dfTrain[dfTrain["relevance"] != 0].copy()
self.target_field = target_field
self.relevance = relevance
self.relevance_str = self._relevance_to_str()
self.ngram = ngram
self.ngram_str = ngram_utils._ngram_str_map[self.ngram]
def __name__(self):
if isinstance(self.aggregation_mode, str):
feat_name = "Group_%sRelevance_%s_Jaccard_%s"%(
self.relevance_str, self.ngram_str, string.capwords(self.aggregation_mode))
elif isinstance(self.aggregation_mode, list):
feat_name = ["Group_%sRelevance_%s_Jaccard_%s"%(
self.relevance_str, self.ngram_str, string.capwords(m)) for m in self.aggregation_mode]
return feat_name
def _relevance_to_str(self):
if isinstance(self.relevance, float):
return re.sub("\.", "d", str(self.relevance))
else:
return str(self.relevance)
def transform_one(self, obs, target, id):
df = self.dfTrain[self.dfTrain["search_term"] == obs].copy()
val_list = [config.MISSING_VALUE_NUMERIC]
if df is not None:
df = df[df["id"] != id].copy()
df = df[df["relevance"] == self.relevance].copy()
if df is not None and df.shape[0] > 0:
target_tokens = nlp_utils._tokenize(target, token_pattern)
target_ngrams = ngram_utils._ngrams(target_tokens, self.ngram)
val_list = []
for x in df[self.target_field]:
x_tokens = nlp_utils._tokenize(x, token_pattern)
x_ngrams = ngram_utils._ngrams(x_tokens, self.ngram)
val_list.append(dist_utils._jaccard_coef(x_ngrams, target_ngrams))
return val_list
# -------------------------------- Main ----------------------------------
def main():
logname = "generate_feature_group_distance_%s.log"%time_utils._timestamp()
logger = logging_utils._get_logger(config.LOG_DIR, logname)
dfAll = pkl_utils._load(config.ALL_DATA_LEMMATIZED_STEMMED)
dfTrain = dfAll.iloc[:TRAIN_SIZE].copy()
## run python3 splitter.py first
split = pkl_utils._load("%s/splits_level1.pkl"%config.SPLIT_DIR)
n_iter = len(split)
relevances_complete = [1, 1.25, 1.33, 1.5, 1.67, 1.75, 2, 2.25, 2.33, 2.5, 2.67, 2.75, 3]
relevances = [1, 1.33, 1.67, 2, 2.33, 2.67, 3]
ngrams = [1]
obs_fields = ["search_term"]
target_fields = ["product_title", "product_description"]
aggregation_mode = ["mean", "std", "max", "min", "median"]
## for cv
for i in range(n_iter):
trainInd, validInd = split[i][0], split[i][1]
dfTrain2 = dfTrain.iloc[trainInd].copy()
sub_feature_dir = "%s/Run%d" % (config.FEAT_DIR, i+1)
for target_field in target_fields:
for relevance in relevances:
for ngram in ngrams:
param_list = [dfAll["id"], dfTrain2, target_field, relevance, ngram, aggregation_mode]
pf = PairwiseFeatureWrapper(GroupRelevance_Ngram_Jaccard, dfAll, obs_fields, [target_field], param_list, sub_feature_dir, logger)
pf.go()
## for all
sub_feature_dir = "%s/All" % (config.FEAT_DIR)
for target_field in target_fields:
for relevance in relevances:
for ngram in ngrams:
param_list = [dfAll["id"], dfTrain, target_field, relevance, ngram, aggregation_mode]
pf = PairwiseFeatureWrapper(GroupRelevance_Ngram_Jaccard, dfAll, obs_fields, [target_field], param_list, sub_feature_dir, logger)
pf.go()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
4fccba1e6cf207096ecb5d43ef2b1e74b10f2d7a | e41651d8f9b5d260b800136672c70cb85c3b80ff | /Notification_System/temboo/Library/Flickr/PhotoComments/LeaveComment.py | 86bbc8411b315c8fddfd9fdd48b7df1f6c43f6c9 | [] | no_license | shriswissfed/GPS-tracking-system | 43e667fe3d00aa8e65e86d50a4f776fcb06e8c5c | 1c5e90a483386bd2e5c5f48f7c5b306cd5f17965 | refs/heads/master | 2020-05-23T03:06:46.484473 | 2018-10-03T08:50:00 | 2018-10-03T08:50:00 | 55,578,217 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,616 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# LeaveComment
# Add a comment to a specified photo on Flickr.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class LeaveComment(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the LeaveComment Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(LeaveComment, self).__init__(temboo_session, '/Library/Flickr/PhotoComments/LeaveComment')
def new_input_set(self):
return LeaveCommentInputSet()
def _make_result_set(self, result, path):
return LeaveCommentResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return LeaveCommentChoreographyExecution(session, exec_id, path)
class LeaveCommentInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the LeaveComment
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Flickr (AKA the OAuth Consumer Key).)
"""
super(LeaveCommentInputSet, self)._set_input('APIKey', value)
def set_APISecret(self, value):
"""
Set the value of the APISecret input for this Choreo. ((required, string) The API Secret provided by Flickr (AKA the OAuth Consumer Secret).)
"""
super(LeaveCommentInputSet, self)._set_input('APISecret', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)
"""
super(LeaveCommentInputSet, self)._set_input('AccessToken', value)
def set_AccessTokenSecret(self, value):
"""
Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.)
"""
super(LeaveCommentInputSet, self)._set_input('AccessTokenSecret', value)
def set_CommentText(self, value):
"""
Set the value of the CommentText input for this Choreo. ((required, string) The text of the comment you are adding.)
"""
super(LeaveCommentInputSet, self)._set_input('CommentText', value)
def set_PhotoID(self, value):
"""
Set the value of the PhotoID input for this Choreo. ((required, integer) The id of the photo to add a comment to)
"""
super(LeaveCommentInputSet, self)._set_input('PhotoID', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: xml and json. Defaults to json.)
"""
super(LeaveCommentInputSet, self)._set_input('ResponseFormat', value)
class LeaveCommentResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the LeaveComment Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Flickr.)
"""
return self._output.get('Response', None)
class LeaveCommentChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return LeaveCommentResultSet(response, path)
| [
"[email protected]"
] | |
ec358af8dcc747a31d12f7fb499c7a78bba2c640 | 7701773efa258510951bc7d45325b4cca26b3a7d | /from_trans_file_cloud/explore_pathlib.py | cd6ac1e600ecf9cc21bb0408817543f804917d9b | [] | no_license | Archanciel/explore | c170b2c8b5eed0c1220d5e7c2ac326228f6b2485 | 0576369ded0e54ce7ff9596ec4df076e69067e0c | refs/heads/master | 2022-06-17T19:15:03.647074 | 2022-06-01T20:07:04 | 2022-06-01T20:07:04 | 105,314,051 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | from pathlib import Path
root = Path('D:\\Development\\Python\\trans_file_cloud\\.git')
child = Path('D:\\Development\\Python\\trans_file_cloud\\.git\\hooks')
other = Path('/some/other/path')
print(root in child.parents) | [
"[email protected]"
] | |
ae83c59eb63599eac7d7f45ea8229a239af25040 | 82f993631da2871933edf83f7648deb6c59fd7e4 | /w1/L3/7.py | 8469a86b108877706bb07df0088f4d1eea2b7434 | [] | no_license | bobur554396/PPII2021Summer | 298f26ea0e74c199af7b57a5d40f65e20049ecdd | 7ef38fb4ad4f606940d2ba3daaa47cbd9ca8bcd2 | refs/heads/master | 2023-06-26T05:42:08.523345 | 2021-07-24T12:40:05 | 2021-07-24T12:40:05 | 380,511,125 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 200 | py | # - [] Iterators and Iterbales
a = [1, 2, 3, 4] # - iterable object
it = iter(a)
# print(next(it))
# print(next(it))
# print(next(it))
# print(next(it))
# print(next(it))
for i in it:
print(i)
| [
"[email protected]"
] | |
f9a25ea75f1038ebb53730647439228ea1d83873 | 9102c3a5fa3a5b0202d61206973d0ea167f7a4d0 | /July/07-IslandPerimeter.py | a93da08ce948ac402b6597b23157a28ceea1580f | [] | no_license | Madhav-Somanath/LeetCode | 8e1b39e106cec238e5a2a3acb3eb267f5c36f781 | b6950f74d61db784095c71df5115ba10be936c65 | refs/heads/master | 2023-01-08T15:10:00.249806 | 2020-10-31T14:45:43 | 2020-10-31T14:45:43 | 255,654,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,657 | py | """ You are given a map in form of a two-dimensional integer grid where 1 represents land and 0 represents water.
Grid cells are connected horizontally/vertically (not diagonally). The grid is completely surrounded by water,
and there is exactly one island (i.e., one or more connected land cells).
The island doesn't have "lakes" (water inside that isn't connected to the water around the island). One cell is a square with side length 1.
The grid is rectangular, width and height don't exceed 100. Determine the perimeter of the island. """
# SOLUTION
class Solution:
def islandPerimeter(self, grid: List[List[int]]) -> int:
if not grid:
return 0
def sum_adjacent(i, j):
adjacent = (i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1),
res = 0
for x, y in adjacent:
if x < 0 or y < 0 or x == len(grid) or y == len(grid[0]) or grid[x][y] == 0:
res += 1
return res
count = 0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
count += sum_adjacent(i, j)
return count
'''
m, n, Perimeter = len(grid), len(grid[0]), 0
for i in range(m):
for j in range(n):
Perimeter += 4*grid[i][j]
if i > 0: Perimeter -= grid[i][j]*grid[i-1][j]
if i < m-1: Perimeter -= grid[i][j]*grid[i+1][j]
if j > 0: Perimeter -= grid[i][j]*grid[i][j-1]
if j < n-1: Perimeter -= grid[i][j]*grid[i][j+1]
return Perimeter
''' | [
"[email protected]"
] | |
a7d11fe7ad97288252922c00a7c365e7199665ed | 43e900f11e2b230cdc0b2e48007d40294fefd87a | /Amazon/VideoOnsite/162.find-peak-element.py | 5b3ada63691cf9fcf4b02f7261a2be18b71ec8d7 | [] | no_license | DarkAlexWang/leetcode | 02f2ed993688c34d3ce8f95d81b3e36a53ca002f | 89142297559af20cf990a8e40975811b4be36955 | refs/heads/master | 2023-01-07T13:01:19.598427 | 2022-12-28T19:00:19 | 2022-12-28T19:00:19 | 232,729,581 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | #
# @lc app=leetcode id=162 lang=python3
#
# [162] Find Peak Element
#
# @lc code=start
class Solution:
def findPeakElement(self, nums: List[int]) -> int:
l, r = 0, len(nums) - 1
while l + 1 < r:
mid = (l + r) // 2
if nums[mid] > nums[mid + 1] and nums[mid] > nums[mid - 1]:
return mid
if nums[mid] > nums[mid + 1]:
r = mid
else:
l = mid
if nums[l] < nums[r]:
return r
else:
return l
# @lc code=end
| [
"[email protected]"
] | |
04fa896307a6d243658fb915099d337f76804cd5 | 86813bf514f3e0257f92207f40a68443f08ee44b | /0406 根据身高重建队列/0406 根据身高重建队列.py | 989f32ac1430a2408dcaef254410bf9310c75be2 | [] | no_license | Aurora-yuan/Leetcode_Python3 | 4ce56679b48862c87addc8cd870cdd525c9d926c | 720bb530850febc2aa67a56a7a0b3a85ab37f415 | refs/heads/master | 2021-07-12T13:23:19.399155 | 2020-10-21T03:14:36 | 2020-10-21T03:14:36 | 212,998,500 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | #label: 贪心算法 difficulty: medium
"""
思路
1.排序:按照身高从高到低排,升高相同的按k从小到大排
2.插入:按照排序好的顺序逐个插入新数组,插入的位置按照k来插
如示例中,排序完:
[[7,0], [7,1], [6,1], [5,0], [5,2],[4,4]]
插入的过程:
第一插:[[7,0]]
第二插:[[7,0], [7,1]]
第三插:[[7,0], [6,1],[7,1]]
第四插:[[5,0],[7,0], [6,1],[7,1]]
...
先插高的,后插矮的,即使后插的插到前面也不会有影像,因为矮
"""
class Solution(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
people.sort(key=lambda (h, k): (-h, k))
res = []
for p in people:
res.insert(p[1],p)
return res
| [
"[email protected]"
] | |
963b0a84d3f5586261ec0ed22a68007f2a76aa70 | 0a1356b97465cc1d5c3f661f61b3b8c51fb05d46 | /android_binding/.buildozer/android/platform/python-for-android/testapps/testapp/main.py | 5baa420f3c203147e6abbfe8085c24a0a8778493 | [
"MIT",
"Python-2.0"
] | permissive | Rohan-cod/cross_platform_calc | 00360f971e4da68dd36d6836c9ddbb157f6b77d5 | 5785a5e8150d174019b330c812e7eb012cc4dd79 | refs/heads/master | 2022-12-22T10:29:05.317051 | 2021-06-05T10:52:44 | 2021-06-05T10:52:44 | 237,465,912 | 2 | 1 | MIT | 2022-12-09T05:18:55 | 2020-01-31T16:07:31 | C | UTF-8 | Python | false | false | 4,015 | py | print('main.py was successfully called')
import os
print('imported os')
print('this dir is', os.path.abspath(os.curdir))
print('contents of this dir', os.listdir('./'))
import sys
print('pythonpath is', sys.path)
import kivy
print('imported kivy')
print('file is', kivy.__file__)
from kivy.app import App
from kivy.lang import Builder
from kivy.properties import StringProperty
from kivy.uix.popup import Popup
from kivy.clock import Clock
print('Imported kivy')
from kivy.utils import platform
print('platform is', platform)
kv = '''
#:import Metrics kivy.metrics.Metrics
#:import sys sys
<FixedSizeButton@Button>:
size_hint_y: None
height: dp(60)
ScrollView:
GridLayout:
cols: 1
size_hint_y: None
height: self.minimum_height
FixedSizeButton:
text: 'test pyjnius'
on_press: app.test_pyjnius()
Image:
keep_ratio: False
allow_stretch: True
source: 'colours.png'
size_hint_y: None
height: dp(100)
Label:
height: self.texture_size[1]
size_hint_y: None
font_size: 100
text_size: self.size[0], None
markup: True
text: '[b]Kivy[/b] on [b]SDL2[/b] on [b]Android[/b]!'
halign: 'center'
Label:
height: self.texture_size[1]
size_hint_y: None
text_size: self.size[0], None
markup: True
text: sys.version
halign: 'center'
padding_y: dp(10)
Widget:
size_hint_y: None
height: 20
Label:
height: self.texture_size[1]
size_hint_y: None
font_size: 50
text_size: self.size[0], None
markup: True
text: 'dpi: {}\\ndensity: {}\\nfontscale: {}'.format(Metrics.dpi, Metrics.density, Metrics.fontscale)
halign: 'center'
FixedSizeButton:
text: 'test ctypes'
on_press: app.test_ctypes()
FixedSizeButton:
text: 'test numpy'
on_press: app.test_numpy()
Widget:
size_hint_y: None
height: 1000
on_touch_down: print('touched at', args[-1].pos)
<ErrorPopup>:
title: 'Error'
size_hint: 0.75, 0.75
Label:
text: root.error_text
'''
class ErrorPopup(Popup):
error_text = StringProperty('')
def raise_error(error):
print('ERROR:', error)
ErrorPopup(error_text=error).open()
class TestApp(App):
def build(self):
root = Builder.load_string(kv)
Clock.schedule_interval(self.print_something, 2)
# Clock.schedule_interval(self.test_pyjnius, 5)
print('testing metrics')
from kivy.metrics import Metrics
print('dpi is', Metrics.dpi)
print('density is', Metrics.density)
print('fontscale is', Metrics.fontscale)
return root
def print_something(self, *args):
print('App print tick', Clock.get_boottime())
def on_pause(self):
return True
def test_pyjnius(self, *args):
try:
from jnius import autoclass
except ImportError:
raise_error('Could not import pyjnius')
return
print('Attempting to vibrate with pyjnius')
# PythonActivity = autoclass('org.renpy.android.PythonActivity')
# activity = PythonActivity.mActivity
PythonActivity = autoclass('org.kivy.android.PythonActivity')
activity = PythonActivity.mActivity
Intent = autoclass('android.content.Intent')
Context = autoclass('android.content.Context')
vibrator = activity.getSystemService(Context.VIBRATOR_SERVICE)
vibrator.vibrate(1000)
def test_ctypes(self, *args):
import ctypes
def test_numpy(self, *args):
import numpy
print(numpy.zeros(5))
print(numpy.arange(5))
print(numpy.random.random((3, 3)))
TestApp().run()
| [
"[email protected]"
] | |
cf7330a35aacb57aecc3cf237fab0a5660c9e136 | 7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0 | /1101-1200/1155-Number of Dice Rolls With Target Sum/1155-Number of Dice Rolls With Target Sum.py | f54e16cb49f5483bfd0bcd1a41d19b792bf96035 | [
"MIT"
] | permissive | jiadaizhao/LeetCode | be31bd0db50cc6835d9c9eff8e0175747098afc6 | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | refs/heads/master | 2021-11-05T04:38:47.252590 | 2021-10-31T09:54:53 | 2021-10-31T09:54:53 | 99,655,604 | 52 | 28 | MIT | 2020-10-02T12:47:47 | 2017-08-08T05:57:26 | C++ | UTF-8 | Python | false | false | 863 | py | class Solution:
def numRollsToTarget(self, d: int, f: int, target: int) -> int:
dp = [[0] * (1 + target) for _ in range(1 + d)]
dp[0][0] = 1
MOD = 10 ** 9 + 7
for i in range(1, 1 + d):
for j in range(1, 1 + target):
for k in range(1, 1 + min(f, j)):
dp[i][j] = (dp[i][j] + dp[i - 1][j - k]) % MOD
return dp[d][target]
class Solution2:
def numRollsToTarget(self, d: int, f: int, target: int) -> int:
dp = [0] * (1 + target)
dp[0] = 1
MOD = 10 ** 9 + 7
for i in range(1, 1 + d):
temp = [0] * (1 + target)
for j in range(1, 1 + target):
for k in range(1, 1 + min(f, j)):
temp[j] = (temp[j] + dp[j - k]) % MOD
dp = temp
return dp[target]
| [
"[email protected]"
] | |
efc48cf55cecc69f2b9a01cbc950890c053e3a77 | 31bc3fdc7c2b62880f84e50893c8e3d0dfb66fa6 | /libraries/numpy/python_369/python_369/numpy_118/built_in_scalars/uint_.py | 31601e10986c1a268eb3ab8a0b088f9f95f7615e | [] | no_license | tpt5cu/python-tutorial | 6e25cf0b346b8182ebc8a921efb25db65f16c144 | 5998e86165a52889faf14133b5b0d7588d637be1 | refs/heads/master | 2022-11-28T16:58:51.648259 | 2020-07-23T02:20:37 | 2020-07-23T02:20:37 | 269,521,394 | 0 | 0 | null | 2020-06-05T03:23:51 | 2020-06-05T03:23:50 | null | UTF-8 | Python | false | false | 1,496 | py | # https://numpy.org/doc/1.18/reference/arrays.scalars.html#built-in-scalar-types
import numpy as np
def what_is_uint():
'''
- "np.uint" and "np.uintc" are aliases for real underlying NumPy scalar types
- The values of those aliases depend on the operating system
- On my system, "np.uint" creates an object whose class is "numpy.uint64"
- "np.uint" has the same precision as ... ?
- On my system, "np.uintc" creates an object whose class is "numpy.uint32"
- "np.uintc" has the same precision as ... ?
- If I want some size other than those specified by the aliases, I'll have to use a class with an explicit size, e.g. np.uint8
'''
print(np.uint is np.uint64) # True
print(np.uintc is np.uint32) # True
# No error because 1 certainly fits within the size of a C long
ary = np.array(1, dtype=np.uint)
print(ary.dtype) # uint64
#print(int(10**50)) # 100000000000000000000000000000000000000000000000000
#np.array(10**50, dtype=np.uint) # OverflowError: Python int too large to convert to C long
print(type(np.uint)) # <class 'type'>
scalar = np.uint(10)
print(type(scalar)) # <class 'numpy.uint64'>
scalar = np.uint32(10)
print(type(scalar)) # <class 'numpy.uint32'>
scalar = np.uintc(10)
print(type(scalar)) # <class 'numpy.uint32'>
scalar = np.uint8(4)
print(type(scalar)) # <class 'numpy.uint8'>
if __name__ == '__main__':
what_is_uint()
| [
"[email protected]"
] | |
2d192a9d9291492a2911fb5ad35382030baf8fc5 | fad34b6b81e93850e6f408bbc24b3070e002997d | /Python-DM-Text Mining-01.py | e4b51fba0851281217136c06054f5f0570c357bf | [] | no_license | Sandy4321/Latent-Dirichlet-Allocation-2 | d60c14a3abb62e05a31aaac8c9a6d9381ec9d560 | 0bf6670643c7968064e375a287448b515b077473 | refs/heads/master | 2021-05-05T09:57:17.304046 | 2017-07-26T16:14:22 | 2017-07-26T16:14:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,149 | py | ############################################################################
# Created by: Prof. Valdecy Pereira, D.Sc.
# UFF - Universidade Federal Fluminense (Brazil)
# email: [email protected]
# Course: Data Mining
# Lesson: Text Mining
# Citation:
# PEREIRA, V. (2017). Project: LDA - Latent Dirichlet Allocation, File: Python-DM-Text Mining-01.py, GitHub repository:
# <https://github.com/Valdecy/Latent-Dirichlet-Allocation>
############################################################################
# Installing Required Libraries
import numpy as np
import pandas as pd
from nltk.tokenize import RegexpTokenizer
from random import randint
# Function: lda_tm
def lda_tm(document = [], K = 2, alpha = 0.12, eta = 0.01, iterations = 5000, dtm_matrix = False, dtm_bin_matrix = False, dtm_tf_matrix = False, dtm_tfidf_matrix = False, co_occurrence_matrix = False, correl_matrix = False):
################ Part 1 - Start of Function #############################
tokenizer = RegexpTokenizer(r'\w+')
result_list = []
# Corpus
corpus = []
for i in document:
tokens = tokenizer.tokenize(i.lower())
corpus.append(tokens)
# Corpus ID
corpus_id = []
for i in document:
tokens = tokenizer.tokenize(i.lower())
corpus_id.append(tokens)
# Unique Words
uniqueWords = []
for j in range(0, len(corpus)):
for i in corpus[j]:
if not i in uniqueWords:
uniqueWords.append(i)
# Corpus ID for Unique Words
for j in range(0, len(corpus)):
for i in range(0, len(uniqueWords)):
for k in range(0, len(corpus[j])):
if uniqueWords[i] == corpus[j][k]:
corpus_id[j][k] = i
# Topic Assignment
topic_assignment = []
for i in document:
tokens = tokenizer.tokenize(i.lower())
topic_assignment.append(tokens)
# dtm
if dtm_matrix == True or dtm_bin_matrix == True or dtm_tf_matrix == True or dtm_tfidf_matrix == True or co_occurrence_matrix == True or correl_matrix == True:
dtm = np.zeros(shape = (len(corpus), len(uniqueWords)))
for j in range(0, len(corpus)):
for i in range(0, len(uniqueWords)):
for k in range(0, len(corpus[j])):
if uniqueWords[i] == corpus[j][k]:
dtm[j][i] = dtm[j][i] + 1
dtm_pd = pd.DataFrame(dtm, columns = uniqueWords)
if dtm_matrix == True:
result_list.append(dtm_pd)
# dtm_bin
if dtm_bin_matrix == True or co_occurrence_matrix == True or correl_matrix == True:
dtm_bin = np.zeros(shape = (len(corpus), len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
if dtm[i,j] > 0:
dtm_bin[i,j] = 1
dtm_bin_pd = pd.DataFrame(dtm_bin, columns = uniqueWords)
if dtm_bin_matrix == True:
result_list.append(dtm_bin_pd)
# dtm_tf
if dtm_tf_matrix == True:
dtm_tf = np.zeros(shape = (len(corpus), len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
if dtm[i,j] > 0:
dtm_tf[i,j] = dtm[i,j]/dtm[i,].sum()
dtm_tf_pd = pd.DataFrame(dtm_tf, columns = uniqueWords)
result_list.append(dtm_tf_pd)
# dtm_tfidf
if dtm_tfidf_matrix == True:
idf = np.zeros(shape = (1, len(uniqueWords)))
for i in range(0, len(uniqueWords)):
idf[0,i] = np.log10(dtm.shape[0]/(dtm[:,i]>0).sum())
dtm_tfidf = np.zeros(shape = (len(corpus), len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
dtm_tfidf[i,j] = dtm_tf[i,j]*idf[0,j]
dtm_tfidf_pd = pd.DataFrame(dtm_tfidf, columns = uniqueWords)
result_list.append(dtm_tfidf_pd)
# Co-occurrence Matrix
if co_occurrence_matrix == True:
co_occurrence = np.dot(dtm_bin.T,dtm_bin)
co_occurrence_pd = pd.DataFrame(co_occurrence, columns = uniqueWords, index = uniqueWords)
result_list.append(co_occurrence_pd)
# Correlation Matrix
if correl_matrix == True:
correl = np.zeros(shape = (len(uniqueWords), len(uniqueWords)))
for i in range(0, correl.shape[0]):
for j in range(i, correl.shape[1]):
correl[i,j] = np.corrcoef(dtm_bin[:,i], dtm_bin[:,j])[0,1]
correl_pd = pd.DataFrame(correl, columns = uniqueWords, index = uniqueWords)
result_list.append(correl_pd)
# LDA Initialization
for i in range(0, len(topic_assignment)):
for j in range(0, len(topic_assignment[i])):
topic_assignment[i][j] = randint(0, K-1)
cdt = np.zeros(shape = (len(topic_assignment), K))
for i in range(0, len(topic_assignment)):
for j in range(0, len(topic_assignment[i])):
for m in range(0, K):
if topic_assignment[i][j] == m:
cdt[i][m] = cdt[i][m] + 1
cwt = np.zeros(shape = (K, len(uniqueWords)))
for i in range(0, len(corpus)):
for j in range(0, len(uniqueWords)):
for m in range(0, len(corpus[i])):
if uniqueWords[j] == corpus[i][m]:
for n in range(0, K):
if topic_assignment[i][m] == n:
cwt[n][j] = cwt[n][j] + 1
# LDA Algorithm
for i in range(0, iterations + 1):
for d in range(0, len(corpus)):
for w in range(0, len(corpus[d])):
initial_t = topic_assignment[d][w]
word_num = corpus_id[d][w]
cdt[d,initial_t] = cdt[d,initial_t] - 1
cwt[initial_t,word_num] = cwt[initial_t,word_num] - 1
p_z = ((cwt[:,word_num] + eta) / (np.sum((cwt), axis = 1) + len(corpus) * eta)) * ((cdt[d,] + alpha) / (sum(cdt[d,]) + K * alpha ))
z = np.sum(p_z)
p_z_ac = np.add.accumulate(p_z/z)
u = np.random.random_sample()
for m in range(0, K):
if u <= p_z_ac[m]:
final_t = m
break
topic_assignment[d][w] = final_t
cdt[d,final_t] = cdt[d,final_t] + 1
cwt[final_t,word_num] = cwt[final_t,word_num] + 1
if i % 100 == 0:
print('iteration:', i)
theta = (cdt + alpha)
for i in range(0, len(theta)):
for j in range(0, K):
theta[i,j] = theta[i,j]/np.sum(theta, axis = 1)[i]
result_list.append(theta)
phi = (cwt + eta)
d_phi = np.sum(phi, axis = 1)
for i in range(0, K):
for j in range(0, len(phi.T)):
phi[i,j] = phi[i,j]/d_phi[i]
phi_pd = pd.DataFrame(phi.T, index = uniqueWords)
result_list.append(phi_pd)
return result_list
############### End of Function ##############
######################## Part 2 - Usage ####################################
# Documents
doc_1 = "data mining technique data mining first favourite technique"
doc_2 = "data mining technique data mining second favourite technique"
doc_3 = "data mining technique data mining third favourite technique"
doc_4 = "data mining technique data mining fourth favourite technique"
doc_5 = "friday play guitar"
doc_6 = "saturday will play guitar"
doc_7 = "sunday will play guitar"
doc_8 = "monday will play guitar"
doc_9 = "good good indeed can thank"
# Compile Documents
docs = [doc_1, doc_2, doc_3, doc_4, doc_5, doc_6, doc_7, doc_8, doc_9]
# Call Function
lda = lda_tm(document = docs, K = 3, alpha = 0.12, eta = 0.01, iterations = 2500, co_occurrence_matrix = True)
########################## End of Code #####################################
| [
"[email protected]"
] | |
3e30a6a777fc7d9632db4589647703d42784d301 | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-dgc/huaweicloudsdkdgc/v1/model/real_time_node_status.py | 7221161869b508adcbdee1530355437f7d8e3e9e | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,485 | py | # coding: utf-8
import re
import six
class RealTimeNodeStatus:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'name': 'str',
'status': 'str',
'log_path': 'str',
'node_type': 'str'
}
attribute_map = {
'name': 'name',
'status': 'status',
'log_path': 'logPath',
'node_type': 'nodeType'
}
def __init__(self, name=None, status=None, log_path=None, node_type=None):
"""RealTimeNodeStatus - a model defined in huaweicloud sdk"""
self._name = None
self._status = None
self._log_path = None
self._node_type = None
self.discriminator = None
if name is not None:
self.name = name
if status is not None:
self.status = status
if log_path is not None:
self.log_path = log_path
if node_type is not None:
self.node_type = node_type
@property
def name(self):
"""Gets the name of this RealTimeNodeStatus.
:return: The name of this RealTimeNodeStatus.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this RealTimeNodeStatus.
:param name: The name of this RealTimeNodeStatus.
:type: str
"""
self._name = name
@property
def status(self):
"""Gets the status of this RealTimeNodeStatus.
:return: The status of this RealTimeNodeStatus.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this RealTimeNodeStatus.
:param status: The status of this RealTimeNodeStatus.
:type: str
"""
self._status = status
@property
def log_path(self):
"""Gets the log_path of this RealTimeNodeStatus.
:return: The log_path of this RealTimeNodeStatus.
:rtype: str
"""
return self._log_path
@log_path.setter
def log_path(self, log_path):
"""Sets the log_path of this RealTimeNodeStatus.
:param log_path: The log_path of this RealTimeNodeStatus.
:type: str
"""
self._log_path = log_path
@property
def node_type(self):
"""Gets the node_type of this RealTimeNodeStatus.
:return: The node_type of this RealTimeNodeStatus.
:rtype: str
"""
return self._node_type
@node_type.setter
def node_type(self, node_type):
"""Sets the node_type of this RealTimeNodeStatus.
:param node_type: The node_type of this RealTimeNodeStatus.
:type: str
"""
self._node_type = node_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RealTimeNodeStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
e5cc439e0b34ca33b3d001bd387a2a4479ab0b82 | 5f332fd35e0071b6c33727b1ec7b22efefd4182a | /lib/sconstool/util/finder_.py | 84618a3615bf73a74897f7ec62a7cd00982180de | [
"MIT"
] | permissive | ptomulik/scons-tool-util | ec4bc3f6971feda4fa918632b0f0431faf96779b | daab1c7db087feb988a721bf10e6b5c29c0e02b2 | refs/heads/master | 2021-08-07T12:20:17.485348 | 2020-04-15T18:48:28 | 2020-04-15T18:48:28 | 153,349,258 | 1 | 0 | null | 2020-04-15T18:49:16 | 2018-10-16T20:23:55 | Python | UTF-8 | Python | false | false | 5,721 | py | # -*- coding: utf-8 -*-
"""Provides the :class:`.ToolFinder` class.
"""
from . import misc_
import os
__all__ = ('ToolFinder',)
class ToolFinder(object):
"""Callable object which searches for executables.
A single ToolFinder instance searches for a single file (program), for
example a compiler executable or script interpreter. The constructor
accepts several options, for each option there is corresponding
@property (read-only) with the same name.
:Example: Typical use in a tool module
.. code-block:: python
from sconstool.util import ToolFinder
foo = ToolFinder('foo')
def generate(env):
env.SetDefault(FOO=foo(env))
# ...
def exists(env):
return env.get('FOO', foo(env))
"""
__slots__ = ('_tool', '_kw')
_ctor_kwargs = ('name',
'path',
'pathext',
'reject',
'priority_path',
'fallback_path',
'strip_path',
'strip_priority_path',
'strip_fallback_path')
def __init__(self, tool, **kw):
"""
:param str tool:
symbolic name of the tool,
:keyword str,list name:
base name of the file (program name) being searched for,
may be a list of alternative program names,
:keyword str,list path:
search path to be used instead of the standard SCons PATH,
:keyword str,list pathext:
a list of file extensions to be considered as executable,
:keyword list reject:
a list of paths to be rejected,
:keyword str,list priority_path:
extra search path to be searched prior to :attr:`.path`,
:keyword str,list fallback_path:
extra search path to be searched after :attr:`.path`,
:keyword bool strip_path:
if ``True`` (default), the leading path, if it's in :attr:`path`
list, will be stripped from the returned file path,
:keyword bool strip_priority_path:
if ``True``, the leading path, if it's in **priority_path**
list, will be stripped from the returned file path;
:keyword bool strip_fallback_path:
if ``True``, the leading path, if it's in **fallback_path** list,
will be stripped from the returned file path.
"""
self._tool = str(tool)
misc_.check_kwargs('ToolFinder()', kw, self._ctor_kwargs)
self._kw = kw
@property
def tool(self):
"""Tool name, that was passed in to the c-tor as an argument.
:rtype: str
"""
return self._tool
def __call__(self, env):
"""Performs the actual search.
:param env:
a SCons environment; provides construction variables and the
``env.WhereIs()`` method to the :class:`.ToolFinder`.
:return:
depending on options chosen at object creation, a name or a
path to the executable file found. If the program can't be
found, ``None`` is returned.
:rtype: str
"""
return self._search(env)
def _whereis(self, env, prog, where):
path = getattr(self, where)
if path and not isinstance(path, str):
# this trick enables variable substitution in list entries
path = os.path.pathsep.join(path)
return env.WhereIs(prog, path, self.pathext, self.reject)
def _adjust_result(self, env, result, where):
prog = env.subst(result[0])
strip = getattr(self, 'strip_%s' % where)
if os.path.isabs(prog) or strip:
return prog
return result[1]
def _search_in(self, env, where):
progs = self.name
if isinstance(progs, str):
progs = [progs]
for prog in progs:
found = self._whereis(env, prog, where)
if found:
return self._adjust_result(env, (prog, found), where)
return None
def _search(self, env):
for where in ('priority_path', 'path', 'fallback_path'):
found = self._search_in(env, where)
if found:
return found
return None
@classmethod
def _add_getter(cls, attr, default=None, **kw):
if isinstance(default, property):
default = default.fget
kw['defaultattr'] = default.__name__
doc = """\
The value of **%(attr)s** keyword argument passed in to the
constructor at object creation, or ``self.%(defaultattr)s`` if the
argument was omitted.
:rtype: %(rtype)s
"""
else:
doc = """\
The value of **%(attr)s** keyword argument passed in to the
constructor at object creation, or ``%(default)r`` if the
argument was omitted.
:rtype: %(rtype)s
"""
kw = dict({'doc': doc}, **kw)
misc_.add_ro_dict_property(cls, '_kw', attr, default, **kw)
TF = ToolFinder
TF._add_getter('name', TF.tool, rtype='str')
TF._add_getter('path', rtype='str,list')
TF._add_getter('priority_path', [], rtype='str,list')
TF._add_getter('fallback_path', [], rtype='str,list')
TF._add_getter('pathext', rtype='str,list')
TF._add_getter('reject', [], rtype='list')
TF._add_getter('strip_path', True, rtype='bool')
TF._add_getter('strip_priority_path', False, rtype='bool')
TF._add_getter('strip_fallback_path', False, rtype='bool')
del TF
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set ft=python et ts=4 sw=4:
| [
"[email protected]"
] | |
f6325cdee89668b585f012a30c7130e6022150fc | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /clouddirectory_write_f/schema_delete.py | 58f82c7195d72611e6c1e62d27b86b09d9f7b063 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
apply-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/apply-schema.html
create-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/create-schema.html
publish-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/publish-schema.html
update-schema : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/clouddirectory/update-schema.html
"""
write_parameter("clouddirectory", "delete-schema") | [
"[email protected]"
] | |
7fb7c4d5f15747a600819c85ad9266779fdb129c | a676d918b568964d475a3ea25c79d446b1783abf | /Chap0/project/ex16.py | cbb9237d4682e1be0fb5529a2f836c5ce46caa04 | [] | no_license | AIHackerTest/SailingChen10_Py101-004 | 35d76d32e6a21c487ce8d48f974532fb38a05051 | 3c95e04f7d54529e897beec7652e089514ee6dd5 | refs/heads/master | 2021-05-15T00:32:35.407998 | 2017-09-12T08:31:02 | 2017-09-12T08:31:02 | 103,240,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | # coding = utf-8
from sys import argv
script, filename = argv
print ("We're going to erase %r." % filename)
print ("If you don't want that, hit CTRL-C (^C).")
print ("If you do want that, hit RETURN.")
input("yes or no: ")
print ("Opening the file...")
target = open(filename, 'w')
print ("Truncating the file. Goodbye!")
target.truncate()
print ("Now I'm going to ask you for three lines.")
line1 = input("line 1: ")
line2 = input("line 2: ")
line3 = input("line 3: ")
print ("I'm going to write these to the file.")
target.write(line1)
target.write("\n")
target.write(line2)
target.write("\n")
target.write(line3)
target.write("\n")
print ("And finally,we close it.")
target.close()
| [
"[email protected]"
] | |
9b5b03a445f19ee80e1454f2b69ec50d24fc9858 | febeffe6ab6aaa33e3a92e2dbbd75783a4e32606 | /ssseg/cfgs/annnet/cfgs_voc_resnet101os8.py | c5a99a673dced76b76fc8e87509c725ef4b0e15f | [
"MIT"
] | permissive | Junjun2016/sssegmentation | 7bbc5d53abee1e0cc88d5e989e4cff5760ffcd09 | bf7281b369e8d7fc2f8986caaeec3ec38a30c313 | refs/heads/main | 2023-02-04T22:09:13.921774 | 2020-12-23T06:28:56 | 2020-12-23T06:28:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,208 | py | '''define the config file for voc and resnet101os8'''
from .base_cfg import *
# modify dataset config
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG['train'].update(
{
'type': 'voc',
'set': 'trainaug',
'rootdir': '/data/VOCdevkit/VOC2012',
}
)
DATASET_CFG['test'].update(
{
'type': 'voc',
'rootdir': '/data/VOCdevkit/VOC2012',
}
)
# modify dataloader config
DATALOADER_CFG = DATALOADER_CFG.copy()
# modify optimizer config
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update(
{
'max_epochs': 60,
}
)
# modify losses config
LOSSES_CFG = LOSSES_CFG.copy()
# modify model config
MODEL_CFG = MODEL_CFG.copy()
MODEL_CFG.update(
{
'num_classes': 21,
}
)
# modify common config
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['train'].update(
{
'backupdir': 'annnet_resnet101os8_voc_train',
'logfilepath': 'annnet_resnet101os8_voc_train/train.log',
}
)
COMMON_CFG['test'].update(
{
'backupdir': 'annnet_resnet101os8_voc_test',
'logfilepath': 'annnet_resnet101os8_voc_test/test.log',
'resultsavepath': 'annnet_resnet101os8_voc_test/annnet_resnet101os8_voc_results.pkl'
}
) | [
"[email protected]"
] | |
8002b8bd33ebd7c6508328204e0bcaba4abfe848 | c527df31f9daf06c36e8025b372d137ad9c1c4c7 | /django/call_app/admin.py | e2561d3e10d41c1a4aa4e22c0d7feb735b07ee77 | [] | no_license | Katerina964/callback | 8dc7d5d230d095ec32ce1d69c4648f4564e99f87 | 741fa58779413845ccc4e478ccc2b952c6d000a0 | refs/heads/master | 2023-03-30T17:55:08.611493 | 2021-04-13T08:53:30 | 2021-04-13T08:53:30 | 295,951,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | from django.contrib import admin
from .models import Crmaccount, Call, Customer
admin.site.register(Crmaccount)
admin.site.register(Call)
admin.site.register(Customer)
| [
"[email protected]"
] | |
c98f149bd94f32fc457a90250420211834c8d90c | a3d2620bbf25002c7b182600c2e40f8f06555e91 | /django_time/lab13/order/migrations/0007_remove_product_product_views.py | c68818c51d3abadc681f55d7596f477885ffcfe3 | [] | no_license | alejo8591/backend-lab | 782736a82933f705f825a1194369bfe13e86c0ec | 4a02a9552083a7c877e91b0f8b81e37a8650cf54 | refs/heads/master | 2016-09-03T03:53:43.878240 | 2015-11-26T06:35:38 | 2015-11-26T06:35:38 | 3,911,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('order', '0006_product_product_views'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='product_views',
),
]
| [
"[email protected]"
] | |
01994540ff9ece71215b866941314e6004992e91 | 0c6100dc16291986fab157ed0437f9203f306f1b | /1101- 1200/1124.py | d5f6e0e4c103af58a54d2712db288e17c2355229 | [] | no_license | Matuiss2/URI-ONLINE | 4c93c139960a55f7cc719d0a3dcd6c6c716d3924 | 6cb20f0cb2a6d750d58b826e97c39c11bf8161d9 | refs/heads/master | 2021-09-17T09:47:16.209402 | 2018-06-30T08:00:14 | 2018-06-30T08:00:14 | 110,856,303 | 13 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,008 | py | def f(l1, l2, r1, r2):
# Processo
dx = l1 - r1 - r2
dy = l2 - r1 - r2
if dx < 0 or dy < 0:
return False # Se a soma dos raios for maior que um dos lados retorna falso, elimina vários casos
return dx * dx + dy * dy >= (r1 + r2) * (r1 + r2) and min(l1, l2) >= 2 * max(r1, r2)
# Valor bool, se couber volta True se não couber volta False
def main():
while True:
# Entrada
data = input().split() # recebe o valor e separa
l1 = int(data[0])
l2 = int(data[1])
r1 = int(data[2])
r2 = int(data[3])
if not (l1 + l2 + r1 + r2) > 0: # Se todos os valores forem 0, o programa fecha(seguindo as instruções)
break
# Saída
if f(l1, l2, r1, r2): # Chama e retorna o valor da função anterior, se for True entra aqui e imprime S
print("S")
else: # Se for False entra aqui
print("N")
return 0
main() # Chama e retorna o valor da função main
| [
"[email protected]"
] | |
bb02514e4e44722f691e3de8784fab89c79f2dd0 | c116a7ab1fb1ec460f526cf8cefe0abd9eac1584 | /py/lib/utils/metrics.py | 589ada85cb53607b1ba8cb38042382b3b9c77d89 | [
"Apache-2.0"
] | permissive | ckunlun/LightWeightCNN | 99a60b41d27a05bae6ad6ba7d2d537010f47726e | b3bed250520971c80bbc170958ff7f5b698be7cc | refs/heads/master | 2022-10-09T02:24:54.620610 | 2020-06-08T08:34:25 | 2020-06-08T08:34:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,734 | py | # -*- coding: utf-8 -*-
"""
@date: 2020/4/27 下午8:25
@file: metrics.py
@author: zj
@description:
"""
import torch
from thop import profile
from torchvision.models import AlexNet
from models.squeeze_net import SqueezeNet
from models.squeeze_net_bypass import SqueezeNetBypass
def compute_num_flops(model):
input = torch.randn(1, 3, 224, 224)
macs, params = profile(model, inputs=(input,), verbose=False)
# print(macs, params)
GFlops = macs * 2.0 / pow(10, 9)
params_size = params * 4.0 / 1024 / 1024
return GFlops, params_size
def topk_accuracy(output, target, topk=(1,)):
"""
计算前K个。N表示样本数,C表示类别数
:param output: 大小为[N, C],每行表示该样本计算得到的C个类别概率
:param target: 大小为[N],每行表示指定类别
:param topk: tuple,计算前top-k的accuracy
:return: list
"""
assert len(output.shape) == 2 and output.shape[0] == target.shape[0]
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, largest=True, sorted=True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
for name in ['alexnet', 'squeezenet', 'squeezenet-bypass']:
if name == 'alexnet':
model = AlexNet()
elif name == 'squeezenet':
model = SqueezeNet()
else:
model = SqueezeNetBypass()
gflops, params_size = compute_num_flops(model)
print('{}: {:.3f} GFlops - {:.3f} MB'.format(name, gflops, params_size))
| [
"[email protected]"
] | |
55bc765f2a5614073dcc33b1956bac232d9e27db | db52e7d3bcc78b60c8c7939bc95f07cab503b3d7 | /GUI for NMT/runnmtui.py | bb81a21cd0de211568f7108da7ff99f4b1fb6ab0 | [
"BSD-3-Clause"
] | permissive | krzwolk/GroundHog | e2d495254f5794fdbc5a0de51cf49c76e51fdc6a | 3d3e6ec9b30f3ae22bda28914c637c10050a472b | refs/heads/master | 2020-12-24T18:55:18.983124 | 2016-02-09T17:20:05 | 2016-02-09T17:20:05 | 51,382,746 | 0 | 0 | null | 2016-02-09T17:09:35 | 2016-02-09T17:09:35 | null | UTF-8 | Python | false | false | 210 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from nmtui import main
main()
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.