blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8813f6544d2ccea3832683f456c77c7e969252cd | 11d697345808e3630985d70600fd6f2bed1ac7e5 | /slacktheme/models.py | 519400c4f6b64e846e1d9bf5d6e8f82435b917a8 | []
| no_license | openhealthcare/opal-slacktheme | ce97ddac3c490ed19a3ab96dd85a17eec010cff5 | c819e02f9e4a45a554ae5b49d28b95a812a86bca | refs/heads/master | 2021-01-19T22:52:41.572813 | 2017-06-08T13:21:42 | 2017-06-08T13:21:42 | 88,879,256 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | """
Models for slacktheme
"""
#
# Warning - even if you don't have any models, please don't delete this file.
# Some parts of Django require you to have something it can import called
# slacktheme.models in order for us to let you be a Django app.
# | [
"[email protected]"
]
| |
9667e86b4ca07c2e6716741e6cf0e9de4b7bdee6 | 4ad04de638ccfed398adb5496826c0d19e755d9e | /models/hr_contract_wage_type_period.py | 50c5042a81dece83a28e3107e503007f66523598 | [
"BSD-2-Clause"
]
| permissive | aroodooteam/aro_hr_payroll | 2f399a0f2e45652d2791df48a95e5ad66a051d71 | dd95d500827566f1444e32760dda5b5b69a8906e | refs/heads/master | 2021-01-22T13:47:46.272642 | 2018-01-29T11:25:35 | 2018-01-29T11:25:35 | 100,686,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | # -*- coding: utf-8 -*-
from openerp.osv import fields, osv
import time
import logging
logger = logging.getLogger(__name__)
# Contract wage type period name
class hr_contract_wage_type_period(osv.osv):
_name = 'hr.contract.wage.type.period'
_description = 'Wage Period'
_columns = {
'name': fields.char('Period Name', size=50,
required=True, select=True),
'factor_days': fields.float('Hours in the period',
digits=(12, 4), required=True,)
}
_defaults = {
'factor_days': 173.33
}
hr_contract_wage_type_period()
| [
"[email protected]"
]
| |
27d25a48451ddf4fd37788f53f17ab7d7bbbb843 | b71f656374293c5f1238fcb449aa4dde78632861 | /eudplib/eudlib/memiof/byterw.py | c6a45de2f0bcb03d62c384d553512caacbd340cb | [
"MIT"
]
| permissive | tobeinged/eudplib | ce1cdc15f7ec6af857b4b64b5c826b3dd95d3e48 | 066c0faa200dc19e70cdb6979daf8f008b8ae957 | refs/heads/master | 2023-05-04T08:49:01.180147 | 2019-03-18T14:30:29 | 2019-03-18T14:30:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,708 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2014 trgk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from ... import core as c
from ... import ctrlstru as cs
from . import dwepdio as dwm
_epd, _suboffset = c.EUDCreateVariables(2)
class EUDByteReader:
"""Read byte by byte."""
def __init__(self):
self._dw = c.EUDVariable()
self._b = c.EUDCreateVariables(4)
self._suboffset = c.EUDVariable()
self._offset = c.EUDVariable()
# -------
@c.EUDMethod
def seekepd(self, epdoffset):
"""Seek EUDByteReader to specific epd player address"""
c.SeqCompute([
(self._offset, c.SetTo, epdoffset),
(self._suboffset, c.SetTo, 0)
])
c.SetVariables(self._dw, dwm.f_dwread_epd(epdoffset))
c.SetVariables([
self._b[0],
self._b[1],
self._b[2],
self._b[3],
], dwm.f_dwbreak(self._dw)[2:6])
@c.EUDMethod
def seekoffset(self, offset):
"""Seek EUDByteReader to specific address"""
global _epd, _suboffset
# convert offset to epd offset & suboffset
c.SetVariables([_epd, _suboffset], c.f_div(offset, 4))
c.SeqCompute([(_epd, c.Add, -0x58A364 // 4)])
# seek to epd & set suboffset
self.seekepd(_epd)
c.SeqCompute([
(self._suboffset, c.SetTo, _suboffset)
])
# -------
@c.EUDMethod
def readbyte(self):
"""Read byte from current address. Reader will advance by 1 bytes.
:returns: Read byte
"""
case0, case1, case2, case3, swend = [c.Forward() for _ in range(5)]
ret = c.EUDVariable()
# suboffset == 0
case0 << c.NextTrigger()
cs.EUDJumpIfNot(self._suboffset.Exactly(0), case1)
c.SeqCompute([
(ret, c.SetTo, self._b[0]),
(self._suboffset, c.Add, 1)
])
cs.EUDJump(swend)
# suboffset == 1
case1 << c.NextTrigger()
cs.EUDJumpIfNot(self._suboffset.Exactly(1), case2)
c.SeqCompute([
(ret, c.SetTo, self._b[1]),
(self._suboffset, c.Add, 1)
])
cs.EUDJump(swend)
# suboffset == 2
case2 << c.NextTrigger()
cs.EUDJumpIfNot(self._suboffset.Exactly(2), case3)
c.SeqCompute([
(ret, c.SetTo, self._b[2]),
(self._suboffset, c.Add, 1)
])
cs.EUDJump(swend)
# suboffset == 3
# read more dword
case3 << c.NextTrigger()
c.SeqCompute([
(ret, c.SetTo, self._b[3]),
(self._offset, c.Add, 1),
(self._suboffset, c.SetTo, 0)
])
c.SetVariables(self._dw, dwm.f_dwread_epd(self._offset))
c.SetVariables([
self._b[0],
self._b[1],
self._b[2],
self._b[3],
], dwm.f_dwbreak(self._dw)[2:6])
swend << c.NextTrigger()
return ret
class EUDByteWriter:
"""Write byte by byte"""
def __init__(self):
self._dw = c.EUDVariable()
self._suboffset = c.EUDVariable()
self._offset = c.EUDVariable()
self._b = [c.EUDLightVariable() for _ in range(4)]
@c.EUDMethod
def seekepd(self, epdoffset):
"""Seek EUDByteWriter to specific epd player addresss"""
c.SeqCompute([
(self._offset, c.SetTo, epdoffset),
(self._suboffset, c.SetTo, 0)
])
c.SetVariables(self._dw, dwm.f_dwread_epd(epdoffset))
c.SetVariables(self._b, dwm.f_dwbreak(self._dw)[2:6])
@c.EUDMethod
def seekoffset(self, offset):
"""Seek EUDByteWriter to specific address"""
global _epd, _suboffset
# convert offset to epd offset & suboffset
c.SetVariables([_epd, _suboffset], c.f_div(offset, 4))
c.SeqCompute([(_epd, c.Add, (0x100000000 - 0x58A364) // 4)])
self.seekepd(_epd)
c.SeqCompute([
(self._suboffset, c.SetTo, _suboffset)
])
@c.EUDMethod
def writebyte(self, byte):
"""Write byte to current position.
Write a byte to current position of EUDByteWriter. Writer will advance
by 1 byte.
.. note::
Bytes could be buffered before written to memory. After you
finished using writebytes, you must call `flushdword` to flush the
buffer.
"""
cs.EUDSwitch(self._suboffset)
for i in range(3):
if cs.EUDSwitchCase()(i):
cs.DoActions([
self._b[i].SetNumber(byte),
self._suboffset.AddNumber(1)
])
cs.EUDBreak()
if cs.EUDSwitchCase()(3):
cs.DoActions(self._b[3].SetNumber(byte))
self.flushdword()
cs.DoActions([
self._offset.AddNumber(1),
self._suboffset.SetNumber(0),
])
c.SetVariables(self._dw, dwm.f_dwread_epd(self._offset))
c.SetVariables(self._b, dwm.f_dwbreak(self._dw)[2:6])
cs.EUDEndSwitch()
@c.EUDMethod
def flushdword(self):
"""Flush buffer."""
# mux bytes
c.RawTrigger(actions=self._dw.SetNumber(0))
for i in range(7, -1, -1):
for j in range(4):
c.RawTrigger(
conditions=[
self._b[j].AtLeast(2 ** i)
],
actions=[
self._b[j].SubtractNumber(2 ** i),
self._dw.AddNumber(2 ** (i + j * 8))
]
)
dwm.f_dwwrite_epd(self._offset, self._dw)
| [
"[email protected]"
]
| |
8c3dff6729a6e03970fa298972d23866dabed387 | 8ea28cd0bc3f120faed01b0a2a5cd435a01ef809 | /liclient/__init__.py | 37bf8ec6e9ca51943c83086d42f59b7882242f33 | [
"MIT"
]
| permissive | Work4Labs/LinkedIn-Client-Library | 9e02ce941e149f3113da936f96bd8401f74bc5c1 | eb08c78e35d6e37b415a9da9820a7977fd895f81 | refs/heads/master | 2021-01-20T16:41:53.968545 | 2017-07-07T21:21:36 | 2017-07-07T21:21:36 | 23,850,972 | 0 | 0 | null | 2017-07-07T21:21:37 | 2014-09-09T21:30:18 | Python | UTF-8 | Python | false | false | 20,308 | py | #! usr/bin/env python
import datetime
import re
import time
import urllib
import urlparse
import oauth2 as oauth
from httplib2 import HttpLib2ErrorWithResponse
import json
from parsers.lixml import LinkedInXMLParser
from lxml import etree
from lxml.builder import ElementMaker
class LinkedInAPI(object):
def __init__(self, ck, cs):
self.consumer_key = ck
self.consumer_secret = cs
self.api_profile_url = 'http://api.linkedin.com/v1/people/~'
self.api_profile_connections_url = 'http://api.linkedin.com/v1/people/~/connections'
self.api_network_update_url = 'http://api.linkedin.com/v1/people/~/network'
self.api_comment_feed_url = 'http://api.linkedin.com/v1/people/~/network/updates/' + \
'key={NETWORK UPDATE KEY}/update-comments'
self.api_update_status_url = 'http://api.linkedin.com/v1/people/~/current-status'
self.api_share = 'http://api.linkedin.com/v1/people/~/shares'
self.api_mailbox_url = 'http://api.linkedin.com/v1/people/~/mailbox'
self.base_url = 'https://api.linkedin.com'
self.li_url = 'http://www.linkedin.com'
self.request_token_path = '/uas/oauth/requestToken'
self.access_token_path = '/uas/oauth/accessToken'
self.authorize_path = '/uas/oauth/authorize'
self.consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
self.valid_network_update_codes = ['ANSW', 'APPS', 'CONN', 'JOBS',
'JGRP', 'PICT', 'RECU', 'PRFU',
'QSTN', 'STAT']
def get_request_token(self, redirect_url=None):
"""
Get a request token based on the consumer key and secret to supply the
user with the authorization URL they can use to give the application
access to their LinkedIn accounts
"""
client = oauth.Client(self.consumer)
request_token_url = self.base_url + self.request_token_path
additional_param = {}
if redirect_url:
additional_param = {
'body': "oauth_callback=%s" % urllib.quote_plus(redirect_url),
'headers': {'Content-Type': 'application/x-www-form-urlencoded'}
}
resp, content = client.request(request_token_url, 'POST', **additional_param)
request_token = dict(urlparse.parse_qsl(content))
return request_token
def get_access_token(self, request_token, verifier):
"""
Get an access token based on the generated request_token and the
oauth verifier supplied in the return URL when a user authorizes their
application
"""
token = oauth.Token(
request_token['oauth_token'],
request_token['oauth_token_secret']
)
token.set_verifier(verifier)
client = oauth.Client(self.consumer, token)
access_token_url = self.base_url + self.access_token_path
resp, content = client.request(access_token_url, 'POST')
access_token = dict(urlparse.parse_qsl(content))
return access_token
def get_user_profile(self, access_token, selectors=None, **kwargs):
"""
Get a user profile. If keyword argument "id" is not supplied, this
returns the current user's profile, else it will return the profile of
the user whose id is specificed. The "selectors" keyword argument takes
a list of LinkedIn compatible field selectors.
"""
# Now using json api - GL
kwargs['format'] = 'json'
url = self.api_profile_url
assert isinstance(selectors, (tuple, list)), '"Keyword argument "selectors" must be of type "list" or "tuple"'
if selectors:
url = self.prepare_field_selectors(selectors, url)
user_token, url = self.prepare_request(access_token, url, kwargs)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
if resp.status >= 500:
raise HttpLib2ErrorWithResponse(resp.reason, resp, content)
return resp, json.loads(content)
def get_user_connections(self, access_token, selectors=None, **kwargs):
"""
Get the connections of the current user. Valid keyword arguments are
"count" and "start" for the number of profiles you wish returned. Types
are automatically converted from integer to string for URL formatting
if necessary.
"""
# Now using json api - GL
kwargs['format'] = 'json'
if selectors:
url = self.prepare_field_selectors(selectors, self.api_profile_connections_url)
user_token, url = self.prepare_request(access_token, url, kwargs)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
if resp.status >= 500:
raise HttpLib2ErrorWithResponse(resp.reason, resp, content)
return resp, json.loads(content)
def get_network_updates(self, access_token, **kwargs):
"""Get network updates for the current user. Valid keyword arguments are
"count", "start", "type", "before", and "after". "Count" and "start" are for the number
of updates to be returned. "Type" specifies what type of update you are querying.
"Before" and "after" set the time interval for the query. Valid argument types are
an integer representing UTC with millisecond precision or a Python datetime object.
"""
if 'type' in kwargs.keys():
assert isinstance(kwargs['type'], (tuple, list)), 'Keyword argument "type" must be of type "list"'
[self.check_network_code(c) for c in kwargs['type']]
if 'before' in kwargs.keys():
kwargs['before'] = self.dt_obj_to_string(kwargs['before']) if kwargs.get('before') else None
if 'after' in kwargs.keys():
kwargs['after'] = self.dt_obj_to_string(kwargs['after']) if kwargs.get('after') else None
user_token, url = self.prepare_request(access_token, self.api_network_update_url, kwargs)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
content = self.clean_dates(content)
return LinkedInXMLParser(content).results
def get_comment_feed(self, access_token, network_key):
"""
Get a comment feed for a particular network update. Requires the update key
for the network update as returned by the API.
"""
url = re.sub(r'\{NETWORK UPDATE KEY\}', network_key, self.api_comment_feed_url)
user_token, url = self.prepare_request(access_token, url)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
content = self.clean_dates(content)
return LinkedInXMLParser(content).results
def submit_comment(self, access_token, network_key, bd):
"""
Submit a comment to a network update. Requires the update key for the network
update that you will be commenting on. The comment body is the last positional
argument. NOTE: The XML will be applied to the comment for you.
"""
bd_pre_wrapper = '<?xml version="1.0" encoding="UTF-8"?><update-comment><comment>'
bd_post_wrapper = '</comment></update-comment>'
xml_request = bd_pre_wrapper + bd + bd_post_wrapper
url = re.sub(r'\{NETWORK UPDATE KEY\}', network_key, self.api_comment_feed_url)
user_token, url = self.prepare_request(access_token, url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='POST', body=xml_request, headers={'Content-Type': 'application/xml'})
def set_status_update(self, access_token, bd):
"""
Set the status for the current user. The status update body is the last
positional argument. NOTE: The XML will be applied to the status update
for you.
WARNING: the status to set should be utf-8 encoded before passing it to that function
"""
bd_pre_wrapper = '<?xml version="1.0" encoding="UTF-8"?><current-status>'
bd_post_wrapper = '</current-status>'
xml_request = bd_pre_wrapper + bd + bd_post_wrapper
user_token, url = self.prepare_request(access_token, self.api_update_status_url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='PUT', body=xml_request)
def share(self, access_token, share_content):
'''
WARNING: all the parameter of the share content to set should be utf-8
encoded before passing it to that function
'''
user_token, url = self.prepare_request(access_token, self.api_share)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(
url,
method='POST',
body=json.dumps(share_content),
headers={
'x-li-format': 'json',
'Content-Type': 'application/json'
}
)
if resp.status >= 500:
raise HttpLib2ErrorWithResponse(resp.reason, resp, content)
return resp, json.loads(content)
def search(self, access_token, data, field_selector_string=None):
"""
Use the LinkedIn Search API to find users. The criteria for your search
should be passed as the 2nd positional argument as a dictionary of key-
value pairs corresponding to the paramters allowed by the API. Formatting
of arguments will be done for you (i.e. lists of keywords will be joined
with "+")
"""
srch = LinkedInSearchAPI(data, access_token, field_selector_string)
client = oauth.Client(self.consumer, srch.user_token)
rest, content = client.request(srch.generated_url, method='GET')
# print content # useful for debugging...
return LinkedInXMLParser(content).results
def send_message(self, access_token, recipients, subject, body):
"""
Send a message to a connection. "Recipients" is a list of ID numbers,
"subject" is the message subject, and "body" is the body of the message.
The LinkedIn API does not allow HTML in messages. All XML will be applied
for you.
"""
assert isinstance(recipients, (tuple, list)), '"Recipients argument" (2nd position) must be of type "list"'
mxml = self.message_factory(recipients, subject, body)
user_token, url = self.prepare_request(access_token, self.api_mailbox_url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='POST', body=mxml, headers={'Content-Type': 'application/xml'})
def send_invitation(self, access_token, recipients, subject, body, **kwargs):
"""
Send an invitation to a user. "Recipients" is an ID number OR email address
(see below), "subject" is the message subject, and "body" is the body of the message.
The LinkedIn API does not allow HTML in messages. All XML will be applied
for you.
NOTE:
If you pass an email address as the recipient, you MUST include "first_name" AND
"last_name" as keyword arguments. Conversely, if you pass a member ID as the
recipient, you MUST include "name" and "value" as keyword arguments. Documentation
for obtaining those values can be found on the LinkedIn website.
"""
if 'first_name' in kwargs.keys():
mxml = self.invitation_factory(recipients, subject, body,
first_name=kwargs['first_name'], last_name=kwargs['last_name'])
else:
mxml = self.invitation_factory(recipients, subject, body,
name=kwargs['name'], value=kwargs['value'])
user_token, url = self.prepare_request(access_token, self.api_mailbox_url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='POST', body=mxml, headers={'Content-Type': 'application/xml'})
def prepare_request(self, access_token, url, kws={}):
user_token = oauth.Token(access_token['oauth_token'],
access_token['oauth_token_secret'])
prep_url = url
if kws and 'id' in kws.keys():
prep_url = self.append_id_args(kws['id'], prep_url)
del kws['id']
for k in kws:
if kws[k]:
if '?' not in prep_url:
prep_url = self.append_initial_arg(k, kws[k], prep_url)
else:
prep_url = self.append_sequential_arg(k, kws[k], prep_url)
prep_url = re.sub('&&', '&', prep_url)
return user_token, prep_url
def append_id_args(self, ids, prep_url):
assert isinstance(ids, (tuple, list)), 'Keyword argument "id" must be a list'
if len(ids) > 1:
prep_url = re.sub('/~', '::(', prep_url) # sub out the ~ if a user wants someone else's profile
for i in ids:
prep_url += 'id=' + i + ','
prep_url = re.sub(',$', ')', prep_url)
else:
prep_url = re.sub('~', 'id=' + ids[0], prep_url)
return prep_url
def append_initial_arg(self, key, args, prep_url):
assert '?' not in prep_url, 'Initial argument has already been applied to %s' % prep_url
if isinstance(args, (tuple, list)):
prep_url += '?' + key + '=' + str(args[0])
if len(args) > 1:
prep_url += ''.join(['&' + key + '=' + str(arg) for arg in args[1:]])
else:
prep_url += '?' + key + '=' + str(args)
return prep_url
def append_sequential_arg(self, key, args, prep_url):
if isinstance(args, (tuple, list)):
prep_url += '&' + ''.join(['&' + key + '=' + str(arg) for arg in args])
else:
prep_url += '&' + key + '=' + str(args)
return prep_url
def prepare_field_selectors(self, selectors, url):
prep_url = url
selector_string = ':('
selector_string += ','.join(selectors)
selector_string += ')'
prep_url += selector_string
return prep_url
def check_network_code(self, code):
if code not in self.valid_network_update_codes:
raise ValueError('Code %s not a valid update code' % code)
def clean_dates(self, content):
data = etree.fromstring(content)
for d in data.iter(tag=etree.Element):
try:
trial = int(d.text)
if len(d.text) > 8:
dt = datetime.datetime.fromtimestamp(float(trial) / 1000)
d.text = dt.strftime('%m/%d/%Y %I:%M:%S')
except:
continue
return etree.tostring(data)
def dt_obj_to_string(self, dtobj):
if isinstance(dtobj, (int, str, long)):
return dtobj
elif hasattr(dtobj, 'timetuple'):
return time.mktime(int(dtobj.timetuple()) * 1000)
else:
raise TypeError('Inappropriate argument type - use either a datetime object, \
string, or integer for timestamps')
def message_factory(self, recipients, subject, body):
rec_path = '/people/'
E = ElementMaker()
MAILBOX_ITEM = E.mailbox_item
RECIPIENTS = E.recipients
RECIPIENT = E.recipient
PERSON = E.person
SUBJECT = E.subject
BODY = E.body
recs = [RECIPIENT(PERSON(path=rec_path + r)) for r in recipients]
mxml = MAILBOX_ITEM(
RECIPIENTS(
*recs
),
SUBJECT(subject),
BODY(body)
)
return re.sub('mailbox_item', 'mailbox-item', etree.tostring(mxml))
def invitation_factory(self, recipient, subject, body, **kwargs):
id_rec_path = '/people/id='
email_rec_path = '/people/email='
E = ElementMaker()
MAILBOX_ITEM = E.mailbox_item
RECIPIENTS = E.recipients
RECIPIENT = E.recipient
PERSON = E.person
SUBJECT = E.subject
BODY = E.body
CONTENT = E.item_content
REQUEST = E.invitation_request
CONNECT = E.connect_type
FIRST = E.first_name
LAST = E.last_name
AUTH = E.authorization
NAME = E.name
VALUE = E.value
if not '@' in recipient:
recs = RECIPIENT(PERSON(path=id_rec_path + recipient))
auth = CONTENT(REQUEST(CONNECT('friend'), AUTH(NAME(kwargs['name']), VALUE(kwargs['value']))))
else:
recs = RECIPIENT(
PERSON(
FIRST(kwargs['first_name']),
LAST(kwargs['last_name']),
path=email_rec_path + recipient
)
)
auth = CONTENT(REQUEST(CONNECT('friend')))
mxml = MAILBOX_ITEM(
RECIPIENTS(
*recs
),
SUBJECT(subject),
BODY(body),
auth
)
return re.sub('_', '-', etree.tostring(mxml))
class LinkedInSearchAPI(LinkedInAPI):
def __init__(self, params, access_token, field_selector_string=None):
self.api_search_url = 'http://api.linkedin.com/v1/people-search'
if field_selector_string:
self.api_search_url += ':' + field_selector_string
self.routing = {
'keywords': self.keywords,
'name': self.name,
'current_company': self.current_company,
'current_title': self.current_title,
'location_type': self.location_type,
'network': self.network,
'sort_criteria': self.sort_criteria
}
self.user_token, self.generated_url = self.do_process(access_token, params)
def do_process(self, access_token, params):
assert type(params) == type(dict()), 'The passed parameters to the Search API must be a dictionary.'
user_token = oauth.Token(
access_token['oauth_token'],
access_token['oauth_token_secret']
)
url = self.api_search_url
for p in params:
if self.routing.get(p):
self.routing.get(p)(url, params[p])
del params[p]
url = self.process_remaining_params(url, params)
return user_token, url
def process_remaining_params(self, prep_url, ps):
for p in ps:
prep_url = self.append_arg(p, ps[p], prep_url)
return prep_url
def keywords(self, url, ps):
return self.list_argument(url, ps, 'keywords')
def name(self, url, ps):
return self.list_argument(url, ps, 'name')
def current_company(self, url, ps):
return self.true_false_argument(url, ps, 'current-company')
def current_title(self, url, ps):
return self.true_false_argument(url, ps, 'current-title')
def location_type(self, prep_url, ps):
assert ps in ('I', 'Y'), 'Valid parameter types for search-location-type are "I" and "Y"'
self.append_arg('search-location-type', ps, prep_url)
def network(self, prep_url, ps):
assert ps in ('in', 'out'), 'Valid parameter types for network are "in" and "out"'
return self.append_arg('network', ps, prep_url)
def sort_criteria(self, prep_url, ps):
assert ps in ('recommenders', 'distance', 'relevance'), 'Valid parameter types for sort-criteria \
are "recommenders", "distance", and "relevance"'
return self.append_arg('sort-criteria', ps, prep_url)
def true_false_argument(self, prep_url, ps, arg):
ps = 'true' if ps else 'false'
return self.append_arg(arg, ps, prep_url)
def list_argument(self, prep_url, ps, arg):
li = '+'.join(ps)
return self.append_arg(arg, li, prep_url)
def append_arg(self, key, arg, prep_url):
try:
prep_url = self.append_initial_arg(key, arg, prep_url)
except AssertionError:
prep_url = self.append_sequential_arg(key, arg, prep_url)
return prep_url
| [
"[email protected]"
]
| |
dcf9d83ba4bfa75b310253049edaadb0ac26101c | 5c056604ecbfdd6e3d20c6d3b891855767c431b8 | /CIFAR-10/DRE-F-SP+RS/models/ResNet_extract.py | c0f759d4aeba34d997dc7326df08db4232fb134d | []
| no_license | pkulwj1994/cDR-RS | 135d1fc9504304ba0303fe5acc3594ea27531557 | 661d694d6a8dfb44885271bdfd92d6dc150a40f8 | refs/heads/main | 2023-08-30T05:37:07.449304 | 2021-11-16T03:17:42 | 2021-11-16T03:17:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,346 | py | '''
ResNet-based model to map an image from pixel space to a features space.
Need to be pretrained on the dataset.
codes are based on
@article{
zhang2018mixup,
title={mixup: Beyond Empirical Risk Minimization},
author={Hongyi Zhang, Moustapha Cisse, Yann N. Dauphin, David Lopez-Paz},
journal={International Conference on Learning Representations},
year={2018},
url={https://openreview.net/forum?id=r1Ddp1-Rb},
}
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
IMG_SIZE=32
NC=3
resize=(32,32)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet_extract(nn.Module):
def __init__(self, block, num_blocks, num_classes=100, nc=NC, img_height=IMG_SIZE, img_width=IMG_SIZE):
super(ResNet_extract, self).__init__()
self.in_planes = 64
self.main = nn.Sequential(
nn.Conv2d(nc, 64, kernel_size=3, stride=1, padding=1, bias=False), # h=h
nn.BatchNorm2d(64),
nn.ReLU(),
self._make_layer(block, 64, num_blocks[0], stride=1), # h=h
self._make_layer(block, 128, num_blocks[1], stride=2),
self._make_layer(block, 256, num_blocks[2], stride=2),
self._make_layer(block, 512, num_blocks[3], stride=2),
nn.AvgPool2d(kernel_size=4)
)
self.classifier_1 = nn.Sequential(
nn.Linear(512*block.expansion, img_height*img_width*nc),
# nn.BatchNorm1d(img_height*img_width*nc),
# nn.ReLU(),
)
self.classifier_2 = nn.Sequential(
nn.Linear(img_height*img_width*nc, num_classes)
)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
# x = nn.functional.interpolate(x,size=resize,mode='bilinear',align_corners=True)
features = self.main(x)
features = features.view(features.size(0), -1)
features = self.classifier_1(features)
out = self.classifier_2(features)
return out, features
def ResNet18_extract(num_classes=10):
return ResNet_extract(BasicBlock, [2,2,2,2], num_classes=num_classes)
def ResNet34_extract(num_classes=10):
return ResNet_extract(BasicBlock, [3,4,6,3], num_classes=num_classes)
def ResNet50_extract(num_classes=10):
return ResNet_extract(Bottleneck, [3,4,6,3], num_classes=num_classes)
def ResNet101_extract(num_classes=10):
return ResNet_extract(Bottleneck, [3,4,23,3], num_classes=num_classes)
def ResNet152_extract(num_classes=10):
return ResNet_extract(Bottleneck, [3,8,36,3], num_classes=num_classes)
if __name__ == "__main__":
net = ResNet34_extract(num_classes=10).cuda()
x = torch.randn(16,3,32,32).cuda()
out, features = net(x)
print(out.size())
print(features.size())
def get_parameter_number(net):
total_num = sum(p.numel() for p in net.parameters())
trainable_num = sum(p.numel() for p in net.parameters() if p.requires_grad)
return {'Total': total_num, 'Trainable': trainable_num}
print(get_parameter_number(net))
| [
"[email protected]"
]
| |
d2d53550d8562b31f2ef00de641a54b3c591e3fd | 5bb8b4c7faeebd16da16ecbcd4a98aabaf688e8f | /data_tools/walker/src-cikm/build_graph/preprocess_venue_word.py | 3d7fc4f53a23214e5cb8bba6ec763cd94551ca7c | []
| no_license | xiaoqinzhe/vrdetection | 014fc2b61c9b30dd2699fdba41089b18b7f060be | 604a812a21a98d72ba8e23a716eb72153bdaa7c4 | refs/heads/master | 2023-07-04T07:44:12.141404 | 2021-08-01T06:21:17 | 2021-08-01T06:21:17 | 150,063,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | #coding:utf-8
file_name = '../dataset/paper_title_venue.txt'
venues = set()
word_df = {}
with open(file_name) as file:
for line in file:
paper_id, title, venue = line.strip().split()
words = title.split('-')
for word in words:
if word not in word_df:
word_df[word] = set()
word_df[word].add(venue)
venues.add(venue)
venues.remove('none')
for word, venue in word_df.items():
if 'none' in venue:
venue.remove('none')
venues = list(venues)
venues.sort()
with open('../dataset/venues.txt', 'w') as file:
for venue in venues:
file.write('{}\n'.format(venue))
words = list(word_df.keys())
words.sort()
with open('../dataset/word_df.txt', 'w') as file:
for word in words:
if len(word)==1 or len(word_df[word])<3:
continue
df = len(word_df[word])/len(venues)
file.write('{} {:.4f}\n'.format(word, df))
| [
"[email protected]"
]
| |
cc997c66aa7c0603bbc734ce62e689cd06b97a65 | 1b5d39f9dd5126b6f21e83efe58b7e86ef8d94f2 | /CodeChef/LTIME80B/CARR.py | 22f1cdd88dba474004c0ee9865be462ca2cd7494 | []
| no_license | jai-dewani/Competitive-Programming | dfad61106a648b80cc97c85cc5c8bc5d1cd335d9 | a2006e53b671ba56d4b0a20dd81fd0e21d0b0806 | refs/heads/master | 2021-07-03T16:08:02.466423 | 2020-09-24T16:22:28 | 2020-09-24T16:22:28 | 178,812,685 | 1 | 2 | null | 2019-10-18T14:43:19 | 2019-04-01T07:51:47 | Python | UTF-8 | Python | false | false | 489 | py | from random import randint
mod = 10**9+7
for _ in range(int(input())):
n,m = map(int,input().strip().split())
# n = randint(1,10**10)
# m = randint(1,10**10)
answer = 0
fact = m*pow(m-1,n-1,mod)
# for i in range(n-1):
# fact *= (m-1)
answer += fact
if(n>2):
fact = m*pow(m-1,n-2,mod)
elif n==2:
fact = m
# for i in range(n-2):
# fact *= (m-1)
fact*= (n-1)
fact %= mod
answer += fact
print(answer%mod) | [
"[email protected]"
]
| |
5a0f58aac33d8bad2c16cd0bc92a93704417daad | 4cdc9ba739f90f6ac4bcd6f916ba194ada77d68c | /剑指offer/第五遍/32-2.分行从上到下打印二叉树.py | cac8702eac2082f33f6071a4d95e0ccd60552e50 | []
| no_license | leilalu/algorithm | bee68690daf836cc5807c3112c2c9e6f63bc0a76 | 746d77e9bfbcb3877fefae9a915004b3bfbcc612 | refs/heads/master | 2020-09-30T15:56:28.224945 | 2020-05-30T03:28:39 | 2020-05-30T03:28:39 | 227,313,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | """
从上到下按层打印二叉树,同一层的节点按从左到右的顺序打印,每一层打印到一行。
例如:
给定二叉树: [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回其层次遍历结果:
[
[3],
[9,20],
[15,7]
]
"""
class Solution:
def levelOrder(self, root):
# 首先判断输入为空的情况
if not root:
return []
res = []
queue = [root]
thisLevel = 1
nextLevel = 0
level = []
while queue:
node = queue.pop(0)
level.append(node.val)
thisLevel -= 1
if node.left:
queue.append(node.left)
nextLevel += 1
if node.right:
queue.append(node.right)
nextLevel += 1
if thisLevel == 0:
res.append(level)
level = []
thisLevel = nextLevel
nextLevel = 0
return res
| [
"[email protected]"
]
| |
f2d79542f326248beeeccdf981c0d6d9b68c644b | cc56916d3451a2c228fd5a4b649d4b37e4896665 | /dxb/libs/eslib.py | 0314fefce22303495f79e2a95dad6b7236a810c1 | []
| no_license | nyflxy/longan | 57ee63fe24720bfb6b6b1769acc5ba112dc08f18 | 5db84101dc11c4524e38f29f464ca522ec88aff4 | refs/heads/master | 2021-01-11T00:14:31.933873 | 2016-11-28T12:31:13 | 2016-11-28T12:31:13 | 70,554,505 | 0 | 1 | null | 2016-10-13T01:24:10 | 2016-10-11T04:00:19 | JavaScript | UTF-8 | Python | false | false | 15,071 | py | #coding=utf-8
import datetime
import elasticsearch
es = elasticsearch.Elasticsearch()
if __name__ == "__main__":
# 创建
es.index("newbie","link",{"name":"lxy","age":27,"create_date":datetime.datetime.now()},)
print es.search(index="my-index",doc_type="test-type")
print es.count("newbie","link",{
"query":{
"bool":{
"must":{"match":{"company.name":"qingdun"}},
# "must_not":{"match":{"name":"niyoufa"}},
}
}
})
es.delete("newbie","link",1)
es.delete_by_query("newbie",)
result = es.search("newbie","user",{
"query":{
"term":{
"age":25,
}
}
})
result = es.search("newbie", "user", {
"query": {
"terms": {
"age": [20,25,30],
}
}
})
result = es.search("newbie", "user", {
"query": {
"range": {
"age": {
"gte":25,
"lte":2
},
}
}
})
result = es.search("newbie", "user", {
"query": {
"exists": {
"field":"age"
}
}
})
result = es.search("newbie", "user", {
"query": {
"bool":{
"must":{"term":{"age":25}},
"must_not":{"term":{"name":"niyoufa"}},
"should":[
{"term":{"name":"lxy1"}},
]
}
}
})
result = es.search("newbie", "user", {
"query": {
"match_all":{}
}
})
result = es.search("newbie", "user", {
"query": {
"match": {"name":"niyoufa"}
}
})
result = es.search("newbie", "user", {
"query": {
"multi_match": {
"query":"full text search",
"fields":["name","age"]
}
}
})
filter
result = es.search("newbie","link",{
"query":{
"filtered": {
"query": {"match": {"name": "niyoufa"}},
"filter": {"term": {"age": 25}},
},
}
})
sort
result = es.search("newbie", "link", {
"query": {
"exists":{
"field":"age",
}
},
"sort":{"age":{"order":"desc"}}
})
result = es.search("newbie", "link", {
"query": {
"match": {"name":"niyoufa"}
}
})
print result.get("hits").get("hits")
print len(result.get("hits").get("hits"))
# coding=utf-8
import datetime, time, json, pdb
from es_settings import *
import cPickle as pickle
import logging
from xieli.models import *
from xieli.util.types import *
from django.conf import settings
import pyes
from pyes import *
from pyes.filters import GeoBoundingBoxFilter, GeoDistanceFilter, GeoPolygonFilter
from pyes.query import FilteredQuery, MatchAllQuery , Search
from pyes.sort import SortFactory, SortOrder, GeoSortOrder, ScriptSortOrder
from pyes.queryset import generate_model
ES_PATH = settings.ES_PATH
#ES_PATH = "http://dev.xielicheng.com:9200"
#ES_PATH ="http://192.168.1.113:9200"
#ES_PATH = "http://www.xieliapp.com:9200"
es_logger = logging.getLogger("utils")
# 连接es服务器
CONN_ES = pyes.ES(ES_PATH, timeout=200.0)
#连接es服务器
def _connect_index():
conn = pyes.ES(ES_PATH, timeout=200.0)
return conn
#创建index索引表
def create_index(name,index_type,FIELD_MAPPING):
try :
conn = _connect_index()
conn.indices.create_index(name)
conn.indices.put_mapping(index_type, {'properties':FIELD_MAPPING}, [name])
print "创建%s索引和%s表"%(name,index_type)
except Exception,e :
print "创建%s索引和%s表失败"%(name,index_type)
es_logger.error(str(e))
#删除index索引表
def delete_index(name):
try :
conn = pyes.ES(ES_PATH, timeout=200.0)
conn.indices.delete_index(name)
print "索引%s被删除"%name
except Exception,e:
print "删除索引%s失败"%name
es_logger.error(str(e))
#向es插入数据
def insert_into_es(params,index_name,index_type):
try :
CONN_ES.index(params,index_name,index_type)
try:
CONN_ES.indices.refresh(index_name)
except Exception, e:
pass
# print "插入数据:\n"
# print params
except Exception ,e :
# print "%s插入数据失败"%e
es_logger.error(str(e))
#获取es数据,形成类似django model对象
def get_index_model(index_name,index_type) :
from pyes.queryset import generate_model
return generate_model(index_name, index_type,es_url=ES_PATH)
#获取所有相关的记录
def march_query_alltag(field,query) :
#b = MatchQuery('interest_tag','美国')
b = MatchQuery(field,query)
return [i for i in CONN_ES.search(query =b)]
#must + should
def march_query_tag(field,query,sub_type):
must = pyes.TermQuery("sub_type",sub_type)
should = pyes.MatchQuery(field,query)
query = pyes.BoolQuery(must = must ,should = should)
return [i for i in CONN_ES.search(query =query)]
#搜索指定index,指定字段
def search_term(field,query,index_name,index_type):
q = TermQuery(field, query)
results = CONN_ES.search(query = q,indices=index_name,doc_types=index_type)
return [i for i in results]
#搜索多个字段
def search_more_term(field1,query1,field2,query2,index_name,index_type,kw=None,*arg):
must1 = pyes.TermQuery(field1,query1)
must2 = pyes.TermQuery(field2,query2)
must= [must1,must2]
if arg:
must3 = pyes.TermQuery(arg[0],arg[1])
must.append(must3)
query = pyes.BoolQuery(must = must)
if kw:
search = search_add_sort(query,kw["sort_field"],kw["sort_type"])
return [i for i in CONN_ES.search(search,indices=[index_name])]
return [i for i in CONN_ES.search(query =query,indices=index_name,doc_types=index_type) ]
#倒序 desc
def search_add_sort(query,sort_field,sort_type):
search = Search(query)
sort_order = SortOrder(sort_field, sort_type)
search.sort.add(sort_order)
return search
#按时间范围查询
def search_range_time(field,start_date,date_range,index_name,index_type):
if type(date_range) == type(-1) and date_range != -1:
#start_da = datetime.datetime.strptime(start_date, "%Y-%m-%dT%H:%M:%SZ").date()
start_da = datetime.datetime.strptime(start_date, "%Y-%m-%d").date()
end_date = (start_da + datetime.timedelta(days=date_range)).strftime('%Y-%m-%d')
must = pyes.RangeQuery(pyes.ESRange(field, from_value=start_date, to_value=end_date))
query = pyes.BoolQuery(must = must)
dd = [i for i in CONN_ES.search(query =query,indices=index_name,doc_types=index_type) ]
return dd
else:
raise
def get_data_id(data):
return data.get_id()
#此处id为es默认id
def delete_data(index_name,index_type,id):
CONN_ES.delete(index = index_name,doc_type = index_type,id = id)
#根据es对象删除数据
def delete_data_from_esobj(es_obj):
id = get_data_id(es_obj)
es_meta = es_obj.get_meta()
index_name = es_meta['index']
index_type = es_meta["type"]
CONN_ES.delete(index = index_name,doc_type = index_type,id = id)
def create_all_about_xieli_es_index():
try:
#create_index("messageglobalindex","MessageGlobal",GLOBAL_MESSAGE_FIELD_MAPPING)
#create_index("commentglobalindex","CommentGlobal",GLOBAL_COMMENT_FIELD_MAPPING)
#create_index("fileglobalindex","FileGlobal",GLOBAL_FILEIMAGE_FIELD_MAPPING)
#create_index("usernavigationglobalindex","UsernavigationgGlobal",GLOBAL_USERNAVIGATION_FIELD_MAPPING)
#create_index("participationglobalindex","ParticipationGlobal",GLOBAL_PARTICIPATION_FIELD_MAPPING)
delete_index("teamup")
create_index("teamup","CommonObject",ES_FIELD_MAPPING)
except Exception, e:
es_logger.error(str(e))
def delete_all_index():
delete_index("messageglobalindex")
delete_index("commentglobalindex")
delete_index("fileglobalindex")
delete_index("usernavigationglobalindex")
delete_index("participationglobalindex")
delete_index("teamup")
# author nyf
#根据条件获取从ES获取指定个数数据
#index_name : 索引名称
#index_type : 索引表名称
#query_params : 查询条件
#ordering : 排序字段
# start , end 数据标记
def get_document_from_es(index_name,index_type,query_params={},ordering="",start=0,end=1) :
try :
model = get_index_model(index_name,index_type)
except Exception ,e :
print e
return False
if ordering :
return model.objects.filter(**query_params).order_by(ordering)[start:end]
else :
return model.objects.filter(**query_params)[start:end]
#根据条件从ES中删除文档
#index_name : 索引名称
#index_type : 索引表名称
#query_params : 查询条件
def delete_document_from_es(index_name,index_type,query_params={}) :
try :
model = get_index_model(index_name,index_type)
except Exception ,e :
print e
return False
results = model.objects.filter(**query_params).all()
try :
for result in results :
result.delete()
except Exception ,e :
print e
return False
return True
#coding=utf8
#author = yxp
"""
配置elasticsearchp2.2
jdk1.8
本配置文件以 路径,loging为主
"""
from django.conf import settings
ES_PATH = settings.ES_PATH
#ES 定义index字段
"""
analyzed 使用分词器
analyzer 分词器类型
"""
ES_FIELD_MAPPING = {
"id" :
{"index":"no","type":u'integer'},
"sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
#标题
"title":
{"index":"analyzed","type":u'string','store': 'yes',},
#作者
"author" :
{"index":"analyzed","type":u'string','store': 'yes',},
#创建时间
"creation_time" :
{"index":"analyzed","type":u'date'},
#是否允许主动传播
"broadcast":
{"index":"no","type":u'boolean'},
#参与人数
"nb_participants" :
{"index":"analyzed","type":u'integer'},
#插件类型: 调查问卷,监督举报等
"plugin" :
{"index":"analyzed","type":u'string'},
#功能类别标签:排行榜,求安慰等
"func_tags":
{"index":"analyzed","type":u'string',},
#行业大标签 list
"topic_tags" :
{"index":"analyzed","type":'string','store': 'yes'},
#兴趣小标签 list
"interest_tag":
{"index":"analyzed","type":'string','store': 'yes'},
#描述
"description" :
{"index":"no","type":u'string'},
#版本
"_version_":
{"index":"analyzed","type":u'long'},
#地理位置,经纬度 [经度,纬度]
"geo":
{"index":"analyzed","type":u'geo_point','store': 'yes',},
#发布活动时的参与者限制条件列表
"limits" :
{"index":"analyzed","type":u'string'},
#参与类型 0 :所有用户 1:联系人
"participant_type" :
{"index":"no","type":u'integer'},
#图片列表
"image_sha1s":
{"index":"no","type":u'string'},
#分享设置 1:可以分享 0:不可以分享
"can_be_shared" :
{"index":"no","type":u'integer'},
#分享次数
"nb_shares" :
{"index":"analyzed","type":u'integer'},
#多少人已经完成任务或已签到
"nb_completes":
{"index":"analyzed","type":u'integer'},
#根据坐标反解析出的地理位置信息,比如海淀区学清路38号
"loc" :
{"index":"analyzed","type":u'string'},
#城市
"city" :
{"index":"analyzed","type":u'string'},
#百度地图对应的城市编码
"city_code":
{"index":"analyzed","type":u'integer'},
#发起人类型:0表示以个人名义发起,1表示以公司名义发起
"organizer_type" :
{"index":"analyzed","type":u'integer'},
#是否有红包, 缺省免费没有
"has_bonus" :
{"index":"no","type":u'boolean'},
#此项投票或是任务的红包总金额
"total_amount":
{"index":"analyzed","type":u'float'},
#红包派发给多少人
"nb_rewarded_people":
{"index":"analyzed","type":u'integer'},
#红包派发类型: 0:最先参与的若干个人;1:根据结果审批的若干个人;
"bonus_type" :
{"index":"analyzed","type":u'integer'},
#红包是否已经派发0 :未派发 1:已派发
"is_bonus_paid":
{"index":"analyzed","type":u'integer',},
#红包发放是否已经结算:0 :未结算 1:已结算
"is_account" :
{"index":"analyzed","type":u'integer',},
"creator_sha1" :
{"index":"analyzed","type":u'string',},
"sub_type" :
{"index":"analyzed","type":u'integer',},
"status" :
{"index":"analyzed","type":u'integer',},
}
#分布式comment全局id存储
GLOBAL_COMMENT_FIELD_MAPPING = {
"user_sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"obj_sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"sha1":
{"index":"not_analyzed","type":u'string','store': 'yes',},
}
#分布式paticipation全局id存储
GLOBAL_PARTICIPATION_FIELD_MAPPING = {
"user_sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"obj_sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"analyzed","type":u'string','store': 'yes',},
}
#分布式usenav全局id存储
GLOBAL_USERNAVIGATION_FIELD_MAPPING = {
"user_sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"analyzed","type":u'string','store': 'yes',},
"time":
{"index":"not_analyzed","type":u'date','store': 'yes',"format": "yyyy-MM-dd"},
}
#分布式fileimage全局id存储
GLOBAL_FILEIMAGE_FIELD_MAPPING = {
"sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"analyzed","type":u'string','store': 'yes',},
}
#分布式message全局id存储
GLOBAL_MESSAGE_FIELD_MAPPING = {
"sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"user_sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"obj_sha1":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"comment_sha1":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"type" :
{"index":"not_analyzed","type":u'integer'},
"creation_time" :
{"index":"not_analyzed","type":u'date'},
"already_read":
{"index":"not_analyzed","type":u'integer'},
}
| [
"[email protected]"
]
| |
34291debc85164d7bdf03f1960026837e992523d | ad54a035bb0b0ec73853516e4b62daf3598b601b | /django_fullstack/library_revisited/manage.py | 045355e8b7f85761526cd3505808d46bf113c5a1 | []
| no_license | odionfross/django | ce34c29fdc9f828ca0544826c9ef1b35efd324fb | 759d2ddf9322984dd97a967dd1f08e7ee644d9c9 | refs/heads/master | 2022-12-11T15:29:17.506133 | 2020-09-16T04:22:10 | 2020-09-16T04:22:10 | 295,921,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'library_revisited.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"https://www.linkedin.com/in/odionfross/"
]
| https://www.linkedin.com/in/odionfross/ |
f8ca76fd22f085e062460354e8d995add278d7e1 | 589ac0a71099f4ee6857a31986305f0df2c16ede | /Bio/Phylo/NewickIO.py | f0579a6eca4eaf30aceb11ae08827e40072c94c4 | [
"LicenseRef-scancode-biopython"
]
| permissive | barendt/biopython | 802aad89005b302b6523a934071796edbd8ac464 | 391bcdbee7f821bff3e12b75c635a06bc1b2dcea | refs/heads/rna | 2021-11-09T19:11:56.345314 | 2010-05-01T02:44:42 | 2010-05-01T02:44:42 | 636,700 | 0 | 0 | NOASSERTION | 2021-11-05T13:10:14 | 2010-04-29T02:35:46 | Python | UTF-8 | Python | false | false | 9,781 | py | # Copyright (C) 2009 by Eric Talevich ([email protected])
# Based on Bio.Nexus, copyright 2005-2008 by Frank Kauff & Cymon J. Cox.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""I/O function wrappers for the Newick file format.
See: U{ http://evolution.genetics.washington.edu/phylip/newick_doc.html }
"""
__docformat__ = "epytext en"
from cStringIO import StringIO
from Bio.Phylo import Newick
# Definitions retrieved from Bio.Nexus.Trees
NODECOMMENT_START = '[&'
NODECOMMENT_END = ']'
class NewickError(Exception):
"""Exception raised when Newick object construction cannot continue."""
pass
# ---------------------------------------------------------
# Public API
def parse(handle):
"""Iterate over the trees in a Newick file handle.
@return: a generator of Bio.Phylo.Newick.Tree objects.
"""
return Parser(handle).parse()
def write(trees, handle, plain=False, **kwargs):
"""Write a trees in Newick format to the given file handle.
@return: number of trees written.
"""
return Writer(trees).write(handle, plain=plain, **kwargs)
# ---------------------------------------------------------
# Input
class Parser(object):
"""Parse a Newick tree given a file handle.
Based on the parser in Bio.Nexus.Trees.
"""
def __init__(self, handle):
self.handle = handle
@classmethod
def from_string(cls, treetext):
handle = StringIO(treetext)
return cls(handle)
def parse(self, values_are_support=False, rooted=False):
"""Parse the text stream this object was initialized with."""
self.values_are_support = values_are_support
self.rooted = rooted
buf = ''
for line in self.handle:
buf += line.rstrip()
if buf.endswith(';'):
yield self._parse_tree(buf)
buf = ''
if buf:
# Last tree is missing a terminal ';' character -- that's OK
yield self._parse_tree(buf)
def _parse_tree(self, text):
"""Parses the text representation into an Tree object."""
# XXX what global info do we have here? Any? Use **kwargs?
return Newick.Tree(root=self._parse_subtree(text))
def _parse_subtree(self, text):
"""Parse (a,b,c...)[[[xx]:]yy] into subcomponents, recursively."""
text = text.strip().rstrip(';')
if text.count('(')!=text.count(')'):
raise NewickError("Parentheses do not match in (sub)tree: " + text)
# Text is now "(...)..." (balanced parens) or "..." (leaf node)
if text.count('(') == 0:
# Leaf/terminal node -- recursion stops here
return self._parse_tag(text)
# Handle one layer of the nested subtree
# XXX what if there's a paren in a comment or other string?
close_posn = text.rfind(')')
subtrees = []
# Locate subtrees by counting nesting levels of parens
plevel = 0
prev = 1
for posn in range(1, close_posn):
if text[posn] == '(':
plevel += 1
elif text[posn] == ')':
plevel -= 1
elif text[posn] == ',' and plevel == 0:
subtrees.append(text[prev:posn])
prev = posn + 1
subtrees.append(text[prev:close_posn])
# Construct a new clade from trailing text, then attach subclades
clade = self._parse_tag(text[close_posn+1:])
clade.clades = [self._parse_subtree(st) for st in subtrees]
return clade
def _parse_tag(self, text):
"""Extract the data for a node from text.
@return: Clade instance containing any available data
"""
# Extract the comment
comment_start = text.find(NODECOMMENT_START)
if comment_start != -1:
comment_end = text.find(NODECOMMENT_END)
if comment_end == -1:
raise NewickError('Error in tree description: '
'Found %s without matching %s'
% (NODECOMMENT_START, NODECOMMENT_END))
comment = text[comment_start+len(NODECOMMENT_START):comment_end]
text = text[:comment_start] + text[comment_end+len(NODECOMMENT_END):]
else:
comment = None
clade = Newick.Clade(comment=comment)
# Extract name (taxon), and optionally support, branch length
# Float values are support and branch length, the string is name/taxon
values = []
for part in (t.strip() for t in text.split(':')):
if part:
try:
values.append(float(part))
except ValueError:
assert clade.name is None, "Two string taxonomies?"
clade.name = part
if len(values) == 1:
# Real branch length, or support as branch length
if self.values_are_support:
clade.support = values[0]
else:
clade.branch_length = values[0]
elif len(values) == 2:
# Two non-taxon values: support comes first. (Is that always so?)
clade.support, clade.branch_length = values
elif len(values) > 2:
raise NewickError("Too many colons in tag: " + text)
return clade
# ---------------------------------------------------------
# Output
class Writer(object):
"""Based on the writer in Bio.Nexus.Trees (str, to_string)."""
def __init__(self, trees):
self.trees = trees
def write(self, handle, **kwargs):
"""Write this instance's trees to a file handle."""
count = 0
for treestr in self.to_strings(**kwargs):
handle.write(treestr + '\n')
count += 1
return count
def to_strings(self, support_as_branchlengths=False,
branchlengths_only=False, plain=False,
plain_newick=True, ladderize=None,
max_support=1.0):
"""Return an iterable of PAUP-compatible tree lines."""
# If there's a conflict in the arguments, we override plain=True
if support_as_branchlengths or branchlengths_only:
plain = False
make_info_string = self._info_factory(plain, support_as_branchlengths,
branchlengths_only, max_support)
def newickize(clade):
"""Convert a node tree to a Newick tree string, recursively."""
if clade.is_terminal(): #terminal
return ((clade.name or '')
+ make_info_string(clade, terminal=True))
else:
subtrees = (newickize(sub) for sub in clade)
return '(%s)%s' % (','.join(subtrees),
make_info_string(clade))
# Convert each tree to a string
for tree in self.trees:
if ladderize in ('left', 'LEFT', 'right', 'RIGHT'):
# Nexus compatibility shim, kind of
tree.ladderize(reverse=(ladderize in ('right', 'RIGHT')))
rawtree = newickize(tree.root) + ';'
if plain_newick:
yield rawtree
continue
# Nexus-style (?) notation before the raw Newick tree
treeline = ['tree', (tree.name or 'a_tree'), '=']
if tree.weight != 1:
treeline.append('[&W%s]' % round(float(tree.weight), 3))
if tree.rooted:
treeline.append('[&R]')
treeline.append(rawtree)
yield ' '.join(treeline)
def _info_factory(self, plain, support_as_branchlengths,
branchlengths_only, max_support):
"""Return a function that creates a nicely formatted node tag."""
if plain:
# Plain tree only. That's easy.
def make_info_string(clade, terminal=False):
return ''
elif support_as_branchlengths:
# Support as branchlengths (eg. PAUP), ignore actual branchlengths
def make_info_string(clade, terminal=False):
if terminal:
# terminal branches have 100% support
return ':%1.2f' % max_support
else:
return ':%1.2f' % (clade.support)
elif branchlengths_only:
# write only branchlengths, ignore support
def make_info_string(clade, terminal=False):
return ':%1.5f' % (clade.branch_length)
else:
# write support and branchlengths (e.g. .con tree of mrbayes)
def make_info_string(clade, terminal=False):
if terminal:
return ':%1.5f' % (clade.branch_length or 1.0)
else:
if (clade.branch_length is not None
and hasattr(clade, 'support')
and clade.support is not None):
# we have blen and suppport
return '%1.2f:%1.5f' % (clade.support,
clade.branch_length)
elif clade.branch_length is not None:
# we have only blen
return '0.00000:%1.5f' % clade.branch_length
elif (hasattr(clade, 'support')
and clade.support is not None):
# we have only support
return '%1.2f:0.00000' % clade.support
else:
return '0.00:0.00000'
return make_info_string
| [
"[email protected]"
]
| |
311729967843c5ec8099011965d0fc07f899187d | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-iotedge/huaweicloudsdkiotedge/v2/model/container_configs_dto.py | 5f77f61d48e99914c232542c25d08d3c747de972 | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 6,284 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ContainerConfigsDTO:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'privileged': 'bool',
'host_network': 'bool',
'restart_policy': 'str',
'container_port_list': 'list[ContainerPortDTO]'
}
attribute_map = {
'privileged': 'privileged',
'host_network': 'host_network',
'restart_policy': 'restart_policy',
'container_port_list': 'container_port_list'
}
def __init__(self, privileged=None, host_network=None, restart_policy=None, container_port_list=None):
"""ContainerConfigsDTO
The model defined in huaweicloud sdk
:param privileged: 开启容器特权模式
:type privileged: bool
:param host_network: 是否使用主机网络模式
:type host_network: bool
:param restart_policy: 重启策略,容器执行健康检查后失败后的策略
:type restart_policy: str
:param container_port_list: 容器端口映射值
:type container_port_list: list[:class:`huaweicloudsdkiotedge.v2.ContainerPortDTO`]
"""
self._privileged = None
self._host_network = None
self._restart_policy = None
self._container_port_list = None
self.discriminator = None
if privileged is not None:
self.privileged = privileged
if host_network is not None:
self.host_network = host_network
self.restart_policy = restart_policy
if container_port_list is not None:
self.container_port_list = container_port_list
@property
def privileged(self):
"""Gets the privileged of this ContainerConfigsDTO.
开启容器特权模式
:return: The privileged of this ContainerConfigsDTO.
:rtype: bool
"""
return self._privileged
@privileged.setter
def privileged(self, privileged):
"""Sets the privileged of this ContainerConfigsDTO.
开启容器特权模式
:param privileged: The privileged of this ContainerConfigsDTO.
:type privileged: bool
"""
self._privileged = privileged
@property
def host_network(self):
"""Gets the host_network of this ContainerConfigsDTO.
是否使用主机网络模式
:return: The host_network of this ContainerConfigsDTO.
:rtype: bool
"""
return self._host_network
@host_network.setter
def host_network(self, host_network):
"""Sets the host_network of this ContainerConfigsDTO.
是否使用主机网络模式
:param host_network: The host_network of this ContainerConfigsDTO.
:type host_network: bool
"""
self._host_network = host_network
@property
def restart_policy(self):
"""Gets the restart_policy of this ContainerConfigsDTO.
重启策略,容器执行健康检查后失败后的策略
:return: The restart_policy of this ContainerConfigsDTO.
:rtype: str
"""
return self._restart_policy
@restart_policy.setter
def restart_policy(self, restart_policy):
"""Sets the restart_policy of this ContainerConfigsDTO.
重启策略,容器执行健康检查后失败后的策略
:param restart_policy: The restart_policy of this ContainerConfigsDTO.
:type restart_policy: str
"""
self._restart_policy = restart_policy
@property
def container_port_list(self):
"""Gets the container_port_list of this ContainerConfigsDTO.
容器端口映射值
:return: The container_port_list of this ContainerConfigsDTO.
:rtype: list[:class:`huaweicloudsdkiotedge.v2.ContainerPortDTO`]
"""
return self._container_port_list
@container_port_list.setter
def container_port_list(self, container_port_list):
"""Sets the container_port_list of this ContainerConfigsDTO.
容器端口映射值
:param container_port_list: The container_port_list of this ContainerConfigsDTO.
:type container_port_list: list[:class:`huaweicloudsdkiotedge.v2.ContainerPortDTO`]
"""
self._container_port_list = container_port_list
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ContainerConfigsDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
e5f657f8585b64e8ca97392387cbc8e5ea4a0f7d | 4c9c2940ef3a07e2756fcceddf01acd384ebde01 | /Python/[7 kyu] Ordered count of characters.py | da36015440ad03be1c025a725b9cca4d2ae3af47 | [
"MIT"
]
| permissive | KonstantinosAng/CodeWars | 7d3501a605f7ffecb7f0b761b5ffe414e2f1983a | 157818ece648454e882c171a71b4c81245ab0214 | refs/heads/master | 2023-04-11T09:44:27.480064 | 2023-03-26T21:37:07 | 2023-03-26T21:37:07 | 245,296,762 | 6 | 6 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | # see https://www.codewars.com/kata/57a6633153ba33189e000074/solutions/python
def ordered_count(inp):
counts = {}
for letter in inp:
if letter not in counts:
counts[letter] = 1
else:
counts[letter] += 1
return [(key, value) for key, value in counts.items()]
tests = (
('abracadabra', [('a', 5), ('b', 2), ('r', 2), ('c', 1), ('d', 1)]),
('Code Wars', [('C', 1), ('o', 1), ('d', 1), ('e', 1), (' ', 1), ('W', 1), ('a', 1), ('r', 1), ('s', 1)])
)
for t in tests:
inp, exp = t
print(ordered_count(inp) == exp) | [
"[email protected]"
]
| |
616bc2f9eedb173fae2c4e924b031aca3eaed1e1 | a2ac73af04a07bb070cd85c88778608b561dd3e4 | /addons/account_check_writing/account_voucher.py | d5cb90cc89990e5b9c4c9de82486dc995d9007a3 | []
| no_license | sannareddy/openerp-heimai | c849586d6099cc7548dec8b3f1cc7ba8be49594a | 58255ecbcea7bf9780948287cf4551ed6494832a | refs/heads/master | 2021-01-15T21:34:46.162550 | 2014-05-13T09:20:37 | 2014-05-13T09:20:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | /usr/share/pyshared/openerp/addons/account_check_writing/account_voucher.py | [
"[email protected]"
]
| |
a80cf6d1ddfc46a4bc219908bc8145a82db73edb | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_naturalism.py | 8c5e9015f19160ff616ae4d4cd686e3352b59c9f | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py |
#calss header
class _NATURALISM():
def __init__(self,):
self.name = "NATURALISM"
self.definitions = [u'showing people and experiences as they really are, instead of suggesting that they are better than they really are or representing them in a fixed style: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
bf5dc29023067b377e9be2c8a51b47247ca9a81a | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_environments_deploy_flow_async.py | ade1585fbfce343c1ef8b1d490219dca511c0ddb | [
"Apache-2.0"
]
| permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 1,997 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeployFlow
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow-cx
# [START dialogflow_v3_generated_Environments_DeployFlow_async]
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_deploy_flow():
# Create a client
client = dialogflowcx_v3.EnvironmentsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3.DeployFlowRequest(
environment="environment_value",
flow_version="flow_version_value",
)
# Make the request
operation = client.deploy_flow(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
# [END dialogflow_v3_generated_Environments_DeployFlow_async]
| [
"[email protected]"
]
| |
79afdf13c61a200d338ede0d864a956c63fabe3f | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /little_person_or_time/year/be_long_woman/part_and_thing/same_fact.py | 4a49f93faba8dbcc80ef785b249febee2ff44e24 | []
| no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#! /usr/bin/env python
def public_company(str_arg):
life_or_long_week(str_arg)
print('tell_part')
def life_or_long_week(str_arg):
print(str_arg)
if __name__ == '__main__':
public_company('want_next_thing')
| [
"[email protected]"
]
| |
dce33266677a3e01c6ff99c2c720c7dfc65d296c | d7d7873d0bea9185a252916e3599b33e301d394c | /setup.py | 8f0a378e044d453b35d69a16563e88fab08a6dcc | []
| no_license | KennethJHan/pip_test | b16a3248a50025075cc3db916d07ee9761cc9b9f | 89e957d7059e303e5b640a1f2e514c437b616c10 | refs/heads/main | 2023-01-12T02:00:35.976500 | 2020-11-18T07:36:52 | 2020-11-18T07:36:52 | 313,856,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 570 | py | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="gitandpip",
version="0.0.1",
author="kenneth joohyun han",
author_email="[email protected]",
description="It's pip... with git.",
long_description=long_description,
url="https://github.com/KennethJHan/pip_test",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| [
"[email protected]"
]
| |
3aa42dfe75937de11efba3950fb0301198e88b6a | 3d4fcc7cbfafc4aaebea8e08d3a084ed0f0d06a1 | /Programme_2/Creation_donnees/MIDI/bk_xmas4fMidiSimple.py | cc8ced8f4e27729a020952c8b025d1842f14d469 | []
| no_license | XgLsuLzRMy/Composition-Musicale-par-Reseau-de-Neurones | 0421d540efe2d9dc522346810f6237c5f24fa3bf | 518a6485e2ad44e8c7fbae93c94a9dc767454a83 | refs/heads/master | 2021-09-03T20:43:01.218089 | 2018-01-11T20:02:00 | 2018-01-11T20:02:00 | 106,448,584 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 151,641 | py | import midi
pattern=midi.Pattern(format=1, resolution=480, tracks=\
[midi.Track(\
[ midi.NoteOnEvent(tick=1200, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=960, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 67]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 70]),
midi.NoteOnEvent(tick=720, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 63]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 59]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 57]),
midi.NoteOnEvent(tick=120, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 78]),
midi.NoteOnEvent(tick=360, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 66]),
midi.NoteOnEvent(tick=120, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 70]),
midi.NoteOnEvent(tick=360, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 59]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 51]),
midi.NoteOnEvent(tick=1440, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=230, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=216, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[71, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=720, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=720, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=216, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=216, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[71, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=720, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 56]),
midi.NoteOnEvent(tick=108, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 71]),
midi.NoteOnEvent(tick=360, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 63]),
midi.NoteOnEvent(tick=108, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 71]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=960, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=720, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=470, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=1920, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[59, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 33]),
midi.NoteOnEvent(tick=720, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 39]),
midi.NoteOnEvent(tick=720, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 34]),
midi.NoteOnEvent(tick=960, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 35]),
midi.NoteOnEvent(tick=720, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 39]),
midi.NoteOnEvent(tick=720, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 45]),
midi.NoteOnEvent(tick=720, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 71]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 71]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 67]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 67]),
midi.NoteOnEvent(tick=960, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=720, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 75]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=720, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 75]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 85]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 82]),
midi.NoteOnEvent(tick=960, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 82]),
midi.NoteOnEvent(tick=480, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 68]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 68]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 80]),
midi.NoteOnEvent(tick=480, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 75]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 75]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 89]),
midi.NoteOnEvent(tick=960, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 74]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 81]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 88]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 65]),
midi.NoteOnEvent(tick=240, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 81]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 84]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 68]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 65]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 81]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 72]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 79]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 78]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 81]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 81]),
midi.NoteOnEvent(tick=960, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 67]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[64, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=720, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 55]),
midi.NoteOnEvent(tick=720, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 55]),
midi.NoteOnEvent(tick=960, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 58]),
midi.NoteOnEvent(tick=720, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 61]),
midi.NoteOnEvent(tick=720, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 64]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 88]),
midi.NoteOnEvent(tick=480, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 84]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 81]),
midi.NoteOnEvent(tick=480, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 78]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 77]),
midi.NoteOnEvent(tick=960, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=7, channel=0, data=[72, 61]),
midi.NoteOnEvent(tick=459, channel=0, data=[71, 55]),
midi.NoteOnEvent(tick=21, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=459, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=14, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 41]),
midi.NoteOnEvent(tick=960, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 50]),
midi.NoteOnEvent(tick=960, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 60]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 60]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 70]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 70]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 70]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 67]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[52, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[84, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[96, 39]),
midi.NoteOnEvent(tick=1440, channel=0, data=[96, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 36]),
midi.NoteOnEvent(tick=951, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[64, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 36]),
midi.NoteOnEvent(tick=951, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[64, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 36]),
midi.NoteOnEvent(tick=1440, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 35]),
midi.NoteOnEvent(tick=1440, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[52, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=1440, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[ midi.NoteOnEvent(tick=1440, channel=0, data=[64, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=360, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 49]),
midi.NoteOnEvent(tick=360, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 48]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 48]),
midi.NoteOnEvent(tick=360, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 51]),
midi.NoteOnEvent(tick=360, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 58]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=1920, channel=0, data=[55, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 47]),
midi.NoteOnEvent(tick=360, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[59, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[59, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 59]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 33]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=1440, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=360, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 49]),
midi.NoteOnEvent(tick=230, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=17, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=353, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=7, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=113, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=14, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=226, channel=0, data=[50, 30]),
midi.NoteOnEvent(tick=14, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=226, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=14, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=226, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=360, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=31, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=353, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=7, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=113, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=216, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 60]),
midi.NoteOnEvent(tick=360, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 56]),
midi.NoteOnEvent(tick=108, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[49, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 70]),
midi.NoteOnEvent(tick=360, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 63]),
midi.NoteOnEvent(tick=108, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[49, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[38, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 78]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 75]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 77]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 76]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[31, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[31, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 73]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 80]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 77]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 73]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 82]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[31, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[31, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 56]),
midi.NoteOnEvent(tick=720, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 56]),
midi.NoteOnEvent(tick=960, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 60]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 60]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 67]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 64]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 69]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 67]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 64]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 63]),
midi.NoteOnEvent(tick=960, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=960, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=960, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=228, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[40, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[40, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 34]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=951, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=951, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=1440, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=1440, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=1440, channel=0, data=[36, 0]),
midi.EndOfTrackEvent(tick=0, data=[])])])
midi.write_midifile("creationMidi.mid", pattern) | [
"[email protected]"
]
| |
1d477bdc2d24efe805ae12ada9589a200b99ac7d | f2658c4bd7f833ace25ac2b63e88317b05f4602d | /2017 July/2017-July-11/st_rdf_test/model2/RelationsConstruction.py | 80db921a3a777c4028c6f12a17dbc2aa3c535f55 | []
| no_license | xiaochao00/telanav_diary | e4c34ac0a14b65e4930e32012cc2202ff4ed91e2 | 3c583695e2880322483f526c98217c04286af9b2 | refs/heads/master | 2022-01-06T19:42:55.504845 | 2019-05-17T03:11:46 | 2019-05-17T03:11:46 | 108,958,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,410 | py | #-------------------------------------------------------------------------------
# Name: RelationsConstruction model
# Purpose: this model is used to mapping the
# columns: [ ]
#
# Author: rex
#
# Created: 2016/01/20
# Copyright: (c) rex 2016
# Licence: <your licence>
#-------------------------------------------------------------------------------
from record import Record
from constants import *
import os
import sys
import datetime
import json
ROOT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),"..")
GLOBAL_KEY_PREFIX = "relations_construction_"
CSV_SEP = '`'
LF = '\n'
#(key, category, function)
STATISTIC_KEYS = (
("type",False,"type"),
)
class RelationsConstruction(Record):
def __init__(self, region):
Record.__init__(self)
self.dump_file = os.path.join(ROOT_DIR, "temporary", self.__class__.__name__)
self.stat = {}
self.region = region
def dump2file(self):
cmd = "SELECT \
DISTINCT(rc.condition_id), \
rc.condition_type \
FROM \
public.rdf_condition AS rc LEFT JOIN public.rdf_nav_strand AS rns ON rns.nav_strand_id=rc.nav_strand_id \
LEFT JOIN public.rdf_nav_link AS rnl ON rns.link_id = rnl.link_id \
WHERE rc.condition_type='3' AND rnl.iso_country_code IN (%s)"%(REGION_COUNTRY_CODES(self.region, GLOBAL_KEY_PREFIX))
print cmd
self.cursor.copy_expert("COPY (%s) TO STDOUT DELIMITER '`'"%(cmd),open(self.dump_file,"w"))
def get_statistic(self):
try:
self.dump2file()
except:
print "Oops! Some table or schema don't exist! Please check the upper sql"
return {}
processcount = 0
with open(self.dump_file, "r",1024*1024*1024) as csv_f:
for line in csv_f:
line = line.rstrip()
line_p = line.split(CSV_SEP)
if len(line_p) < 1:
continue
self.__statistic(line_p)
processcount += 1
if processcount%5000 == 0:
print "\rProcess index [ "+str(processcount)+" ]",
print "\rProcess index [ "+str(processcount)+" ]",
# write to file
with open(os.path.join(ROOT_DIR, "output", "stat", self.__class__.__name__), 'w') as stf:
stf.write(json.dumps(self.stat))
return self.stat
def __statistic(self,line):
for keys in STATISTIC_KEYS:
try:
getattr(self,'_RelationsConstruction__get_'+keys[2])(keys,line)
except:
print "The statistic [ %s ] didn't exist"%(keys[2])
print ("Unexpected error:[ RelationsConstruction.py->__statistic] "+str(sys.exc_info()))
def __count(self,key):
if self.stat.has_key(key):
self.stat[key] += 1
else:
self.stat[key] = 1
# all statistic method
def __get_type(self,keys,line):
if '\N' != line[0]:
self.__count("%s%s"%(GLOBAL_KEY_PREFIX,keys[0]))
if __name__ == "__main__":
# use to test this model
bg = datetime.datetime.now()
stat = RelationsConstruction('na').get_statistic()
keys = stat.keys()
print "==>"
print "{%s}"%(",".join(map(lambda px: "\"%s\":%s"%(px,stat[px]) ,keys)))
print "<=="
ed = datetime.datetime.now()
print "Cost time:"+str(ed - bg)
| [
"[email protected]"
]
| |
9f3a4c72756e26bb17b1fe4a87c755b5e04cd441 | ab174d6a1c5effdaab4a49015987c44909680792 | /p4/solve.py | cf24db71cb7964f30b8b21b561e3433d28b73124 | []
| no_license | carrdelling/AdventOfCode2018 | 2b26ed6cae8e48f473243e156d528b17fcb71584 | c42f29d684ca7fb1954c3c1d45031e837d8c818a | refs/heads/master | 2022-01-14T10:01:14.460444 | 2022-01-02T16:35:59 | 2022-01-02T16:35:59 | 160,434,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,569 | py | import datetime as dt
from collections import defaultdict, Counter
log = []
with open('input_data') as in_f:
for row in in_f:
timestamp, action = row.strip().split(']')
_time = dt.datetime.strptime(timestamp[1:], "%Y-%m-%d %H:%M")
log.append((_time, action.strip()))
log.sort()
guard_id = None
start = None
sleep_time = None
sum_sleep = defaultdict(int)
sleep_periods = defaultdict(list)
for _time, action in log:
if 'Guard' in action:
guard_id = action.split()[1]
start = None
if 'falls' in action:
start = _time
if 'wakes' in action:
sleep_time = int((_time - start).total_seconds() / 60.0)
start_minute = start.minute
sum_sleep[guard_id] += sleep_time
sleep_periods[guard_id].append([start_minute + i for i in range(sleep_time)])
lazy_guard = sorted(sum_sleep.items(), key=lambda x: -x[1])[0]
sleep_pattern = Counter(minute for night in sleep_periods[lazy_guard[0]] for minute in night)
quiet_minute = sleep_pattern.most_common(1)[0][0]
plan = int(lazy_guard[0][1:]) * quiet_minute
all_quiet_minutes = []
for guard, sleep_patterns in sleep_periods.items():
sleep_pattern = Counter(minute for night in sleep_patterns for minute in night)
quiet_minute, times = sleep_pattern.most_common(1)[0]
all_quiet_minutes.append((guard, quiet_minute, times))
laziest_guard, quiet_minute, zzz_times = sorted(all_quiet_minutes, key=lambda x: -x[2])[0]
second_plan = int(laziest_guard[1:]) * quiet_minute
print(f'P4-1: {plan}')
print(f'P4-2: {second_plan}')
| [
"[email protected]"
]
| |
a3f2a5a005d26ab9af467662fd50ff955da9a329 | 381612e57ef807e573b40b2dfaf062c8fe7a43f7 | /nesi/softbox/api/models/route_models.py | 7aea30390ad3f30d7155b1f369e6370d70560810 | [
"BSD-2-Clause",
"BSD-3-Clause"
]
| permissive | zcf900/NESi | 1635a405660bb9390843468f34105dd2ef45bd75 | 0db169dd6378fbd097380280cc41440e652de19e | refs/heads/master | 2023-01-31T23:21:02.799923 | 2020-12-18T13:37:43 | 2020-12-18T13:37:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | # This file is part of the NESi software.
#
# Copyright (c) 2020
# Original Software Design by Ilya Etingof <https://github.com/etingof>.
#
# Software adapted by inexio <https://github.com/inexio>.
# - Janis Groß <https://github.com/unkn0wn-user>
# - Philip Konrath <https://github.com/Connyko65>
# - Alexander Dincher <https://github.com/Dinker1996>
#
# License: https://github.com/inexio/NESi/LICENSE.rst
import uuid
from nesi.softbox.api import db
class Route(db.Model):
id = db.Column(db.Integer(), primary_key=True)
dst = db.Column(db.String(23))
gw = db.Column(db.String(23))
metric = db.Column(db.Integer(), default=1)
box_id = db.Column(db.Integer, db.ForeignKey('box.id'))
sub_mask = db.Column(db.Integer(), default=None)
| [
"[email protected]"
]
| |
7c18cecf00315c32eb38a70e12917f1ea4a78653 | 5befd324597df48b068a7ebda221db12a591f895 | /yotta/lib/registry_access.py | 69e96096e2a266ace3511bc79832141695333703 | [
"Apache-2.0"
]
| permissive | parisk/yotta | 6616002c709e30e44234118ecf3dcbe7b9433f53 | 496b0994aa77854709782d8cc23032aabd50bb59 | refs/heads/master | 2021-01-22T19:04:02.966265 | 2014-10-21T08:40:55 | 2014-10-21T08:40:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,462 | py | # standard library modules, , ,
import re
import logging
from collections import OrderedDict
import uuid
import functools
import json
import binascii
import calendar
import datetime
import hashlib
import itertools
import urllib
import base64
import webbrowser
# restkit, MIT, HTTP client library for RESTful APIs, pip install restkit
from restkit import Resource, BasicAuth, errors as restkit_errors
from restkit.forms import multipart_form_encode
# PyJWT, MIT, Jason Web Tokens, pip install PyJWT
import jwt
# pycrypto, Public Domain, Python Crypto Library, pip install pyCRypto
import Crypto
from Crypto.PublicKey import RSA
# settings, , load and save settings, internal
import settings
# connection_pool, , shared connection pool, internal
import connection_pool
# access_common, , things shared between different component access modules, internal
import access_common
# version, , represent versions and specifications, internal
import version
# Ordered JSON, , read & write json, internal
import ordered_json
# Github Access, , access repositories on github, internal
import github_access
# !!! FIXME get SSL cert for main domain, then use HTTPS
Registry_Base_URL = 'http://registry.yottabuild.org'
Website_Base_URL = 'http://yottabuild.org'
_OpenSSH_Keyfile_Strip = re.compile("^(ssh-[a-z0-9]*\s+)|(\s+.+\@.+)|\n", re.MULTILINE)
logger = logging.getLogger('access')
# Internal functions
class _BearerJWTFilter(object):
def __init__(self, private_key):
super(_BearerJWTFilter, self).__init__()
expires = calendar.timegm((datetime.datetime.utcnow() + datetime.timedelta(hours=2)).timetuple())
prn = _fingerprint(private_key.publickey())
logger.debug('fingerprint: %s' % prn)
token_fields = {
"iss": 'yotta',
"aud": Registry_Base_URL,
"prn": prn,
"exp": str(expires)
}
logger.debug('token fields: %s' % token_fields)
self.token = jwt.encode(token_fields, private_key, 'RS256')
logger.debug('encoded token: %s' % self.token)
def on_request(self, request):
request.headers['Authorization'] = 'Bearer ' + self.token
def _pubkeyWireFormat(pubkey):
return urllib.quote(_OpenSSH_Keyfile_Strip.sub('', pubkey.exportKey('OpenSSH')))
def _fingerprint(pubkey):
stripped = _OpenSSH_Keyfile_Strip.sub('', pubkey.exportKey('OpenSSH'))
decoded = base64.b64decode(stripped)
khash = hashlib.md5(decoded).hexdigest()
return ':'.join([khash[i:i+2] for i in xrange(0, len(khash), 2)])
def _registryAuthFilter():
# basic auth until we release publicly, to prevent outside registry access,
# after that this will be removed
return _BearerJWTFilter(_getPrivateKeyObject())
def _returnRequestError(fn):
''' Decorator that captures un-caught restkit_errors.RequestFailed errors
and returns them as an error message. If no error occurs the reture
value of the wrapped function is returned (normally None). '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except restkit_errors.RequestFailed as e:
return "sever returned status %s: %s" % (e.status_int, e.message)
return wrapped
def _handleAuth(fn):
''' Decorator to re-try API calls after asking the user for authentication. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except restkit_errors.Unauthorized as e:
github_access.authorizeUser()
logger.debug('trying with authtoken:', settings.getProperty('github', 'authtoken'))
return fn(*args, **kwargs)
return wrapped
def _friendlyAuthError(fn):
''' Decorator to print a friendly you-are-not-authorised message. Use
**outside** the _handleAuth decorator to only print the message after
the user has been given a chance to login. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except restkit_errors.Unauthorized as e:
logger.error('insufficient permission')
return None
return wrapped
def _listVersions(namespace, name):
# list versions of the package:
url = '%s/%s/%s/versions' % (
Registry_Base_URL,
namespace,
name
)
headers = { }
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
try:
logger.info('get versions for ' + name)
response = resource.get(
headers = headers
)
except restkit_errors.ResourceNotFound as e:
raise access_common.ComponentUnavailable(
'%s does not exist in the %s registry' % (name, namespace)
)
body_s = response.body_string()
return [RegistryThingVersion(x, namespace, name) for x in ordered_json.loads(body_s)]
def _tarballURL(namespace, name, version):
return '%s/%s/%s/versions/%s/tarball' % (
Registry_Base_URL, namespace, name, version
)
def _getTarball(url, directory, sha256):
auth = _registryAuthFilter()
logger.debug('registry: get: %s' % url)
if not sha256:
logger.warn('tarball %s has no hash to check' % url)
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
#resource = Resource('http://blobs.yottos.org/targets/stk3700-0.0.0.tar.gz', pool=connection_pool.getPool(), follow_redirect=True)
response = resource.get()
# there seems to be an issue with following the redirect using restkit:
# follow redirect manually
if response.status_int == 302 and 'Location' in response.headers:
redirect_url = response.headers['Location']
logger.debug('registry: redirect to: %s' % redirect_url)
resource = Resource(redirect_url, pool=connection_pool.getPool())
response = resource.get()
return access_common.unpackTarballStream(response.body_stream(), directory, ('sha256', sha256))
def _generateAndSaveKeys():
k = RSA.generate(2048)
privatekey_hex = binascii.hexlify(k.exportKey('DER'))
settings.setProperty('keys', 'private', privatekey_hex)
pubkey_hex = binascii.hexlify(k.publickey().exportKey('DER'))
settings.setProperty('keys', 'public', pubkey_hex)
return pubkey_hex, privatekey_hex
def _getPrivateKeyObject():
privatekey_hex = settings.getProperty('keys', 'private')
if not privatekey_hex:
pubkey_hex, privatekey_hex = _generateAndSaveKeys()
return RSA.importKey(binascii.unhexlify(privatekey_hex))
# API
class RegistryThingVersion(access_common.RemoteVersion):
def __init__(self, data, namespace, name):
logger.debug('RegistryThingVersion %s/%s data: %s' % (namespace, name, data))
version = data['version']
self.namespace = namespace
self.name = name
if 'hash' in data and 'sha256' in data['hash']:
self.sha256 = data['hash']['sha256']
else:
self.sha256 = None
url = _tarballURL(self.namespace, self.name, version)
super(RegistryThingVersion, self).__init__(version, url)
def unpackInto(self, directory):
assert(self.url)
_getTarball(self.url, directory, self.sha256)
class RegistryThing(access_common.RemoteComponent):
def __init__(self, name, version_spec, namespace):
self.name = name
self.spec = version.Spec(version_spec)
self.namespace = namespace
@classmethod
def createFromNameAndSpec(cls, version_spec, name, registry):
''' returns a registry component for anything that's a valid package
name (this does not guarantee that the component actually exists in
the registry: use availableVersions() for that).
'''
# we deliberately allow only lowercase, hyphen, and (unfortunately)
# numbers in package names, to reduce the possibility of confusingly
# similar names: if the name doesn't match this then escalate to make
# the user fix it
name_match = re.match('^([a-z0-9-]+)$', name)
if not name_match:
logger.warning(
'Dependency name "%s" is not valid (must contain only lowercase letters, hyphen, and numbers)' % name
)
return None
try:
spec = version.Spec(version_spec)
return RegistryThing(name, version_spec, registry)
except ValueError, e:
pass
return None
def versionSpec(self):
return self.spec
def availableVersions(self):
''' return a list of Version objects, each able to retrieve a tarball '''
return _listVersions(self.namespace, self.name)
def tipVersion(self):
raise NotImplementedError()
@classmethod
def remoteType(cls):
return 'registry'
@_returnRequestError
@_handleAuth
def publish(namespace, name, version, description_file, tar_file, readme_file, readme_file_ext):
''' Publish a tarblob to the registry, if the request fails, an exception
is raised, which either triggers re-authentication, or is turned into a
return value by the decorators. (If successful, the decorated function
returns None)
'''
url = '%s/%s/%s/versions/%s' % (
Registry_Base_URL,
namespace,
name,
version
)
if readme_file_ext == '.md':
readme_section_name = 'readme.md'
elif readme_file_ext == '':
readme_section_name = 'readme'
else:
raise ValueError('unsupported readme type: "%s"' % readne_file_ext)
# description file is in place as text (so read it), tar file is a file
body = OrderedDict([('metadata',description_file.read()), ('tarball',tar_file), (readme_section_name, readme_file)])
headers = { }
body, headers = multipart_form_encode(body, headers, uuid.uuid4().hex)
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
response = resource.put(
headers = headers,
payload = body
)
return None
@_friendlyAuthError
@_handleAuth
def listOwners(namespace, name):
''' List the owners of a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
url = '%s/%s/%s/owners' % (
Registry_Base_URL,
namespace,
name
)
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
try:
response = resource.get()
except restkit_errors.ResourceNotFound as e:
logger.error('no such %s, "%s"' % (namespace, name))
return None
return ordered_json.loads(response.body_string())
@_friendlyAuthError
@_handleAuth
def addOwner(namespace, name, owner):
''' Add an owner for a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
url = '%s/%s/%s/owners/%s' % (
Registry_Base_URL,
namespace,
name,
owner
)
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
try:
response = resource.put()
except restkit_errors.ResourceNotFound as e:
logger.error('no such %s, "%s"' % (namespace, name))
@_friendlyAuthError
@_handleAuth
def removeOwner(namespace, name, owner):
''' Remove an owner for a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
url = '%s/%s/%s/owners/%s' % (
Registry_Base_URL,
namespace,
name,
owner
)
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
try:
response = resource.delete()
except restkit_errors.ResourceNotFound as e:
logger.error('no such %s, "%s"' % (namespace, name))
def deauthorize():
if settings.getProperty('keys', 'private'):
settings.setProperty('keys', 'private', '')
if settings.getProperty('keys', 'public'):
settings.setProperty('keys', 'public', '')
def getPublicKey():
''' Return the user's public key (generating and saving a new key pair if necessary) '''
pubkey_hex = settings.getProperty('keys', 'public')
if not pubkey_hex:
k = RSA.generate(2048)
settings.setProperty('keys', 'private', binascii.hexlify(k.exportKey('DER')))
pubkey_hex = binascii.hexlify(k.publickey().exportKey('DER'))
settings.setProperty('keys', 'public', pubkey_hex)
pubkey_hex, privatekey_hex = _generateAndSaveKeys()
return _pubkeyWireFormat(RSA.importKey(binascii.unhexlify(pubkey_hex)))
def testLogin():
url = '%s/users/me' % (
Registry_Base_URL
)
headers = { }
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
logger.debug('test login...')
response = resource.get(
headers = headers
)
def getAuthData():
''' Poll the registry to get the result of a completed authentication
(which, depending on the authentication the user chose or was directed
to, will include a github or other access token)
'''
url = '%s/tokens' % (
Registry_Base_URL
)
headers = { }
auth = _registryAuthFilter()
resource = Resource(url, pool=connection_pool.getPool(), filters=[auth])
try:
logger.debug('poll for tokens...')
response = resource.get(
headers = headers
)
except restkit_errors.Unauthorized as e:
logger.debug(str(e))
return None
except restkit_errors.ResourceNotFound as e:
logger.debug(str(e))
return None
except restkit_errors.RequestFailed as e:
logger.debug(str(e))
return None
body = response.body_string()
logger.debug('auth data response: %s' % body);
r = {}
for token in ordered_json.loads(body):
if token['provider'] == 'github':
r['github'] = token['accessToken']
break
logger.debug('parsed auth tokens %s' % r);
return r
def openBrowserLogin(provider=None):
if provider:
query = '?provider=github'
else:
query = ''
webbrowser.open(Website_Base_URL + '/#login/' + getPublicKey() + query)
| [
"[email protected]"
]
| |
b4e5d65cf41c5b676b3c8fea171d87bae446ba4b | 88608583e66b2084a8fe010d18a4bc779a9ea8eb | /torcv/links/model/inception/__init__.py | 06148b1d3907003901ba7919852b9854ece3df5b | []
| no_license | UdonDa/torcv | c4f1f1cac99d49a5fe0d3edef6293659d807f292 | 49e548d538933f5eb5a4ffe1cb529914b180dae2 | refs/heads/master | 2020-04-28T05:25:09.183032 | 2019-03-19T02:39:52 | 2019-03-19T02:39:52 | 175,019,302 | 1 | 0 | null | 2019-03-19T02:39:53 | 2019-03-11T14:37:18 | Python | UTF-8 | Python | false | false | 51 | py | from torcv.links.model.inception.inception import * | [
"[email protected]"
]
| |
208410d3e358a10f563e5f103349fd22130cf43d | aae3d55b9d2004e04c5917a31408384a4269a425 | /astrodash/save_binned_templates_as_arrays.py | 46d645225b46ac3a4d4829533285989d5f651758 | [
"MIT"
]
| permissive | daniel-muthukrishna/astrodash | 5b1ee330d2ae2d9cc43f5c52d0765359aa40673f | acc241ad73133894d93ef16733cf0f1fb4ca7b87 | refs/heads/master | 2023-04-04T03:27:07.480846 | 2023-03-22T17:02:08 | 2023-03-22T17:02:08 | 75,250,754 | 23 | 12 | MIT | 2019-04-26T15:27:30 | 2016-12-01T03:19:18 | Python | UTF-8 | Python | false | false | 4,018 | py | import numpy as np
import pickle
import os
from astrodash.create_arrays import AgeBinning
from astrodash.helpers import temp_list
from astrodash.combine_sn_and_host import BinTemplate
def create_sn_and_host_arrays(snTemplateDirectory, snTempFileList, galTemplateDirectory, galTempFileList, paramsFile):
snTemplates = {}
galTemplates = {}
snList = temp_list(snTempFileList)
galList = temp_list(galTempFileList)
with open(paramsFile, 'rb') as f:
pars = pickle.load(f)
w0, w1, nw, snTypes, galTypes, minAge, maxAge, ageBinSize = pars['w0'], pars['w1'], pars['nw'], pars['typeList'], \
pars['galTypeList'], pars['minAge'], pars['maxAge'], \
pars['ageBinSize']
ageBinning = AgeBinning(minAge, maxAge, ageBinSize)
ageLabels = ageBinning.age_labels()
# Create dictionary of dictionaries for type and age of SN
for snType in snTypes:
snTemplates[snType] = {}
for ageLabel in ageLabels:
snTemplates[snType][ageLabel] = {}
snTemplates[snType][ageLabel]['snInfo'] = []
snTemplates[snType][ageLabel]['names'] = []
for galType in galTypes:
galTemplates[galType] = {}
galTemplates[galType]['galInfo'] = []
galTemplates[galType]['names'] = []
for snFile in snList:
snBinTemplate = BinTemplate(snTemplateDirectory + snFile, 'sn', w0, w1, nw)
nAges = snBinTemplate.nCols
ages = snBinTemplate.ages
snType = snBinTemplate.tType
filename = snBinTemplate.filename
for ageIdx in range(nAges):
age = ages[ageIdx]
if minAge < age < maxAge:
ageBin = ageBinning.age_bin(age)
ageLabel = ageLabels[ageBin]
snInfo = snBinTemplate.bin_template(ageIdx)
snTemplates[snType][ageLabel]['snInfo'].append(snInfo)
snTemplates[snType][ageLabel]['names'].append("%s_%s" % (filename, age))
print("Reading {} {} out of {}".format(snFile, ageIdx, nAges))
for galFile in galList:
galBinTemplate = BinTemplate(galTemplateDirectory + galFile, 'gal', w0, w1, nw)
galType = galBinTemplate.tType
filename = galBinTemplate.filename
galInfo = galBinTemplate.bin_template()
galTemplates[galType]['galInfo'].append(galInfo)
galTemplates[galType]['names'].append(filename)
print("Reading {}".format(galFile))
# Convert lists in dictionaries to numpy arrays
for snType in snTypes:
for ageLabel in ageLabels:
snTemplates[snType][ageLabel]['snInfo'] = np.array(snTemplates[snType][ageLabel]['snInfo'])
snTemplates[snType][ageLabel]['names'] = np.array(snTemplates[snType][ageLabel]['names'])
for galType in galTypes:
galTemplates[galType]['galInfo'] = np.array(galTemplates[galType]['galInfo'])
galTemplates[galType]['names'] = np.array(galTemplates[galType]['names'])
return snTemplates, galTemplates
def save_templates():
scriptDirectory = os.path.dirname(os.path.abspath(__file__))
parameterFile = 'models_v06/models/zeroZ/training_params.pickle'
snTemplateDirectory = os.path.join(scriptDirectory, "../templates/training_set/")
snTempFileList = snTemplateDirectory + 'templist.txt'
galTemplateDirectory = os.path.join(scriptDirectory, "../templates/superfit_templates/gal/")
galTempFileList = galTemplateDirectory + 'gal.list'
saveFilename = 'models_v06/models/sn_and_host_templates.npz'
snTemplates, galTemplates = create_sn_and_host_arrays(snTemplateDirectory, snTempFileList, galTemplateDirectory,
galTempFileList, parameterFile)
np.savez_compressed(saveFilename, snTemplates=snTemplates, galTemplates=galTemplates)
return saveFilename
if __name__ == "__main__":
unCombinedTemplates = save_templates()
| [
"[email protected]"
]
| |
2cfa6771adfad25d67580278d8e90dd45946b759 | 046207f434966462fff55f634ba5a450d2208534 | /CodeUp/1277_0200.py | d529aa1563e5cd2f9c1f8347f1cc93d651e5b6cf | []
| no_license | sungsikyang92/pythonStudy | e293e1ac8af443809f840ccee7052a8f57480b70 | 26522b5e232ccd9ab25c52122d254aa7249a8fdf | refs/heads/master | 2023-07-04T16:58:40.318976 | 2021-08-04T02:00:27 | 2021-08-04T02:00:27 | 365,398,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | count = int(input())
nums = list(map(int, input().split()))
print(nums[0],nums[int(count/2)],nums[-1]) | [
"[email protected]"
]
| |
4db43b3627ce71b65078c3610a3ad71319c4c739 | a512b8893b0d2de827d6292e810f3a98b41e132c | /Week4/Day6/Solutions/Python/prog3.py | e8ebb18c93788e86255dbf1b31875bd34116bfa1 | []
| no_license | Audarya07/Daily-Flash-Codes | d771079fd0d470e2d3e05679f17f32fb64b4f426 | cf96ca2b1676b038e243fac67be778381492ffeb | refs/heads/master | 2022-11-06T15:37:47.180729 | 2020-06-25T16:20:55 | 2020-06-25T16:20:55 | 274,960,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | for num in range(1,101):
sum = 0
for i in range(1,num):
if num%i==0:
sum+=i
if sum==num:
continue
else:
print(num,end=" ")
print()
| [
"[email protected]"
]
| |
fb30f63ea2395b0dcca9405b88c567a7a4bb60d6 | e7dd192123f404367e9623a357366643742fa723 | /kubernetes/test/test_scheduling_v1beta1_api.py | 700a3b463994104380586917c18869a0959fe020 | [
"Apache-2.0"
]
| permissive | itholic/python | 1772725582f28af445efb233eca6c9139da3ae49 | dffe577a062e17057270ae80fa677ffd83e9d183 | refs/heads/master | 2020-09-12T08:59:16.847326 | 2019-11-15T20:40:32 | 2019-11-15T20:40:32 | 222,375,164 | 0 | 0 | Apache-2.0 | 2019-11-18T06:05:45 | 2019-11-18T06:05:43 | null | UTF-8 | Python | false | false | 1,718 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.15.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import kubernetes.client
from kubernetes.client.api.scheduling_v1beta1_api import SchedulingV1beta1Api # noqa: E501
from kubernetes.client.rest import ApiException
class TestSchedulingV1beta1Api(unittest.TestCase):
"""SchedulingV1beta1Api unit test stubs"""
def setUp(self):
self.api = kubernetes.client.api.scheduling_v1beta1_api.SchedulingV1beta1Api() # noqa: E501
def tearDown(self):
pass
def test_create_priority_class(self):
"""Test case for create_priority_class
"""
pass
def test_delete_collection_priority_class(self):
"""Test case for delete_collection_priority_class
"""
pass
def test_delete_priority_class(self):
"""Test case for delete_priority_class
"""
pass
def test_get_api_resources(self):
"""Test case for get_api_resources
"""
pass
def test_list_priority_class(self):
"""Test case for list_priority_class
"""
pass
def test_patch_priority_class(self):
"""Test case for patch_priority_class
"""
pass
def test_read_priority_class(self):
"""Test case for read_priority_class
"""
pass
def test_replace_priority_class(self):
"""Test case for replace_priority_class
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
c288c52f8ee60885fe587a639279c2976ed3966e | 9d67cd5f8d3e0ffdd4334a6b9b67c93f8deca100 | /configs/example_old_map_1228.py | 5fa96132f84104c6dbd4a5e0ca228a05b0c82a8a | []
| no_license | SiyuanLee/caps | 0c300a8e5a9a661eca4b2f59cd38125ddc35b6d3 | 476802e18ca1c7c88f1e29ed66a90c350aa50c1f | refs/heads/master | 2021-06-20T22:48:16.230354 | 2021-02-22T13:21:57 | 2021-02-22T13:21:57 | 188,695,489 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,754 | py | """
This is the example config file
larger lr
beta no bias
lower explr
comment: too small!
not target beta
"""
import numpy as np
# More one-char representation will be added in order to support
# other objects.
# The following a=10 is an example although it does not work now
# as I have not included a '10' object yet.
a = 10
# This is the map array that represents the map
# You have to fill the array into a (m x n) matrix with all elements
# not None. A strange shape of the array may cause malfunction.
# Currently available object indices are # they can fill more than one element in the array.
# 0: nothing
# 1: wall
# 2: ladder
# 3: coin
# 4: spike
# 5: triangle -------source
# 6: square ------ source
# 7: coin -------- target
# 8: princess -------source
# 9: player # elements(possibly more than 1) filled will be selected randomly to place the player
# unsupported indices will work as 0: nothing
map_array = [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 5, 1, 0, 0, 0, 6, 0, 1],
[1, 9, 9, 9, 1, 9, 9, 9, 9, 9, 1],
[1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1],
[1, 0, 2, 0, 0, 0, 2, 0, 7, 0, 1],
[1, 0, 2, 0, 0, 0, 2, 0, 0, 0, 1],
[1, 9, 2, 9, 9, 9, 2, 9, 9, 9, 1],
[1, 2, 1, 1, 1, 2, 1, 1, 1, 2, 1],
[1, 2, 0, 1, 0, 2, 0, 1, 0, 2, 1],
[1, 2, 9, 1, 9, 2, 8, 1, 9, 2, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
]
# set to true -> win when touching the object
# 0, 1, 2, 3, 4, 9 are not possible
end_game = {
8: True,
}
rewards = {
"positive": 0, # when collecting a coin
"win": 1, # endgame (win)
"negative": -25, # endgame (die)
"tick": 0 # living
}
######### dqn only #########
# ensure correct import
import os
import sys
__file_path = os.path.abspath(__file__)
__dqn_dir = '/'.join(str.split(__file_path, '/')[:-2]) + '/'
sys.path.append(__dqn_dir)
__cur_dir = '/'.join(str.split(__file_path, '/')[:-1]) + '/'
from dqn_utils import PiecewiseSchedule
# load the random sampled obs
import pickle
pkl_file = __cur_dir + 'eval_obs_array_random_old_map.pkl'
with open(pkl_file, 'rb') as f:
eval_obs_array = pickle.loads(f.read())
def seed_func():
return np.random.randint(0, 1000)
num_timesteps = 2e6 # 40 epoch
learning_freq = 4
# training iterations to go
num_iter = num_timesteps / learning_freq
# piecewise learning rate
lr_multiplier = 1.0
learning_rate = PiecewiseSchedule([
(0, 1e-4 * lr_multiplier),
(num_iter / 10, 1e-4 * lr_multiplier),
(num_iter / 2, 5e-5 * lr_multiplier),
], outside_value=5e-5 * lr_multiplier)
learning_rate_term = PiecewiseSchedule([
(0, 2e-4 * lr_multiplier),
(num_iter / 40, 1e-3 * lr_multiplier),
(num_iter / 20, 1e-2 * lr_multiplier),
(num_iter / 10, 5e-2 * lr_multiplier),
(num_iter * 3 / 4, 5e-3 * lr_multiplier),
(num_iter * 7 / 8, 5e-4 * lr_multiplier),
], outside_value=5e-4 * lr_multiplier)
# piecewise exploration rate
exploration = PiecewiseSchedule([
(0, 1.0),
(num_iter / 40, 0.97),
(num_iter * 3 / 8, 0.7),
(num_iter * 7 / 8, 0.05),
], outside_value=0.05)
######### transfer only #########
import tensorflow as tf
source_dirs = [
# an old map policy
'/home/beeperman/Project/ple-monsterkong/examples/dqn_new/logs/old_map_mod_target_1c_12_07_17_22:15:51/dqn',
'/home/beeperman/Project/ple-monsterkong/examples/dqn_new/logs/old_map_mod_target_2_12_13_17_19:12:07/dqn',
#'/home/beeperman/Project/ple-monsterkong/examples/dqn_new/logs/old_map_mod_target_3_12_13_17_19:13:03/dqn',
'/home/beeperman/Project/ple-monsterkong/examples/dqn_new/logs/old_map_mod_target_4_12_23_17_16:20:56/dqn',
]
transfer_config = {
'source_dirs': source_dirs,
'online_q_omega': False, # default false off policy with experience replay
'q_omega_uniform_sample': False, # default false
'four_to_two': True, # default false frame_history_len must be 4!
'source_noop': False, # default false (false means source policies HAS noop action)
'no_share_para': True, # default false set to true to stop sharing parameter between q network and q_omega/term
'xi': 0.005, # default none you may specify a constant. none means xi = 0.5 (q_omega_val - q_omega_second_max)
'target_beta': False, # default false (true means using target beta)
'termination_stop': True, # default false train cnn when training beta online
'learning_rate_term': learning_rate_term,
'beta_no_bias': True, # default false prune bias for termination function
}
dqn_config = {
'seed': seed_func, # will override game settings
'num_timesteps': num_timesteps,
'replay_buffer_size': 1000000,
'batch_size': 32,
'gamma': 0.99,
'learning_starts': 50000,
'learning_freq': learning_freq,
'frame_history_len': 4,
'target_update_freq': 10000,
'grad_norm_clipping': 10,
'learning_rate': learning_rate,
'exploration': exploration,
'eval_obs_array': eval_obs_array, # TODO: construct some eval_obs_array
'room_q_interval': 1e5, # q_vals will be evaluated every room_q_interval steps
'epoch_size': 5e4, # you decide any way
'config_name': str.split(__file_path, '/')[-1].replace('.py', ''), # the config file name
'transfer_config': transfer_config,
}
map_config = {
'map_array': map_array,
'rewards': rewards,
'end_game': end_game,
'init_score': 0,
'init_lives': 1, # please don't change, not going to work
# configs for dqn
'dqn_config': dqn_config,
# work automatically only for aigym wrapped version
'fps': 1000,
'frame_skip': 1,
'force_fps': True, # set to true to make the game run as fast as possible
'display_screen': True,
'episode_length': 1200,
'episode_end_sleep': 0., # sec
} | [
"[email protected]"
]
| |
5abefd1028b6cccfdaa7eb87f9bf76914e4e80f7 | ef187d259d33e97c7b9ed07dfbf065cec3e41f59 | /work/atcoder/abc/abc063/A/answers/322689_irin0890.py | 4d6bb223af0fd8d70f6f64a17ba23573afd95cd4 | []
| no_license | kjnh10/pcw | 847f7295ea3174490485ffe14ce4cdea0931c032 | 8f677701bce15517fb9362cc5b596644da62dca8 | refs/heads/master | 2020-03-18T09:54:23.442772 | 2018-07-19T00:26:09 | 2018-07-19T00:26:09 | 134,586,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | a, b = map(int, input().split())
if a+b >= 10:
print('error')
else:
print(a+b) | [
"[email protected]"
]
| |
07ff6980884d70cacc711dfc287bfbf96c7c733e | f4b694982027ac362de1e9d6755f2943d0355a06 | /DECSKS-03 -- Convergence of FD formulation of high order CS/pyfiles/plots_df9_comparison.py | 6ef2da6f2b41cabe799772b8df49ec3244e370d7 | []
| no_license | dsirajud/IPython-notebooks | 55275e44191c16f5393571522787993f931cfd98 | 6ad9d978c611558525fc9d716af101dc841a393b | refs/heads/master | 2021-01-15T15:33:57.119172 | 2016-07-13T20:08:29 | 2016-07-13T20:08:29 | 35,054,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | import matplotlib.pyplot as plt
import numpy as np
from convergence_routines import *
Nx = 2488
x, dx, L = domain(_Nx = Nx)
L2error, df9_approx = FD_derivative_matrix_formulation(_dn = 9, _p = 3, _Nx = Nx)
df9_exact = df9(x)
plt.plot(x,df9_exact, label = 'exact df9', linewidth = 3)
plt.hold('on')
plt.plot(x,df9_approx, label = 'approx df9', linewidth = 1, color = "red")
# compare with the function whose derivative this is
df8_exact = df8(x)
plt.plot(x,df8_exact * np.abs(np.min(df9_approx)) / np.abs(np.min(df8_exact)), label = 'exact df4', linewidth = 1, color = "cyan")
plt.hold('off')
plt.legend(loc = 'best')
plt.grid()
plt.show()
| [
"[email protected]"
]
| |
d59800358316a58679932c187a9225e40f43364e | b08d42933ac06045905d7c005ca9c114ed3aecc0 | /src/learningCurve/leaveOneOut/lrClassifierF.py | 36c020e785dfac7d8a00613b3398404787143651 | []
| no_license | TanemuraKiyoto/PPI-native-detection-via-LR | d148d53f5eb60a4dda5318b371a3048e3f662725 | 897e7188b0da94e87126a4acc0c9a6ff44a64574 | refs/heads/master | 2022-12-05T11:59:01.014309 | 2020-08-10T00:41:17 | 2020-08-10T00:41:17 | 225,272,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,134 | py | # 9 September 2019
# Kiyoto Aramis Tanemura
# I modified the rfClassifier.py script to implement a logistic regression classifier. This classifier runs faster than the random forest classifier and Jun previously observed comparable results between logistic regression and random forest classifiers for the protein folding system. Due to the lesser time cost, I may sample a greater hyperparameter space using the logistic regression classifier. If the sampling yields a region in which overfitting is not observed, then I can refine the search. If the results are similar to that of the random forest classifier, then I may have exhausted the dataset for generalizability.
# Modified 26 October 2019 by Kiyoto Aramis Tanemura. Apply logistic regression classifier to CASF-PPI dataset.
# Modified 2020-02-09 by KAT. Code generalized for public use on GitHub.
import pandas as pd
import numpy as np
import os
import json
import pickle
#from multiprocessing import Pool
from time import time
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import RandomizedSearchCV
from sklearn.preprocessing import StandardScaler
from random import shuffle, random
#os.chdir('/mnt/scratch/tanemur1/')
toc = time()
# Randomize input file orders
pathToInput = 'data/comparison_descriptors/'
pathToOutput = 'results/learningCurve/'
fileNames = [x for x in os.listdir(pathToInput) if '.csv' in x]
shuffle(fileNames) # note: shuffle is in-place. Do not assign to variable
# Specify training set fraction
train_fraction = 0.99
if len(fileNames) * train_fraction == int(len(fileNames) * train_fraction):
train_file_number = int(len(fileNames) * train_fraction)
else:
train_file_number = int(len(fileNames) * train_fraction + 1)
x_train = pd.DataFrame()
y_train = pd.DataFrame()
# Read individual csv for comparison descriptors, append to train_data, and partition to x_train, y_train
fileNamesWithPath = [pathToInput + fileName for fileName in fileNames]
def read_csv(filePath):
return pd.read_csv(filePath, index_col = 0)
print('begin read training set')
#with Pool(np.min([train_file_number, 28])) as p:
# train_dataList = list(p.map(read_csv, fileNamesWithPath[:train_file_number]))
train_dataList = list(map(read_csv, fileNamesWithPath[:train_file_number]))
print('begin append DF | ', (time() - toc) / 60, ' min')
# Append DataFrames into one. While loop used to reduce append operations. Iteratively, DFs in a list are appended
# to the following DF.
while len(train_dataList) != 1:
number = int(len(train_dataList) / 2)
for i in range(number):
train_dataList[2 * i] = train_dataList[2 * i].append(train_dataList[2 * i + 1], sort = True)
for j in range(number):
del train_dataList[j + 1]
x_train = train_dataList[0]
del train_dataList
print('train_data dimensions', x_train.shape, ' | ', (time() - toc) / 60, ' min')
y_train = x_train['class']
x_train = x_train.drop('class', axis = 1) # x_train contains only nonbonding descriptors
feature_names = x_train.columns
scaler = StandardScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
y_train = y_train.values
print('Dimensions x_train ', x_train.shape, ' | y_train', y_train.shape)
# Define a logistic regression classifier along with pertinent hyperparameters. Here, default values are used.
clf = LogisticRegression(penalty='l2', verbose = 1)
def sampleRationalVals(minVal, maxVal):
return 2 ** (random() * (np.log2(maxVal) - np.log2(minVal)) + np.log2(minVal))
def sampleRationalList(minVal, maxVal):
theList = []
for i in range(int(2 * np.log2(maxVal - minVal) + 1)):
theVal = sampleRationalVals(minVal, maxVal)
theList.append(theVal)
return theList
parameters = {
# include any hyperparameters to sample. Otherwise, leave empty to perform five fold cross validation with default values. For example:
# 'C': sampleRationalList(0.001, 1000),
# 'solver': ['newton-cg', 'lbfgs', 'sag','saga']
}
print('begin RandomizedSearchCV | ' + str((time() - toc)/60) + ' mins')
randomized_search = RandomizedSearchCV(estimator = clf, param_distributions = parameters, n_iter = 1, scoring = 'accuracy', refit = True, cv = 5, verbose = 1, n_jobs = 1, pre_dispatch = 'n_jobs', return_train_score=True)
randomized_search.fit(x_train, y_train)
print('begin output | ', (time() - toc) / 60 / 60, ' hours')
tic = time()
with open(pathToOutput + 'bestParamF.json', 'w') as g:
json.dump(randomized_search.best_estimator_.get_params(), g)
with open(pathToOutput + 'modelF.pkl', 'wb') as h:
pickle.dump(randomized_search, h)
with open(pathToOutput + 'trainingSetF.txt', 'w') as i:
i.write('Training set:\n')
for pdbID in fileNames[:train_file_number]:
i.write(pdbID + '\n')
i.write('\nJob time: ' + str((tic - toc) / 60 / 60) + ' hours')
with open(pathToOutput + 'standardScalerF.pkl', 'wb') as j:
pickle.dump(scaler, j)
bestCoefficient = randomized_search.best_estimator_.coef_
coefDf = pd.DataFrame(bestCoefficient, columns = feature_names)
with open(pathToOutput + 'coefficientsF.csv', 'w') as f:
coefDf.to_csv(f)
| [
"[email protected]"
]
| |
18a28d5e4e839646f65336d3d49006c5a957223d | de0584cdd6a0b452efa3c8bd0e1e43286853c814 | /preprocess/huff/clean_huffpost.py | a2a2d91bc756e5a1c5826ea7fe1277733daea635 | []
| no_license | johnsonice/triplet-loss | a325ecd229b5346aaca4cb0556bbc18e9e4eae26 | 71c13dfa7631ec93c564d9dc9da4fcf667eb9500 | refs/heads/master | 2023-08-24T17:49:01.593415 | 2021-10-23T16:27:26 | 2021-10-23T16:27:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,823 | py | import json
from random import shuffle
#cleaning up text
import re
def get_only_chars(line):
clean_line = ""
line = line.replace("’", "")
line = line.replace("'", "")
line = line.replace("-", " ") #replace hyphens with spaces
line = line.replace("\t", " ")
line = line.replace("\n", " ")
line = line.lower()
for char in line:
if char in 'qwertyuiopasdfghjklzxcvbnm ':
clean_line += char
else:
clean_line += ' '
clean_line = re.sub(' +',' ',clean_line) #delete extra spaces
if clean_line[0] == ' ':
clean_line = clean_line[1:]
return clean_line
def clean_dataset(file_path, output_path_train, output_path_test):
lines = open(file_path, 'r').readlines()
category_to_headlines = {}
for line in lines:
d = json.loads(line[:-1])
category = d['category']
headline = d['headline']
if len(headline) > 10:
if category in category_to_headlines:
category_to_headlines[category].append(headline)
else:
category_to_headlines[category] = [headline]
category_to_id = {category: i for i, category in enumerate(list(sorted(list(category_to_headlines.keys()))))}
train_writer = open(output_path_train, 'w')
test_writer = open(output_path_test, 'w')
for category, headlines in category_to_headlines.items():
_id = category_to_id[category]
shuffle(headlines)
test_headlines = headlines[:300]
train_headlines = headlines[300:1000]
for train_headline in train_headlines:
train_writer.write('\t'.join([str(_id), get_only_chars(train_headline)]) + '\n')
for test_headline in test_headlines:
test_writer.write('\t'.join([str(_id), get_only_chars(test_headline)]) + '\n')
if __name__ == "__main__":
clean_dataset('News_Category_dataset_v2.json', 'huffpost/train.txt', 'huffpost/test.txt') | [
"[email protected]"
]
| |
9cdc5953fa52c0c13a48fd139f2abf63be4bcdb2 | 95777f5257f00aa982d94812f46658ace2e92bd2 | /pytorch/pytorchcv/models/model_store.py | 3ddbf0ceb2ababece6f08a706124b80d1c589957 | [
"MIT"
]
| permissive | yangkang779/imgclsmob | ea2c1f9223a3419375e8339c7e941daba69a56a7 | 9d189eae8195d045dfb4b25bec2501b2c42a154a | refs/heads/master | 2020-05-07T08:16:23.658714 | 2019-04-08T16:20:33 | 2019-04-08T16:20:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,684 | py | """
Model store which provides pretrained models.
"""
__all__ = ['get_model_file', 'load_model', 'download_model', 'calc_num_params']
import os
import zipfile
import logging
import hashlib
_model_sha1 = {name: (error, checksum, repo_release_tag) for name, error, checksum, repo_release_tag in [
('alexnet', '2093', '6429d865d917d57d1198e89232dd48a117ddb4d5', 'v0.0.108'),
('vgg11', '1137', '8a64fe7a143dca1d9031475cb6bea5379f4bac3d', 'v0.0.109'),
('vgg13', '1075', '24178cabf4864a238086c7f6f625261acdcbb35c', 'v0.0.109'),
('vgg16', '0892', '10f44f684420e4278427a764f96f5aa9b91ec766', 'v0.0.109'),
('vgg19', '0839', 'd4e69a0d393f4d46f1d9c4d4ba96f5a83de3399c', 'v0.0.109'),
('bn_vgg11b', '1019', '98d7e914a32f1022618ffa390e78c6a523dfcdc1', 'v0.0.110'),
('bn_vgg13b', '0963', 'cf9352f47805c18798c0f80ab0e158ec5401331e', 'v0.0.110'),
('bn_vgg16b', '0874', 'af4f2d0bbfda667e6b7b3ad4cda5ca331021cd18', 'v0.0.110'),
('bn_vgg19b', '0840', 'b6919f7f74b3174a86818062b2d1d4cf5a110b8b', 'v0.0.110'),
('bninception', '0804', '99ff87081fbd04cfe4193910674ffef7cc84b4b0', 'v0.0.139'),
('resnet10', '1436', '67d9a618e8670497386af806564f7ac1a4dbcd76', 'v0.0.248'),
('resnet12', '1328', 'd7d2f4d6c7fcf3aff0458533ae5204b7f0eee2d7', 'v0.0.253'),
('resnet14', '1246', 'd5b55c113168c02f1b39b65f8908b0db467a2d74', 'v0.0.256'),
('resnet16', '1118', 'd54bc41afa244476ca28380111f66d188905ecbc', 'v0.0.259'),
('resnet18_wd4', '1785', 'fe79b31f56e7becab9c014dbc14ccdb564b5148f', 'v0.0.262'),
('resnet18_wd2', '1327', '6654f50ad357f4596502b92b3dca2147776089ac', 'v0.0.263'),
('resnet18_w3d4', '1106', '3636648b504e1ba134947743eb34dd0e78feda02', 'v0.0.266'),
('resnet18', '0982', '0126861b4cd7f7b14196b1e01827da688f8bab6d', 'v0.0.153'),
('resnet34', '0780', '3f775482a327e5fc4850fbb77785bfc55e171e5f', 'v0.0.291'),
('resnet50', '0658', '828686d7a4b0bef906d7bcc115efd894fc5c1e0a', 'v0.0.147'),
('resnet50b', '0645', 'a53df64c736194427d0bd01eadf468e95d45fd35', 'v0.0.146'),
('resnet101', '0622', 'ab0cf005bbe9b17e53f9e3c330c6147a8c80b3a5', 'v0.0.1'),
('resnet101b', '0561', '9fbf0696ed7fe3dbe496d70fff56118674dd0d83', 'v0.0.145'),
('resnet152', '0550', '800b2cb1959a0d3648483e86917502b8f63dc37e', 'v0.0.144'),
('resnet152b', '0534', 'e02a8bf77357f553d57086c3f351f914c765e187', 'v0.0.143'),
('preresnet10', '1421', 'b3973cd4461287d61df081d6f689d293eacf2248', 'v0.0.249'),
('preresnet12', '1348', '563066fa8fcf8b5f19906b933fea784965d68192', 'v0.0.257'),
('preresnet14', '1239', '4be725fd3f06c99c46817fce3b69caf2ebc62414', 'v0.0.260'),
('preresnet16', '1108', '06d8c87e29284dac19a9019485e210541532411a', 'v0.0.261'),
('preresnet18_wd4', '1811', '41135c15210390e9a564b14e8ae2ebda1a662ec1', 'v0.0.272'),
('preresnet18_wd2', '1340', 'c1fe4e314188eeb93302432d03731a91ce8bc9f2', 'v0.0.273'),
('preresnet18_w3d4', '1105', 'ed2f9ca434b6910b92657eefc73ad186396578d5', 'v0.0.274'),
('preresnet18', '0972', '5651bc2dbb200382822a6b64375d240f747cc726', 'v0.0.140'),
('preresnet34', '0774', 'fd5bd1e883048e29099768465df2dd9e891803f4', 'v0.0.300'),
('preresnet50', '0685', 'd81a7aca0384c6d65ee0e5c1f3ba854591466346', 'v0.0.2'),
('preresnet50b', '0687', '65be98fbe7b82c79bccd9c794ce9d9a3482aec9c', 'v0.0.2'),
('preresnet101', '0591', '4bacff796e113562e1dfdf71cfa7c6ed33e0ba86', 'v0.0.2'),
('preresnet101b', '0603', 'b1e37a09424dde15ecba72365d46b1f59abd479b', 'v0.0.2'),
('preresnet152', '0555', 'c842a030abbcc21a0f2a9a8299fc42204897a611', 'v0.0.14'),
('preresnet152b', '0591', '2c91ab2c8d90f3990e7c30fd6ee2184f6c2c3bee', 'v0.0.2'),
('preresnet200b', '0588', 'f7104ff306ed5de2c27f3c855051c22bda167981', 'v0.0.45'),
('preresnet269b', '0581', '1a7878bb10923b22bda58d7935dfa6e5e8a7b67d', 'v0.0.239'),
('resnext101_32x4d', '0611', 'cf962440f11fe683fd02ec04f2102d9f47ce38a7', 'v0.0.10'),
('resnext101_64x4d', '0575', '651abd029bcc4ce88c62e1d900a710f284a8281e', 'v0.0.10'),
('seresnet50', '0640', '8820f2af62421ce2e1df989d6e0ce7916c78ff86', 'v0.0.11'),
('seresnet101', '0589', '5e6e831b7518b9b8a049dd60ed1ff82ae75ff55e', 'v0.0.11'),
('seresnet152', '0576', '814cf72e0deeab530332b16fb9b609e574afec61', 'v0.0.11'),
('seresnext50_32x4d', '0554', '99e0e9aa4578af9f15045c1ceeb684a2e988628a', 'v0.0.12'),
('seresnext101_32x4d', '0505', '0924f0a2c1de90dc964c482b7aff6232dbef3600', 'v0.0.12'),
('senet154', '0461', '6512228c820897cd09f877527a553ca99d673956', 'v0.0.13'),
('ibn_resnet50', '0641', 'e48a1fe5f7e448d4b784ef4dc0f33832f3370a9b', 'v0.0.127'),
('ibn_resnet101', '0561', '5279c78a0dbfc722cfcfb788af479b6133920528', 'v0.0.127'),
('ibnb_resnet50', '0686', 'e138995e6acda4b496375beac6d01cd7a9f79876', 'v0.0.127'),
('ibn_resnext101_32x4d', '0542', 'b5233c663a4d207d08c21107d6c951956e910be8', 'v0.0.127'),
('ibn_densenet121', '0725', 'b90b0615e6ec5c9652e3e553e27851c8eaf01adf', 'v0.0.127'),
('ibn_densenet169', '0651', '96dd755e0df8a54349278e0cd23a043a5554de08', 'v0.0.127'),
('airnet50_1x64d_r2', '0590', '3ec422128d17314124c02e3bb0f77e26777fb385', 'v0.0.120'),
('airnet50_1x64d_r16', '0619', '090179e777f47057bedded22d669bf9f9ce3169c', 'v0.0.120'),
('airnext50_32x4d_r2', '0551', 'c68156e5e446a1116b1b42bc94b3f881ab73fe92', 'v0.0.120'),
('bam_resnet50', '0658', '96a37c82bdba821385b29859ad1db83061a0ca5b', 'v0.0.124'),
('cbam_resnet50', '0605', 'a1172fe679622224dcc88c00020936ad381806fb', 'v0.0.125'),
('pyramidnet101_a360', '0620', '3a24427baf21ee6566d7e4c7dee25da0e5744f7f', 'v0.0.104'),
('diracnet18v2', '1170', 'e06737707a1f5a5c7fe4e57da92ed890b034cb9a', 'v0.0.111'),
('diracnet34v2', '0993', 'a6a661c0c3e96af320e5b9bf65a6c8e5e498a474', 'v0.0.111'),
('densenet121', '0803', 'f994107a83aed162916ff89e2ded4c5af5bc6457', 'v0.0.3'),
('densenet161', '0644', 'c0fb22c83e8077a952ce1a0c9703d1a08b2b9e3a', 'v0.0.3'),
('densenet169', '0719', '271391051775ba9bbf458a6bd77af4b3007dc892', 'v0.0.3'),
('densenet201', '0663', '71ece4ad7be5d1e2aa4bbf6f1a6b32ac2562d847', 'v0.0.3'),
('condensenet74_c4_g4', '0828', '5ba550494cae7081d12c14b02b2a02365539d377', 'v0.0.4'),
('condensenet74_c8_g8', '1006', '3574d874fefc3307f241690bad51f20e61be1542', 'v0.0.4'),
('peleenet', '1151', '9c47b80297ac072a923cda763b78e7218cd52d3a', 'v0.0.141'),
('wrn50_2', '0641', '83897ab9f015f6f988e51108e12518b08e1819dd', 'v0.0.113'),
('drnc26', '0755', '35405bd52a0c721f3dc64f18d433074f263b7339', 'v0.0.116'),
('drnc42', '0657', '7c99c4608a9a5e5f073f657b92f258ba4ba5ac77', 'v0.0.116'),
('drnc58', '0601', '70ec1f56c23da863628d126a6ed0ad10f037a2ac', 'v0.0.116'),
('drnd22', '0823', '5c2c6a0cf992409ab388e04e9fbd06b7141bdf47', 'v0.0.116'),
('drnd38', '0695', '4630f0fb3f721f4a2296e05aacb1231ba7530ae5', 'v0.0.116'),
('drnd54', '0586', 'bfdc1f8826027b247e2757be45b176b3b91b9ea3', 'v0.0.116'),
('drnd105', '0548', 'a643f4dcf9e4b69eab06b76e54ce22169f837592', 'v0.0.116'),
('dpn68', '0727', '438492331840612ff1700e7b7d52dd6c0c683b47', 'v0.0.17'),
('dpn98', '0553', '52c55969835d56185afa497c43f09df07f58f0d3', 'v0.0.17'),
('dpn131', '0548', '0c53e5b380137ccb789e932775e8bd8a811eeb3e', 'v0.0.17'),
('darknet_tiny', '1784', '4561e1ada619e33520d1f765b3321f7f8ea6196b', 'v0.0.69'),
('darknet_ref', '1718', '034595b49113ee23de72e36f7d8a3dbb594615f6', 'v0.0.64'),
('darknet53', '0564', 'b36bef6b297055dda3d17a3f79596511730e1963', 'v0.0.150'),
('irevnet301', '0841', '95dc8d94257bf16027edd7077b785a8676369fca', 'v0.0.251'),
('bagnet9', '2961', 'cab1179284e9749697f38c1c7e5f0e172be12c89', 'v0.0.255'),
('bagnet17', '1884', '6b2a100f8d14d4616709586483f625743ed04769', 'v0.0.255'),
('bagnet33', '1301', '4f17b6e837dacd978b15708ffbb2c1e6be3c371a', 'v0.0.255'),
('dla34', '0794', '04698d78b16f2d08e4396b5b0c9f46cb42542242', 'v0.0.202'),
('dla46c', '1323', 'efcd363642a4b479892f47edae7440f0eea05edb', 'v0.0.282'),
('dla46xc', '1269', '00d3754ad0ff22636bb1f4b4fb8baebf4751a1ee', 'v0.0.293'),
('dla60', '0669', 'b2cd6e51a322512a6cb45414982a2ec71285daad', 'v0.0.202'),
('dla60x', '0598', '88547d3f81c4df711b15457cfcf37e2b703ed895', 'v0.0.202'),
('dla60xc', '1091', '0f6381f335e5bbb4c69b360be61a4a08e5c7a9de', 'v0.0.289'),
('dla102', '0605', '11df13220b44f51dc8c925fbd9fc334bc8d115b4', 'v0.0.202'),
('dla102x', '0577', '58331655844f9d95bcf2bb90de6ac9cf3b66bd5e', 'v0.0.202'),
('dla102x2', '0536', '079361117045dc661b63ce4b14408d403bc91844', 'v0.0.202'),
('dla169', '0566', 'ae0c6a82acfaf9dc459ac5a032106c2727b71d4f', 'v0.0.202'),
('fishnet150', '0604', 'f5af4873ff5730f589a6c4a505ede8268e6ce3e3', 'v0.0.168'),
('espnetv2_wd2', '2015', 'd234781f81e5d1b5ae6070fc851e3f7bb860b9fd', 'v0.0.238'),
('espnetv2_w1', '1345', '550d54229d7fd8f7c090601c2123ab3ca106393b', 'v0.0.238'),
('espnetv2_w5d4', '1218', '85d97b2b1c9ebb176f634949ef5ca6d7fe70f09c', 'v0.0.238'),
('espnetv2_w3d2', '1129', '3bbb49adaa4fa984a67f82862db7dcfc4998429e', 'v0.0.238'),
('espnetv2_w2', '0961', '13ba0f7200eb745bacdf692905fde711236448ef', 'v0.0.238'),
('squeezenet_v1_0', '1766', 'afdbcf1aef39237300656d2c5a7dba19230e29fc', 'v0.0.128'),
('squeezenet_v1_1', '1772', '25b77bc39e35612abbe7c2344d2c3e1e6756c2f8', 'v0.0.88'),
('squeezeresnet_v1_0', '1809', '25bfc02edeffb279010242614e7d73bbeacc0170', 'v0.0.178'),
('squeezeresnet_v1_1', '1821', 'c27ed88f1b19eb233d3925efc71c71d25e4c434e', 'v0.0.70'),
('sqnxt23_w1', '1906', '97b74e0c4d6bf9fc939771d94b2f6dd97de34024', 'v0.0.171'),
('sqnxt23v5_w1', '1785', '2fe3ad67d73313193a77690b10c17cbceef92340', 'v0.0.172'),
('sqnxt23_w3d2', '1350', 'c2f21bce669dbe50fba544bcc39bc1302f63e1e8', 'v0.0.210'),
('sqnxt23v5_w3d2', '1301', 'c244844ba2f02dadd350dddd74e21360b452f9dd', 'v0.0.212'),
('sqnxt23_w2', '1100', 'b9bb7302824f89f16e078f0a506e3a8c0ad9c74e', 'v0.0.240'),
('sqnxt23v5_w2', '1066', '229b0d3de06197e399eeebf42dc826b78f0aba86', 'v0.0.216'),
('shufflenet_g1_wd4', '3729', '47dbd0f279da6d3056079bb79ad39cabbb3b9415', 'v0.0.134'),
('shufflenet_g3_wd4', '3653', '6abdd65e087e71f80345415cdf7ada6ed2762d60', 'v0.0.135'),
('shufflenet_g1_wd2', '2261', 'dae4bdadd7d48bee791dff2a08cd697cff0e9320', 'v0.0.174'),
('shufflenet_g3_wd2', '2080', 'ccaacfc8d9ac112c6143269df6e258fd55b662a7', 'v0.0.167'),
('shufflenet_g1_w3d4', '1711', '161cd24aa0b2e2afadafa69b44a28af222f2ec7a', 'v0.0.218'),
('shufflenet_g3_w3d4', '1650', '3f3b0aef0ce3174c78ff42cf6910c6e34540fc41', 'v0.0.219'),
('shufflenet_g1_w1', '1389', '4cfb65a30761fe548e0b5afbb5d89793ec41e4e9', 'v0.0.223'),
('shufflenet_g2_w1', '1363', '07256203e217a7b31f1c69a5bd38a6674bce75bc', 'v0.0.241'),
('shufflenet_g3_w1', '1348', 'ce54f64ecff87556a4303380f46abaaf649eb308', 'v0.0.244'),
('shufflenet_g4_w1', '1335', 'e2415f8270a4b6cbfe7dc97044d497edbc898577', 'v0.0.245'),
('shufflenet_g8_w1', '1342', '9a979b365424addba75c559a61a77ac7154b26eb', 'v0.0.250'),
('shufflenetv2_wd2', '1865', '9c22238b5fa9c09541564e8ed7f357a5f7e8cd7c', 'v0.0.90'),
('shufflenetv2_w1', '1163', 'c71dfb7a814c8d8ef704bdbd80995e9ea49ff4ff', 'v0.0.133'),
('shufflenetv2_w3d2', '0942', '26a9230405d956643dcd563a5a383844c49b5907', 'v0.0.288'),
('shufflenetv2_w2', '1249', 'b9f9e84cbf49cf63fe2a89e9c48a9fb107f591d7', 'v0.0.84'),
('shufflenetv2b_wd2', '1822', '01d18d6fa1a6136f605a4277f47c9a757f9ede3b', 'v0.0.157'),
('shufflenetv2b_w1', '1125', '6a5d3dc446e6a00cf60fe8aa2f4139d74d766305', 'v0.0.161'),
('shufflenetv2b_w3d2', '0911', 'f2106fee0748d7f0d40db16b228782b6d7636737', 'v0.0.203'),
('shufflenetv2b_w2', '0834', 'cb36b92ca4ca3bee470b739021d01177e0601c5f', 'v0.0.242'),
('menet108_8x1_g3', '2076', '6acc82e46dfc1ce0dd8c59668aed4a464c8cbdb5', 'v0.0.89'),
('menet128_8x1_g4', '1959', '48fa80fc363adb88ff580788faa8053c9d7507f3', 'v0.0.103'),
('menet160_8x1_g8', '2084', '0f4fce43b4234c5bca5dd76450b698c2d4daae65', 'v0.0.154'),
('menet228_12x1_g3', '1316', '5b670c42031d0078e2ae981829358d7c1b92ee30', 'v0.0.131'),
('menet256_12x1_g4', '1252', '14c6c86df96435c693eb7d0fcd8d3bf4079dd621', 'v0.0.152'),
('menet348_12x1_g3', '0958', 'ad50f635a1f7b799a19a0a9c71aa9939db8ffe77', 'v0.0.173'),
('menet352_12x1_g8', '1200', '4ee200c5c98c64a2503cea82ebf62d1d3c07fb91', 'v0.0.198'),
('menet456_24x1_g3', '0799', '826c002244f1cdc945a95302b1ce5c66d949db74', 'v0.0.237'),
('mobilenet_wd4', '2249', '1ad5e8fe8674cdf7ffda8450095eb96d227397e0', 'v0.0.62'),
('mobilenet_wd2', '1355', '41a21242c95050407df876cfa44bb5d3676aa751', 'v0.0.156'),
('mobilenet_w3d4', '1076', 'd801bcaea83885b16a0306b8b77fe314bbc585c3', 'v0.0.130'),
('mobilenet_w1', '0895', '7e1d739f0fd4b95c16eef077c5dc0a5bb1da8ad5', 'v0.0.155'),
('fdmobilenet_wd4', '3098', '2b22b709a05d7ca6e43acc6f3a9f27d0eb2e01cd', 'v0.0.177'),
('fdmobilenet_wd2', '2015', '414dbeedb2f829dcd8f94cd7fef10aae6829f06f', 'v0.0.83'),
('fdmobilenet_w3d4', '1641', '5561d58aa8889d8d93f2062a2af4e4b35ad7e769', 'v0.0.159'),
('fdmobilenet_w1', '1338', '9d026c04112de9f40e15fa40457d77941443c327', 'v0.0.162'),
('mobilenetv2_wd4', '2451', '05e1e3a286b27c17ea11928783c4cd48b1e7a9b2', 'v0.0.137'),
('mobilenetv2_wd2', '1493', 'b82d79f6730eac625e6b55b0618bff8f7a1ed86d', 'v0.0.170'),
('mobilenetv2_w3d4', '1082', '8656de5a8d90b29779c35c5ce521267c841fd717', 'v0.0.230'),
('mobilenetv2_w1', '0887', '13a021bca5b679b76156829743f7182da42e8bb6', 'v0.0.213'),
('igcv3_wd4', '2871', 'c9f28301391601e5e8ae93139431a9e0d467317c', 'v0.0.142'),
('igcv3_wd2', '1732', '8c504f443283d8a32787275b23771082fcaab61b', 'v0.0.132'),
('igcv3_w3d4', '1140', '63f43cf8d334111d55d06f2f9bf7e1e4871d162c', 'v0.0.207'),
('igcv3_w1', '0920', '12385791681f09adb3a08926c95471f332f538b6', 'v0.0.243'),
('mnasnet', '1174', 'e8ec017ca396dc7d39e03b383776b8cf9ad20a4d', 'v0.0.117'),
('darts', '0874', '74f0c7b690cf8bef9b54cc5afc2cb0f2a2a83630', 'v0.0.118'),
('xception', '0549', 'e4f0232c99fa776e630189d62fea18e248a858b2', 'v0.0.115'),
('inceptionv3', '0565', 'cf4061800bc1dc3b090920fc9536d8ccc15bb86e', 'v0.0.92'),
('inceptionv4', '0529', '5cb7b4e4b8f62d6b4346855d696b06b426b44f3d', 'v0.0.105'),
('inceptionresnetv2', '0490', '1d1b4d184e6d41091c5ac3321d99fa554b498dbe', 'v0.0.107'),
('polynet', '0452', '6a1b295dad3f261b48e845f1b283e4eef3ab5a0b', 'v0.0.96'),
('nasnet_4a1056', '0816', 'd21bbaf5e937c2e06134fa40e7bdb1f501423b86', 'v0.0.97'),
('nasnet_6a4032', '0421', 'f354d28f4acdde399e081260c3f46152eca5d27e', 'v0.0.101'),
('pnasnet5large', '0428', '65de46ebd049e494c13958d5671aba5abf803ff3', 'v0.0.114'),
('resnetd50b', '0565', 'ec03d815c0f016c6517ed7b4b40126af46ceb8a4', 'v0.0.296'),
('resnetd101b', '0473', 'f851c920ec1fe4f729d339c933535d038bf2903c', 'v0.0.296'),
('resnetd152b', '0482', '112e216da50eb20d52c509a28c97b05ef819cefe', 'v0.0.296'),
('nin_cifar10', '0743', '795b082470b58c1aa94e2f861514b7914f6e2f58', 'v0.0.175'),
('nin_cifar100', '2839', '627a11c064eb44c6451fe53e0becfc21a6d57d7f', 'v0.0.183'),
('nin_svhn', '0376', '1205dc06a4847bece8159754033f325f75565c02', 'v0.0.270'),
('resnet20_cifar10', '0597', '9b0024ac4c2f374cde2c5052e0d0344a75871cdb', 'v0.0.163'),
('resnet20_cifar100', '2964', 'a5322afed92fa96cb7b3453106f73cf38e316151', 'v0.0.180'),
('resnet20_svhn', '0343', '8232e6e4c2c9fac1200386b68311c3bd56f483f5', 'v0.0.265'),
('resnet56_cifar10', '0452', '628c42a26fe347b84060136212e018df2bb35e0f', 'v0.0.163'),
('resnet56_cifar100', '2488', 'd65f53b10ad5d124698e728432844c65261c3107', 'v0.0.181'),
('resnet56_svhn', '0275', '6e08ed929b8f0ee649f75464f06b557089023290', 'v0.0.265'),
('resnet110_cifar10', '0369', '4d6ca1fc02eaeed724f4f596011e391528536049', 'v0.0.163'),
('resnet110_cifar100', '2280', 'd8d397a767db6d22af040223ec8ae342a088c3e5', 'v0.0.190'),
('resnet110_svhn', '0245', 'c971f0a38943d8a75386a60c835cc0843c2f6c1c', 'v0.0.265'),
('resnet164bn_cifar10', '0368', '74ae9f4bccb7fb6a8f3f603fdabe8d8632c46b2f', 'v0.0.179'),
('resnet164bn_cifar100', '2044', '8fa07b7264a075fa5add58f4c676b99a98fb1c89', 'v0.0.182'),
('resnet164bn_svhn', '0242', '549413723d787cf7e96903427a7a14fb3ea1a4c1', 'v0.0.267'),
('resnet1001_cifar10', '0328', '77a179e240808b7aa3534230d39b845a62413ca2', 'v0.0.201'),
('resnet1001_cifar100', '1979', '2728b558748f9c3e70db179afb6c62358020858b', 'v0.0.254'),
('resnet1202_cifar10', '0353', '1d5a21290117903fb5fd6ba59f3f7e7da7c08836', 'v0.0.214'),
('preresnet20_cifar10', '0651', '76cec68d11de5b25be2ea5935681645b76195f1d', 'v0.0.164'),
('preresnet20_cifar100', '3022', '3dbfa6a2b850572bccb28cc2477a0e46c24abcb8', 'v0.0.187'),
('preresnet20_svhn', '0322', 'c3c00fed49c1d6d9deda6436d041c5788d549299', 'v0.0.269'),
('preresnet56_cifar10', '0449', 'e9124fcf167d8ca50addef00c3afa4da9f828f29', 'v0.0.164'),
('preresnet56_cifar100', '2505', 'ca90a2be6002cd378769b9d4e7c497dd883d31d9', 'v0.0.188'),
('preresnet56_svhn', '0280', 'b51b41476710c0e2c941356ffe992ff883a3ee87', 'v0.0.269'),
('preresnet110_cifar10', '0386', 'cc08946a2126a1224d1d2560a47cf766a763c52c', 'v0.0.164'),
('preresnet110_cifar100', '2267', '3954e91581b7f3e5f689385d15f618fe16e995af', 'v0.0.191'),
('preresnet110_svhn', '0279', 'aa49e0a3c4a918e227ca2d5a5608704f026134c3', 'v0.0.269'),
('preresnet164bn_cifar10', '0364', '429012d412e82df7961fa071f97c938530e1b005', 'v0.0.196'),
('preresnet164bn_cifar100', '2018', 'a8e67ca6e14f88b009d618b0e9b554312d862174', 'v0.0.192'),
('preresnet164bn_svhn', '0258', '94d42de440d5f057a38f4c8cdbdb24acfee3981c', 'v0.0.269'),
('preresnet1001_cifar10', '0265', '9fedfe5fd643e7355f1062a6db68da310c8962be', 'v0.0.209'),
('preresnet1001_cifar100', '1841', '88f14ed9df1573e98b0ec2a07009a15066855fda', 'v0.0.283'),
('preresnet1202_cifar10', '0339', '6fc686b02191226f39e25a76fc5da26857f7acd9', 'v0.0.246'),
('resnext29_32x4d_cifar10', '0315', '30413525cd4466dbef759294eda9b702bc39648f', 'v0.0.169'),
('resnext29_32x4d_cifar100', '1950', '13ba13d92f6751022549a3b370ae86d3b13ae2d1', 'v0.0.200'),
('resnext29_32x4d_svhn', '0280', 'e85c5217944cdfafb0a538dd7cc817cffaada7c4', 'v0.0.275'),
('resnext29_16x64d_cifar10', '0241', '4133d3d04f9b10b132dcb959601d36f10123f8c2', 'v0.0.176'),
('pyramidnet110_a48_cifar10', '0372', 'eb185645cda89e0c3c47b11c4b2d14ff18fa0ae1', 'v0.0.184'),
('pyramidnet110_a48_cifar100', '2095', '95da1a209916b3cf4af7e8dc44374345a88c60f4', 'v0.0.186'),
('pyramidnet110_a48_svhn', '0247', 'd48bafbebaabe9a68e5924571752b3d7cd95d311', 'v0.0.281'),
('pyramidnet110_a84_cifar10', '0298', '7b835a3cf19794478d478aced63ca9e855c3ffeb', 'v0.0.185'),
('pyramidnet110_a84_cifar100', '1887', 'ff711084381f217f84646c676e4dcc90269dc516', 'v0.0.199'),
('pyramidnet110_a270_cifar10', '0251', '31bdd9d51ec01388cbb2adfb9f822c942de3c4ff', 'v0.0.194'),
('pyramidnet164_a270_bn_cifar10', '0242', 'daa2a402c1081323b8f2239f2201246953774e84', 'v0.0.264'),
('pyramidnet200_a240_bn_cifar10', '0244', '44433afdd2bc32c55dfb1e8347bc44d1c2bf82c7', 'v0.0.268'),
('pyramidnet236_a220_bn_cifar10', '0247', 'daa91d74979c451ecdd8b59e4350382966f25831', 'v0.0.285'),
('pyramidnet272_a200_bn_cifar10', '0239', '586b1ecdc8b34b69dcae4ba57f71c24583cca9b1', 'v0.0.284'),
('densenet40_k12_cifar10', '0561', '8b8e819467a2e4c450e4ff72ced80582d0628b68', 'v0.0.193'),
('densenet40_k12_cifar100', '2490', 'd182c224d6df2e289eef944d54fea9fd04890961', 'v0.0.195'),
('densenet40_k12_svhn', '0305', 'ac0de84a1a905b768c66f0360f1fb9bd918833bf', 'v0.0.278'),
('densenet40_k12_bc_cifar10', '0643', '6dc86a2ea1d088f088462f5cbac06cc0f37348c0', 'v0.0.231'),
('densenet40_k12_bc_cifar100', '2841', '1e9db7651a21e807c363c9f366bd9e91ce2f296f', 'v0.0.232'),
('densenet40_k12_bc_svhn', '0320', '320760528b009864c68ff6c5b874e9f351ea7a07', 'v0.0.279'),
('densenet40_k24_bc_cifar10', '0452', '669c525548a4a2392c5e3c380936ad019f2be7f9', 'v0.0.220'),
('densenet40_k24_bc_cifar100', '2267', '411719c0177abf58eddaddd05511c86db0c9d548', 'v0.0.221'),
('densenet40_k24_bc_svhn', '0290', 'f4440d3b8c974c9e1014969f4d5832c6c90195d5', 'v0.0.280'),
('densenet40_k36_bc_cifar10', '0404', 'b1a4cc7e67db1ed8c5583a59dc178cc7dc2c572e', 'v0.0.224'),
('densenet40_k36_bc_cifar100', '2050', 'cde836fafec1e5d6c8ed69fd3cfe322e8e71ef1d', 'v0.0.225'),
('densenet100_k12_cifar10', '0366', '26089c6e70236e8f25359de6fda67b84425888ab', 'v0.0.205'),
('densenet100_k12_cifar100', '1964', '5e10cd830c06f6ab178e9dd876c83c754ca63f00', 'v0.0.206'),
('densenet100_k24_cifar10', '0313', '397f0e39b517c05330221d4f3a9755eb5e561be1', 'v0.0.252'),
('densenet100_k12_bc_cifar10', '0416', 'b9232829b13c3f3f2ea15f4be97f500b7912c3c2', 'v0.0.189'),
('densenet100_k12_bc_cifar100', '2119', '05a6f02772afda51a612f5b92aadf19ffb60eb72', 'v0.0.208'),
('densenet190_k40_bc_cifar10', '0252', '2896fa088aeaef36fcf395d404d97ff172d78943', 'v0.0.286'),
('densenet250_k24_bc_cifar10', '0267', 'f8f9d3052bae1fea7e33bb1ce143c38b4aa5622b', 'v0.0.290'),
('xdensenet40_2_k24_bc_cifar10', '0531', 'b91a9dc35877c4285fe86f49953d1118f6b69e57', 'v0.0.226'),
('xdensenet40_2_k24_bc_cifar100', '2396', '0ce8f78ab9c6a4786829f816ae0615c7905f292c', 'v0.0.227'),
('xdensenet40_2_k36_bc_cifar10', '0437', 'ed264a2060836c7440f0ccde57315e1ec6263ff0', 'v0.0.233'),
('xdensenet40_2_k36_bc_cifar100', '2165', '6f68f83dc31dea5237e6362e6c6cfeed48a8d9e3', 'v0.0.234'),
('wrn16_10_cifar10', '0293', 'ce810d8a17a2deb73eddb5bec8709f93278bc53e', 'v0.0.166'),
('wrn16_10_cifar100', '1895', 'bef9809c845deb1b2bb0c9aaaa7c58bd97740504', 'v0.0.204'),
('wrn16_10_svhn', '0278', '5ab2a4edd5398a03d2e28db1b075bf0313ae5828', 'v0.0.271'),
('wrn28_10_cifar10', '0239', 'fe97dcd6d0dd8dda8e9e38e6cfa320cffb9955ce', 'v0.0.166'),
('wrn28_10_svhn', '0271', 'd62b6bbaef7228706a67c2c8416681f97c6d4688', 'v0.0.276'),
('wrn40_8_cifar10', '0237', '8dc84ec730f35c4b8968a022bc045c0665410840', 'v0.0.166'),
('wrn40_8_svhn', '0254', 'dee59602c10e5d56bd9c168e8e8400792b9a8b08', 'v0.0.277'),
('ror3_56_cifar10', '0543', '44f0f47d2e1b609880ee1b623014c52a9276e2ea', 'v0.0.228'),
('ror3_56_cifar100', '2549', '34be6719cd128cfe60ba93ac6d250ac4c1acf0a5', 'v0.0.229'),
('ror3_56_svhn', '0269', '5a9ad66c8747151be1d2fb9bc854ae382039bdb9', 'v0.0.287'),
('ror3_110_cifar10', '0435', 'fb2a2b0499e4a4d92bdc1d6792bd5572256d5165', 'v0.0.235'),
('ror3_110_cifar100', '2364', 'd599e3a93cd960c8bfc5d05c721cd48fece5fa6f', 'v0.0.236'),
('ror3_110_svhn', '0257', '155380add8d351d2c12026d886a918f1fc3f9fd0', 'v0.0.287'),
('ror3_164_cifar10', '0393', 'de7b6dc60ad6a297bd55ab65b6d7b1225b0ef6d1', 'v0.0.294'),
('ror3_164_cifar100', '2234', 'd37483fccc7fc1a25ff90ef05ecf1b8eab3cc1c4', 'v0.0.294'),
('ror3_164_svhn', '0273', 'ff0d9af0d40ef204393ecc904b01a11aa63acc01', 'v0.0.294'),
('rir_cifar10', '0328', '414c3e6088ae1e83aa1a77c43e38f940c18a0ce2', 'v0.0.292'),
('rir_cifar100', '1923', 'de8ec24a232b94be88f4208153441f66098a681c', 'v0.0.292'),
('rir_svhn', '0268', '12fcbd3bfc6b4165e9b23f3339a1b751b4b8f681', 'v0.0.292'),
('shakeshakeresnet20_2x16d_cifar10', '0515', 'ef71ec0d5ef928ef8654294114a013895abe3f9a', 'v0.0.215'),
('shakeshakeresnet20_2x16d_cifar100', '2922', '4d07f14234b1c796b3c1dfb24d4a3220a1b6b293', 'v0.0.247'),
('shakeshakeresnet20_2x16d_svhn', '0317', 'a693ec24fb8fe2c9f15bcc6b1050943c0c5d595a', 'v0.0.295'),
('shakeshakeresnet26_2x32d_cifar10', '0317', 'ecd1f8337cc90b5378b4217fb2591f2ed0f02bdf', 'v0.0.217'),
('shakeshakeresnet26_2x32d_cifar100', '1880', 'b47e371f60c9fed9eaac960568783fb6f83a362f', 'v0.0.222'),
('shakeshakeresnet26_2x32d_svhn', '0262', 'c1b8099ece97e17ce85213e4ecc6e50a064050cf', 'v0.0.295'),
('pspnet_resnetd101b_voc', '8144', 'c22f021948461a7b7ab1ef1265a7948762770c83', 'v0.0.297'),
('pspnet_resnetd50b_ade20k', '3687', '13f22137d7dd06c6de2ffc47e6ed33403d3dd2cf', 'v0.0.297'),
('pspnet_resnetd101b_ade20k', '3797', '115d62bf66477221b83337208aefe0f2f0266da2', 'v0.0.297'),
('pspnet_resnetd101b_cityscapes', '7172', '0a6efb497bd4fc763d27e2121211e06f72ada7ed', 'v0.0.297'),
('pspnet_resnetd101b_coco', '6741', 'c8b13be65cb43402fce8bae945f6e0d0a3246b92', 'v0.0.297'),
('deeplabv3_resnetd101b_voc', '8024', 'fd8bf74ffc96c97b30bcd3b6ce194a2daed68098', 'v0.0.298'),
('deeplabv3_resnetd152b_voc', '8120', 'f2dae198b3cdc41920ea04f674b665987c68d7dc', 'v0.0.298'),
('deeplabv3_resnetd50b_ade20k', '3713', 'bddbb458e362e18f5812c2307b322840394314bc', 'v0.0.298'),
('deeplabv3_resnetd101b_ade20k', '3784', '977446a5fb32b33f168f2240fb6b7ef9f561fc1e', 'v0.0.298'),
('deeplabv3_resnetd101b_coco', '6773', 'e59c1d8f7ed5bcb83f927d2820580a2f4970e46f', 'v0.0.298'),
('deeplabv3_resnetd152b_coco', '6899', '7e946d7a63ed255dd38afacebb0a0525e735da64', 'v0.0.298'),
('fcn8sd_resnetd101b_voc', '8040', '66edc0b073f0dec66c18bb163c7d6de1ddbc32a3', 'v0.0.299'),
('fcn8sd_resnetd50b_ade20k', '3339', 'e1dad8a15c2a1be1138bd3ec51ba1b100bb8d9c9', 'v0.0.299'),
('fcn8sd_resnetd101b_ade20k', '3588', '30d05ca42392a164ea7c93a9cbd7f33911d3c1af', 'v0.0.299'),
('fcn8sd_resnetd101b_coco', '6011', 'ebe2ad0bc1de5b4cecade61d17d269aa8bf6df7f', 'v0.0.299'),
]}
imgclsmob_repo_url = 'https://github.com/osmr/imgclsmob'
def get_model_name_suffix_data(model_name):
if model_name not in _model_sha1:
raise ValueError('Pretrained model for {name} is not available.'.format(name=model_name))
error, sha1_hash, repo_release_tag = _model_sha1[model_name]
return error, sha1_hash, repo_release_tag
def get_model_file(model_name,
local_model_store_dir_path=os.path.join('~', '.torch', 'models')):
"""
Return location for the pretrained on local file system. This function will download from online model zoo when
model cannot be found or has mismatch. The root directory will be created if it doesn't exist.
Parameters
----------
model_name : str
Name of the model.
local_model_store_dir_path : str, default $TORCH_HOME/models
Location for keeping the model parameters.
Returns
-------
file_path
Path to the requested pretrained model file.
"""
error, sha1_hash, repo_release_tag = get_model_name_suffix_data(model_name)
short_sha1 = sha1_hash[:8]
file_name = '{name}-{error}-{short_sha1}.pth'.format(
name=model_name,
error=error,
short_sha1=short_sha1)
local_model_store_dir_path = os.path.expanduser(local_model_store_dir_path)
file_path = os.path.join(local_model_store_dir_path, file_name)
if os.path.exists(file_path):
if _check_sha1(file_path, sha1_hash):
return file_path
else:
logging.warning('Mismatch in the content of model file detected. Downloading again.')
else:
logging.info('Model file not found. Downloading to {}.'.format(file_path))
if not os.path.exists(local_model_store_dir_path):
os.makedirs(local_model_store_dir_path)
zip_file_path = file_path + '.zip'
_download(
url='{repo_url}/releases/download/{repo_release_tag}/{file_name}.zip'.format(
repo_url=imgclsmob_repo_url,
repo_release_tag=repo_release_tag,
file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(local_model_store_dir_path)
os.remove(zip_file_path)
if _check_sha1(file_path, sha1_hash):
return file_path
else:
raise ValueError('Downloaded file has different hash. Please try again.')
def _download(url, path=None, overwrite=False, sha1_hash=None, retries=5, verify_ssl=True):
"""
Download an given URL
Parameters
----------
url : str
URL to download
path : str, optional
Destination path to store downloaded file. By default stores to the
current directory with same name as in url.
overwrite : bool, optional
Whether to overwrite destination file if already exists.
sha1_hash : str, optional
Expected sha1 hash in hexadecimal digits. Will ignore existing file when hash is specified
but doesn't match.
retries : integer, default 5
The number of times to attempt the download in case of failure or non 200 return codes
verify_ssl : bool, default True
Verify SSL certificates.
Returns
-------
str
The file path of the downloaded file.
"""
import warnings
try:
import requests
except ImportError:
class requests_failed_to_import(object):
pass
requests = requests_failed_to_import
if path is None:
fname = url.split('/')[-1]
# Empty filenames are invalid
assert fname, 'Can\'t construct file-name from this URL. ' \
'Please set the `path` option manually.'
else:
path = os.path.expanduser(path)
if os.path.isdir(path):
fname = os.path.join(path, url.split('/')[-1])
else:
fname = path
assert retries >= 0, "Number of retries should be at least 0"
if not verify_ssl:
warnings.warn(
'Unverified HTTPS request is being made (verify_ssl=False). '
'Adding certificate verification is strongly advised.')
if overwrite or not os.path.exists(fname) or (sha1_hash and not _check_sha1(fname, sha1_hash)):
dirname = os.path.dirname(os.path.abspath(os.path.expanduser(fname)))
if not os.path.exists(dirname):
os.makedirs(dirname)
while retries + 1 > 0:
# Disable pyling too broad Exception
# pylint: disable=W0703
try:
print('Downloading {} from {}...'.format(fname, url))
r = requests.get(url, stream=True, verify=verify_ssl)
if r.status_code != 200:
raise RuntimeError("Failed downloading url {}".format(url))
with open(fname, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
if sha1_hash and not _check_sha1(fname, sha1_hash):
raise UserWarning('File {} is downloaded but the content hash does not match.'
' The repo may be outdated or download may be incomplete. '
'If the "repo_url" is overridden, consider switching to '
'the default repo.'.format(fname))
break
except Exception as e:
retries -= 1
if retries <= 0:
raise e
else:
print("download failed, retrying, {} attempt{} left"
.format(retries, 's' if retries > 1 else ''))
return fname
def _check_sha1(file_name, sha1_hash):
"""
Check whether the sha1 hash of the file content matches the expected hash.
Parameters
----------
file_name : str
Path to the file.
sha1_hash : str
Expected sha1 hash in hexadecimal digits.
Returns
-------
bool
Whether the file content matches the expected hash.
"""
sha1 = hashlib.sha1()
with open(file_name, 'rb') as f:
while True:
data = f.read(1048576)
if not data:
break
sha1.update(data)
return sha1.hexdigest() == sha1_hash
def load_model(net,
file_path,
ignore_extra=True):
"""
Load model state dictionary from a file.
Parameters
----------
net : Module
Network in which weights are loaded.
file_path : str
Path to the file.
ignore_extra : bool, default True
Whether to silently ignore parameters from the file that are not present in this Module.
"""
import torch
if ignore_extra:
pretrained_state = torch.load(file_path)
model_dict = net.state_dict()
pretrained_state = {k: v for k, v in pretrained_state.items() if k in model_dict}
net.load_state_dict(pretrained_state)
else:
net.load_state_dict(torch.load(file_path))
def download_model(net,
model_name,
local_model_store_dir_path=os.path.join('~', '.torch', 'models'),
ignore_extra=True):
"""
Load model state dictionary from a file with downloading it if necessary.
Parameters
----------
net : Module
Network in which weights are loaded.
model_name : str
Name of the model.
local_model_store_dir_path : str, default $TORCH_HOME/models
Location for keeping the model parameters.
ignore_extra : bool, default True
Whether to silently ignore parameters from the file that are not present in this Module.
"""
load_model(
net=net,
file_path=get_model_file(
model_name=model_name,
local_model_store_dir_path=local_model_store_dir_path),
ignore_extra=ignore_extra)
def calc_num_params(net):
"""
Calculate the count of trainable parameters for a model.
Parameters
----------
net : Module
Analyzed model.
"""
import numpy as np
net_params = filter(lambda p: p.requires_grad, net.parameters())
weight_count = 0
for param in net_params:
weight_count += np.prod(param.size())
return weight_count
| [
"[email protected]"
]
| |
3d725712e172cee8591768772262237bc21dcaae | 830465731dfda87b4141546262f20d74c29297bf | /GENERAL/RADARCTF/Logo/sol.py | d32c2f2933fdf57751dd6485d243603bc52c9566 | []
| no_license | jchen8tw-research/CTF | f559d7ca0e16a730335b11caeeae208c42e8bf17 | f49615c24437a9cc6a2c20d6b30cb5abf7a32b71 | refs/heads/master | 2023-03-17T12:29:08.630613 | 2021-03-23T06:31:26 | 2021-03-23T06:31:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | import os
import binascii
import struct
misc = open("logo.png","rb").read()
for i in range(1024):
data = misc[12:16] + struct.pack('>i',i)+ misc[20:29]
crc32 = binascii.crc32(data) & 0xffffffff
if crc32 == 0xB65879B0:
print i | [
"[email protected]"
]
| |
a0f042399c854efeeae2f22745708993359d89e0 | 8a11814f757b22cacd89ae618265d6705393ba78 | /amplify/agent/data/statsd.py | 8c17a990d29c16671f7bda85bf50d173b786d17e | [
"BSD-2-Clause"
]
| permissive | ngonsol/nginx-amplify-agent | e763bfcc82cf103b4eb2ce49269dfccaec0cb9af | c711579208465578b03dda5db40ccc7dc8f31b81 | refs/heads/master | 2021-01-18T03:17:04.494068 | 2016-05-18T20:17:25 | 2016-05-18T20:17:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,307 | py | # -*- coding: utf-8 -*-
import copy
import time
from collections import defaultdict
__author__ = "Mike Belov"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__credits__ = ["Mike Belov", "Andrei Belov", "Ivan Poluyanov", "Oleg Mamontov", "Andrew Alexeev", "Grant Hulegaard"]
__license__ = ""
__maintainer__ = "Mike Belov"
__email__ = "[email protected]"
class StatsdClient(object):
def __init__(self, address=None, port=None, interval=None, object=None):
# Import context as a class object to avoid circular import on statsd. This could be refactored later.
from amplify.agent.common.context import context
self.context = context
self.address = address
self.port = port
self.object = object
self.interval = interval
self.current = defaultdict(dict)
self.delivery = defaultdict(dict)
def average(self, metric_name, value):
"""
Same thing as histogram but without p95
:param metric_name: metric name
:param value: metric value
"""
if metric_name in self.current['average']:
self.current['average'][metric_name].append(value)
else:
self.current['average'][metric_name] = [value]
def timer(self, metric_name, value):
"""
Histogram with 95 percentile
The algorithm is as follows:
Collect all the data samples for a period of time (commonly a day, a week, or a month).
Sort the data set by value from highest to lowest and discard the highest 5% of the sorted samples.
The next highest sample is the 95th percentile value for the data set.
:param metric_name: metric name
:param value: metric value
"""
if metric_name in self.current['timer']:
self.current['timer'][metric_name].append(value)
else:
self.current['timer'][metric_name] = [value]
def incr(self, metric_name, value=None, rate=None, stamp=None):
"""
Simple counter with rate
:param metric_name: metric name
:param value: metric value
:param rate: rate
:param stamp: timestamp (current timestamp will be used if this is not specified)
"""
timestamp = stamp or int(time.time())
if value is None:
value = 1
# new metric
if metric_name not in self.current['counter']:
self.current['counter'][metric_name] = [[timestamp, value]]
return
# metric exists
slots = self.current['counter'][metric_name]
last_stamp, last_value = slots[-1]
# if rate is set then check it's time
if self.interval and rate:
sample_duration = self.interval * rate
# write to current slot
if timestamp < last_stamp + sample_duration:
self.current['counter'][metric_name][-1] = [last_stamp, last_value + value]
else:
self.current['counter'][metric_name].append([last_stamp, value])
else:
self.current['counter'][metric_name][-1] = [last_stamp, last_value + value]
def agent(self, metric_name, value, stamp=None):
"""
Agent metrics
:param metric_name: metric
:param value: value
:param stamp: timestamp (current timestamp will be used if this is not specified)
"""
timestamp = stamp or int(time.time())
self.current['gauge'][metric_name] = [(timestamp, value)]
def gauge(self, metric_name, value, delta=False, prefix=False, stamp=None):
"""
Gauge
:param metric_name: metric name
:param value: metric value
:param delta: metric delta (applicable only if we have previous values)
:param stamp: timestamp (current timestamp will be used if this is not specified)
"""
timestamp = stamp or int(time.time())
if metric_name in self.current['gauge']:
if delta:
last_stamp, last_value = self.current['gauge'][metric_name][-1]
new_value = last_value + value
else:
new_value = value
self.current['gauge'][metric_name].append((timestamp, new_value))
else:
self.current['gauge'][metric_name] = [(timestamp, value)]
def flush(self):
if not self.current:
return
results = {}
delivery = copy.deepcopy(self.current)
self.current = defaultdict(dict)
# histogram
if 'timer' in delivery:
timers = {}
timestamp = int(time.time())
for metric_name, metric_values in delivery['timer'].iteritems():
if len(metric_values):
metric_values.sort()
length = len(metric_values)
timers['G|%s' % metric_name] = [[timestamp, sum(metric_values) / float(length)]]
timers['C|%s.count' % metric_name] = [[timestamp, length]]
timers['G|%s.max' % metric_name] = [[timestamp, metric_values[-1]]]
timers['G|%s.median' % metric_name] = [[timestamp, metric_values[int(round(length / 2 - 1))]]]
timers['G|%s.pctl95' % metric_name] = [[timestamp, metric_values[-int(round(length * .05))]]]
results['timer'] = timers
# counters
if 'counter' in delivery:
counters = {}
for k, v in delivery['counter'].iteritems():
# Aggregate all observed counters into a single record.
last_stamp = v[-1][0] # Use the oldest timestamp.
total_value = 0
for timestamp, value in v:
total_value += value
# Condense the list of lists 'v' into a list of a single element. Remember that we are using lists
# instead of tuples because we need mutability during self.incr().
counters['C|%s' % k] = [[last_stamp, total_value]]
results['counter'] = counters
# gauges
if 'gauge' in delivery:
gauges = {}
for k, v in delivery['gauge'].iteritems():
# Aggregate all observed gauges into a single record.
last_stamp = v[-1][0] # Use the oldest timestamp.
total_value = 0
for timestamp, value in v:
total_value += value
# Condense list of tuples 'v' into a list of a single tuple using an average value.
gauges['G|%s' % k] = [(last_stamp, float(total_value)/len(v))]
results['gauge'] = gauges
# avg
if 'average' in delivery:
averages = {}
timestamp = int(time.time()) # Take a new timestamp here because it is not collected previously.
for metric_name, metric_values in delivery['average'].iteritems():
if len(metric_values):
length = len(metric_values)
averages['G|%s' % metric_name] = [[timestamp, sum(metric_values) / float(length)]]
results['average'] = averages
return {
'metrics': copy.deepcopy(results),
'object': self.object.definition
}
| [
"[email protected]"
]
| |
560ba1036cedaa7535985e80ac45ea83e6c5361e | 5e6d8b9989247801718dd1f10009f0f7f54c1eb4 | /sdk/python/pulumi_azure_native/compute/v20201201/availability_set.py | 25d7c983e0cf8a6ed9d97e871af5b85db623d5b4 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | vivimouret29/pulumi-azure-native | d238a8f91688c9bf09d745a7280b9bf2dd6d44e0 | 1cbd988bcb2aa75a83e220cb5abeb805d6484fce | refs/heads/master | 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,366 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['AvailabilitySetArgs', 'AvailabilitySet']
@pulumi.input_type
class AvailabilitySetArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
availability_set_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
platform_fault_domain_count: Optional[pulumi.Input[int]] = None,
platform_update_domain_count: Optional[pulumi.Input[int]] = None,
proximity_placement_group: Optional[pulumi.Input['SubResourceArgs']] = None,
sku: Optional[pulumi.Input['SkuArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machines: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]] = None):
"""
The set of arguments for constructing a AvailabilitySet resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] availability_set_name: The name of the availability set.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[int] platform_fault_domain_count: Fault Domain count.
:param pulumi.Input[int] platform_update_domain_count: Update Domain count.
:param pulumi.Input['SubResourceArgs'] proximity_placement_group: Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01.
:param pulumi.Input['SkuArgs'] sku: Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
:param pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]] virtual_machines: A list of references to all virtual machines in the availability set.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if availability_set_name is not None:
pulumi.set(__self__, "availability_set_name", availability_set_name)
if location is not None:
pulumi.set(__self__, "location", location)
if platform_fault_domain_count is not None:
pulumi.set(__self__, "platform_fault_domain_count", platform_fault_domain_count)
if platform_update_domain_count is not None:
pulumi.set(__self__, "platform_update_domain_count", platform_update_domain_count)
if proximity_placement_group is not None:
pulumi.set(__self__, "proximity_placement_group", proximity_placement_group)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if virtual_machines is not None:
pulumi.set(__self__, "virtual_machines", virtual_machines)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="availabilitySetName")
def availability_set_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the availability set.
"""
return pulumi.get(self, "availability_set_name")
@availability_set_name.setter
def availability_set_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_set_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="platformFaultDomainCount")
def platform_fault_domain_count(self) -> Optional[pulumi.Input[int]]:
"""
Fault Domain count.
"""
return pulumi.get(self, "platform_fault_domain_count")
@platform_fault_domain_count.setter
def platform_fault_domain_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "platform_fault_domain_count", value)
@property
@pulumi.getter(name="platformUpdateDomainCount")
def platform_update_domain_count(self) -> Optional[pulumi.Input[int]]:
"""
Update Domain count.
"""
return pulumi.get(self, "platform_update_domain_count")
@platform_update_domain_count.setter
def platform_update_domain_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "platform_update_domain_count", value)
@property
@pulumi.getter(name="proximityPlacementGroup")
def proximity_placement_group(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01.
"""
return pulumi.get(self, "proximity_placement_group")
@proximity_placement_group.setter
def proximity_placement_group(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "proximity_placement_group", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input['SkuArgs']]:
"""
Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input['SkuArgs']]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="virtualMachines")
def virtual_machines(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]:
"""
A list of references to all virtual machines in the availability set.
"""
return pulumi.get(self, "virtual_machines")
@virtual_machines.setter
def virtual_machines(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]):
pulumi.set(self, "virtual_machines", value)
class AvailabilitySet(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
availability_set_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
platform_fault_domain_count: Optional[pulumi.Input[int]] = None,
platform_update_domain_count: Optional[pulumi.Input[int]] = None,
proximity_placement_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['SkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machines: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubResourceArgs']]]]] = None,
__props__=None):
"""
Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Manage the availability of virtual machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json). <br><br> For more information on Azure planned maintenance, see [Planned maintenance for virtual machines in Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json) <br><br> Currently, a VM can only be added to availability set at creation time. An existing VM cannot be added to an availability set.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] availability_set_name: The name of the availability set.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[int] platform_fault_domain_count: Fault Domain count.
:param pulumi.Input[int] platform_update_domain_count: Update Domain count.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] proximity_placement_group: Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[pulumi.InputType['SkuArgs']] sku: Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubResourceArgs']]]] virtual_machines: A list of references to all virtual machines in the availability set.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AvailabilitySetArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Manage the availability of virtual machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json). <br><br> For more information on Azure planned maintenance, see [Planned maintenance for virtual machines in Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json) <br><br> Currently, a VM can only be added to availability set at creation time. An existing VM cannot be added to an availability set.
:param str resource_name: The name of the resource.
:param AvailabilitySetArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AvailabilitySetArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
availability_set_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
platform_fault_domain_count: Optional[pulumi.Input[int]] = None,
platform_update_domain_count: Optional[pulumi.Input[int]] = None,
proximity_placement_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['SkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machines: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubResourceArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AvailabilitySetArgs.__new__(AvailabilitySetArgs)
__props__.__dict__["availability_set_name"] = availability_set_name
__props__.__dict__["location"] = location
__props__.__dict__["platform_fault_domain_count"] = platform_fault_domain_count
__props__.__dict__["platform_update_domain_count"] = platform_update_domain_count
__props__.__dict__["proximity_placement_group"] = proximity_placement_group
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["virtual_machines"] = virtual_machines
__props__.__dict__["name"] = None
__props__.__dict__["statuses"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute/v20201201:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20150615:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20150615:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20160330:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20160330:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20160430preview:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20160430preview:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20170330:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20170330:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20171201:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20171201:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20180401:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20180401:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20180601:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20180601:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20181001:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20181001:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20190301:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20190301:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20190701:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20190701:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20191201:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20191201:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20200601:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20200601:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20210301:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20210301:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20210401:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20210401:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20210701:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20210701:AvailabilitySet")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(AvailabilitySet, __self__).__init__(
'azure-native:compute/v20201201:AvailabilitySet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'AvailabilitySet':
"""
Get an existing AvailabilitySet resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = AvailabilitySetArgs.__new__(AvailabilitySetArgs)
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["platform_fault_domain_count"] = None
__props__.__dict__["platform_update_domain_count"] = None
__props__.__dict__["proximity_placement_group"] = None
__props__.__dict__["sku"] = None
__props__.__dict__["statuses"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["virtual_machines"] = None
return AvailabilitySet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="platformFaultDomainCount")
def platform_fault_domain_count(self) -> pulumi.Output[Optional[int]]:
"""
Fault Domain count.
"""
return pulumi.get(self, "platform_fault_domain_count")
@property
@pulumi.getter(name="platformUpdateDomainCount")
def platform_update_domain_count(self) -> pulumi.Output[Optional[int]]:
"""
Update Domain count.
"""
return pulumi.get(self, "platform_update_domain_count")
@property
@pulumi.getter(name="proximityPlacementGroup")
def proximity_placement_group(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01.
"""
return pulumi.get(self, "proximity_placement_group")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.SkuResponse']]:
"""
Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def statuses(self) -> pulumi.Output[Sequence['outputs.InstanceViewStatusResponse']]:
"""
The resource status information.
"""
return pulumi.get(self, "statuses")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualMachines")
def virtual_machines(self) -> pulumi.Output[Optional[Sequence['outputs.SubResourceResponse']]]:
"""
A list of references to all virtual machines in the availability set.
"""
return pulumi.get(self, "virtual_machines")
| [
"[email protected]"
]
| |
0e37bf6580d3248893e8a1c0e5dd6d1ebbe57409 | 8efe56ee34c455a6b1336897f6d457acbc9c10f9 | /tests/metarl/tf/models/test_cnn_model.py | 6d8f599ade02b39db52ef608f933167b30287246 | [
"MIT"
]
| permissive | neurips2020submission11699/metarl | ab18d11e708bf569d76cb2fab2bcce089badd111 | ae4825d21478fa1fd0aa6b116941ea40caa152a5 | refs/heads/master | 2022-10-15T22:03:09.948673 | 2020-06-11T19:22:55 | 2020-06-11T19:30:58 | 268,410,657 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,567 | py | import pickle
import numpy as np
import pytest
import tensorflow as tf
from metarl.tf.models import CNNModel
from metarl.tf.models import CNNModelWithMaxPooling
from tests.fixtures import TfGraphTestCase
class TestCNNModel(TfGraphTestCase):
def setup_method(self):
super().setup_method()
self.batch_size = 5
self.input_width = 10
self.input_height = 10
self.obs_input = np.ones(
(self.batch_size, self.input_width, self.input_height, 3))
input_shape = self.obs_input.shape[1:] # height, width, channel
self._input_ph = tf.compat.v1.placeholder(tf.float32,
shape=(None, ) + input_shape,
name='input')
# yapf: disable
@pytest.mark.parametrize('filters, in_channels, strides', [
(((32, (1, 1)),), (3, ), (1, )), # noqa: E122
(((32, (3, 3)),), (3, ), (1, )),
(((32, (3, 3)),), (3, ), (2, )),
(((32, (1, 1)), (64, (1, 1))), (3, 32), (1, 1)),
(((32, (3, 3)), (64, (3, 3))), (3, 32), (1, 1)),
(((32, (3, 3)), (64, (3, 3))), (3, 32), (2, 2)),
])
# yapf: enable
def test_output_value(self, filters, in_channels, strides):
model = CNNModel(filters=filters,
strides=strides,
name='cnn_model',
padding='VALID',
hidden_w_init=tf.constant_initializer(1),
hidden_nonlinearity=None)
outputs = model.build(self._input_ph)
output = self.sess.run(outputs,
feed_dict={self._input_ph: self.obs_input})
filter_sum = 1
# filter value after 3 layers of conv
for filter_iter, in_channel in zip(filters, in_channels):
filter_sum *= filter_iter[1][0] * filter_iter[1][1] * in_channel
height_size = self.input_height
width_size = self.input_width
for filter_iter, stride in zip(filters, strides):
height_size = int((height_size - filter_iter[1][0]) / stride) + 1
width_size = int((width_size - filter_iter[1][1]) / stride) + 1
flatten_shape = height_size * width_size * filters[-1][0]
# flatten
expected_output = np.full((self.batch_size, flatten_shape),
filter_sum,
dtype=np.float32)
assert np.array_equal(output, expected_output)
# yapf: disable
@pytest.mark.parametrize(
'filters, in_channels, strides, pool_strides, pool_shapes',
[
(((32, (1, 1)), ), (3, ), (1, ), (1, 1), (1, 1)), # noqa: E122
(((32, (3, 3)), ), (3, ), (1, ), (2, 2), (1, 1)),
(((32, (3, 3)), ), (3, ), (1, ), (1, 1), (2, 2)),
(((32, (3, 3)), ), (3, ), (1, ), (2, 2), (2, 2)),
(((32, (3, 3)), ), (3, ), (2, ), (1, 1), (2, 2)),
(((32, (3, 3)), ), (3, ), (2, ), (2, 2), (2, 2)),
(((32, (1, 1)), (64, (1, 1))), (3, 32), (1, 1), (1, 1), (1, 1)),
(((32, (3, 3)), (64, (3, 3))), (3, 32), (1, 1), (1, 1), (1, 1)),
(((32, (3, 3)), (64, (3, 3))), (3, 32), (2, 2), (1, 1), (1, 1)),
])
# yapf: enable
def test_output_value_max_pooling(self, filters, in_channels, strides,
pool_strides, pool_shapes):
model = CNNModelWithMaxPooling(
filters=filters,
strides=strides,
name='cnn_model',
padding='VALID',
pool_strides=pool_strides,
pool_shapes=pool_shapes,
hidden_w_init=tf.constant_initializer(1),
hidden_nonlinearity=None)
outputs = model.build(self._input_ph)
output = self.sess.run(outputs,
feed_dict={self._input_ph: self.obs_input})
filter_sum = 1
# filter value after 3 layers of conv
for filter_iter, in_channel in zip(filters, in_channels):
filter_sum *= filter_iter[1][0] * filter_iter[1][1] * in_channel
height_size = self.input_height
width_size = self.input_width
for filter_iter, stride in zip(filters, strides):
height_size = int((height_size - filter_iter[1][0]) / stride) + 1
height_size = int(
(height_size - pool_shapes[0]) / pool_strides[0]) + 1
width_size = int((width_size - filter_iter[1][1]) / stride) + 1
width_size = int(
(width_size - pool_shapes[1]) / pool_strides[1]) + 1
flatten_shape = height_size * width_size * filters[-1][0]
# flatten
expected_output = np.full((self.batch_size, flatten_shape),
filter_sum,
dtype=np.float32)
assert np.array_equal(output, expected_output)
# yapf: disable
@pytest.mark.parametrize('filters, strides', [
(((32, (1, 1)),), (1, )), # noqa: E122
(((32, (3, 3)),), (1, )),
(((32, (3, 3)),), (2, )),
(((32, (1, 1)), (64, (1, 1))), (1, 1)),
(((32, (3, 3)), (64, (3, 3))), (1, 1)),
(((32, (3, 3)), (64, (3, 3))), (2, 2)),
])
# yapf: enable
def test_is_pickleable(self, filters, strides):
model = CNNModel(filters=filters,
strides=strides,
name='cnn_model',
padding='VALID',
hidden_w_init=tf.constant_initializer(1),
hidden_nonlinearity=None)
outputs = model.build(self._input_ph)
with tf.compat.v1.variable_scope('cnn_model/cnn/h0', reuse=True):
bias = tf.compat.v1.get_variable('bias')
bias.load(tf.ones_like(bias).eval())
output1 = self.sess.run(outputs,
feed_dict={self._input_ph: self.obs_input})
h = pickle.dumps(model)
with tf.compat.v1.Session(graph=tf.Graph()) as sess:
model_pickled = pickle.loads(h)
input_shape = self.obs_input.shape[1:] # height, width, channel
input_ph = tf.compat.v1.placeholder(tf.float32,
shape=(None, ) + input_shape,
name='input')
outputs = model_pickled.build(input_ph)
output2 = sess.run(outputs, feed_dict={input_ph: self.obs_input})
assert np.array_equal(output1, output2)
| [
"[email protected]"
]
| |
8221dde14b547b99f9faed2041f581a2b8211915 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/storage/azure-mgmt-storage/generated_samples/queue_operation_get.py | 2fe4ed0f330180c8f1b2f904034727d53e124075 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,547 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.storage import StorageManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-storage
# USAGE
python queue_operation_get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = StorageManagementClient(
credential=DefaultAzureCredential(),
subscription_id="{subscription-id}",
)
response = client.queue.get(
resource_group_name="res3376",
account_name="sto328",
queue_name="queue6185",
)
print(response)
# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2023-01-01/examples/QueueOperationGet.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
f38cf335b8fab60a2d1b2f67a4620fe3e0c47847 | dfe3191eee14251b958589f9b383fd5f8798d47e | /habanero/__init__.py | 7fae4d724c918f2b4ae26eb4c4d44980330b35cc | [
"MIT"
]
| permissive | kyleniemeyer/habanero | 39257428cc442ec764edd3616749db10af783262 | 6338f22f06912a4f1af5f0459ff8329906442489 | refs/heads/master | 2021-01-14T11:53:45.396972 | 2016-07-11T15:59:41 | 2016-07-11T15:59:41 | 66,299,090 | 0 | 0 | null | 2016-08-22T18:47:17 | 2016-08-22T18:47:16 | Python | UTF-8 | Python | false | false | 1,192 | py | # -*- coding: utf-8 -*-
# habanero
'''
habanero library
~~~~~~~~~~~~~~~~~~~~~
habanero is a low level client for the Crossref search API.
Usage::
from habanero import Crossref
cr = Crossref()
# setup a different base URL
Crossref(base_url = "http://some.other.url")
# setup an api key
Crossref(api_key = "123456")
# Make request against works route
cr.works(ids = '10.1371/journal.pone.0033693')
# curl options
## For example, set a timeout
cr.works(query = "ecology", timeout=0.1)
## advanced logging
### setup first
import requests
import logging
import httplib as http_client
http_client.HTTPConnection.debuglevel = 1
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
### then make request
cr.works(query = "ecology")
'''
__title__ = 'habanero'
__version__ = '0.2.6'
__author__ = 'Scott Chamberlain'
__license__ = 'MIT'
from .crossref import Crossref
from .cn import content_negotiation, csl_styles
from .counts import citation_count
from .exceptions import *
| [
"[email protected]"
]
| |
6f4c7736b4f7b3b3be54a806fa5fed52f9e446db | e3c6dcf5a77ae0b930087bb5849352a088dbc2e4 | /hamon_shu/segments/segment_03/.handlers.py | 4687984f7cf378e302baf025675dc29baf63361d | []
| no_license | Catsvilles/hamon_shu | 684cda44661ba18724af6719e4efc5f763c3cf61 | 35b377074cff9900193018446668aeb5440475be | refs/heads/master | 2022-12-04T08:00:46.779614 | 2020-08-26T21:25:57 | 2020-08-26T21:25:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,913 | py | import abjad
handler_to_value = abjad.OrderedDict(
[
(
'violin_1_pitch_handler_three',
abjad.OrderedDict(
[
('pitch_count', 38),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_1_pitch_handler_one',
abjad.OrderedDict(
[
('pitch_count', 45),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_1_pitch_handler_two',
abjad.OrderedDict(
[
('pitch_count', 59),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_1_pitch_handler_four',
abjad.OrderedDict(
[
('pitch_count', 34),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_2_pitch_handler_three',
abjad.OrderedDict(
[
('pitch_count', 45),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_2_pitch_handler_one',
abjad.OrderedDict(
[
('pitch_count', 25),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_2_pitch_handler_two',
abjad.OrderedDict(
[
('pitch_count', 52),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'violin_2_pitch_handler_four',
abjad.OrderedDict(
[
('pitch_count', 26),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'viola_pitch_handler_three',
abjad.OrderedDict(
[
('pitch_count', 72),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'viola_pitch_handler_one',
abjad.OrderedDict(
[
('pitch_count', 24),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'viola_pitch_handler_two',
abjad.OrderedDict(
[
('pitch_count', 57),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'viola_pitch_handler_four',
abjad.OrderedDict(
[
('pitch_count', 38),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'cello_pitch_handler_three',
abjad.OrderedDict(
[
('pitch_count', 44),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'cello_pitch_handler_one',
abjad.OrderedDict(
[
('pitch_count', 34),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'cello_pitch_handler_two',
abjad.OrderedDict(
[
('pitch_count', 55),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'cello_pitch_handler_four',
abjad.OrderedDict(
[
('pitch_count', 14),
('chord_boolean_count', -1),
('chord_groups_count', -1),
]
),
),
(
'dynamic_handler_one',
abjad.OrderedDict(
[
('count_1', 39),
('count_2', 12),
('count_3', 26),
('count_4', 12),
('count_5', 39),
]
),
),
(
'dynamic_handler_two',
abjad.OrderedDict(
[
('count_1', 10),
('count_2', 3),
('count_3', 6),
('count_4', 3),
('count_5', 10),
]
),
),
(
'articulation_handler_three',
abjad.OrderedDict(
[
('count', 92),
('vector_count', 92),
]
),
),
(
'articulation_handler_two',
abjad.OrderedDict(
[
('count', 19),
('vector_count', 19),
]
),
),
]
) | [
"[email protected]"
]
| |
e12d86d370dcf165b72e5e841e08d996d6f90d10 | e65d16ea1e8d412bac75a809be6d390126bdf528 | /homeassistant/components/hassio/issues.py | 2af0a6ed76485bddc49bf5c1300208967512a4f9 | [
"Apache-2.0"
]
| permissive | syssi/home-assistant | 6347d57866cb16ab9d4499ad38e2be6f0399077f | fd43687833741b21221769d46b4d1ecef8a94711 | refs/heads/dev | 2023-08-17T09:31:52.680518 | 2023-06-11T14:22:12 | 2023-06-11T14:22:12 | 97,874,495 | 6 | 16 | Apache-2.0 | 2023-09-13T06:31:21 | 2017-07-20T20:12:37 | Python | UTF-8 | Python | false | false | 11,530 | py | """Supervisor events monitor."""
from __future__ import annotations
import asyncio
from dataclasses import dataclass, field
import logging
from typing import Any, TypedDict
from typing_extensions import NotRequired
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.issue_registry import (
IssueSeverity,
async_create_issue,
async_delete_issue,
)
from .const import (
ATTR_DATA,
ATTR_HEALTHY,
ATTR_ISSUES,
ATTR_SUGGESTIONS,
ATTR_SUPPORTED,
ATTR_UNHEALTHY,
ATTR_UNHEALTHY_REASONS,
ATTR_UNSUPPORTED,
ATTR_UNSUPPORTED_REASONS,
ATTR_UPDATE_KEY,
ATTR_WS_EVENT,
DOMAIN,
EVENT_HEALTH_CHANGED,
EVENT_ISSUE_CHANGED,
EVENT_ISSUE_REMOVED,
EVENT_SUPERVISOR_EVENT,
EVENT_SUPERVISOR_UPDATE,
EVENT_SUPPORTED_CHANGED,
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
PLACEHOLDER_KEY_REFERENCE,
UPDATE_KEY_SUPERVISOR,
SupervisorIssueContext,
)
from .handler import HassIO, HassioAPIError
ISSUE_KEY_UNHEALTHY = "unhealthy"
ISSUE_KEY_UNSUPPORTED = "unsupported"
ISSUE_ID_UNHEALTHY = "unhealthy_system"
ISSUE_ID_UNSUPPORTED = "unsupported_system"
INFO_URL_UNHEALTHY = "https://www.home-assistant.io/more-info/unhealthy"
INFO_URL_UNSUPPORTED = "https://www.home-assistant.io/more-info/unsupported"
PLACEHOLDER_KEY_REASON = "reason"
UNSUPPORTED_REASONS = {
"apparmor",
"connectivity_check",
"content_trust",
"dbus",
"dns_server",
"docker_configuration",
"docker_version",
"cgroup_version",
"job_conditions",
"lxc",
"network_manager",
"os",
"os_agent",
"restart_policy",
"software",
"source_mods",
"supervisor_version",
"systemd",
"systemd_journal",
"systemd_resolved",
}
# Some unsupported reasons also mark the system as unhealthy. If the unsupported reason
# provides no additional information beyond the unhealthy one then skip that repair.
UNSUPPORTED_SKIP_REPAIR = {"privileged"}
UNHEALTHY_REASONS = {
"docker",
"supervisor",
"setup",
"privileged",
"untrusted",
}
# Keys (type + context) of issues that when found should be made into a repair
ISSUE_KEYS_FOR_REPAIRS = {
"issue_mount_mount_failed",
"issue_system_multiple_data_disks",
"issue_system_reboot_required",
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
}
_LOGGER = logging.getLogger(__name__)
class SuggestionDataType(TypedDict):
"""Suggestion dictionary as received from supervisor."""
uuid: str
type: str
context: str
reference: str | None
@dataclass(slots=True, frozen=True)
class Suggestion:
"""Suggestion from Supervisor which resolves an issue."""
uuid: str
type: str
context: SupervisorIssueContext
reference: str | None = None
@property
def key(self) -> str:
"""Get key for suggestion (combination of context and type)."""
return f"{self.context}_{self.type}"
@classmethod
def from_dict(cls, data: SuggestionDataType) -> Suggestion:
"""Convert from dictionary representation."""
return cls(
uuid=data["uuid"],
type=data["type"],
context=SupervisorIssueContext(data["context"]),
reference=data["reference"],
)
class IssueDataType(TypedDict):
"""Issue dictionary as received from supervisor."""
uuid: str
type: str
context: str
reference: str | None
suggestions: NotRequired[list[SuggestionDataType]]
@dataclass(slots=True, frozen=True)
class Issue:
"""Issue from Supervisor."""
uuid: str
type: str
context: SupervisorIssueContext
reference: str | None = None
suggestions: list[Suggestion] = field(default_factory=list, compare=False)
@property
def key(self) -> str:
"""Get key for issue (combination of context and type)."""
return f"issue_{self.context}_{self.type}"
@classmethod
def from_dict(cls, data: IssueDataType) -> Issue:
"""Convert from dictionary representation."""
suggestions: list[SuggestionDataType] = data.get("suggestions", [])
return cls(
uuid=data["uuid"],
type=data["type"],
context=SupervisorIssueContext(data["context"]),
reference=data["reference"],
suggestions=[
Suggestion.from_dict(suggestion) for suggestion in suggestions
],
)
class SupervisorIssues:
"""Create issues from supervisor events."""
def __init__(self, hass: HomeAssistant, client: HassIO) -> None:
"""Initialize supervisor issues."""
self._hass = hass
self._client = client
self._unsupported_reasons: set[str] = set()
self._unhealthy_reasons: set[str] = set()
self._issues: dict[str, Issue] = {}
@property
def unhealthy_reasons(self) -> set[str]:
"""Get unhealthy reasons. Returns empty set if system is healthy."""
return self._unhealthy_reasons
@unhealthy_reasons.setter
def unhealthy_reasons(self, reasons: set[str]) -> None:
"""Set unhealthy reasons. Create or delete repairs as necessary."""
for unhealthy in reasons - self.unhealthy_reasons:
if unhealthy in UNHEALTHY_REASONS:
translation_key = f"{ISSUE_KEY_UNHEALTHY}_{unhealthy}"
translation_placeholders = None
else:
translation_key = ISSUE_KEY_UNHEALTHY
translation_placeholders = {PLACEHOLDER_KEY_REASON: unhealthy}
async_create_issue(
self._hass,
DOMAIN,
f"{ISSUE_ID_UNHEALTHY}_{unhealthy}",
is_fixable=False,
learn_more_url=f"{INFO_URL_UNHEALTHY}/{unhealthy}",
severity=IssueSeverity.CRITICAL,
translation_key=translation_key,
translation_placeholders=translation_placeholders,
)
for fixed in self.unhealthy_reasons - reasons:
async_delete_issue(self._hass, DOMAIN, f"{ISSUE_ID_UNHEALTHY}_{fixed}")
self._unhealthy_reasons = reasons
@property
def unsupported_reasons(self) -> set[str]:
"""Get unsupported reasons. Returns empty set if system is supported."""
return self._unsupported_reasons
@unsupported_reasons.setter
def unsupported_reasons(self, reasons: set[str]) -> None:
"""Set unsupported reasons. Create or delete repairs as necessary."""
for unsupported in reasons - UNSUPPORTED_SKIP_REPAIR - self.unsupported_reasons:
if unsupported in UNSUPPORTED_REASONS:
translation_key = f"{ISSUE_KEY_UNSUPPORTED}_{unsupported}"
translation_placeholders = None
else:
translation_key = ISSUE_KEY_UNSUPPORTED
translation_placeholders = {PLACEHOLDER_KEY_REASON: unsupported}
async_create_issue(
self._hass,
DOMAIN,
f"{ISSUE_ID_UNSUPPORTED}_{unsupported}",
is_fixable=False,
learn_more_url=f"{INFO_URL_UNSUPPORTED}/{unsupported}",
severity=IssueSeverity.WARNING,
translation_key=translation_key,
translation_placeholders=translation_placeholders,
)
for fixed in self.unsupported_reasons - (reasons - UNSUPPORTED_SKIP_REPAIR):
async_delete_issue(self._hass, DOMAIN, f"{ISSUE_ID_UNSUPPORTED}_{fixed}")
self._unsupported_reasons = reasons
@property
def issues(self) -> set[Issue]:
"""Get issues."""
return set(self._issues.values())
def add_issue(self, issue: Issue) -> None:
"""Add or update an issue in the list. Create or update a repair if necessary."""
if issue.key in ISSUE_KEYS_FOR_REPAIRS:
placeholders: dict[str, str] | None = None
if issue.reference:
placeholders = {PLACEHOLDER_KEY_REFERENCE: issue.reference}
async_create_issue(
self._hass,
DOMAIN,
issue.uuid,
is_fixable=bool(issue.suggestions),
severity=IssueSeverity.WARNING,
translation_key=issue.key,
translation_placeholders=placeholders,
)
self._issues[issue.uuid] = issue
async def add_issue_from_data(self, data: IssueDataType) -> None:
"""Add issue from data to list after getting latest suggestions."""
try:
data["suggestions"] = (
await self._client.get_suggestions_for_issue(data["uuid"])
)[ATTR_SUGGESTIONS]
except HassioAPIError:
_LOGGER.error(
"Could not get suggestions for supervisor issue %s, skipping it",
data["uuid"],
)
return
self.add_issue(Issue.from_dict(data))
def remove_issue(self, issue: Issue) -> None:
"""Remove an issue from the list. Delete a repair if necessary."""
if issue.uuid not in self._issues:
return
if issue.key in ISSUE_KEYS_FOR_REPAIRS:
async_delete_issue(self._hass, DOMAIN, issue.uuid)
del self._issues[issue.uuid]
def get_issue(self, issue_id: str) -> Issue | None:
"""Get issue from key."""
return self._issues.get(issue_id)
async def setup(self) -> None:
"""Create supervisor events listener."""
await self.update()
async_dispatcher_connect(
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_issues
)
async def update(self) -> None:
"""Update issues from Supervisor resolution center."""
data = await self._client.get_resolution_info()
self.unhealthy_reasons = set(data[ATTR_UNHEALTHY])
self.unsupported_reasons = set(data[ATTR_UNSUPPORTED])
# Remove any cached issues that weren't returned
for issue_id in set(self._issues.keys()) - {
issue["uuid"] for issue in data[ATTR_ISSUES]
}:
self.remove_issue(self._issues[issue_id])
# Add/update any issues that came back
await asyncio.gather(
*[self.add_issue_from_data(issue) for issue in data[ATTR_ISSUES]]
)
@callback
def _supervisor_events_to_issues(self, event: dict[str, Any]) -> None:
"""Create issues from supervisor events."""
if ATTR_WS_EVENT not in event:
return
if (
event[ATTR_WS_EVENT] == EVENT_SUPERVISOR_UPDATE
and event.get(ATTR_UPDATE_KEY) == UPDATE_KEY_SUPERVISOR
):
self._hass.async_create_task(self.update())
elif event[ATTR_WS_EVENT] == EVENT_HEALTH_CHANGED:
self.unhealthy_reasons = (
set()
if event[ATTR_DATA][ATTR_HEALTHY]
else set(event[ATTR_DATA][ATTR_UNHEALTHY_REASONS])
)
elif event[ATTR_WS_EVENT] == EVENT_SUPPORTED_CHANGED:
self.unsupported_reasons = (
set()
if event[ATTR_DATA][ATTR_SUPPORTED]
else set(event[ATTR_DATA][ATTR_UNSUPPORTED_REASONS])
)
elif event[ATTR_WS_EVENT] == EVENT_ISSUE_CHANGED:
self.add_issue(Issue.from_dict(event[ATTR_DATA]))
elif event[ATTR_WS_EVENT] == EVENT_ISSUE_REMOVED:
self.remove_issue(Issue.from_dict(event[ATTR_DATA]))
| [
"[email protected]"
]
| |
f29f4d3f9eb00ed98d6c9da648caeb5da3c9d380 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /f3jX2BwzAuR8DXsy4_22.py | e469acc301be3f4807256f980cb528fa19e2fb93 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | """
Create a function that takes an integer `n` and returns the **factorial of
factorials**. See below examples for a better understanding:
### Examples
fact_of_fact(4) ➞ 288
# 4! * 3! * 2! * 1! = 288
fact_of_fact(5) ➞ 34560
fact_of_fact(6) ➞ 24883200
### Notes
N/A
"""
import math
from functools import reduce
def fact_of_fact(n):
m = [math.factorial(i) for i in list(range(1, n+1))]
return reduce((lambda x, y: x * y), m)
| [
"[email protected]"
]
| |
96093e0e0b317846982be7163fd0fb65f35e64a6 | 90fa5489f9849494da93d64d54557491bf85d52f | /PythonNLP/C05/C0502.py | 01b42fee0d9479ef0e45dd5c28809e341fbbdf0f | []
| no_license | liping2084/NLTK-Python-CN | f0a072e9632661e95b4fb6419bb125dcdb8d68cd | f5b7520811f727e0111336efd536b28bbb01edf1 | refs/heads/master | 2023-05-09T05:42:34.004242 | 2020-12-15T04:39:37 | 2020-12-15T04:39:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,587 | py | # -*- encoding: utf-8 -*-
"""
@Author : zYx.Tom
@Contact : [email protected]
@site : https://zhuyuanxiang.github.io
---------------------------
@Software : PyCharm
@Project : NLTK-Python-CN
@File : C0502.py
@Version : v0.1
@Time : 2020-11-19 10:34
@License : (C)Copyright 2018-2020, zYx.Tom
@Reference :
@Desc :
@理解:
"""
import nltk
from nltk.corpus import brown
from tools import show_subtitle
# Sec 5.2 标注语料库
# 5.2.1 表示已经标注的标识符
# str2tuple() 将已经标注的字符串转换成元组
# Ch5 分类和标注词汇
# 1. 什么是词汇分类,在自然语言处理中它们如何使用?
# 2. 对于存储词汇和它们的分类来说什么是好的 Python 数据结构?
# 3. 如何自动标注文本中每个词汇的词类?
# - 词性标注(parts-of-speech tagging,POS tagging):简称标注。将词汇按照它们的词性(parts-of-speech,POS)进行分类并对它们进行标注
# - 词性:也称为词类或者词汇范畴。
# - 标记集:用于特定任务标记的集合。
taggen_token = nltk.tag.str2tuple('fly/NN')
print("taggen_token= ", taggen_token)
print("taggen_token[0]= ", taggen_token[0])
print("taggen_token[1]= ", taggen_token[1])
# 拆分已经标注的语料文本为元组
sent = '''
The/AT grand/JJ jury/NN commented/VBD on/IN a/AT number/NN of/IN
other/AP topics/NNS ,/, AMONG/IN them/PPO the/AT Atlanta/NP and/CC
Fulton/NP-tl County/NN-tl purchasing/VBG departments/NNS which/WDT it/PPS
said/VBD ``/`` ARE/BER well/QL operated/VBN and/CC follow/VB generally/RB
accepted/VBN practices/NNS which/WDT inure/VB to/IN the/AT best/JJT
interest/NN of/IN both/ABX governments/NNS ''/'' ./.
'''
print("sent.split()= ", sent.split())
split_sent_to_tuple = [nltk.tag.str2tuple(t) for t in sent.split()]
print("split_sent_to_tuple= ", split_sent_to_tuple)
# 5.2.2 读取已经标注的语料库
# 打开brown语料库的ca01文件,可以看到下面的内容:
# The/at Fulton/np-tl County/nn-tl Grand/jj-tl Jury/nn-tl said/vbd Friday/nr an/at
# investigation/nn of/in Atlanta's/np$ recent/jj primary/nn election/nn produced/vbd
# ``/`` no/at evidence/nn ''/'' that/cs any/dti irregularities/nns took/vbd place/nn ./.
# 这个是已经标注好的语料库,可以使用函数tagged_words()直接读取
# tagset='universal' 使用通用标注集进行词类标注
# simplify_tags 不再支持
show_subtitle("brown.tagged_words()")
print(brown.tagged_words())
show_subtitle("brown.tagged_words(tagset='universal')")
print(brown.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.treebank.tagged_words()")
print(nltk.corpus.treebank.tagged_words())
show_subtitle("nltk.corpus.treebank.tagged_words(tagset='universal')")
print(nltk.corpus.treebank.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.nps_chat.tagged_words()")
print(nltk.corpus.nps_chat.tagged_words())
show_subtitle("nltk.corpus.nps_chat.tagged_words(tagset='universal')")
print(nltk.corpus.nps_chat.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.conll2000.tagged_words()")
print(nltk.corpus.conll2000.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.conll2000.tagged_words()")
print(nltk.corpus.conll2000.tagged_words(tagset='universal'))
# Note:以下的都无法正常转换为通用标注集
# 因为每个语言的标注集都是根据自己的情况定义的,不与通用标注集完全相同
show_subtitle("nltk.corpus.sinica_treebank.tagged_words()")
print(nltk.corpus.sinica_treebank.tagged_words())
show_subtitle("nltk.corpus.sinica_treebank.tagged_words(tagset='universal')")
# print(nltk.corpus.sinica_treebank.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.indian.tagged_words()")
print(nltk.corpus.indian.tagged_words())
show_subtitle("nltk.corpus.indian.tagged_words(tagset='universal')")
# print(nltk.corpus.indian.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.mac_morpho.tagged_words()")
print(nltk.corpus.mac_morpho.tagged_words())
show_subtitle("nltk.corpus.mac_morpho.tagged_words(tagset='universal')")
# print(nltk.corpus.mac_morpho.tagged_words(tagset='universal'))
show_subtitle("nltk.corpus.cess_cat.tagged_words()")
print(nltk.corpus.cess_cat.tagged_words())
show_subtitle("nltk.corpus.cess_cat.tagged_words(tagset='universal')")
# print(nltk.corpus.cess_cat.tagged_words(tagset='universal'))
# 使用tagged_sents()可以直接把语料库分割成句子,而不是将所有的词表示成一个链表,句子中的词同样进行了词类标注。
# 因为开发的自动标注器需要在句子链表上进行训练和测试,而不是在词链表上。
show_subtitle("brown.tagged_sents()[0]")
print(brown.tagged_sents()[0])
show_subtitle("brown.tagged_sents(tagset='universal')[0]")
print(brown.tagged_sents(tagset='universal')[0])
# 5.2.3 A Universal Part-of-Speech Tagset, 一个通用的(简化的)标注集
# http://www.nltk.org/book/ch05.html Table2.1 (比书P200 表5-1还要简单)
# Tag Meaning English Examples
# ADJ adjective new, good, high, special, big, local
# ADP adposition on, of, at, with, by, into, under
# ADV adverb really, already, still, early, now
# CONJ conjunction and, or, but, if, while, although
# DET determiner, article the, a, some, most, every, no, which
# NOUN noun year, home, costs, time, Africa
# NUM numeral twenty-four, fourth, 1991, 14:24
# PRT particle at, on, out, over per, that, up, with
# PRON pronoun he, their, her, its, my, I, us
# VERB verb is, say, told, given, playing, would
# . punctuation marks . , ; !
# X other ersatz, esprit, dunno, gr8, univeristy
# 布朗语料库的新闻类中哪些标记最常见
brown_news_tagged = brown.tagged_words(categories='news', tagset='universal')
tag_fd = nltk.FreqDist(tag for (word, tag) in brown_news_tagged)
print("list(tag_fd)= ", list(tag_fd))
print("tag_fd.keys()= ", tag_fd.keys())
print("tag_fd.most_common()= ", tag_fd.most_common())
show_subtitle("tag_fd.tabulate()")
tag_fd.tabulate()
tag_fd.plot()
# 图形化的POS一致性工具,可以用来寻找任一词和POS标记的组合
# 例如:"VERB VERB" 或者 "was missing" 或者 "had VERB" 或者 "DET money" 等等
nltk.app.concordance()
# 5.2.4 名词
# 名词:一般指人、地点、事情和概念。可能出现在限定词和形容词之后,可以是动词的主语或者宾语。
# 表5-2 名词的句法模式
# 统计构成二元模型(W1,W2)中W2=‘NOUN’的W1的词性的比例
brown_news_tagged = brown.tagged_words(categories='news', tagset='universal')
word_tag_pairs = nltk.bigrams(brown_news_tagged) # 构建双词链表
noun_precedes = [
a[1]
for (a, b) in word_tag_pairs
if b[1] == 'NOUN'
]
fdist = nltk.FreqDist(noun_precedes)
print("fdist.most_common()= ", fdist.most_common())
tag_list = [
tag
for (tag, _) in fdist.most_common()
]
print("tag_list= ", tag_list)
fdist.plot()
# 结论:名词最容易出现在名词后面
# 其次是出现在限定词和形容词之后,包括数字形容词(即数词,标注为NUM)
# 5.2.5 动词
# 动词:描述事件和行动的词。在句子中,动词通常表示涉及一个或多个名词短语所指示物的关系。
# 表5-3 动词的句法模式
# 找出新闻文本中最常见的动词(频率分布中计算的项目是词——标记对)
wsj = nltk.corpus.treebank.tagged_words(tagset='universal')
word_tag_fd = nltk.FreqDist(wsj)
show_subtitle("word_tag_fd.most_common(20)")
print(word_tag_fd.most_common(20))
word_tag_list = [
wt[0]
for (wt, _) in word_tag_fd.most_common()
if wt[1] == 'VERB'
]
show_subtitle("word_tag_list[:20]")
print(word_tag_list[:20])
fdist = nltk.FreqDist(word_tag_fd)
show_subtitle("fdist.most_common(20)")
print(fdist.most_common(20))
# fdist.plot() # 不能执行,会死机,因为动词单词数目太多
wsj = nltk.corpus.treebank.tagged_words(tagset='universal')
word_tag_pairs = nltk.bigrams(wsj)
verb_precedes = [
a[1]
for (a, b) in word_tag_pairs
if b[1] == 'VERB'
]
fdist = nltk.FreqDist(verb_precedes)
print("fdist.most_common()= ", fdist.most_common())
show_subtitle("fdist.tabulate()")
fdist.tabulate()
# fdist.plot()
# 结论:动词出现在名词、动词、名字和副词后面。
# 因为词汇和标记是成对的
# 所以把词汇作为条件,把标记作为事件,使用条件——事件对的链表初始化条件频率分布。
wsj = nltk.corpus.treebank.tagged_words(tagset='universal')
cfd1 = nltk.ConditionalFreqDist(wsj)
print("cfd1['yield'].most_common(20)= ", cfd1['yield'].most_common(20))
print("cfd1['cut'].most_common(20)= ", cfd1['cut'].most_common(20))
# 动词太多,执行时间过长,显示效果不好
# cfd1.tabulate()
# cfd1.plot()
# 也可以颠倒配对,把标记作为条件,词汇作为事件,生成条件频率分布,就可以直接查找标记对应哪些词了。
wsj = nltk.corpus.treebank.tagged_words()
cfd2 = nltk.ConditionalFreqDist((tag, word) for (word, tag) in wsj)
print("cfd2= ", cfd2)
print("cfd2['VBN']= ", cfd2['VBN'])
show_subtitle("cfd2['VBN'].most_common(20)")
print(cfd2['VBN'].most_common(20))
show_subtitle("list(cfd2['VBN'].keys())[:20]")
print(list(cfd2['VBN'].keys())[:20])
show_subtitle("cfd2['VBN'].most_common(20)")
print(cfd2['VBN'].most_common(20))
show_subtitle("in")
print("'been' in cfd2['VBN'].keys()= ", 'been' in cfd2['VBN'].keys())
# 尝试分辨VD(过去式)和VN(过去分词)之间的区别
# 先找出同是VD和VN的词汇,然后分析它们的上下文区别
wsj = nltk.corpus.treebank.tagged_words()
cfd3 = nltk.ConditionalFreqDist(wsj)
# cfd.conditions() 返回所有的条件构成的链表,等价于list(cfd1.keys())返回所有的关键字。
show_subtitle("cfd3.conditions()[:20]")
print(cfd3.conditions()[:20])
show_subtitle("list(cfd3.keys())[:20]")
print(list(cfd3.keys())[:20])
# 寻找既可以作为 VBD 还可以作为 VBN 的单词
word_list = [
w
for w in cfd3.conditions()
if 'VBD' in cfd3[w] and 'VBN' in cfd3[w]
]
show_subtitle("word_list[:20]")
print(word_list[:20])
# kicked 作为 VBD 的示例句子
idx1 = wsj.index(('kicked', 'VBD'))
show_subtitle("idx1")
print(' '.join(word for word, tag in wsj[idx1 - 10:idx1 + 10]))
# kicked 作为 VBN 的示例句子
idx2 = wsj.index(('kicked', 'VBN'))
show_subtitle("idx2")
print(' '.join(word for word, tag in wsj[idx2 - 10:idx2 + 10]))
# 5.2.6 其他词类
# (形容词、副词、介词、冠词(限定词)、情态动词、人称代词)
# 形容词:修饰名词,可以作为修饰符 或 谓语。
# 副词:修饰动词,指定时间、方式、地点或动词描述的事件发展方向;修饰形容词。
# 5.2.7 未简化的标记(P204)
# Ex5-1 找出最频繁的名词标记的程序
def find_tags(tag_prefix, tagged_text):
cfd = nltk.ConditionalFreqDist(
(tag, word)
for (word, tag) in tagged_text
if tag.startswith(tag_prefix))
return dict(
(tag, cfd[tag].most_common(5))
for tag in cfd.conditions()
)
brown_tagged_words = brown.tagged_words(categories='news')
tag_dict = find_tags('NN', brown_tagged_words)
for tag in sorted(tag_dict):
print(tag, tag_dict[tag])
# 5.2.8 探索已经标注的语料库
# 观察 often 后面的词汇
brown_learned_text = brown.tagged_words(categories='learned')
print(sorted(set(
b
for (a, b) in nltk.bigrams(brown_learned_text)
if a == 'often'
)))
brown_learned_tagged = brown.tagged_words(categories='learned', tagset='universal')
brown_learned_bigrams = nltk.bigrams(brown_learned_tagged)
print("brown_learned_bigrams= ", brown_learned_bigrams)
a_b_list = [
(a, b)
for (a, b) in brown_learned_bigrams
]
show_subtitle("a_b_list")
print(a_b_list[:20])
# 观察 often 后面的词汇
tags = [
b[1]
for (a, b) in nltk.bigrams(brown_learned_tagged)
if a[0] == 'often'
]
fd = nltk.FreqDist(tags)
fd.tabulate()
fd.plot()
# P205 Ex5-2 使用POS标记寻找三词短语(<Verb>to<Verb>)
def process(sentence):
for (w1, t1), (w2, t2), (w3, t3) in nltk.trigrams(sentence):
if t1.startswith('V') and t2 == 'TO' and t3.startswith('V'):
print(w1, w2, w3)
for i, tagged_sent in enumerate(brown.tagged_sents()):
if i < 100 and len(tagged_sent) >= 3:
process(tagged_sent)
brown_news_tagged = brown.tagged_words(categories='news', tagset='universal')
data = nltk.ConditionalFreqDist(
(word.lower(), tag)
for (word, tag) in brown_news_tagged
)
for word in sorted(data.conditions()):
if len(data[word]) > 3:
tags = [
tag
for (tag, _) in data[word].most_common()
]
print(word, ' '.join(tags))
print("data['works']= ", data['works'])
print("data['$1']= ", data['$1'])
print("data['$222']= ", data['$222'])
# data.tabulate() # 执行结果过多,时间过长
show_subtitle("data.conditions()")
print(data.conditions()[:20])
show_subtitle("data.values()")
# print(data.values()) # 执行结果过多,时间过长
for i, (cfd_key, cfd_value) in enumerate(zip(data.keys(), data.values())):
if i < 20:
show_subtitle(cfd_key)
for j, fd_value in enumerate(cfd_value.values()):
print(fd_value, end=',')
nltk.app.concordance()
| [
"[email protected]"
]
| |
5c11d0ef4c5a83f6c0d971af6f4730a9a6fe1a67 | c1e488789b41a714cdd37525d7e71815753c21d9 | /atcoder/beginners/chap1/PASTFILES/ABC088A_1.py | 041c7c7cbb41d815d7d2848a46a3bce2ad8a670a | []
| no_license | happyhappyhappyhappy/pythoncode | 638a0cbeb94ec04829c1c4e216fb200863cd7a4e | 247b8346a503cab272043c20e6210ee03cfdd8c4 | refs/heads/master | 2023-08-31T20:54:06.144750 | 2023-08-30T08:33:15 | 2023-08-30T08:33:15 | 223,697,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | # Problem https://atcoder.jp/contests/abc088/tasks/abc088_a
# Python 1st Try
if __name__ == "__main__":
yes = "Yes"
no = "No"
answer = ""
N = int(input().strip())
A = int(input().strip())
chargeCoin = N % 500
if chargeCoin <= A:
answer = yes
else:
answer = no
print(answer)
exit
| [
"[email protected]"
]
| |
2174b795b603fa21096a6785c9d48f544f644a5f | f30163c5c3c2051a699062a2baa4a632e2d47ad6 | /openspeech/models/openspeech_encoder_decoder_model.py | de45acc8ed693c7d77c9502a55c9ffc3282f54ff | [
"MIT",
"LicenseRef-scancode-secret-labs-2011",
"Unlicense",
"HPND",
"BSD-3-Clause",
"ISC",
"Apache-2.0",
"BSD-2-Clause"
]
| permissive | dnfcallan/openspeech | 6740672df1e9c43e898ff9eaa5fafdc20bf9593a | 55e50cb9b3cc3e7a6dfddcd33e6e698cca3dae3b | refs/heads/main | 2023-06-20T19:40:28.953644 | 2021-07-16T10:16:05 | 2021-07-16T10:16:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,191 | py | # MIT License
#
# Copyright (c) 2021 Soohwan Kim and Sangchun Ha and Soyoung Cho
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from torch import Tensor
from collections import OrderedDict
from typing import Dict
from omegaconf import DictConfig
from openspeech.models import OpenspeechModel
from openspeech.utils import get_class_name
from openspeech.vocabs.vocab import Vocabulary
class OpenspeechEncoderDecoderModel(OpenspeechModel):
r"""
Base class for OpenSpeech's encoder-decoder models.
Args:
configs (DictConfig): configuration set.
vocab (Vocabulary): the class of vocabulary
Inputs:
- **inputs** (torch.FloatTensor): A input sequence passed to encoders. Typically for inputs this will be
a padded `FloatTensor` of size ``(batch, seq_length, dimension)``.
- **input_lengths** (torch.LongTensor): The length of input tensor. ``(batch)``
Returns:
- **y_hats** (torch.FloatTensor): Result of model predictions.
"""
def __init__(self, configs: DictConfig, vocab: Vocabulary, ) -> None:
super(OpenspeechEncoderDecoderModel, self).__init__(configs, vocab)
self.teacher_forcing_ratio = configs.model.teacher_forcing_ratio
self.encoder = None
self.decoder = None
self.criterion = self.configure_criterion(self.configs.criterion.criterion_name)
def set_beam_decoder(self, beam_size: int = 3):
raise NotImplementedError
def collect_outputs(
self,
stage: str,
logits: Tensor,
encoder_logits: Tensor,
encoder_output_lengths: Tensor,
targets: Tensor,
target_lengths: Tensor,
) -> OrderedDict:
cross_entropy_loss, ctc_loss = None, None
if get_class_name(self.criterion) == "JointCTCCrossEntropyLoss":
loss, ctc_loss, cross_entropy_loss = self.criterion(
encoder_logits=encoder_logits.transpose(0, 1),
logits=logits,
output_lengths=encoder_output_lengths,
targets=targets[:, 1:],
target_lengths=target_lengths,
)
elif get_class_name(self.criterion) == "LabelSmoothedCrossEntropyLoss" \
or get_class_name(self.criterion) == "CrossEntropyLoss":
loss = self.criterion(logits, targets[:, 1:])
else:
raise ValueError(f"Unsupported criterion: {self.criterion}")
predictions = logits.max(-1)[1]
wer = self.wer_metric(targets[:, 1:], predictions)
cer = self.cer_metric(targets[:, 1:], predictions)
self.info({
f"{stage}_loss": loss,
f"{stage}_cross_entropy_loss": cross_entropy_loss,
f"{stage}_ctc_loss": ctc_loss,
f"{stage}_wer": wer,
f"{stage}_cer": cer,
})
return OrderedDict({
"loss": loss,
"cross_entropy_loss": cross_entropy_loss,
"ctc_loss": ctc_loss,
"predictions": predictions,
"targets": targets,
"logits": logits,
"learning_rate": self.get_lr(),
})
def forward(self, inputs: Tensor, input_lengths: Tensor) -> Dict[str, Tensor]:
r"""
Forward propagate a `inputs` and `targets` pair for inference.
Inputs:
inputs (torch.FloatTensor): A input sequence passed to encoders. Typically for inputs this will be a padded
`FloatTensor` of size ``(batch, seq_length, dimension)``.
input_lengths (torch.LongTensor): The length of input tensor. ``(batch)``
Returns:
* dict (dict): Result of model predictions that contains `predictions`, `logits`, `encoder_outputs`,
`encoder_logits`, `encoder_output_lengths`.
"""
logits = None
encoder_outputs, encoder_logits, encoder_output_lengths = self.encoder(inputs, input_lengths)
if get_class_name(self.decoder) in ("BeamSearchLSTM", "BeamSearchTransformer"):
predictions = self.decoder(encoder_outputs, encoder_output_lengths)
else:
logits = self.decoder(
encoder_outputs=encoder_outputs,
encoder_output_lengths=encoder_output_lengths,
teacher_forcing_ratio=0.0,
)
predictions = logits.max(-1)[1]
return {
"predictions": predictions,
"logits": logits,
"encoder_outputs": encoder_outputs,
"encoder_logits": encoder_logits,
"encoder_output_lengths": encoder_output_lengths,
}
def training_step(self, batch: tuple, batch_idx: int) -> OrderedDict:
r"""
Forward propagate a `inputs` and `targets` pair for training.
Inputs:
train_batch (tuple): A train batch contains `inputs`, `targets`, `input_lengths`, `target_lengths`
batch_idx (int): The index of batch
Returns:
loss (torch.Tensor): loss for training
"""
inputs, targets, input_lengths, target_lengths = batch
encoder_outputs, encoder_logits, encoder_output_lengths = self.encoder(inputs, input_lengths)
if get_class_name(self.decoder) == "TransformerDecoder":
logits = self.decoder(
encoder_outputs=encoder_outputs,
targets=targets,
encoder_output_lengths=encoder_output_lengths,
target_lengths=target_lengths,
teacher_forcing_ratio=self.teacher_forcing_ratio,
)
else:
logits = self.decoder(
encoder_outputs=encoder_outputs,
targets=targets,
encoder_output_lengths=encoder_output_lengths,
teacher_forcing_ratio=self.teacher_forcing_ratio,
)
return self.collect_outputs(
stage='train',
logits=logits,
encoder_logits=encoder_logits,
encoder_output_lengths=encoder_output_lengths,
targets=targets,
target_lengths=target_lengths,
)
def validation_step(self, batch: tuple, batch_idx: int) -> OrderedDict:
r"""
Forward propagate a `inputs` and `targets` pair for validation.
Inputs:
train_batch (tuple): A train batch contains `inputs`, `targets`, `input_lengths`, `target_lengths`
batch_idx (int): The index of batch
Returns:
loss (torch.Tensor): loss for training
"""
inputs, targets, input_lengths, target_lengths = batch
encoder_outputs, encoder_logits, encoder_output_lengths = self.encoder(inputs, input_lengths)
logits = self.decoder(
encoder_outputs,
encoder_output_lengths=encoder_output_lengths,
teacher_forcing_ratio=0.0,
)
return self.collect_outputs(
stage='val',
logits=logits,
encoder_logits=encoder_logits,
encoder_output_lengths=encoder_output_lengths,
targets=targets,
target_lengths=target_lengths,
)
def test_step(self, batch: tuple, batch_idx: int) -> OrderedDict:
r"""
Forward propagate a `inputs` and `targets` pair for test.
Inputs:
train_batch (tuple): A train batch contains `inputs`, `targets`, `input_lengths`, `target_lengths`
batch_idx (int): The index of batch
Returns:
loss (torch.Tensor): loss for training
"""
inputs, targets, input_lengths, target_lengths = batch
encoder_outputs, encoder_logits, encoder_output_lengths = self.encoder(inputs, input_lengths)
logits = self.decoder(
encoder_outputs,
encoder_output_lengths=encoder_output_lengths,
teacher_forcing_ratio=0.0,
)
return self.collect_outputs(
stage='test',
logits=logits,
encoder_logits=encoder_logits,
encoder_output_lengths=encoder_output_lengths,
targets=targets,
target_lengths=target_lengths,
)
| [
"[email protected]"
]
| |
5937a083574b20b77de3073d1b7317e4f94be9ec | c9cf4e7acd3ff09412610965dc83988b3f501e5e | /utils/readWrite/read.py | 2fe030fe3668a47d797bc8bc787023f8779bee51 | []
| no_license | Noba1anc3/General-Doc-SemSeg | 31df6cc0c747c5586fbbeb9dace6170d3fbef4bd | 27d9761fd45b2d5d52cfe3ed50413f902912b238 | refs/heads/master | 2021-05-19T04:15:42.604378 | 2020-03-31T06:59:45 | 2020-03-31T06:59:45 | 251,524,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,247 | py | import os
import sys
from configparser import ConfigParser
from utils.logging.syslog import Logger
class Configuration():
def __init__(self):
self.logging = Logger(__name__)
Logger.get_log(self.logging).info('Start processing ConfigFile')
self.config()
Logger.get_log(self.logging).info('ConfigFile Processed\n')
def config(self):
cp = ConfigParser()
cp.read('conf.cfg')
self.folder = cp.get('configuration', 'folder')
self.filename = cp.get('configuration', 'filename')
self.tit_choice = cp.getint('configuration', 'tit_choice')
self.text_level = cp.getint('configuration', 'text_level')
self.table_level = cp.getint('configuration', 'table_level')
self.save_text = cp.getboolean('configuration', 'save_text')
self.save_image = cp.getboolean('configuration', 'save_image')
self.configCheck()
self.output_folder = 'output/'
if not os.path.exists(self.output_folder):
os.mkdir(self.output_folder)
if self.save_text or self.save_image:
self.prediction_folder = self.output_folder + 'prediction/'
if not os.path.exists(self.prediction_folder):
os.mkdir(self.prediction_folder)
if self.save_text == True:
self.json_folder = self.prediction_folder + 'json/'
if not os.path.exists(self.json_folder):
os.mkdir(self.json_folder)
if self.save_image == True:
self.img_folder = self.prediction_folder + 'image/'
if not os.path.exists(self.img_folder):
os.mkdir(self.img_folder)
if self.filename == 'all':
self.fileList = sorted(os.listdir(self.folder))
else:
self.fileList = [self.filename]
def configCheck(self):
if not self.folder[-1] == '/':
Logger.get_log(self.logging).critical('Configuration - Folder Format Error')
print("Configuration - Folder may loss '/' to the end of the path")
y_n = input("Do you want system add '/' to the end of path ? (Y/N)\n")
if y_n.lower() == 'y' or y_n.lower() == 'yes':
self.folder += '/'
else:
sys.exit()
if not self.filename == 'all' and not self.filename[-4:] == '.pdf':
Logger.get_log(self.logging).critical('Configuration - FileName Not End With .pdf ')
print('Configuration - FileName Not End With \'.pdf\'')
y_n = input("Do you want system add '.pdf' to the end of filename ? (Y/N)\n")
if y_n.lower() == 'y' or y_n.lower() == 'yes':
self.filename += '.pdf'
else:
sys.exit()
if not (self.tit_choice == 0 or self.tit_choice == 1 or self.tit_choice == 2 or self.tit_choice == 3):
Logger.get_log(self.logging).critical('Configuration - tit_choice Format Error ')
while True:
print('Configuration - tit_choice Format Error')
tit_choice = input("Please press 0/1/2/3 to specify a tit_choice \n")
if tit_choice == '0' or tit_choice == '1' or tit_choice == '2' or tit_choice == '3':
self.tit_choice = tit_choice
break
if not (self.text_level == 1 or self.text_level == 2):
Logger.get_log(self.logging).critical('Configuration - text_level Format Error ')
while True:
print('Configuration - text_level Format Error ')
text_level = input("Please press 1/2 to specify a text_level \n")
if text_level == '1' or text_level == '2':
self.text_level = text_level
break
if not (self.table_level == 1 or self.table_level == 2):
Logger.get_log(self.logging).critical('Configuration - table_level Format Error ')
while True:
print('Configuration - table_level Format Error ')
table_level = input("Please press 1/2 to specify a table_level \n")
if table_level == '1' or table_level == '2':
self.text_level = table_level
break | [
"[email protected]"
]
| |
f79103b6166bbcddf98f63d0c258951fb19b31eb | 28280d1c7ca06f89906e811f3b7311a5e8a0046b | /ecoz2py/__init__.py | 5bffc1a049e5d658a7b607a7b4e2c48e1360e361 | []
| no_license | mbari-org/ecoz2py | e5e96ba127a397c7d319a15ca13889f724943ba5 | 00d17b1696debc3aff7da37f0e4be316de70c3a7 | refs/heads/master | 2022-09-03T20:59:18.927539 | 2020-05-03T02:06:51 | 2020-05-03T02:06:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,691 | py | import os
from _ecoz2_extension import ffi
from _ecoz2_extension.lib import ecoz2_hmm_learn
from _ecoz2_extension.lib import ecoz2_prd_show_file
from _ecoz2_extension.lib import ecoz2_set_random_seed
from _ecoz2_extension.lib import ecoz2_version
from _ecoz2_extension.lib import ecoz2_vq_learn
def get_version():
return ffi.string(ecoz2_version())
def prd_show_file(filename,
show_reflections=False,
from_=-1,
to=-1,
):
ecoz2_prd_show_file(filename, show_reflections, from_, to)
def set_random_seed(seed):
ecoz2_set_random_seed(seed)
def hmm_learn(N,
sequence_filenames,
model_type=3,
hmm_epsilon=1.e-5,
val_auto=0.3,
max_iterations=-1,
hmm_learn_callback=None
):
c_sequence_filenames_keepalive = [ffi.new("char[]", _to_bytes(s)) for s in sequence_filenames]
c_sequence_filenames = ffi.new("char *[]", c_sequence_filenames_keepalive)
# for (i, c_sequence_filename) in enumerate(c_sequence_filenames):
# print('SEQ {} => {}'.format(i, ffi.string(c_sequence_filename)))
@ffi.callback("void(char*, double)")
def callback(c_variable, c_value):
if hmm_learn_callback:
variable = _to_str(ffi.string(c_variable))
value = float(c_value)
hmm_learn_callback(variable, value)
ecoz2_hmm_learn(N,
model_type,
c_sequence_filenames,
len(c_sequence_filenames),
hmm_epsilon,
val_auto,
max_iterations,
callback
)
def vq_learn(prediction_order,
predictor_filenames,
codebook_class_name='_',
epsilon=0.05,
vq_learn_callback=None
):
c_codebook_class_name = ffi.new("char []", _to_bytes(codebook_class_name))
c_predictor_filenames_keepalive = [ffi.new("char[]", _to_bytes(s)) for s in predictor_filenames]
c_predictor_filenames = ffi.new("char *[]", c_predictor_filenames_keepalive)
@ffi.callback("void(int, double, double, double)")
def callback(m, avg_distortion, sigma, inertia):
if vq_learn_callback:
vq_learn_callback(m, avg_distortion, sigma, inertia)
return ecoz2_vq_learn(prediction_order,
epsilon,
c_codebook_class_name,
c_predictor_filenames,
len(c_predictor_filenames),
callback
)
def get_actual_filenames(filenames, file_ext):
"""
Returns the given list of files but expanding any directories.
"""
files = []
for path in filenames:
if os.path.isdir(path):
dir_files = list_files(path, file_ext)
files = files + dir_files
elif os.path.isfile(path) and path.endswith(file_ext):
files.append(path)
return files
def list_files(directory, file_ext):
"""
ListS all files under the given directory and having the given extension.
"""
files = []
for e in os.listdir(directory):
f = "{}/{}".format(directory, e)
# print(f)
if os.path.isdir(f):
files = files + list_files(f, file_ext)
elif os.path.isfile(f) and f.endswith(file_ext):
files.append(f)
return files
# ---------
def _to_bytes(s):
return s if isinstance(s, bytes) else str(s).encode("utf-8")
def _to_str(s):
return s if isinstance(s, str) else bytes(s).decode("utf-8")
| [
"[email protected]"
]
| |
bd37d6634f405523c79a877228689da80f242c6a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_89/46.py | 2b2b33f6462a2c18f37ba5fc20391f0621f9a50f | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,842 | py | #!/usr/bin/env python
# encoding: utf-8
"""
Waiters en LCM
"""
import sys, time, copy
from pdb import set_trace as DEBUG
def p(*s):
print >> sys.stderr, s
def gcd(a, b):
while b:
a, b = b, a % b
return a
def lcm(a, b):
return a * b // gcd(a, b)
def lcmm(*args):
return reduce(lcm, args)
def factors(n):
fact={1:1}
check=2
while check<=n:
if n%check==0:
n/=check
t = fact.get(check, 0)
fact[check] = t+1
else:
check+=1
return fact
#problem specific functions
def parseInput(f):
return int(f.readline())
def main(N):
if N ==1: return 0
l = lcmm(*range(1,N+1))
f = factors(l)
facts = {1:1}
maxturns = 0
for i in range(1,N+1):
fact = factors(i)
contribute = 0
for k,v in fact.items():
if k not in facts:
contribute+=1
if facts.get(k,0)<v:
facts[k] = v
maxturns+=contribute
return sum(f.values()) - maxturns
#for i in range(N, 0, -1):
#fact = factors(i)
#for k,v in fact.items():
#fk = facts.get(k,0)
#if fk>v:
#facts[k]-=v
#elif fk==v:
#del(facts[k])
#else:
#continue
#pass
#maxturns = i
#return maxturns
if __name__ == "__main__":
if len(sys.argv)==1:
filename = 'test.in'
else:
filename = sys.argv[1]
f = open('primes.txt')
primes = f.read().split()
primes = map(int, primes)
f.close()
#print primes
f = open(filename)
cases = int(f.readline())
for case in range(cases):
#p("Case #%i" % (case+1))
args = parseInput(f)
print "Case #%i: %s" % (case+1, main(args))
| [
"[email protected]"
]
| |
0fbaab7562dfc9e920f442142b34da9865161986 | 7fdff3ab45f5fef05cc76f97ee44e44779f87120 | /peerloan/migrations/0018_auto_20160912_1536.py | e45c005b5ff05f11df1b1b9a437414fdb3067bda | []
| no_license | Calvin66der/project_peerloan | 4a132c7464b21e75a80f091d44c389cbd10c2cc5 | 99a02843addbfcffec5c7d7a964f0b3347a03962 | refs/heads/master | 2021-01-12T07:45:00.811952 | 2016-12-20T08:44:42 | 2016-12-20T08:44:42 | 77,006,043 | 0 | 0 | null | 2016-12-21T01:47:48 | 2016-12-21T01:47:48 | null | UTF-8 | Python | false | false | 425 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('peerloan', '0017_borrowrequest_overpay_amount'),
]
operations = [
migrations.AlterField(
model_name='loanschedule',
name='received_amount',
field=models.FloatField(default=0),
),
]
| [
"[email protected]"
]
| |
4f1cf1347b78f2c9ecb4170992e0d6cc1810de58 | 16dbe8b1be0cd360ac1062072430f1f2b7d95bd6 | /FlightPlanner/BasicGNSS/ParameterDlgs/DlgCaculateWaypoint.py | a6bc525d80735a610431231967d854ce93aaae7a | []
| no_license | developer124320/FlightPlanner | 4a0d9a450ddddede95512ad76437db2906154536 | f1e4c762c360c0a00022ae6fa028fc1aee2a467d | refs/heads/master | 2022-08-25T14:00:57.495037 | 2020-05-27T01:26:27 | 2020-05-27T01:26:27 | 267,186,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,721 | py | # -*- coding: utf-8 -*-
from PyQt4.QtGui import QWidget, QFrame, QVBoxLayout, QGroupBox, QSizePolicy, QHBoxLayout, \
QLabel, QFont, QLineEdit, QToolButton, QIcon, QPixmap, QDialog, QDialogButtonBox, QMessageBox
from PyQt4.QtCore import QSize, QSizeF, Qt, SIGNAL, QObject
from FlightPlanner.captureCoordinateTool import CaptureCoordinateTool
import define
from qgis.gui import QgsMapToolPan
from qgis.core import QgsPoint
from FlightPlanner.types import RnavCommonWaypoint, DistanceUnits, AngleUnits, AircraftSpeedCategory
from FlightPlanner.QgisHelper import Point3D
from FlightPlanner.helpers import Unit, MathHelper, Distance
from FlightPlanner.BasicGNSS.rnavWaypoints import RnavWaypoints
from FlightPlanner.messages import Messages
from FlightPlanner.MeasureTool import MeasureTool
from FlightPlanner.CaptureBearingTool import CaptureBearingTool
from FlightPlanner.validations import Validations
import math
class CalcDlg(QDialog):
def __init__(self, parent, rnavType, category, position_0, position_1, position_List, flagStr = None):
QDialog.__init__(self, parent)
self.flagStrName = flagStr
# self.resize(326, 310)
self.verticalLayout = QVBoxLayout(self)
self.verticalLayout.setSpacing(6)
self.verticalLayout.setMargin(3)
self.verticalLayout.setObjectName(("verticalLayout"))
self.groupBox = QGroupBox(self)
self.groupBox.setTitle((""))
self.groupBox.setObjectName(("groupBox"))
self.verticalLayout_2 = QVBoxLayout(self.groupBox)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setMargin(3)
self.verticalLayout_2.setObjectName(("verticalLayout_2"))
self.groupBox_5 = QGroupBox(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_5.sizePolicy().hasHeightForWidth())
self.groupBox_5.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily(("Arial"))
self.groupBox_5.setFont(font)
self.groupBox_5.setObjectName(("groupBox_5"))
self.horizontalLayout_19 = QHBoxLayout(self.groupBox_5)
self.horizontalLayout_19.setSpacing(0)
self.horizontalLayout_19.setMargin(0)
self.horizontalLayout_19.setObjectName(("horizontalLayout_19"))
self.frame_18 = QFrame(self.groupBox_5)
self.frame_18.setFrameShape(QFrame.StyledPanel)
self.frame_18.setFrameShadow(QFrame.Raised)
self.frame_18.setObjectName(("frame_18"))
self.verticalLayout_13 = QVBoxLayout(self.frame_18)
self.verticalLayout_13.setSpacing(0)
self.verticalLayout_13.setContentsMargins(-1, -1, 0, -1)
self.verticalLayout_13.setObjectName(("verticalLayout_13"))
self.frame_19 = QFrame(self.frame_18)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_19.sizePolicy().hasHeightForWidth())
self.frame_19.setSizePolicy(sizePolicy)
self.frame_19.setFrameShape(QFrame.StyledPanel)
self.frame_19.setFrameShadow(QFrame.Raised)
self.frame_19.setObjectName(("frame_19"))
self.horizontalLayout_20 = QHBoxLayout(self.frame_19)
self.horizontalLayout_20.setSpacing(0)
self.horizontalLayout_20.setMargin(0)
self.horizontalLayout_20.setObjectName(("horizontalLayout_20"))
self.label_9 = QLabel(self.frame_19)
self.label_9.setMaximumSize(QSize(60, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.label_9.setFont(font)
self.label_9.setObjectName(("label_9"))
self.horizontalLayout_20.addWidget(self.label_9)
self.txtTHR_X = QLineEdit(self.frame_19)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtTHR_X.sizePolicy().hasHeightForWidth())
self.txtTHR_X.setSizePolicy(sizePolicy)
self.txtTHR_X.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtTHR_X.setFont(font)
self.txtTHR_X.setObjectName(("txtTHR_X"))
self.horizontalLayout_20.addWidget(self.txtTHR_X)
self.verticalLayout_13.addWidget(self.frame_19)
self.frame_20 = QFrame(self.frame_18)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_20.sizePolicy().hasHeightForWidth())
self.frame_20.setSizePolicy(sizePolicy)
self.frame_20.setFrameShape(QFrame.StyledPanel)
self.frame_20.setFrameShadow(QFrame.Raised)
self.frame_20.setObjectName(("frame_20"))
self.horizontalLayout_21 = QHBoxLayout(self.frame_20)
self.horizontalLayout_21.setSpacing(0)
self.horizontalLayout_21.setMargin(0)
self.horizontalLayout_21.setObjectName(("horizontalLayout_21"))
self.label_10 = QLabel(self.frame_20)
self.label_10.setMaximumSize(QSize(60, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.label_10.setFont(font)
self.label_10.setObjectName(("label_10"))
self.horizontalLayout_21.addWidget(self.label_10)
self.txtTHR_Y = QLineEdit(self.frame_20)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtTHR_Y.sizePolicy().hasHeightForWidth())
self.txtTHR_Y.setSizePolicy(sizePolicy)
self.txtTHR_Y.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtTHR_Y.setFont(font)
self.txtTHR_Y.setObjectName(("txtTHR_Y"))
self.horizontalLayout_21.addWidget(self.txtTHR_Y)
self.verticalLayout_13.addWidget(self.frame_20)
self.horizontalLayout_19.addWidget(self.frame_18)
self.frame_21 = QFrame(self.groupBox_5)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_21.sizePolicy().hasHeightForWidth())
self.frame_21.setSizePolicy(sizePolicy)
self.frame_21.setMaximumSize(QSize(30, 70))
self.frame_21.setFrameShape(QFrame.StyledPanel)
self.frame_21.setFrameShadow(QFrame.Raised)
self.frame_21.setObjectName(("frame_21"))
self.verticalLayout_14 = QVBoxLayout(self.frame_21)
self.verticalLayout_14.setSpacing(0)
self.verticalLayout_14.setMargin(0)
self.verticalLayout_14.setObjectName(("verticalLayout_14"))
self.btnCaptureRunwayTHR = QToolButton(self.frame_21)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnCaptureRunwayTHR.sizePolicy().hasHeightForWidth())
self.btnCaptureRunwayTHR.setSizePolicy(sizePolicy)
self.btnCaptureRunwayTHR.setMaximumSize(QSize(16777215, 47))
icon = QIcon()
icon.addPixmap(QPixmap(("Resource/coordinate_capture.png")), QIcon.Normal, QIcon.Off)
self.btnCaptureRunwayTHR.setIcon(icon)
self.btnCaptureRunwayTHR.setObjectName(("btnCaptureRunwayTHR"))
self.verticalLayout_14.addWidget(self.btnCaptureRunwayTHR)
# self.btnToolTHR = QToolButton(self.frame_21)
# sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
# sizePolicy.setHorizontalStretch(0)
# sizePolicy.setVerticalStretch(0)
# sizePolicy.setHeightForWidth(self.btnToolTHR.sizePolicy().hasHeightForWidth())
# self.btnToolTHR.setSizePolicy(sizePolicy)
# self.btnToolTHR.setMaximumSize(QSize(16777215, 20))
# icon1 = QIcon()
# icon1.addPixmap(QPixmap(("Resource/sort2.png")), QIcon.Normal, QIcon.Off)
# self.btnToolTHR.setIcon(icon1)
# self.btnToolTHR.setObjectName(("btnToolTHR"))
# self.verticalLayout_14.addWidget(self.btnToolTHR)
self.horizontalLayout_19.addWidget(self.frame_21)
self.verticalLayout_2.addWidget(self.groupBox_5)
self.groupBox_4 = QGroupBox(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_4.sizePolicy().hasHeightForWidth())
self.groupBox_4.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily(("Arial"))
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName(("groupBox_4"))
self.horizontalLayout_16 = QHBoxLayout(self.groupBox_4)
self.horizontalLayout_16.setSpacing(0)
self.horizontalLayout_16.setMargin(0)
self.horizontalLayout_16.setObjectName(("horizontalLayout_16"))
self.frame_14 = QFrame(self.groupBox_4)
self.frame_14.setFrameShape(QFrame.StyledPanel)
self.frame_14.setFrameShadow(QFrame.Raised)
self.frame_14.setObjectName(("frame_14"))
self.verticalLayout_11 = QVBoxLayout(self.frame_14)
self.verticalLayout_11.setSpacing(0)
self.verticalLayout_11.setContentsMargins(-1, -1, 0, -1)
self.verticalLayout_11.setObjectName(("verticalLayout_11"))
self.frame_15 = QFrame(self.frame_14)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_15.sizePolicy().hasHeightForWidth())
self.frame_15.setSizePolicy(sizePolicy)
self.frame_15.setFrameShape(QFrame.StyledPanel)
self.frame_15.setFrameShadow(QFrame.Raised)
self.frame_15.setObjectName(("frame_15"))
self.horizontalLayout_17 = QHBoxLayout(self.frame_15)
self.horizontalLayout_17.setSpacing(0)
self.horizontalLayout_17.setMargin(0)
self.horizontalLayout_17.setObjectName(("horizontalLayout_17"))
self.label_7 = QLabel(self.frame_15)
self.label_7.setMaximumSize(QSize(60, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.label_7.setFont(font)
self.label_7.setObjectName(("label_7"))
self.horizontalLayout_17.addWidget(self.label_7)
self.txtEND_X = QLineEdit(self.frame_15)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtEND_X.sizePolicy().hasHeightForWidth())
self.txtEND_X.setSizePolicy(sizePolicy)
self.txtEND_X.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtEND_X.setFont(font)
self.txtEND_X.setObjectName(("txtEND_X"))
self.horizontalLayout_17.addWidget(self.txtEND_X)
self.verticalLayout_11.addWidget(self.frame_15)
self.frame_16 = QFrame(self.frame_14)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_16.sizePolicy().hasHeightForWidth())
self.frame_16.setSizePolicy(sizePolicy)
self.frame_16.setFrameShape(QFrame.StyledPanel)
self.frame_16.setFrameShadow(QFrame.Raised)
self.frame_16.setObjectName(("frame_16"))
self.horizontalLayout_18 = QHBoxLayout(self.frame_16)
self.horizontalLayout_18.setSpacing(0)
self.horizontalLayout_18.setMargin(0)
self.horizontalLayout_18.setObjectName(("horizontalLayout_18"))
self.label_8 = QLabel(self.frame_16)
self.label_8.setMaximumSize(QSize(60, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.label_8.setFont(font)
self.label_8.setObjectName(("label_8"))
self.horizontalLayout_18.addWidget(self.label_8)
self.txtEND_Y = QLineEdit(self.frame_16)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtEND_Y.sizePolicy().hasHeightForWidth())
self.txtEND_Y.setSizePolicy(sizePolicy)
self.txtEND_Y.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtEND_Y.setFont(font)
self.txtEND_Y.setObjectName(("txtEND_Y"))
self.horizontalLayout_18.addWidget(self.txtEND_Y)
self.verticalLayout_11.addWidget(self.frame_16)
self.horizontalLayout_16.addWidget(self.frame_14)
self.frame_17 = QFrame(self.groupBox_4)
self.frame_17.setMaximumSize(QSize(30, 16777215))
self.frame_17.setFrameShape(QFrame.StyledPanel)
self.frame_17.setFrameShadow(QFrame.Raised)
self.frame_17.setObjectName(("frame_17"))
self.verticalLayout_12 = QVBoxLayout(self.frame_17)
self.verticalLayout_12.setSpacing(0)
self.verticalLayout_12.setMargin(0)
self.verticalLayout_12.setObjectName(("verticalLayout_12"))
self.btnCaptureRunwayEND = QToolButton(self.frame_17)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnCaptureRunwayEND.sizePolicy().hasHeightForWidth())
self.btnCaptureRunwayEND.setSizePolicy(sizePolicy)
self.btnCaptureRunwayEND.setMaximumSize(QSize(16777215, 47))
self.btnCaptureRunwayEND.setIcon(icon)
self.btnCaptureRunwayEND.setObjectName(("btnCaptureRunwayEND"))
self.verticalLayout_12.addWidget(self.btnCaptureRunwayEND)
# self.btnToolEND = QToolButton(self.frame_17)
# sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
# sizePolicy.setHorizontalStretch(0)
# sizePolicy.setVerticalStretch(0)
# sizePolicy.setHeightForWidth(self.btnToolEND.sizePolicy().hasHeightForWidth())
# self.btnToolEND.setSizePolicy(sizePolicy)
# self.btnToolEND.setMaximumSize(QSize(16777215, 20))
# self.btnToolEND.setIcon(icon1)
# self.btnToolEND.setObjectName(("btnToolEND"))
# self.verticalLayout_12.addWidget(self.btnToolEND)
self.horizontalLayout_16.addWidget(self.frame_17)
self.verticalLayout_2.addWidget(self.groupBox_4)
self.lbl1 = QLabel(self.groupBox)
font = QFont()
font.setFamily(("Arial"))
self.lbl1.setFont(font)
self.lbl1.setText((""))
self.lbl1.setAlignment(Qt.AlignCenter)
self.lbl1.setWordWrap(False)
self.lbl1.setMargin(0)
self.lbl1.setObjectName(("lbl1"))
self.verticalLayout_2.addWidget(self.lbl1)
self.lbl2 = QLabel(self.groupBox)
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.lbl2.setFont(font)
self.lbl2.setText((""))
self.lbl2.setAlignment(Qt.AlignCenter)
self.lbl2.setObjectName(("lbl2"))
self.verticalLayout_2.addWidget(self.lbl2)
self.frame_22 = QFrame(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_22.sizePolicy().hasHeightForWidth())
self.frame_22.setSizePolicy(sizePolicy)
self.frame_22.setFrameShape(QFrame.StyledPanel)
self.frame_22.setFrameShadow(QFrame.Raised)
self.frame_22.setObjectName(("frame_22"))
self.horizontalLayout_22 = QHBoxLayout(self.frame_22)
self.horizontalLayout_22.setSpacing(0)
self.horizontalLayout_22.setMargin(0)
self.horizontalLayout_22.setObjectName(("horizontalLayout_22"))
self.label_11 = QLabel(self.frame_22)
self.label_11.setMinimumSize(QSize(170, 0))
self.label_11.setMaximumSize(QSize(180, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.label_11.setFont(font)
self.label_11.setObjectName(("label_11"))
self.horizontalLayout_22.addWidget(self.label_11)
self.txtForm = QLineEdit(self.frame_22)
self.txtForm.setEnabled(False)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtForm.sizePolicy().hasHeightForWidth())
self.txtForm.setSizePolicy(sizePolicy)
self.txtForm.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtForm.setFont(font)
self.txtForm.setObjectName(("txtForm"))
self.horizontalLayout_22.addWidget(self.txtForm)
self.verticalLayout_2.addWidget(self.frame_22)
self.frame_23 = QFrame(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_23.sizePolicy().hasHeightForWidth())
self.frame_23.setSizePolicy(sizePolicy)
self.frame_23.setFrameShape(QFrame.StyledPanel)
self.frame_23.setFrameShadow(QFrame.Raised)
self.frame_23.setObjectName(("frame_23"))
self.horizontalLayout_23 = QHBoxLayout(self.frame_23)
self.horizontalLayout_23.setSpacing(0)
self.horizontalLayout_23.setMargin(0)
self.horizontalLayout_23.setObjectName(("horizontalLayout_23"))
self.label_12 = QLabel(self.frame_23)
self.label_12.setMinimumSize(QSize(170, 0))
self.label_12.setMaximumSize(QSize(180, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.label_12.setFont(font)
self.label_12.setObjectName(("label_12"))
self.horizontalLayout_23.addWidget(self.label_12)
self.txtBearing = QLineEdit(self.frame_23)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtBearing.sizePolicy().hasHeightForWidth())
self.txtBearing.setSizePolicy(sizePolicy)
self.txtBearing.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtBearing.setFont(font)
self.txtBearing.setObjectName(("txtBearing"))
self.horizontalLayout_23.addWidget(self.txtBearing)
self.btnCaptureBearing = QToolButton(self.frame_23)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnCaptureBearing.sizePolicy().hasHeightForWidth())
self.btnCaptureBearing.setSizePolicy(sizePolicy)
self.btnCaptureBearing.setMaximumSize(QSize(16777215, 25))
self.btnCaptureBearing.setStyleSheet((""))
self.btnCaptureBearing.setIcon(icon)
self.btnCaptureBearing.setObjectName(("btnCaptureBearing"))
self.horizontalLayout_23.addWidget(self.btnCaptureBearing)
self.verticalLayout_2.addWidget(self.frame_23)
self.frame_24 = QFrame(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_24.sizePolicy().hasHeightForWidth())
self.frame_24.setSizePolicy(sizePolicy)
self.frame_24.setFrameShape(QFrame.StyledPanel)
self.frame_24.setFrameShadow(QFrame.Raised)
self.frame_24.setObjectName(("frame_24"))
self.horizontalLayout_24 = QHBoxLayout(self.frame_24)
self.horizontalLayout_24.setSpacing(0)
self.horizontalLayout_24.setMargin(0)
self.horizontalLayout_24.setObjectName(("horizontalLayout_24"))
self.lblDistance = QLabel(self.frame_24)
self.lblDistance.setMinimumSize(QSize(170, 0))
self.lblDistance.setMaximumSize(QSize(180, 16777215))
font = QFont()
font.setFamily(("Arial"))
font.setBold(False)
font.setWeight(50)
self.lblDistance.setFont(font)
self.lblDistance.setObjectName(("lblDistance"))
self.horizontalLayout_24.addWidget(self.lblDistance)
self.txtDistance = QLineEdit(self.frame_24)
self.txtDistance.setEnabled(False)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.txtDistance.sizePolicy().hasHeightForWidth())
self.txtDistance.setSizePolicy(sizePolicy)
self.txtDistance.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily(("Arial"))
self.txtDistance.setFont(font)
self.txtDistance.setObjectName(("txtDistance"))
self.horizontalLayout_24.addWidget(self.txtDistance)
self.btnCaptureDistance = QToolButton(self.frame_24)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnCaptureDistance.sizePolicy().hasHeightForWidth())
self.btnCaptureDistance.setSizePolicy(sizePolicy)
self.btnCaptureDistance.setMaximumSize(QSize(16777215, 23))
self.btnCaptureDistance.setStyleSheet((""))
self.btnCaptureDistance.setIcon(icon)
self.btnCaptureDistance.setObjectName(("btnCaptureDistance"))
self.horizontalLayout_24.addWidget(self.btnCaptureDistance)
self.verticalLayout_2.addWidget(self.frame_24)
self.verticalLayout.addWidget(self.groupBox)
self.buttonBox = QDialogButtonBox(self)
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
self.buttonBox.setObjectName(("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.btnCaptureDistance.clicked.connect(self.method_9)
self.btnCaptureBearing.clicked.connect(self.method_8)
self.txtEND_X.textChanged.connect(self.method_4)
self.txtEND_Y.textChanged.connect(self.method_4)
self.txtTHR_X.textChanged.connect(self.method_4)
self.txtTHR_Y.textChanged.connect(self.method_4)
self.type = rnavType
self.category = category
self.resultPosionList = position_List
self.MinBearing2 = 0
self.MaxBearing2= 0
self.waypoint = None
self.distanceMeasureTool = MeasureTool(define._canvas, self.txtDistance, DistanceUnits.NM)
self.bearingTool = CaptureBearingTool(define._canvas, self.txtBearing)
self.CaptureTHRCoordTool = CaptureCoordinateTool(define._canvas, self.txtTHR_X, self.txtTHR_Y)
self.CaptureTHRCoordTool.rubberBandClick.setColor(Qt.green)
self.CaptureENDCoordTool = CaptureCoordinateTool(define._canvas, self.txtEND_X, self.txtEND_Y)
self.CaptureENDCoordTool.rubberBandClick.setColor(Qt.blue)
if rnavType == RnavCommonWaypoint.FAWP or rnavType == RnavCommonWaypoint.MAWP:
self.from1 = position_0
self.resize(326, 310)
if position_List[0] != None:
self.setThrPosition(position_List[0].x(),position_List[0].y())
self.CaptureTHRCoordTool.rubberBandClick.addPoint(QgsPoint(position_List[0].x(),position_List[0].y()))
# self.CaptureTHRCoordTool.rubberBandClick.show()
if position_List[1] != None:
self.setEndPosition(position_List[1].x(),position_List[1].y())
self.CaptureENDCoordTool.rubberBandClick.addPoint(QgsPoint(position_List[1].x(),position_List[1].y()))
# self.setWaypoint(position_List[2])
else:
self.from1 = position_1
num = RnavWaypoints.smethod_0(position_0, position_1)
self.MinBearing = RnavWaypoints.smethod_7(rnavType, category, num)
self.MaxBearing= RnavWaypoints.smethod_8(rnavType, category, num)
self.MinDistance = RnavWaypoints.smethod_4(rnavType, category)
if flagStr == "Y-Bar":
if (rnavType == RnavCommonWaypoint.IAWP1):
self.setBearing(self.MaxBearing)
elif (rnavType != RnavCommonWaypoint.IAWP3):
self.setBearing(num)
else:
self.setBearing(self.MinBearing)
else:
if (rnavType == RnavCommonWaypoint.IAWP1):
self.setBearing(self.MinBearing)
elif (rnavType != RnavCommonWaypoint.IAWP3):
self.setBearing(num)
else:
self.setBearing(self.MaxBearing)
# if self.txtDistance.isEnabled():
# self.setDistance(RnavWaypoints.smethod_6(rnavType, category).NauticalMiles)
# self.setWaypoint(position_List.pop(0))
self.method_4()
self.retranslateUi()
QObject.connect(self.buttonBox, SIGNAL(("accepted()")), self.btnCalculate_Click)
QObject.connect(self.buttonBox, SIGNAL(("rejected()")), self.reject)
# QMetaObject.connectSlotsByName(Dialog)
# self.btnToolEND.clicked.connect(self.removeEnd)
# self.btnToolTHR.clicked.connect(self.removeThr)
self.btnCaptureRunwayTHR.clicked.connect(self.captureTHR)
self.btnCaptureRunwayEND.clicked.connect(self.captureEND)
def retranslateUi(self):
self.setWindowTitle("CaculaterDlg")
self.groupBox_5.setTitle("Runway THR")
self.label_9.setText("X:")
self.label_10.setText("Y:")
self.btnCaptureRunwayTHR.setText("...")
# self.btnToolTHR.setText("...")
self.groupBox_4.setTitle("Runway END")
self.label_7.setText("X:")
self.label_8.setText("Y:")
self.btnCaptureRunwayEND.setText("...")
# self.btnToolEND.setText("...")
self.label_11.setText("From:")
self.txtForm.setText("FAWP")
self.label_12.setText(unicode("Back Azimuth (°) :", "utf-8"))
# self.txtBearing.setText("188.25")
self.btnCaptureBearing.setText("...")
self.lblDistance.setText("Distance From RWY THR (nm):")
self.txtDistance.setText("5")
self.btnCaptureDistance.setText("...")
def captureTHR(self):
define._canvas.setMapTool(self.CaptureTHRCoordTool)
def captureEND(self):
define._canvas.setMapTool(self.CaptureENDCoordTool)
def close(self):
scene = define._canvas.scene()
scene.removeItem(self.CaptureTHRCoordTool.rubberBandClick)
scene.removeItem(self.CaptureENDCoordTool.rubberBandClick)
scene.removeItem(self.bearingTool.rubberBand)
scene.removeItem(self.distanceMeasureTool.rubberBand)
# self.CaptureTHRCoordTool.rubberBand.hide()
# self.CaptureENDCoordTool.rubberBand.hide()
define._canvas.setMapTool(QgsMapToolPan(define._canvas))
# self.reject()
def reject(self):
self.close()
QDialog.reject(self)
def getThrPoint3D(self):
if self.txtTHR_X.text() !="" and self.txtTHR_Y.text() !="":
try:
x = float(self.txtTHR_X.text())
except ValueError:
x = 0
try:
y = float(self.txtTHR_Y.text())
except ValueError:
y = 0
return Point3D(x, y, 0)
else:
return None
def getEndPoint3D(self):
if self.txtEND_X.text() !="" and self.txtEND_Y.text() !="":
try:
x = float(self.txtEND_X.text())
except ValueError:
x = 0
try:
y = float(self.txtEND_Y.text())
except ValueError:
y = 0
return Point3D(x, y, 0)
else:
return None
def setEndPosition(self, x, y):
self.txtEND_X.setText(str(x))
self.txtEND_Y.setText(str(y))
def setThrPosition(self, x, y):
self.txtTHR_X.setText(str(x))
self.txtTHR_Y.setText(str(y))
def getWaypoint(self):
if (self.type == RnavCommonWaypoint.FAWP):
nauticalMiles = float(self.txtDistance.text())
value = float(self.txtBearing.text())
num1 = math.fabs(self.rethr - value)
if (num1 > 180):
num1 = 360 - num1
num2 = math.sin(Unit.smethod_0(num1)) * 0.7559395
num3 = Unit.smethod_1(math.asin(num2 / nauticalMiles))
num4 = math.cos(Unit.smethod_0(num1)) * 0.755939525
num5 = math.cos(Unit.smethod_0(num3)) * nauticalMiles
return RnavWaypoints.smethod_3(self.pos1400m, float(self.txtBearing.text()), Distance(math.fabs(num5 - num4), DistanceUnits.NM))
if (self.type != RnavCommonWaypoint.MAWP):
return RnavWaypoints.smethod_3(self.from1, float(self.txtBearing.text()), Distance(float(self.txtDistance.text()), DistanceUnits.NM))
angle = 90
if (float(self.txtBearing.text()) > self.thrre or float(self.txtBearing.text()) - self.thrre >= 90):
if self.flagStrName == "Y-Bar":
angle = 70
num = self.rethr - angle
if (num < 0):
num = num + 360
else:
num = self.rethr + angle
if (num > 360):
num = num - 360
point3d1 = self.from1
point3d2 = self.getThrPoint3D()
point3d = MathHelper.getIntersectionPoint(point3d1, RnavWaypoints.smethod_3(self.from1, float(self.txtBearing.text()), Distance(1000)), point3d2, RnavWaypoints.smethod_3(self.getThrPoint3D(), num, Distance(1000)))
if point3d == None:
raise UserWarning, Messages.ERR_FAILED_TO_CALCULATE_INTERSECTION_POINT
return RnavWaypoints.smethod_3(self.getThrPoint3D(), num, Distance(MathHelper.calcDistance(point3d2, point3d)))
def setWaypoint(self, value):
self.waypoint = value
if self.from1 != None and self.waypoint != None:
# self.setBearing(RnavWaypoints.smethod_0(self.from1, value))
# if self.txtDistance.isEnabled():
# print RnavWaypoints.smethod_2(self.from1, value).NauticalMiles
# print RnavWaypoints.smethod_2(self.from1, value).NauticalMiles
self.setDistance(RnavWaypoints.smethod_2(self.from1, value).NauticalMiles)
def setDistance(self, value):
self.txtDistance.setText("%i"%round(value))
def setBearing(self, value):
self.txtBearing.setText(str(value))
def btnCalculate_Click(self):
# try:
if self.type == RnavCommonWaypoint.FAWP or self.type == RnavCommonWaypoint.MAWP:
if self.getThrPoint3D() == None or self.getEndPoint3D() == None:
return
if not MathHelper.smethod_112(float(self.txtBearing.text()), self.MinBearing, self.MaxBearing, AngleUnits.Degrees):
if self.type != RnavCommonWaypoint.MAWP or MathHelper.smethod_96(self.MinBearing2) or MathHelper.smethod_96(self.MaxBearing2):
raise UserWarning, Messages.VALUE_NOT_WITHIN_ACCEPTABLE_RANGE
elif not MathHelper.smethod_106(float(self.txtBearing.text()), self.MinBearing2, self.MaxBearing2):
raise UserWarning, Messages.VALUE_NOT_WITHIN_ACCEPTABLE_RANGE
if self.txtDistance.isEnabled() and Distance(float(self.txtDistance.text()),DistanceUnits.NM).Metres < (self.MinDistance.Metres - 100):
raise UserWarning, Validations.VALUE_CANNOT_BE_SMALLER_THAN%self.MinDistance.NauticalMiles
wayPoint = self.getWaypoint()
if self.type == RnavCommonWaypoint.FAWP or self.type == RnavCommonWaypoint.MAWP:
if self.type == RnavCommonWaypoint.FAWP:
self.parent().gbFAWP.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 0:
self.parent().parameterCalcList.pop(0)
self.parent().parameterCalcList.insert(0,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationFAWP.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
else:
self.parent().gbMAWP.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 1:
self.parent().parameterCalcList.pop(1)
self.parent().parameterCalcList.insert(1,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationMAWP.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
self.parent().RwyTHR = self.getThrPoint3D()
self.parent().RwyEND = self.getEndPoint3D()
elif self.type == RnavCommonWaypoint.MAHWP:
self.parent().gbMAHWP.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 2:
self.parent().parameterCalcList.pop(2)
self.parent().parameterCalcList.insert(2,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationMAHWP.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
elif self.type == RnavCommonWaypoint.IWP:
self.parent().gbIWP.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 3:
self.parent().parameterCalcList.pop(3)
self.parent().parameterCalcList.insert(3,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationIWP.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
elif self.type == RnavCommonWaypoint.IAWP1:
self.parent().gbIAWP1.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 4:
self.parent().parameterCalcList.pop(4)
self.parent().parameterCalcList.insert(4,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationIAWP1.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
elif self.type == RnavCommonWaypoint.IAWP2:
self.parent().gbIAWP2.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 5:
self.parent().parameterCalcList.pop(5)
self.parent().parameterCalcList.insert(5,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationIAWP2.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
elif self.type == RnavCommonWaypoint.IAWP3:
self.parent().gbIAWP3.setPosition( wayPoint.x(), wayPoint.y())
if len(self.parent().parameterCalcList) > 6:
self.parent().parameterCalcList.pop(6)
self.parent().parameterCalcList.insert(6,(self.txtBearing.text(), self.txtDistance.text()))
self.parent().annotationIAWP3.setMapPosition(QgsPoint(wayPoint.x(), wayPoint.y()))
self.close()
QDialog.accept(self)
# except UserWarning as e:
# pass
# # QMessageBox.warning(self, "warning", e.message)
def method_9(self):
# self.distanceMeasureTool = MeasureTool(define._canvas, self.txtDistance, DistanceUnits.NM)
define._canvas.setMapTool(self.distanceMeasureTool)
def method_8(self):
# self.bearingTool = CaptureBearingTool(define._canvas, self.txtBearing)
define._canvas.setMapTool(self.bearingTool)
def method_4(self):
num = None
num1 = None
num2 = None
num3 = None
num4 = None
num5 = None
num6 = None
num7 = None
num8 = None
num9 = None
self.lbl1.setText(Validations.PLEASE_ENTER_VALID_RUNWAY_POSITIONS)
self.lbl2.setText(" ")
if (self.type == RnavCommonWaypoint.FAWP):
position = self.getThrPoint3D()
position1 = self.getEndPoint3D()
if position is None or position1 is None:
self.txtBearing.setText("")
# self.txtDistance.setText("")
return
self.thrre = RnavWaypoints.smethod_0(position, position1)
self.rethr = RnavWaypoints.smethod_0(position1, position)
self.pos1400m = RnavWaypoints.smethod_3(position, self.rethr, Distance(1400))
self.MinDistance = RnavWaypoints.smethod_4(self.type, self.category)
# if self.txtDistance.isEnabled():
# self.setDistance(RnavWaypoints.smethod_6(self.type, self.category).NauticalMiles)
self.setBearing(self.rethr)
self.MinBearing = RnavWaypoints.smethod_7(self.type, self.category, self.rethr)
self.MaxBearing = RnavWaypoints.smethod_8(self.type, self.category, self.rethr)
# if self.waypoint is not None:
# self.setBearing(round(RnavWaypoints.smethod_0(self.pos1400m, self.waypoint), 2))
# # if self.txtDistance.isEnabled():
# self.setDistance(RnavWaypoints.smethod_2(position, self.waypoint).NauticalMiles)
elif (self.type == RnavCommonWaypoint.MAWP):
position2 = self.getThrPoint3D()
position3 = self.getEndPoint3D()
if position2 is None or position3 is None:
self.txtBearing.setText("")
return
self.thrre = RnavWaypoints.smethod_0(position2, position3)
self.rethr = RnavWaypoints.smethod_0(position3, position2)
self.pos1400m = RnavWaypoints.smethod_3(position2, self.rethr, Distance(1400))
num10 = RnavWaypoints.smethod_1(self.pos1400m, self.from1)
num = RnavWaypoints.smethod_1(self.from1, self.pos1400m)
num11 = 15
position4 = None
if (self.category == AircraftSpeedCategory.A or self.category == AircraftSpeedCategory.B or self.category == AircraftSpeedCategory.H):
num11 = 30
if (num10 > self.rethr or self.rethr - num10 >= 90):
num1 = self.thrre + num11
num2 = num
if (num2 > 360):
num2 = num2 - 360
else:
num1 = num
num2 = self.thrre - num11
if (num2 < 0):
num2 = num2 + 360
if (max(num1, num2) <= 270 or min(num1, num2) >= 90):
num3 =min(num1, num2)
num4 = max(num1, num2)
else:
num3 = max(num1, num2)
num4 = min(num1, num2)
position4 = RnavWaypoints.smethod_3(position2, self.thrre, Distance(466))
num12 = RnavWaypoints.smethod_0(position4, self.from1)
num13 = math.fabs(num12 - self.rethr)
if (num13 > 180):
num13 = 360 - num13
if (num13 > 5):
num5 = 0
num6 = 0
num7 = 0
num8 = 0
else:
if (num12 > self.rethr or self.rethr - num12 >= 90):
num9 = self.rethr + 90
if (num9 > 360):
num9 = num9 - 360
else:
num9 = self.rethr - 90
if (num9 < 0):
num9 = num9 + 360
position5 = RnavWaypoints.smethod_3(self.pos1400m, num9, Distance(150))
num5 = RnavWaypoints.smethod_0(self.from1, position5)
num6 = RnavWaypoints.smethod_0(self.from1, self.pos1400m)
if (max(num5, num6) <= 270 or min(num5, num6) >= 90):
num7 = min(num5, num6)
num8 = max(num5, num6)
else:
num7 = max(num5, num6)
num8 = min(num5, num6)
if (MathHelper.smethod_99(num, self.thrre, 1)):
position6 = RnavWaypoints.smethod_3(self.pos1400m, self.rethr - 90, Distance(150))
position7 = RnavWaypoints.smethod_3(self.pos1400m, self.rethr + 90, Distance(150))
num1 = RnavWaypoints.smethod_0(self.from1, position6)
num2 = RnavWaypoints.smethod_0(self.from1, position7)
num7 = 0
num8 = 0
if (max(num1, num2) <= 270 or min(num1, num2) >= 90):
num3 = min(num1, num2)
num4 = max(num1, num2)
else:
num3 = max(num1, num2)
num4 = min(num1, num2)
if (MathHelper.smethod_96(num7) or MathHelper.smethod_96(num8)):
self.MinBearing = MathHelper.smethod_3(num3)
self.MaxBearing = MathHelper.smethod_3(num4)
self.MinBearing2 = MathHelper.smethod_3(num7)
self.MaxBearing2 = MathHelper.smethod_3(num8)
elif (min(num3, num4) >= min(num7, num8)):
if (MathHelper.smethod_99(num8, num3, 0.3)):
num8 = num4
num3 = 0
num4 = 0
self.MinBearing = MathHelper.smethod_3(num7)
self.MaxBearing = MathHelper.smethod_3(num8)
self.MinBearing2 = MathHelper.smethod_3(num3)
self.MaxBearing2 = MathHelper.smethod_3(num4)
else:
if (MathHelper.smethod_99(num4, num7, 0.3)):
num4 = num8
num7 = 0
num8 = 0
self.MinBearing = MathHelper.smethod_3(num3)
self.MaxBearing = MathHelper.smethod_3(num4)
self.MinBearing2 = MathHelper.smethod_3(num7)
self.MaxBearing2 = MathHelper.smethod_3(num8)
self.MinDistance = RnavWaypoints.smethod_4(self.type, self.category)
# if self.txtDistance.isEnabled():
# self.setDistance(RnavWaypoints.smethod_6(self.type, self.category).NauticalMiles)
if (self.MinBearing <= self.MaxBearing):
self.setBearing((self.MinBearing + self.MaxBearing) / 2)
else:
self.setBearing(MathHelper.smethod_3(self.MinBearing + (360 - self.MinBearing + self.MaxBearing)))
# if (self.waypoint is not None):
# self.setBearing(RnavWaypoints.smethod_0(self.from1, self.waypoint))
if (MathHelper.smethod_96(self.MinBearing2) or MathHelper.smethod_96(self.MaxBearing2)):
self.lbl1.setText(unicode("Acceptable bearings are %.1f° - %.1f°", "utf-8")%(self.MinBearing, self.MaxBearing))
else:
self.lbl1.setText(Validations.ACCEPTABLE_BEARINGS_ARE_X_Y_AND_X_Y%( self.MinBearing, self.MaxBearing, self.MinBearing2, self.MaxBearing2))
if self.MinDistance != None and self.type != RnavCommonWaypoint.MAWP:
self.lbl2.setText(Validations.ACCEPTABLE_MINIMUM_DISTANCE_IS_X%(self.MinDistance.NauticalMiles))
# def removeEnd(self):
# self.txtEND_X.setText("")
# self.txtEND_Y.setText("")
# def removeThr(self):
# self.txtTHR_X.setText("")
# self.txtTHR_Y.setText("")
# @staticmethod
# def smethod_0( parent, rnavCommonWaypoint_0, aircraftSpeedCategory_0, position_0, position_1, position_List):
# flag = None
# using (DlgCalculateWaypoint dlgCalculateWaypoint = new DlgCalculateWaypoint())
# {
# dlgCalculateWaypoint.Text = string.Format("{0} {1}", Captions.CALCULATE, EnumHelper.smethod_0(rnavCommonWaypoint_0))
# dlgCalculateWaypoint.Type = rnavCommonWaypoint_0
# dlgCalculateWaypoint.Category = aircraftSpeedCategory_0
# dlgCalculateWaypoint.From = position_1
# double num = RnavWaypoints.smethod_0(position_0, position_1)
# dlgCalculateWaypoint.MinBearing = RnavWaypoints.smethod_7(rnavCommonWaypoint_0, aircraftSpeedCategory_0, num)
# dlgCalculateWaypoint.MaxBearing = RnavWaypoints.smethod_8(rnavCommonWaypoint_0, aircraftSpeedCategory_0, num)
# dlgCalculateWaypoint.MinDistance = RnavWaypoints.smethod_4(rnavCommonWaypoint_0, aircraftSpeedCategory_0)
# if (rnavCommonWaypoint_0 == RnavCommonWaypoint.IAWP1)
# {
# dlgCalculateWaypoint.Bearing = dlgCalculateWaypoint.MinBearing
# }
# else if (rnavCommonWaypoint_0 != RnavCommonWaypoint.IAWP3)
# {
# dlgCalculateWaypoint.Bearing = num
# }
# else
# {
# dlgCalculateWaypoint.Bearing = dlgCalculateWaypoint.MaxBearing
# }
# dlgCalculateWaypoint.Distance = RnavWaypoints.smethod_6(rnavCommonWaypoint_0, aircraftSpeedCategory_0)
# dlgCalculateWaypoint.Waypoint = position_2
# if (dlgCalculateWaypoint.method_2(iwin32Window_0) != System.Windows.Forms.DialogResult.OK)
# {
# flag = false
# }
# else
# {
# position_2 = dlgCalculateWaypoint.Waypoint
# flag = true
# }
# }
# return flag
# }
#
# public static bool smethod_1(IWin32Window iwin32Window_0, RnavCommonWaypoint rnavCommonWaypoint_0, AircraftSpeedCategory aircraftSpeedCategory_0, Position position_0, ref Position position_1, ref Position position_2, ref Position position_3)
# {
# bool flag
# using (DlgCalculateWaypoint dlgCalculateWaypoint = new DlgCalculateWaypoint())
# {
# dlgCalculateWaypoint.Text = string.Format("{0} {1}", Captions.CALCULATE, EnumHelper.smethod_0(rnavCommonWaypoint_0))
# dlgCalculateWaypoint.Type = rnavCommonWaypoint_0
# dlgCalculateWaypoint.Category = aircraftSpeedCategory_0
# dlgCalculateWaypoint.From = position_0
# dlgCalculateWaypoint.RwyThr = position_1
# dlgCalculateWaypoint.RwyEnd = position_2
# dlgCalculateWaypoint.Waypoint = position_3
# bool flag1 = dlgCalculateWaypoint.method_2(iwin32Window_0) == System.Windows.Forms.DialogResult.OK
# position_1 = dlgCalculateWaypoint.RwyThr
# position_2 = dlgCalculateWaypoint.RwyEnd
# if (flag1)
# {
# position_3 = dlgCalculateWaypoint.Waypoint
# }
# flag = flag1
# }
# return flag
# }
# } | [
"[email protected]"
]
| |
8cde3781272c47dc93995ad7a393be282fd619be | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/9/wuz.py | 25bf44e8b6d42681dc820d4df1c4e481b029d13a | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'wUZ':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
72c850969dfe5e6528309e706ffd673c82f7a44c | 5b93930ce8280b3cbc7d6b955df0bfc5504ee99c | /nodes/VanderPlas17Python/E_Chapter4/E_VisualizingErrors/index.py | c2c770e9b709e97993efbbfb79962c767157f91e | []
| no_license | nimra/module_gen | 8749c8d29beb700cac57132232861eba4eb82331 | 2e0a4452548af4fefd4cb30ab9d08d7662122cf4 | refs/heads/master | 2022-03-04T09:35:12.443651 | 2019-10-26T04:40:49 | 2019-10-26T04:40:49 | 213,980,247 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,314 | py | # Lawrence McAfee
# ~~~~~~~~ import ~~~~~~~~
from modules.node.HierNode import HierNode
from modules.node.LeafNode import LeafNode
from modules.node.Stage import Stage
from modules.node.block.CodeBlock import CodeBlock as cbk
from modules.node.block.HierBlock import HierBlock as hbk
from modules.node.block.ImageBlock import ImageBlock as ibk
from modules.node.block.ListBlock import ListBlock as lbk
from modules.node.block.MarkdownBlock import MarkdownBlock as mbk
from .A_BasicErrorbars.index import BasicErrorbars as A_BasicErrorbars
from .B_ContinuousErrors.index import ContinuousErrors as B_ContinuousErrors
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
blocks = [
# Figure 4-26. Using point properties to encode features of the Iris data
#
# We can see that this scatter plot has given us the ability to simultaneously explore
# four different dimensions of the data: the (x, y) location of each point corresponds to
# the sepal length and width, the size of the point is related to the petal width, and the
# color is related to the particular species of flower. Multicolor and multifeature scatter
# plots like this can be useful for both exploration and presentation of data.
#
# plot Versus scatter: A Note on Efficiency
# Aside from the different features available in plt.plot and plt.scatter, why might
# you choose to use one over the other? While it doesn’t matter as much for small
# amounts of data, as datasets get larger than a few thousand points, plt.plot can be
# noticeably more efficient than plt.scatter. The reason is that plt.scatter has the
# capability to render a different size and/or color for each point, so the renderer must
# do the extra work of constructing each point individually. In plt.plot, on the other
# hand, the points are always essentially clones of each other, so the work of determin‐
# ing the appearance of the points is done only once for the entire set of data. For large
# datasets, the difference between these two can lead to vastly different performance,
# and for this reason, plt.plot should be preferred over plt.scatter for large
# datasets.
#
# Visualizing Errors
# For any scientific measurement, accurate accounting for errors is nearly as important,
# if not more important, than accurate reporting of the number itself. For example,
# imagine that I am using some astrophysical observations to estimate the Hubble Con‐
# stant, the local measurement of the expansion rate of the universe. I know that the
# current literature suggests a value of around 71 (km/s)/Mpc, and I measure a value of
# 74 (km/s)/Mpc with my method. Are the values consistent? The only correct answer,
# given this information, is this: there is no way to know.
#
#
# Visualizing Errors | 237
#
# Suppose I augment this information with reported uncertainties: the current litera‐
# ture suggests a value of around 71 ± 2.5 (km/s)/Mpc, and my method has measured a
# value of 74 ± 5 (km/s)/Mpc. Now are the values consistent? That is a question that
# can be quantitatively answered.
# In visualization of data and results, showing these errors effectively can make a plot
# convey much more complete information.
#
# Basic Errorbars
# A basic errorbar can be created with a single Matplotlib function call (Figure 4-27):
# In[1]: %matplotlib inline
# import matplotlib.pyplot as plt
# plt.style.use('seaborn-whitegrid')
# import numpy as np
# In[2]: x = np.linspace(0, 10, 50)
# dy = 0.8
# y = np.sin(x) + dy * np.random.randn(50)
#
# plt.errorbar(x, y, yerr=dy, fmt='.k');
#
#
#
#
# Figure 4-27. An errorbar example
#
# Here the fmt is a format code controlling the appearance of lines and points, and has
# the same syntax as the shorthand used in plt.plot, outlined in “Simple Line Plots”
# on page 224 and “Simple Scatter Plots” on page 233.
# In addition to these basic options, the errorbar function has many options to fine-
# tune the outputs. Using these additional options you can easily customize the aesthet‐
# ics of your errorbar plot. I often find it helpful, especially in crowded plots, to make
# the errorbars lighter than the points themselves (Figure 4-28):
# In[3]: plt.errorbar(x, y, yerr=dy, fmt='o', color='black',
# ecolor='lightgray', elinewidth=3, capsize=0);
#
#
#
#
# 238 | Chapter 4: Visualization with Matplotlib
#
]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Content(LeafNode):
def __init__(self):
super().__init__(
"Visualizing Errors",
# Stage.REMOVE_EXTRANEOUS,
# Stage.ORIG_BLOCKS,
# Stage.CUSTOM_BLOCKS,
# Stage.ORIG_FIGURES,
# Stage.CUSTOM_FIGURES,
# Stage.CUSTOM_EXERCISES,
)
[self.add(a) for a in blocks]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class VisualizingErrors(HierNode):
def __init__(self):
super().__init__("Visualizing Errors")
self.add(Content())
self.add(A_BasicErrorbars())
self.add(B_ContinuousErrors())
# eof
| [
"[email protected]"
]
| |
3037cc9f0d5675cef844ea03c08be30f015cdeb3 | fe7996f7110211e8c2df7cd7a4d81cc572204a70 | /synthetic-enumeration/sprint-12/03-collect-experimental-data-from-Lauren-assignments.py | afb9cf0e066c49396bcbc2bd77a5215fad858d7a | [
"MIT"
]
| permissive | FoldingAtHome/covid-moonshot | 78c2bc7e6d00f371d626fcb0a4381cf528413eef | 814189c239f8f0189c6cc48afcbca1f96c87dd09 | refs/heads/master | 2023-02-23T04:23:00.064389 | 2023-02-19T23:18:10 | 2023-02-19T23:18:10 | 249,626,873 | 62 | 11 | MIT | 2022-03-01T20:43:56 | 2020-03-24T06:07:39 | Python | UTF-8 | Python | false | false | 5,603 | py | #!/bin/env python
"""
Collect experimental data from Lauren's reassignments via CSV file
"""
import numpy as np
import json
import math
import itertools
import datetime
from rich.progress import track
from openeye import oechem
xchem_project = 'Mpro'
creator = 'John Chodera <[email protected]>'
creation_date = datetime.datetime.now()
prefix = 'sprint-12'
description = 'COVID Moonshot Sprint 12 for optimizing 5-spiro compounds'
csv_filename = 'experimental-data/Fl_agg_data_all_data_11_01_2022_11_13_20-cleaned-reassigned_isomers.csv'
#
# Now pull in all submitted designs
#
def smiles_is_racemic(suspected_smiles):
"""
Return True if compound is racemic.
Examples:
"CNC(=O)CN1Cc2ccc(Cl)cc2[C@@]2(CCN(c3cncc4c3CCCC4)C2=O)C1 |o1:14|" : compound is enantiopure, but stereochemistry is uncertain
"CNC(=O)CN1Cc2ccc(Cl)cc2[C@@]2(CCN(c3cncc4c3CCCC4)C2=O)C1" : compound is enantiopure, stereochemistry is certain
"CNC(=O)CN1Cc2ccc(Cl)cc2[C]2(CCN(c3cncc4c3CCCC4)C2=O)C1" : compound is racemic
"""
smiles = suspected_smiles.split()[0] # truncate suffix
return stereochemistry_is_uncertain(smiles)
def stereochemistry_is_uncertain(suspected_smiles):
"""
Return True if there is uncertainty in the enantiopure compound or mixture is racemic.
"""
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.Chem.EnumerateStereoisomers import EnumerateStereoisomers, StereoEnumerationOptions
rdmol = Chem.MolFromSmiles(suspected_smiles)
smi_list = []
opts = StereoEnumerationOptions(unique=True)
isomers = tuple(EnumerateStereoisomers(rdmol, options=opts))
for smi in sorted(Chem.MolToSmiles(isomer, isomericSmiles=True) for isomer in isomers):
smi_list.append(smi)
if len(smi_list) > 1:
return True
else:
return False
# Read all submitted designs
print('Reading CSV export...')
compounds_with_experimental_data = list()
# Drop columns that cause trouble for OpenEye
import pandas as pd
df = pd.read_csv(csv_filename, dtype=str)
# Drop columns
#drop_columns = []
#df.drop(columns=drop_columns, inplace=True)
# Replace suspected_SMILES with SMILES
#df['suspected_SMILES'].fillna(df['SMILES'], inplace=True)
# Exchange columns so suspected_SMILES is first
#title_column_index = df.columns.get_loc("Canonical PostEra ID")
#smiles_column_index = df.columns.get_loc("suspected_SMILES")
#cols = df.columns.tolist()
#cols = cols[smiles_column_index:(smiles_column_index+1)] + cols[title_column_index:(title_column_index+1)] + cols[:]
#df = df[cols]
# Replace < and > with limits
#df.applymap(lambda x: str(x))
#df.applymap(lambda x: 0.050 if "<" in str(x) else x)
#df.applymap(lambda x: 99.0 if ">" in str(x) else x)
# Eliminate stuff after spaces
#df = df.applymap(lambda x: str(x).split()[0])
ncompounds_dropped_due_to_uncertain_stereochemistry = 0
ncompounds_racemic = 0
# Iterate over molecules
# Fields: compound_name,compound_structure,measurement,qualifier,reassigned_structure
# Format: PostEra ID,SMILES,pIC50,comparator,reassigned_structure
delta_pIC50 = 0.2 # 95% CI is this many units in either direction
from fah_xchem.schema import ExperimentalCompoundData, ExperimentalCompoundDataUpdate
for index, row in df.iterrows():
row = row.to_dict()
suspected_smiles = row['compound_structure']
compound_id = row['compound_name']
is_racemic = smiles_is_racemic(suspected_smiles)
# Skip inequalities
if row['qualifier'] != '=':
continue
pIC50 = float(row['measurement'])
pIC50_lower = pIC50 - delta_pIC50
pIC50_upper = pIC50 + delta_pIC50
# Canonicalize with OpenEye SMILES
suspected_smiles = suspected_smiles.split()[0] # truncate stuff after whitespace
oemol = oechem.OEGraphMol()
oechem.OESmilesToMol(oemol, suspected_smiles)
suspected_smiles = oechem.OEMolToSmiles(oemol)
experimental_data = dict()
experimental_data['pIC50'] = pIC50
experimental_data['pIC50_lower'] = pIC50_lower
experimental_data['pIC50_upper'] = pIC50_upper
if is_racemic:
ncompounds_racemic += 1
# Store compound experimental data
experimental_compound_data = ExperimentalCompoundData(
compound_id=compound_id,
smiles=suspected_smiles,
is_racemic=is_racemic,
experimental_data=experimental_data,
)
compounds_with_experimental_data.append(experimental_compound_data)
print(f'{len(compounds_with_experimental_data)} measurements read and retained')
print(f'{ncompounds_dropped_due_to_uncertain_stereochemistry} enantiopure compounds with uncertain stereochemistry dropped.')
print(f'{ncompounds_racemic} compounds assayed as racemates')
dataset = ExperimentalCompoundDataUpdate(compounds=compounds_with_experimental_data)
print(f'There are {len(compounds_with_experimental_data)} compounds in this sprint with in-range IC50 measurements')
# Write JSON
def write_json(compound_series, json_filename):
print(f'Writing JSON to {json_filename}')
if '.bz2' in json_filename:
import bz2
with bz2.open(json_filename, "wt") as f:
f.write(compound_series.json())
elif '.gz' in json_filename:
import gzip
with gzip.open(json_filename, "wt") as f:
f.write(compound_series.json())
else:
with open(json_filename, "wt") as f:
f.write(compound_series.json())
import os
os.makedirs('json', exist_ok=True)
print(f'Generating experimental data JSON for {prefix}...')
json_filename = f'json/{prefix}-experimental-data.json' # output filename
write_json(dataset, json_filename)
| [
"[email protected]"
]
| |
db61be2c3b26ca80b961f9b324f981d7de1be14a | 99361c45166c3e39bdc1e5e7ff796b60e5edc20e | /setup.py | 59352d3cc65d0277c567478e0470ebd9187c11c0 | []
| no_license | wkcn/WorldCup | 2b358b73aab5496b3f7e209dc615c97c0181abff | 1acef2d2cadf5e8cbb911b05a8ecfd98aa43920d | refs/heads/master | 2020-03-08T10:38:08.558059 | 2018-04-04T15:03:07 | 2018-04-04T15:03:07 | 128,077,995 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | # -*- coding: utf-8 -*-
from distutils.core import setup
import py2exe
import sys
sys.argv.append('py2exe')
setup(
windows=[
{"script":"run.py","icon_resources":[(1,"logo.ico"),]}],
options={
"py2exe":{"includes":["sip"],"dll_excludes":["MSVCP90.dll"],\
"bundle_files": 3,"optimize": 2,
}},
data_files=[
("image", ["./logo.ico",])]
) | [
"[email protected]"
]
| |
eca69742d6ec30ac047d2b79b46fa7b0ddc3cf56 | 237cc38de0cf7a6e3661ed552ae771bd972d7438 | /base/obj2_demo.py | ce08920ba539aeb6829dc7a411f369bec63a4e60 | []
| no_license | chydream/python | af5ad8a98c78de71e255f7b776f936c4b89c616e | e5bfef53a7770d4f323bd2877f93c8166c563695 | refs/heads/master | 2020-05-07T17:00:33.558178 | 2020-05-05T13:45:19 | 2020-05-05T13:45:19 | 180,708,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,949 | py | class Point(object):
# 自定义Point类的构造(初始化)方法
def __init__(self, x, y):
self.x = x
self.y = y
# 自定义Point类对象的格式化输出函数(string())
def string(self):
print(print("{{X:{0},Y:{1}}}".format(self.x, self.y)))
class Circle(Point):
# 自定义Circle类的构造(初始化)方法
def __init__(self, x, y, radius):
Point.__init__(self, x, y)
# super().__init__(x, y)
self.radius = radius
# 自定义Circle类对象的格式化输出函数(string())
def string(self):
print("该图形初始化点为:{{X:{0},Y:{1}}};{{半径为:{2}}}".format(self.x, self.y, self.radius))
class Size(object):
# 自定义Size类的构造(初始化)方法
def __init__(self, width, height):
self.width = width
self.height = height
# 自定义Size类对象的格式化输出函数(string())
def string(self):
print("{{Width:{0},Height:{1}}}".format(self.width, self.height))
class Rectangle(Point, Size):
# 自定义Rectangle类的构造(初始化)方法,并在方法中调用父类的初始化方法以完成初始化
def __init__(self, x, y, width, height):
Point.__init__(self, x, y)
Size.__init__(self, width, height)
# 自定义Rectangle类对象的格式化输出函数(string())
def string(self):
print("该图形初始化点为:{{X:{0},Y:{1}}};长宽分别为:{{Width:{2}, Height:{3}}}".format(self.x, self.y, self.width, self.height))
if __name__ == "__main__":
# 实例化Circle对象,圆心为(5,5),半径为8
c = Circle(5, 5, 8)
c.string()
# 实例化Rectangle对象,顶点位置(15,15),长和宽分别为15和15
r1 = Rectangle(15, 15, 15, 15)
r1.string()
# 实例化Rectangle对象,顶点位置(40,30),长和宽分别为11和14
r2 = Rectangle(40, 30, 11, 14)
r2.string() | [
"[email protected]"
]
| |
f31a50aaf5650420eddc7d4b4b4b0b17edbae209 | 3fd7adb56bf78d2a5c71a216d0ac8bc53485b034 | /experiments/cem_exp/benchmarks_goalimage/hor15_easygoal/mod_hyper.py | 1060f0f55147f0e67cf53d1bef3020b1c04858e0 | []
| no_license | anair13/lsdc | 6d1675e493f183f467cab0bfe9b79a4f70231e4e | 7760636bea24ca0231b4f99e3b5e8290c89b9ff5 | refs/heads/master | 2021-01-19T08:02:15.613362 | 2017-05-12T17:13:54 | 2017-05-12T17:13:54 | 87,596,344 | 0 | 0 | null | 2017-04-08T00:18:55 | 2017-04-08T00:18:55 | null | UTF-8 | Python | false | false | 640 | py |
current_dir = '/'.join(str.split(__file__, '/')[:-1])
bench_dir = '/'.join(str.split(__file__, '/')[:-2])
from lsdc.algorithm.policy.cem_controller_goalimage import CEM_controller
policy = {
'type' : CEM_controller,
'use_goalimage':"",
'low_level_ctrl': None,
'usenet': True,
'nactions': 5,
'repeat': 3,
'initial_std': 7,
'netconf': current_dir + '/conf.py',
'use_first_plan': False, # execute MPC instead using firs plan
'iterations': 5,
'load_goal_image':'make_easy_goal',
}
agent = {
'T': 25,
'use_goalimage':"",
'start_confs': bench_dir + '/make_easy_goal/configs_easy_goal'
} | [
"[email protected]"
]
| |
e17f92d3d343d5272ea4fbcebd7c5a86df5c6a2d | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2811/60768/235290.py | 44942420c792f233946644b79e4acce40a08ea76 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | pAndn = input().split(' ')
map = int(pAndn[0]) * ['']
num = int(pAndn[1])
conflict = False
for i in range(num):
index = int(input())
if map[index % len(map)] == '':
map[index % len(map)] = index
else:
print(i + 1)
conflict = True
break
if not conflict:
print(-1) | [
"[email protected]"
]
| |
376f82bf1be280037aaad21374b43a1e4dce82eb | 69889d51e933b4e8a1d4c8397a317aa1d1365a5a | /Stack/17299.py | 3de2e8eff8d86d4a1485e3e058e23e566d2857dc | []
| no_license | ddraa/Algorithm | a35c87631420ceccec6f7094da6f2b22ddb66c8c | a97c6628d5389f7f93603a2e95ac3b569057f556 | refs/heads/master | 2023-06-25T17:12:39.925821 | 2021-07-18T05:53:28 | 2021-07-18T05:53:28 | 279,240,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | import sys
input = sys.stdin.readline
N = int(input())
F, stack = {}, []
arr = list(map(int, input().split()))
res = [-1 for _ in range(N)]
for n in arr:
if n in F:
F[n] += 1
else:
F[n] = 1
for i in range(N - 1, -1, -1):
while stack and stack[-1][0] <= F[arr[i]]:
stack.pop()
if stack:
res[i] = stack[-1][1]
stack.append((F[arr[i]], arr[i]))
print(*res)
| [
"[email protected]"
]
| |
bfb31bbaa48485e6c87d4b9683dbf6fc1c4d2f7b | 91a9f5a7afb398f4238527708cbc155dc972cbfa | /older/Grapher_app0/Names_Module.py | 1ff52a69c3e05a1e89a15ebd6b1cc78a4dd3597e | []
| no_license | bddmodelcar/kzpy3.2 | cd6f9bf6b7b8b920c79b4ee36c2592b992ae4332 | b044b26649b19b240bd580feca20424a237374b1 | refs/heads/master | 2021-01-19T21:01:58.687712 | 2017-08-23T22:39:56 | 2017-08-23T22:39:56 | 101,243,308 | 0 | 1 | null | 2017-08-24T02:04:50 | 2017-08-24T02:04:50 | null | UTF-8 | Python | false | false | 1,681 | py | from Paths_Module import *
exec(identify_file_str)
for _name in [
'pts_plot','img','purpose','name','xyz_sizes','data_type','x','y',
'xmin','ymin','xmax','ymax','xscale','yscale','floats_to_pixels',
'pixels_to_floats','ysize','xsize','lines_plot','color',
'reject_run',
'left',
'out1_in2',
'dic',
'name',
'test',
'dic_type',
'purpose',
'batch_size',
'net',
'camera_data',
'metadata',
'target_data',
'names',
'states',
'loss_dic',
'train',
'val',
'ctr',
'all_steer',
'epoch_counter',
'get_data',
'next',
'run_code',
'seg_num',
'offset',
'all_data_moment_id_codes',
'left',
'right',
'fill',
'clear',
'forward',
'backward',
'display',
'GPU',
'BATCH_SIZE',
'DISPLAY',
'VERBOSE',
'LOAD_ARUCO',
'BAIR_CAR_DATA_PATH',
'RESUME',
'IGNORE',
'REQUIRE_ONE',
'USE_STATES',
'N_FRAMES',
'N_STEPS',
'STRIDE',
'save_net_timer',
'print_timer',
'epoch_timer',
'WEIGHTS_FILE_PATH',
'SAVE_FILE_NAME',
'mode',
'criterion',
'optimizer',
'data_ids',
'data_moment',
'racing',
'caffe',
'follow',
'direct',
'play',
'furtive',
'labels',
'LCR',
'data_moment_loss_record',
'loss',
'outputs',
'print_now',
'network',
'metadata',
'steer',
'motor',
'data',
'NETWORK_OUTPUT_FOLDER',
'code','data_moment_loss_records','loss_history','weights',
'save_net',
'CODE_PATH',
'rate_ctr',
'rate_timer',
'step',
'rate_counter',
'loss_record',
'add','loss',
'TRAIN_TIME',
'VAL_TIME','INITIAL_WEIGHTS_FOLDER',
'activiations',
'moment_index', 'imgs', 'view','camera_input','final_output',
'pre_metadata_features','pre_metadata_features_metadata','post_metadata_features','scales','delay'
]:exec(d2n(_name,'=',"'",_name,"'"))
#
#EOF | [
"[email protected]"
]
| |
7540b3442e53b36dbb55bce5a3c058d967207818 | 296d4fec38b2a5ec2f4eb402d1b2145980dd184b | /aliens.py | ac6d5f3a8d3bb0c5203dcb6a7cf851111dbd07b3 | []
| no_license | RayGutt/python | 9464ae7c63850240df58ff78c6050bc6e1d35b3e | a9b68d43923f13b58e7d59fdabf649820d48bd52 | refs/heads/master | 2020-11-27T01:17:57.136062 | 2020-01-22T14:36:25 | 2020-01-22T14:36:25 | 229,254,199 | 0 | 0 | null | 2020-01-05T19:47:27 | 2019-12-20T11:39:53 | HTML | UTF-8 | Python | false | false | 844 | py | alien_0 = {'color': 'green', 'points': 5}
alien_1 = {'color': 'yellow', 'points': 10}
alien_2 = {'color': 'yellow', 'points': 15}
aliens = [alien_0, alien_1, alien_2]
for alien in aliens:
print(alien)
print("_________")
# Make an empty list for storing aliens.
aliens = []
# Make 30 green aliens.
for alien_number in range(30):
new_alien = {'color': 'green', 'points': 5, 'speed': 'slow'}
aliens.append(new_alien)
for alien in aliens[0:3]:
if alien['color'] == 'green':
alien['color'] = 'yellow'
alien['speed'] = 'medium'
alien['points'] = 10
elif alien['color'] == 'yellow':
alien['color'] = 'red'
alien['speed'] = 'fast'
alien['points'] = 15
# Show the first 5 aliens.
for alien in aliens[:5]:
print(alien)
print("...")
# Show hown many aliens have been created.
print("Total number of aliens: " + str(len(aliens)))
| [
"[email protected]"
]
| |
37dcddbb5760b82cc718a99321054fb899fc11bf | ba3231b25c60b73ca504cd788efa40d92cf9c037 | /nitro-python-13.0.36/nssrc/com/citrix/netscaler/nitro/resource/config/cmp/cmppolicy.py | 27844dcdfaf065883b411a91b98dd7aa313f7a18 | [
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | zhuweigh/vpx13 | f6d559ae85341e56472e3592cbc67062dac34b93 | b36caa3729d3ca5515fa725f2d91aeaabdb2daa9 | refs/heads/master | 2020-07-04T22:15:16.595728 | 2019-09-20T00:19:56 | 2019-09-20T00:19:56 | 202,435,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,184 | py | #
# Copyright (c) 2008-2019 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class cmppolicy(base_resource) :
""" Configuration for compression policy resource. """
def __init__(self) :
self._name = None
self._rule = None
self._resaction = None
self._feature = None
self._newname = None
self._expressiontype = None
self._reqaction = None
self._hits = None
self._txbytes = None
self._rxbytes = None
self._clientttlb = None
self._clienttransactions = None
self._serverttlb = None
self._servertransactions = None
self._description = None
self._isdefault = None
self.___count = None
@property
def name(self) :
r"""Name of the HTTP compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Can be changed after the policy is created.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name of the HTTP compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Can be changed after the policy is created.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def rule(self) :
r"""Expression that determines which HTTP requests or responses match the compression policy.
The following requirements apply only to the Citrix ADC CLI:
* If the expression includes one or more spaces, enclose the entire expression in double quotation marks.
* If the expression itself includes double quotation marks, escape the quotations by using the \ character.
* Alternatively, you can use single quotation marks to enclose the rule, in which case you do not have to escape the double quotation marks.
"""
try :
return self._rule
except Exception as e:
raise e
@rule.setter
def rule(self, rule) :
r"""Expression that determines which HTTP requests or responses match the compression policy.
The following requirements apply only to the Citrix ADC CLI:
* If the expression includes one or more spaces, enclose the entire expression in double quotation marks.
* If the expression itself includes double quotation marks, escape the quotations by using the \ character.
* Alternatively, you can use single quotation marks to enclose the rule, in which case you do not have to escape the double quotation marks.
"""
try :
self._rule = rule
except Exception as e:
raise e
@property
def resaction(self) :
r"""The built-in or user-defined compression action to apply to the response when the policy matches a request or response.<br/>Minimum length = 1.
"""
try :
return self._resaction
except Exception as e:
raise e
@resaction.setter
def resaction(self, resaction) :
r"""The built-in or user-defined compression action to apply to the response when the policy matches a request or response.<br/>Minimum length = 1
"""
try :
self._resaction = resaction
except Exception as e:
raise e
@property
def feature(self) :
r"""The feature to be checked while applying this config.
"""
try :
return self._feature
except Exception as e:
raise e
@feature.setter
def feature(self, feature) :
r"""The feature to be checked while applying this config.
"""
try :
self._feature = feature
except Exception as e:
raise e
@property
def newname(self) :
r"""New name for the compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Choose a name that reflects the function that the policy performs.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1.
"""
try :
return self._newname
except Exception as e:
raise e
@newname.setter
def newname(self, newname) :
r"""New name for the compression policy. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Choose a name that reflects the function that the policy performs.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my cmp policy" or 'my cmp policy').<br/>Minimum length = 1
"""
try :
self._newname = newname
except Exception as e:
raise e
@property
def expressiontype(self) :
r"""Type of policy (Classic/Advanced) .<br/>Possible values = Classic Policy, Advanced Policy.
"""
try :
return self._expressiontype
except Exception as e:
raise e
@property
def reqaction(self) :
r"""The compression action to be performed on requests.
"""
try :
return self._reqaction
except Exception as e:
raise e
@property
def hits(self) :
r"""Number of hits.
"""
try :
return self._hits
except Exception as e:
raise e
@property
def txbytes(self) :
r"""Number of bytes transferred.
"""
try :
return self._txbytes
except Exception as e:
raise e
@property
def rxbytes(self) :
r"""Number of bytes received.
"""
try :
return self._rxbytes
except Exception as e:
raise e
@property
def clientttlb(self) :
r"""Total client TTLB value.
"""
try :
return self._clientttlb
except Exception as e:
raise e
@property
def clienttransactions(self) :
r"""Number of client transactions.
"""
try :
return self._clienttransactions
except Exception as e:
raise e
@property
def serverttlb(self) :
r"""Total server TTLB value.
"""
try :
return self._serverttlb
except Exception as e:
raise e
@property
def servertransactions(self) :
r"""Number of server transactions.
"""
try :
return self._servertransactions
except Exception as e:
raise e
@property
def description(self) :
r"""Description of the policy.
"""
try :
return self._description
except Exception as e:
raise e
@property
def isdefault(self) :
r"""A value of true is returned if it is a default policy.
"""
try :
return self._isdefault
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(cmppolicy_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.cmppolicy
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
r""" Use this API to add cmppolicy.
"""
try :
if type(resource) is not list :
addresource = cmppolicy()
addresource.name = resource.name
addresource.rule = resource.rule
addresource.resaction = resource.resaction
addresource.feature = resource.feature
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].rule = resource[i].rule
addresources[i].resaction = resource[i].resaction
addresources[i].feature = resource[i].feature
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
r""" Use this API to delete cmppolicy.
"""
try :
if type(resource) is not list :
deleteresource = cmppolicy()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
r""" Use this API to update cmppolicy.
"""
try :
if type(resource) is not list :
updateresource = cmppolicy()
updateresource.name = resource.name
updateresource.rule = resource.rule
updateresource.resaction = resource.resaction
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ cmppolicy() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].rule = resource[i].rule
updateresources[i].resaction = resource[i].resaction
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def rename(cls, client, resource, new_name) :
r""" Use this API to rename a cmppolicy resource.
"""
try :
renameresource = cmppolicy()
if type(resource) == cls :
renameresource.name = resource.name
else :
renameresource.name = resource
return renameresource.rename_resource(client,new_name)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
r""" Use this API to fetch all the cmppolicy resources that are configured on netscaler.
"""
try :
if not name :
obj = cmppolicy()
response = obj.get_resources(client, option_)
else :
if type(name) is not list :
if type(name) == cls :
raise Exception('Invalid parameter name:{0}'.format(type(name)))
obj = cmppolicy()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
if type(name[0]) == cls :
raise Exception('Invalid parameter name:{0}'.format(type(name[0])))
response = [cmppolicy() for _ in range(len(name))]
obj = [cmppolicy() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = cmppolicy()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
r""" Use this API to fetch filtered set of cmppolicy resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cmppolicy()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
r""" Use this API to count the cmppolicy resources configured on NetScaler.
"""
try :
obj = cmppolicy()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
r""" Use this API to count filtered the set of cmppolicy resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cmppolicy()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Expressiontype:
Classic_Policy = "Classic Policy"
Advanced_Policy = "Advanced Policy"
class Feature:
WL = "WL"
WebLogging = "WebLogging"
SP = "SP"
SurgeProtection = "SurgeProtection"
LB = "LB"
LoadBalancing = "LoadBalancing"
CS = "CS"
ContentSwitching = "ContentSwitching"
CR = "CR"
CacheRedirection = "CacheRedirection"
SC = "SC"
SureConnect = "SureConnect"
CMP = "CMP"
CMPcntl = "CMPcntl"
CompressionControl = "CompressionControl"
PQ = "PQ"
PriorityQueuing = "PriorityQueuing"
HDOSP = "HDOSP"
HttpDoSProtection = "HttpDoSProtection"
SSLVPN = "SSLVPN"
AAA = "AAA"
GSLB = "GSLB"
GlobalServerLoadBalancing = "GlobalServerLoadBalancing"
SSL = "SSL"
SSLOffload = "SSLOffload"
SSLOffloading = "SSLOffloading"
CF = "CF"
ContentFiltering = "ContentFiltering"
IC = "IC"
IntegratedCaching = "IntegratedCaching"
OSPF = "OSPF"
OSPFRouting = "OSPFRouting"
RIP = "RIP"
RIPRouting = "RIPRouting"
BGP = "BGP"
BGPRouting = "BGPRouting"
REWRITE = "REWRITE"
IPv6PT = "IPv6PT"
IPv6protocoltranslation = "IPv6protocoltranslation"
AppFw = "AppFw"
ApplicationFirewall = "ApplicationFirewall"
RESPONDER = "RESPONDER"
HTMLInjection = "HTMLInjection"
push = "push"
NSPush = "NSPush"
NetScalerPush = "NetScalerPush"
AppFlow = "AppFlow"
CloudBridge = "CloudBridge"
ISIS = "ISIS"
ISISRouting = "ISISRouting"
CH = "CH"
CallHome = "CallHome"
AppQoE = "AppQoE"
ContentAccelerator = "ContentAccelerator"
SYSTEM = "SYSTEM"
RISE = "RISE"
FEO = "FEO"
LSN = "LSN"
LargeScaleNAT = "LargeScaleNAT"
RDPProxy = "RDPProxy"
Rep = "Rep"
Reputation = "Reputation"
URLFiltering = "URLFiltering"
VideoOptimization = "VideoOptimization"
ForwardProxy = "ForwardProxy"
SSLInterception = "SSLInterception"
AdaptiveTCP = "AdaptiveTCP"
CQA = "CQA"
CI = "CI"
ContentInspection = "ContentInspection"
class cmppolicy_response(base_response) :
def __init__(self, length=1) :
self.cmppolicy = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.cmppolicy = [cmppolicy() for _ in range(length)]
| [
"[email protected]"
]
| |
a232ab5e7b7b3938334e7d69911f01ae956a17eb | 4fdaa61e2fb2d320a0903e17024598c6a67ab0fb | /python/Vaav/kitchen.py | 9ffefc461390bce99d81d0b9e5536c9669c10b11 | []
| no_license | khans/ProgrammingAndDataStructures | 10d5cd5f30f703298ba132be4dfba828f3a0e9e1 | 58c1d822fa5eab17485369bc40dd1376db389f44 | refs/heads/master | 2021-01-25T14:03:40.616633 | 2018-06-19T23:02:44 | 2018-06-19T23:02:44 | 123,643,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py |
from collections import deque,defaultdict
class Table:
capacity = 0
availability = True
occupancy = 0
def __init__(self,number):
self.number = number
def addOccupant(self):
self.occupancy += 1
self.availability = False
def setCapacity(self,capacity):
self.capacity = capacity
def getTableNumber(self):
return self.number
class Order:
def __init__(self):
self.orderList = {}
def addOrder(self,item,count):
self.orderList[item] = count
class Kitchen:
queue = deque();
free = False
def make(self,order):
self.queue.append(order)
def isReady(self,order):
if order in self.queue:
return False
else:
return True
def getFood(self):
self.queue.popleft();
def getQueue(self):
return self.queue;
def doneDish(self):
self.queue.popleft()
| [
"[email protected]"
]
| |
7fbc8d5ca1d93c1ff42c22beefc7772cb15d39ca | 2f8f8171b3b996b0c866ede72367ec26f64eae39 | /sampleproject/book/BeginningPython3_O_REILLY/chapter10/10-8.py | 659dc821caed89f2f69b939227a7fca816939de1 | []
| no_license | kabaksh0507/exercise_python_it-1 | da46edce09301b03a5351ee1885fb01eb69d8240 | 2b6c80a79494c9981e51bd03696c3aa19d6625ec | refs/heads/main | 2023-03-04T03:12:44.188468 | 2021-02-08T08:55:36 | 2021-02-08T08:55:36 | 337,014,697 | 0 | 0 | null | 2021-02-08T08:57:30 | 2021-02-08T08:57:30 | null | UTF-8 | Python | false | false | 169 | py | from datetime import date
birth_day = date(1987, 8, 9)
print(birth_day)
fmt = 'year = %Y , month = %B , day = %d , day of the week = %A'
print(birth_day.strftime(fmt)) | [
"[email protected]"
]
| |
597f6e44b90374e56fd32df848bc609cc1e37273 | 733496067584ee32eccc333056c82d60f673f211 | /idfy_rest_client/models/signer_info.py | be68e6b47d5cff31143fcbe749d6914360bfe06d | [
"MIT"
]
| permissive | dealflowteam/Idfy | 90ee5fefaa5283ce7dd3bcee72ace4615ffd15d2 | fa3918a6c54ea0eedb9146578645b7eb1755b642 | refs/heads/master | 2020-03-07T09:11:15.410502 | 2018-03-30T08:12:40 | 2018-03-30T08:12:40 | 127,400,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,191 | py | # -*- coding: utf-8 -*-
"""
idfy_rest_client.models.signer_info
This file was automatically generated for Idfy by APIMATIC v2.0 ( https://apimatic.io )
"""
import idfy_rest_client.models.mobile
import idfy_rest_client.models.organization_info
class SignerInfo(object):
"""Implementation of the 'SignerInfo' model.
TODO: type model description here.
Attributes:
first_name (string): The signers first name
last_name (string): The signers last name
email (string): The signers email adress, define this if you are using
notifications
mobile (Mobile): The signers mobile, define this if you are using
notifications
organization_info (OrganizationInfo): The signers organization info
"""
# Create a mapping from Model property names to API property names
_names = {
"first_name":'firstName',
"last_name":'lastName',
"email":'email',
"mobile":'mobile',
"organization_info":'organizationInfo'
}
def __init__(self,
first_name=None,
last_name=None,
email=None,
mobile=None,
organization_info=None,
additional_properties = {}):
"""Constructor for the SignerInfo class"""
# Initialize members of the class
self.first_name = first_name
self.last_name = last_name
self.email = email
self.mobile = mobile
self.organization_info = organization_info
# Add additional model properties to the instance
self.additional_properties = additional_properties
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
first_name = dictionary.get('firstName')
last_name = dictionary.get('lastName')
email = dictionary.get('email')
mobile = idfy_rest_client.models.mobile.Mobile.from_dictionary(dictionary.get('mobile')) if dictionary.get('mobile') else None
organization_info = idfy_rest_client.models.organization_info.OrganizationInfo.from_dictionary(dictionary.get('organizationInfo')) if dictionary.get('organizationInfo') else None
# Clean out expected properties from dictionary
for key in cls._names.values():
if key in dictionary:
del dictionary[key]
# Return an object of this model
return cls(first_name,
last_name,
email,
mobile,
organization_info,
dictionary)
| [
"[email protected]"
]
| |
bf6f30ccfa37f9d4acc212e1f4ec33d7b4457052 | 09fd456a6552f42c124c148978289fae1af2d5c3 | /Greedy/1282.py | 0aeb767815ec62b1439482c75e3f15c26f9a4fc9 | []
| no_license | hoang-ng/LeetCode | 60b4e68cbcf54cbe763d1f98a70f52e628ab32fb | 5407c6d858bfa43325363503c31134e560522be3 | refs/heads/master | 2021-04-10T11:34:35.310374 | 2020-07-28T10:22:05 | 2020-07-28T10:22:05 | 248,932,393 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,391 | py | # 1282. Group the People Given the Group Size They Belong To
# There are n people whose IDs go from 0 to n - 1 and each person belongs exactly to one group. Given the array groupSizes of length n telling the group size each person belongs to, return the groups there are and the people's IDs each group includes.
# You can return any solution in any order and the same applies for IDs. Also, it is guaranteed that there exists at least one solution.
# Example 1:
# Input: groupSizes = [3,3,3,3,3,1,3]
# Output: [[5],[0,1,2],[3,4,6]]
# Explanation:
# Other possible solutions are [[2,1,6],[5],[0,4,3]] and [[5],[0,6,2],[4,3,1]].
# Example 2:
# Input: groupSizes = [2,1,3,3,3,2]
# Output: [[1],[0,5],[2,3,4]]
# Constraints:
# groupSizes.length == n
# 1 <= n <= 500
# 1 <= groupSizes[i] <= n
import collections
class Solution(object):
def groupThePeople(self, groupSizes):
dic = collections.defaultdict(list)
for i in range(len(groupSizes)):
dic[groupSizes[i]].append(i)
rs = []
for key in dic.keys():
count = 0
subArr = []
for i in range(len(dic[key])):
subArr.append(dic[key][i])
count += 1
if count == key:
rs.append(subArr)
subArr = []
count = 0
return rs
| [
"[email protected]"
]
| |
a728af285352f2bc6175af70b01b5f0761313a71 | acf5a0ea75b92eb8d082f04961a7646d8ccf7b32 | /passpie/database.py | cf67372c689b4ec9ef0d8045fd5348f847a064c1 | [
"MIT"
]
| permissive | mauriciovieira/passpie | 6f9c98ba086bfe10a9d2c964c473507feba22586 | bd0f5cca6ce12fc4469f4007199bef7ab3b8980e | refs/heads/master | 2021-01-18T08:56:23.853489 | 2016-01-26T07:03:26 | 2016-01-26T07:03:26 | 50,439,403 | 0 | 0 | null | 2016-01-26T15:49:43 | 2016-01-26T15:49:43 | null | UTF-8 | Python | false | false | 3,841 | py | from datetime import datetime
import logging
import os
import shutil
from tinydb import TinyDB, Storage, where, Query
import yaml
from .utils import mkdir_open
from .credential import split_fullname, make_fullname
class PasspieStorage(Storage):
extension = ".pass"
def __init__(self, path):
super(PasspieStorage, self).__init__()
self.path = path
def delete(self, credentials):
for cred in credentials:
dirname, filename = cred["name"], cred["login"] + self.extension
credpath = os.path.join(self.path, dirname, filename)
os.remove(credpath)
if not os.listdir(os.path.dirname(credpath)):
shutil.rmtree(os.path.dirname(credpath))
def read(self):
elements = []
for rootdir, dirs, files in os.walk(self.path):
filenames = [f for f in files if f.endswith(self.extension)]
for filename in filenames:
docpath = os.path.join(rootdir, filename)
with open(docpath) as f:
elements.append(yaml.load(f.read()))
return {"_default":
{idx: elem for idx, elem in enumerate(elements, start=1)}}
def write(self, data):
deleted = [c for c in self.read()["_default"].values()
if c not in data["_default"].values()]
self.delete(deleted)
for eid, cred in data["_default"].items():
dirname, filename = cred["name"], cred["login"] + self.extension
credpath = os.path.join(self.path, dirname, filename)
with mkdir_open(credpath, "w") as f:
f.write(yaml.dump(dict(cred), default_flow_style=False))
class Database(TinyDB):
def __init__(self, path, extension='.pass', storage=PasspieStorage):
self.path = path
PasspieStorage.extension = extension
super(Database, self).__init__(self.path, storage=storage)
def has_keys(self):
return os.path.exists(os.path.join(self.path, '.keys'))
def credential(self, fullname):
login, name = split_fullname(fullname)
return self.get((where("login") == login) & (where("name") == name))
def add(self, fullname, password, comment):
login, name = split_fullname(fullname)
if login is None:
logging.error('Cannot add credential with empty login. use "@<name>" syntax')
return None
credential = dict(fullname=fullname,
name=name,
login=login,
password=password,
comment=comment,
modified=datetime.now())
self.insert(credential)
return credential
def update(self, fullname, values):
values['fullname'] = make_fullname(values["login"], values["name"])
values['modified'] = datetime.now()
self.table().update(values, (where("fullname") == fullname))
def credentials(self, fullname=None):
if fullname:
login, name = split_fullname(fullname)
Credential = Query()
if login is None:
creds = self.search(Credential.name == name)
else:
creds = self.search((Credential.login == login) & (Credential.name == name))
else:
creds = self.all()
return sorted(creds, key=lambda x: x["name"] + x["login"])
def remove(self, fullname):
self.table().remove(where('fullname') == fullname)
def matches(self, regex):
Credential = Query()
credentials = self.search(
Credential.name.matches(regex) |
Credential.login.matches(regex) |
Credential.comment.matches(regex)
)
return sorted(credentials, key=lambda x: x["name"] + x["login"])
| [
"[email protected]"
]
| |
11fad38dc34588ed44dd250c8b3bee034cee5107 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03495/s959914177.py | 79888ecf7a1c1e5617d415a8a5f3fbe869a319b8 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | from collections import Counter
N,K=map(int,input().split())
A=list(map(int,input().split()))
c = Counter(A)
val = sorted(c.values())
if len(val) <= K:
print(0)
exit()
print(sum(val[:len(val)-K])) | [
"[email protected]"
]
| |
9728d3469911e999ed53abd170b3c8608947e880 | caaf9046de59559bb92641c46bb8ab00f731cb46 | /Configuration/Generator/python/Upsilon1SToMuMu_forSTEAM_13TeV_TuneCUETP8M1_cfi.py | eaeffad1236fe5b17d942a6e9bfb79db3a17feaa | []
| no_license | neumeist/cmssw | 7e26ad4a8f96c907c7373291eb8df205055f47f0 | a7061201efe9bc5fa3a69069db037d572eb3f235 | refs/heads/CMSSW_7_4_X | 2020-05-01T06:10:08.692078 | 2015-01-11T22:57:32 | 2015-01-11T22:57:32 | 29,109,257 | 1 | 1 | null | 2015-01-11T22:56:51 | 2015-01-11T22:56:49 | null | UTF-8 | Python | false | false | 3,453 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
source = cms.Source("EmptySource")
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(0.53),
pythiaHepMCVerbosity = cms.untracked.bool(False),
crossSection = cms.untracked.double(9090000.0),
comEnergy = cms.double(13000.0),
maxEventsToPrint = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'Bottomonium:states(3S1) = 553', # filter on 553 and prevents other onium states decaying to 553, so we should turn the others off
'Bottomonium:O(3S1)[3S1(1)] = 9.28',
'Bottomonium:O(3S1)[3S1(8)] = 0.15',
'Bottomonium:O(3S1)[1S0(8)] = 0.02',
'Bottomonium:O(3S1)[3P0(8)] = 0.02',
'Bottomonium:gg2bbbar(3S1)[3S1(1)]g = on',
'Bottomonium:gg2bbbar(3S1)[3S1(8)]g = on',
'Bottomonium:qg2bbbar(3S1)[3S1(8)]q = on',
'Bottomonium:qqbar2bbbar(3S1)[3S1(8)]g = on',
'Bottomonium:gg2bbbar(3S1)[1S0(8)]g = on',
'Bottomonium:qg2bbbar(3S1)[1S0(8)]q = on',
'Bottomonium:qqbar2bbbar(3S1)[1S0(8)]g = on',
'Bottomonium:gg2bbbar(3S1)[3PJ(8)]g = on',
'Bottomonium:qg2bbbar(3S1)[3PJ(8)]q = on',
'Bottomonium:qqbar2bbbar(3S1)[3PJ(8)]g = on',
'553:onMode = off', # ignore cross-section re-weighting (CSAMODE=6) since selecting wanted decay mode
'553:onIfAny = 13',
'PhaseSpace:pTHatMin = 20.',
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
)
oniafilter = cms.EDFilter("PythiaFilter",
Status = cms.untracked.int32(2),
MaxEta = cms.untracked.double(1000.0),
MinEta = cms.untracked.double(-1000.0),
MinPt = cms.untracked.double(0.0),
ParticleID = cms.untracked.int32(553)
)
mumugenfilter = cms.EDFilter("MCParticlePairFilter",
Status = cms.untracked.vint32(1, 1),
MinPt = cms.untracked.vdouble(0.5, 0.5),
MinP = cms.untracked.vdouble(2.7, 2.7),
MaxEta = cms.untracked.vdouble(2.5, 2.5),
MinEta = cms.untracked.vdouble(-2.5, -2.5),
MinInvMass = cms.untracked.double(5.0),
MaxInvMass = cms.untracked.double(20.0),
ParticleCharge = cms.untracked.int32(-1),
ParticleID1 = cms.untracked.vint32(13),
ParticleID2 = cms.untracked.vint32(13)
)
ProductionFilterSequence = cms.Sequence(generator*oniafilter*mumugenfilter)
| [
"[email protected]"
]
| |
0a3953d0402b818210f35ac3401f274eb0d96b78 | cae8adc520ee71ffd9cfc82418152b4ec63f9302 | /template_wsgi/demo1.py | 98b1b0acbce69b38b641792d1f5dcb3850bfeb56 | []
| no_license | dong-c-git/WSGIServer | 55111c04f4bbefe239949ddaea16c71221b7f795 | 1f0b58977e2a951f3c6dec335854dd9d6e31cdfd | refs/heads/master | 2020-08-01T17:03:30.307962 | 2019-11-09T01:45:30 | 2019-11-09T01:45:30 | 211,054,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | #coding:utf-8
import time
def application(environ,start_response):
status = '200 OK'
response_headers = [('Content-Type','text/html')]
start_response(status,response_headers)
return str(environ)+'==Hello world from a simple WSGI application!-->%s\n'%time.ctime()
| [
"[email protected]"
]
| |
f8202764eacbf21b84e1afab879c8f6bea7c9820 | ec6f83a3636fdb0d6f2266c56b58ac294eb2a945 | /ntut python/associationRule.py | 5c174b680a9cb0dfb518b5d31898b1cfb5313f2c | []
| no_license | jack20951948/Python-Learning | f65c2aacea6cbe61a8be2539f2959202546adb7d | d683790ba47b73c6360f5f804700c664d40777c9 | refs/heads/main | 2023-06-26T03:43:47.395088 | 2021-07-18T08:00:28 | 2021-07-18T08:00:28 | 387,111,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,601 | py | import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import networkx as nx
from apyori import apriori
#pip install apriori
from wordcloud import WordCloud
#pip install wordcloud
def testTensorflow():
hello = tf.constant('hello tensorflow!')
sess = tf.Session()
print("hello")
print(sess.run(hello))
#conda install -c conda-forge wordcloud
#pip install wordcloud
def wordCloud():
plt.figure(figsize=(9,6))
data=np.array([
['Milk','Bread','Apple'],
['Milk','Bread'],
['Milk','Bread','Apple', 'Banana'],
['Milk', 'Banana','Rice','Chicken'],
['Apple','Rice','Chicken'],
['Milk','Bread', 'Banana'],
['Rice','Chicken'],
['Bread','Apple', 'Chicken'],
['Bread','Chicken'],
['Apple', 'Banana']])
#convert the array to text
text_data=[]
for i in data:
for j in i:
text_data.append(j)
products=' '.join(map(str, text_data))
print(products)
wordcloud = WordCloud(relative_scaling = 1.0,stopwords = {}).generate(products)
plt.imshow(wordcloud)
plt.axis("off")
plt.show()
def draw(df):
plt.style.use('ggplot')
plt.figure(figsize=(9,6))
print(df.iloc[6:19][['items','support']]) # Only get items with two pair sets. They start from index 6 to 19
ar=(df.iloc[6:19]['items'])
G = nx.Graph()
G.add_edges_from(ar)
pos = nx.spring_layout(G)
nx.draw(G, pos, font_size=16, with_labels=False, edge_color='green',node_size=800,node_color=['red','green','blue','cyan','orange','magenta'])
for p in pos:
pos[p][1] += 0.07
nx.draw_networkx_labels(G, pos)
plt.show()
def simple_bar_chart(support,products):
labels=np.array(products)
colors = ['#008000','#808000','#FFFF00','#000000','#FF0000','#00FF00','#0000FF','#008080','#aa22ff','#aa22ff','#dd0022','#ff00cc','#eeaa22','#22bbaa','#C0C0C0']
y_pos = np.arange(len(labels))
x_pos = np.array(support)
plt.barh(y_pos, x_pos, color=colors, align='center' ,edgecolor='green')
plt.yticks(y_pos, labels)
plt.ylabel('Products',fontsize=18)
plt.xlabel('Support',fontsize=18)
plt.title('Consumer Buying Behaviour\n',fontsize=20)
plt.show()
def testApriori_s():
data=np.array([
['Milk','Bread','Apple'],
['Milk','Bread'],
['Milk','Bread','Apple', 'Banana'],
['Milk', 'Banana','Rice','Chicken'],
['Apple','Rice','Chicken'],
['Milk','Bread', 'Banana'],
['Rice','Chicken'],
['Bread','Apple', 'Chicken'],
['Bread','Chicken'],
['Apple', 'Banana']])
for i in data:
print(i)
print("\n\n")
result=list(apriori(data))
df=pd.DataFrame(result)
df.to_csv("appriori_results.csv") #Save to csv formart for detailed view
print(df.head()) # Print the first 5 items
#print(df)
draw(df)
support=df.iloc[0:19]['support']*100
products=df.iloc[0:19]['items']
simple_bar_chart(support,products)
def testApriori():
records = []
store_data = pd.read_csv('e:\\Datasets\\store_data.csv', header=None)
#print(store_data)
print(store_data.head())
#perprocessing
#convert our pandas dataframe into a list of lists
for i in range(0, 7501):
#records.append([str(store_data.values[i,j]) for j in range(0, 20)])
records.append([str(store_data.values[i,j]) for j in range(0, 20) if str(store_data.values[i,j]) != 'nan'])
# remove NaN value
#print(records)
association_rules = apriori(records, min_support=0.0045, min_confidence=0.2, min_lift=3, min_length=2)
#min_length: at least 2 product in the rules
association_results = list(association_rules)
print(len(association_results))
#print(association_results)
print(association_results[0])
for item in association_results:
# first index of the inner list
# Contains base item and add item
pair = item[0]
items = [x for x in pair]
print("Rule: " + items[0] + " -> " + items[1])
#second index of the inner list
print("Support: " + str(item[1]))
#third index of the list located at 0th
#of the third index of the inner list
print("Confidence: " + str(item[2][0][2]))
print("Lift: " + str(item[2][0][3]))
print("=====================================")
def main():
testApriori()
#testApriori_s()
wordCloud()
main() | [
"[email protected]"
]
| |
fbee478ecc1dd477bdebf5a09cd472cb2d0ebc20 | c42a085521cec895fac0021eb1638d6f077eadf7 | /PYTHON_FUNDAMENTALS_May_August_2020/Exersice_Objects_And_Classes_26_06_2020/Storage.py | 88a44072c4d4c8e1b26fab959fea06bf9c937ddf | []
| no_license | vasil-panoff/Python_Fundamentals_SoftUni_May_2020 | f645ce85efa6db047b52a8b63d411d2e5bd5bd9a | daf1a27ff1a4684d51cf875ee0a4c0706a1a4404 | refs/heads/main | 2023-01-06T22:20:30.151249 | 2020-11-03T22:56:24 | 2020-11-03T22:56:24 | 309,818,123 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | class Storage:
def __init__(self, capacity):
self.capacity = capacity
self.storage = []
def add_product(self, product):
if len(self.storage) < self.capacity:
self.storage.append(product)
def get_products(self):
return self.storage
storage = Storage(4)
storage.add_product("apple")
storage.add_product("banana")
storage.add_product("potato")
storage.add_product("tomato")
storage.add_product("bread")
print(storage.get_products()) | [
"[email protected]"
]
| |
90a954e345f531880f8bfee7f4c958164933934e | aeac5b3cc7a34e3eeaef5d41e8d8ef7f4b3b38dc | /testlib/test_transforms.py | cdc9ecac6b175b53f53b6ebf7dd8e8589d1dffe3 | [
"MIT"
]
| permissive | venkatakrishnareddymallu/gramex | 7377f68d7207248b98f846e54a9c458f4300d30a | 725f7564e607f22fc43d06d639aeaf785500f284 | refs/heads/master | 2023-07-28T14:03:20.452158 | 2021-09-07T11:12:23 | 2021-09-07T11:12:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,608 | py | import io
import os
import yaml
import inspect
import unittest
from dis import dis
from types import GeneratorType
from tornado.gen import coroutine, Task
from orderedattrdict import AttrDict
from orderedattrdict.yamlutils import AttrDictYAMLLoader
from gramex.transforms import build_transform, flattener, badgerfish, template, once
from gramex.cache import reload_module
from nose.tools import eq_, assert_raises
folder = os.path.dirname(os.path.abspath(__file__))
def yaml_parse(text):
return yaml.load(text, Loader=AttrDictYAMLLoader)
def remove(path):
if os.path.exists(path):
os.unlink(path)
@coroutine
def gen_str(val):
'''Sample coroutine method'''
yield Task(str, val)
def eqfn(actual, expected):
'''Checks if two functions are the same'''
# msg = parent function's name
msg = inspect.stack()[1][3]
a_code, e_code = actual.__code__, expected.__code__
actual, expected = a_code.co_code, e_code.co_code
if actual != expected:
# Print the disassembled code to make debugging easier
print('\nActual') # noqa
dis(actual)
print(a_code.co_names) # noqa
print('Expected') # noqa
dis(expected)
print(e_code.co_names) # noqa
eq_(actual, expected, '%s: code mismatch' % msg)
src, tgt = a_code.co_argcount, e_code.co_argcount
eq_(src, tgt, '%s: argcount %d != %d' % (msg, src, tgt))
src, tgt = a_code.co_nlocals, e_code.co_nlocals
eq_(src, tgt, '%s: nlocals %d != %d' % (msg, src, tgt))
class BuildTransform(unittest.TestCase):
'''Test build_transform CODE output'''
dummy = os.path.join(folder, 'dummy.py')
files = set([dummy])
def check_transform(self, transform, yaml_code, vars=None, cache=True, iter=True, doc=None):
fn = build_transform(yaml_parse(yaml_code), vars=vars, cache=cache, iter=iter)
eqfn(fn, transform)
if doc is not None:
eq_(fn.__doc__, doc)
return fn
def test_invalid_function_raises_error(self):
with assert_raises(KeyError):
build_transform({})
with assert_raises(KeyError):
build_transform({'function': ''})
with assert_raises(ValueError):
build_transform({'function': 'x = 1'})
with assert_raises(ValueError):
build_transform({'function': 'x(); y()'})
with assert_raises(ValueError):
build_transform({'function': 'import json'})
def test_expr(self):
def transform(x=0):
result = x + 1
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: x + 1', vars={'x': 0}, doc='x + 1')
def transform(x=0):
result = x + 1
return result
self.check_transform(transform, 'function: x + 1', vars={'x': 0}, iter=False, doc='x + 1')
def transform():
result = "abc"
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''function: '"abc"' ''', vars={}, doc='"abc"')
def transform():
import gramex.cache
import pandas
result = gramex.cache.open('x', pandas.read_csv).to_html()
return result if isinstance(result, GeneratorType) else [result, ]
# This is a complex function. It's not clear whether we should pick up the docs from
# to_html() or gramex.cache.open(). Let the user specify the docs
fn = 'function: gramex.cache.open("x", pandas.read_csv).to_html()'
self.check_transform(transform, fn, vars={}, doc=None)
def transform(s=None):
result = 1 if "windows" in s.lower() else 2 if "linux" in s.lower() else 0
return result if isinstance(result, GeneratorType) else [result, ]
fn = 'function: 1 if "windows" in s.lower() else 2 if "linux" in s.lower() else 0'
self.check_transform(transform, fn, vars={'s': None})
def transform(_val):
result = condition(1, 0, -1) # noqa: this is in gramex.transforms
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: condition(1, 0, -1)')
def transform(_val):
result = str.upper(_val)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: str.upper')
self.check_transform(transform, 'function: str.upper(_val)', doc=str.upper.__doc__)
def test_fn(self):
def transform(_val):
result = len(_val)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: len
''')
def test_fn_no_args(self):
def transform():
result = max(1, 2)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: max
args: [1, 2]
''', vars={})
self.check_transform(transform, 'function: max(1, 2)', vars={})
def test_fn_args(self):
def transform(_val):
result = max(1, 2)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: max
args: [1, 2]
''')
self.check_transform(transform, 'function: max(1, 2)')
def transform(_val):
result = len('abc')
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: len
args: abc
''')
self.check_transform(transform, 'function: len("abc")')
def transform(_val):
result = range(10)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: range
args: 10
''')
self.check_transform(transform, 'function: range(10)')
def test_fn_args_var(self):
def transform(x=1, y=2):
result = max(x, y, 3)
return result if isinstance(result, GeneratorType) else [result, ]
vars = AttrDict([('x', 1), ('y', 2)])
self.check_transform(transform, '''
function: max
args:
- =x
- =y
- 3
''', vars=vars)
self.check_transform(transform, 'function: max(x, y, 3)', vars=vars)
def transform(x=1, y=2):
result = x
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: x', vars=vars)
def transform(x=1, y=2):
result = x.real
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: x.real', vars=vars)
def transform(x=1, y=2):
result = x.conjugate()
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: x.conjugate()', vars=vars)
def transform(x=1, y=2):
result = x.to_bytes(2, 'big')
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, 'function: x.to_bytes(2, "big")', vars=vars)
def test_fn_kwargs(self):
def transform(_val):
result = dict(_val, a=1, b=2)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: dict
kwargs: {a: 1, b: 2}
''')
self.check_transform(transform, 'function: dict(_val, a=1, b=2)')
def test_fn_kwargs_complex(self):
def transform(_val):
result = dict(_val, a=[1, 2], b=AttrDict([('b1', 'x'), ('b2', 'y')]))
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: dict
kwargs:
a: [1, 2]
b:
b1: x
b2: y
''')
self.check_transform(transform, '''
function: 'dict(_val, a=[1, 2], b=AttrDict([("b1", "x"), ("b2", "y")]))'
''')
def test_fn_kwargs_var(self):
def transform(x=1, y=2):
result = dict(x, y, a=x, b=y, c=3, d='=4')
return result if isinstance(result, GeneratorType) else [result, ]
vars = AttrDict([('x', 1), ('y', 2)])
self.check_transform(transform, '''
function: dict
kwargs: {a: =x, b: =y, c: 3, d: ==4}
''', vars=vars)
self.check_transform(transform, 'function: dict(x, y, a=x, b=y, c=3, d="=4")', vars=vars)
def test_fn_args_kwargs(self):
def transform(_val):
result = format(1, 2, a=3, b=4, c=5, d='=6')
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: format
args: [1, 2]
kwargs: {a: 3, b: 4, c: 5, d: ==6}
''')
self.check_transform(transform, 'function: format(1, 2, a=3, b=4, c=5, d="=6")')
def test_fn_args_kwargs_var(self):
def transform(x=1, y=2):
result = format(x, y, a=x, b=y, c=3)
return result if isinstance(result, GeneratorType) else [result, ]
vars = AttrDict([('x', 1), ('y', 2)])
self.check_transform(transform, '''
function: format
args: [=x, =y]
kwargs: {a: =x, b: =y, c: =3}
''', vars=vars)
self.check_transform(transform, 'function: format(x, y, a=x, b=y, c=3)', vars=vars)
def test_coroutine(self):
def transform(_val):
import testlib.test_transforms
result = testlib.test_transforms.gen_str(_val)
return result if isinstance(result, GeneratorType) else [result, ]
self.check_transform(transform, '''
function: testlib.test_transforms.gen_str
''')
self.check_transform(transform, 'function: testlib.test_transforms.gen_str(_val)')
def test_cache_change(self):
remove(self.dummy.replace('.py', '.pyc'))
with io.open(self.dummy, 'w', encoding='utf-8') as handle:
handle.write('def value():\n\treturn 1\n')
def transform(_val):
import testlib.dummy
reload_module(testlib.dummy)
result = testlib.dummy.value()
return result if isinstance(result, GeneratorType) else [result, ]
fn = self.check_transform(transform, '''
function: testlib.dummy.value
args: []
''', cache=False)
eq_(fn(), [1])
fn = self.check_transform(transform, 'function: testlib.dummy.value()', cache=False)
eq_(fn(), [1])
remove(self.dummy.replace('.py', '.pyc'))
with io.open(self.dummy, 'w', encoding='utf-8') as handle:
handle.write('def value():\n\treturn 100\n')
eq_(fn(), [100])
fn = self.check_transform(transform, 'function: testlib.dummy.value()', cache=False)
eq_(fn(), [100])
def test_invalid_change(self):
fn = build_transform(yaml_parse('function: testlib.dummy.invalid\nargs: []'))
remove(self.dummy.replace('.py', '.pyc'))
with io.open(self.dummy, 'w', encoding='utf-8') as handle:
handle.write('def invalid():\n\tsyntax error\n')
with assert_raises(SyntaxError):
fn()
remove(self.dummy.replace('.py', '.pyc'))
with io.open(self.dummy, 'w', encoding='utf-8') as handle:
handle.write('1/0\ndef invalid():\n\treturn 100\n')
with assert_raises(ZeroDivisionError):
fn()
remove(self.dummy.replace('.py', '.pyc'))
with io.open(self.dummy, 'w', encoding='utf-8') as handle:
handle.write('def invalid():\n\treturn 100\n')
eq_(fn(), [100])
def test_import_levels(self):
def transform(_val):
result = str(_val)
return result if isinstance(result, GeneratorType) else [result, ]
fn = self.check_transform(transform, 'function: str')
eq_(fn(b'abc'), [str(b'abc')])
def transform(content):
result = str.__add__(content, '123')
return result if isinstance(result, GeneratorType) else [result, ]
fn = self.check_transform(transform, '''
function: str.__add__
args: [=content, '123']
''', vars=AttrDict(content=None))
eq_(fn('abc'), ['abc123'])
def transform(handler):
result = str.endswith(handler.current_user.user, 'ta')
return result if isinstance(result, GeneratorType) else [result, ]
fn = self.check_transform(transform, '''
function: str.endswith
args: [=handler.current_user.user, 'ta']
''', vars=AttrDict(handler=None))
@classmethod
def tearDownClass(cls):
# Remove temporary files
for path in cls.files:
if os.path.exists(path):
os.unlink(path)
class Badgerfish(unittest.TestCase):
'Test gramex.transforms.badgerfish'
def test_transform(self):
result = yield badgerfish('''
html:
"@lang": en
p: text
div:
p: text
''')
eq_(
result,
'<!DOCTYPE html>\n<html lang="en"><p>text</p><div><p>text</p></div></html>')
def test_mapping(self):
result = yield badgerfish('''
html:
json:
x: 1
y: 2
''', mapping={
'json': {
'function': 'json.dumps',
'kwargs': {'separators': [',', ':']},
}
})
eq_(
result,
'<!DOCTYPE html>\n<html><json>{"x":1,"y":2}</json></html>')
class Template(unittest.TestCase):
'Test gramex.transforms.template'
def check(self, content, expected, **kwargs):
result = yield template(content, **kwargs)
eq_(result, expected)
def test_template(self):
self.check('{{ 1 }}', '1')
self.check('{{ 1 + 2 }}', '3')
self.check('{{ x + y }}', '3', x=1, y=2)
class Flattener(unittest.TestCase):
def test_dict(self):
fieldmap = {
'all1': '',
'all2': True,
'x': 'x',
'y.z': 'y.z',
'z.1': 'z.1',
}
flat = flattener(fieldmap)
src = {'x': 'X', 'y': {'z': 'Y.Z'}, 'z': ['Z.0', 'Z.1']}
out = flat(src)
eq_(out.keys(), fieldmap.keys())
eq_(out['all1'], src)
eq_(out['all2'], src)
eq_(out['x'], src['x'])
eq_(out['y.z'], src['y']['z'])
eq_(out['z.1'], src['z'][1])
def test_list(self):
# Integer values must be interpreted as array indices
fieldmap = {
'0': 0,
'1': '1',
'2.0': '2.0',
}
flat = flattener(fieldmap)
src = [0, 1, [2]]
out = flat(src)
eq_(out.keys(), fieldmap.keys())
eq_(out['0'], src[0])
eq_(out['1'], src[1])
eq_(out['2.0'], src[2][0])
def test_invalid(self):
# None of these fields are valid. Don't raise an error, just ignore
fieldmap = {
0: 'int-invalid',
('a', 'b'): 'tuple-invalid',
'false-invalid': False,
'none-invalid': None,
'float-invalid': 1.0,
'dict-invalid': {},
'tuple-invalid': tuple(),
'set-invalid': set(),
'list-invalid': [],
}
out = flattener(fieldmap)({})
eq_(len(out.keys()), 0)
fieldmap = {
0.0: 'float-invalid',
}
out = flattener(fieldmap)({})
eq_(len(out.keys()), 0)
def test_default(self):
fieldmap = {'x': 'x', 'y.a': 'y.a', 'y.1': 'y.1', 'z.a': 'z.a', '1': 1}
default = 1
flat = flattener(fieldmap, default=default)
out = flat({'z': {}, 'y': []})
eq_(out, {key: default for key in fieldmap})
class TestOnce(unittest.TestCase):
def test_once(self):
for key in ['►', 'λ', '►', 'λ']:
eq_(once(key, _clear=True), None)
eq_(once(key), True)
eq_(once(key), False)
eq_(once(key), False)
| [
"[email protected]"
]
| |
866d1c051acc929fded0b916a5dc384cd729456e | 29f18e8ddde0379cef7fa00b1a50058be3cafa79 | /numba/tests/test_make_function_to_jit_function.py | 29161fff594545fffceee89dbe9390cb349f1dc4 | [
"LicenseRef-scancode-secret-labs-2011",
"BSD-3-Clause",
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-free-unknown",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0",
"BSD-2-Clause"
]
| permissive | numba/numba | 9a8345ff5f7d57f0ffec40e39941ebf2684df0d1 | 46059957ad416e68476d1e5f32ccd59f7d5df2bb | refs/heads/main | 2023-08-09T22:29:38.170300 | 2023-08-07T15:00:27 | 2023-08-07T15:00:27 | 3,659,275 | 8,247 | 1,151 | BSD-2-Clause | 2023-09-13T14:43:48 | 2012-03-08T11:12:43 | Python | UTF-8 | Python | false | false | 8,288 | py | from numba import njit
from numba.core import errors
from numba.core.extending import overload
import numpy as np
import unittest
@njit
def consumer(func, *args):
return func(*args)
@njit
def consumer2arg(func1, func2):
return func2(func1)
_global = 123
class TestMakeFunctionToJITFunction(unittest.TestCase):
"""
This tests the pass that converts ir.Expr.op == make_function (i.e. closure)
into a JIT function.
"""
# NOTE: testing this is a bit tricky. The function receiving a JIT'd closure
# must also be under JIT control so as to handle the JIT'd closure
# correctly, however, in the case of running the test implementations in the
# interpreter, the receiving function cannot be JIT'd else it will receive
# the Python closure and then complain about pyobjects as arguments.
# The way around this is to use a factory function to close over either the
# jitted or standard python function as the consumer depending on context.
def test_escape(self):
def impl_factory(consumer_func):
def impl():
def inner():
return 10
return consumer_func(inner)
return impl
cfunc = njit(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
self.assertEqual(impl(), cfunc())
def test_nested_escape(self):
def impl_factory(consumer_func):
def impl():
def inner():
return 10
def innerinner(x):
return x()
return consumer_func(inner, innerinner)
return impl
cfunc = njit(impl_factory(consumer2arg))
impl = impl_factory(consumer2arg.py_func)
self.assertEqual(impl(), cfunc())
def test_closure_in_escaper(self):
def impl_factory(consumer_func):
def impl():
def callinner():
def inner():
return 10
return inner()
return consumer_func(callinner)
return impl
cfunc = njit(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
self.assertEqual(impl(), cfunc())
def test_close_over_consts(self):
def impl_factory(consumer_func):
def impl():
y = 10
def callinner(z):
return y + z + _global
return consumer_func(callinner, 6)
return impl
cfunc = njit(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
self.assertEqual(impl(), cfunc())
def test_close_over_consts_w_args(self):
def impl_factory(consumer_func):
def impl(x):
y = 10
def callinner(z):
return y + z + _global
return consumer_func(callinner, x)
return impl
cfunc = njit(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
a = 5
self.assertEqual(impl(a), cfunc(a))
def test_with_overload(self):
def foo(func, *args):
nargs = len(args)
if nargs == 1:
return func(*args)
elif nargs == 2:
return func(func(*args))
@overload(foo)
def foo_ol(func, *args):
# specialise on the number of args, as per `foo`
nargs = len(args)
if nargs == 1:
def impl(func, *args):
return func(*args)
return impl
elif nargs == 2:
def impl(func, *args):
return func(func(*args))
return impl
def impl_factory(consumer_func):
def impl(x):
y = 10
def callinner(*z):
return y + np.sum(np.asarray(z)) + _global
# run both specialisations, 1 arg, and 2 arg.
return foo(callinner, x), foo(callinner, x, x)
return impl
cfunc = njit(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
a = 5
self.assertEqual(impl(a), cfunc(a))
def test_basic_apply_like_case(self):
def apply(array, func):
return func(array)
@overload(apply)
def ov_apply(array, func):
return lambda array, func: func(array)
def impl(array):
def mul10(x):
return x * 10
return apply(array, mul10)
cfunc = njit(impl)
a = np.arange(10)
np.testing.assert_allclose(impl(a), cfunc(a))
@unittest.skip("Needs option/flag inheritance to work")
def test_jit_option_inheritance(self):
def impl_factory(consumer_func):
def impl(x):
def inner(val):
return 1 / val
return consumer_func(inner, x)
return impl
cfunc = njit(error_model='numpy')(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
a = 0
self.assertEqual(impl(a), cfunc(a))
# this needs true SSA to be able to work correctly, check error for now
def test_multiply_defined_freevar(self):
@njit
def impl(c):
if c:
x = 3
def inner(y):
return y + x
r = consumer(inner, 1)
else:
x = 6
def inner(y):
return y + x
r = consumer(inner, 2)
return r
with self.assertRaises(errors.TypingError) as e:
impl(1)
self.assertIn("Cannot capture a constant value for variable",
str(e.exception))
def test_non_const_in_escapee(self):
@njit
def impl(x):
z = np.arange(x)
def inner(val):
return 1 + z + val # z is non-const freevar
return consumer(inner, x)
with self.assertRaises(errors.TypingError) as e:
impl(1)
self.assertIn("Cannot capture the non-constant value associated",
str(e.exception))
def test_escape_with_kwargs(self):
def impl_factory(consumer_func):
def impl():
t = 12
def inner(a, b, c, mydefault1=123, mydefault2=456):
z = 4
return mydefault1 + mydefault2 + z + t + a + b + c
# this is awkward, top and tail closure inlining with a escapees
# in the middle that do/don't have defaults.
return (inner(1, 2, 5, 91, 53),
consumer_func(inner, 1, 2, 3, 73),
consumer_func(inner, 1, 2, 3,),
inner(1, 2, 4))
return impl
cfunc = njit(impl_factory(consumer))
impl = impl_factory(consumer.py_func)
np.testing.assert_allclose(impl(), cfunc())
def test_escape_with_kwargs_override_kwargs(self):
@njit
def specialised_consumer(func, *args):
x, y, z = args # unpack to avoid `CALL_FUNCTION_EX`
a = func(x, y, z, mydefault1=1000)
b = func(x, y, z, mydefault2=1000)
c = func(x, y, z, mydefault1=1000, mydefault2=1000)
return a + b + c
def impl_factory(consumer_func):
def impl():
t = 12
def inner(a, b, c, mydefault1=123, mydefault2=456):
z = 4
return mydefault1 + mydefault2 + z + t + a + b + c
# this is awkward, top and tail closure inlining with a escapees
# in the middle that get defaults specified in the consumer
return (inner(1, 2, 5, 91, 53),
consumer_func(inner, 1, 2, 11),
consumer_func(inner, 1, 2, 3,),
inner(1, 2, 4))
return impl
cfunc = njit(impl_factory(specialised_consumer))
impl = impl_factory(specialised_consumer.py_func)
np.testing.assert_allclose(impl(), cfunc())
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
61c12fba05665362085355b586a72a95a6cb9038 | 65a3f548503cd1bdd9a429704aec630c0a507b4b | /src/genie/libs/parser/nxos/show_vxlan.py | 7c7f3d6d9fb672cf68724c8a9bca7691087b3db0 | [
"Apache-2.0"
]
| permissive | LouiseSianEvans/genieparser | 75c3c73612db4a7cb31f657dc6ad9f25b5bfebb5 | 7dd4d81834479e35a6c08254e10e7692b00b897b | refs/heads/master | 2020-04-30T15:25:04.158694 | 2019-03-27T22:58:15 | 2019-03-27T22:58:15 | 176,919,539 | 0 | 0 | Apache-2.0 | 2019-03-21T10:10:46 | 2019-03-21T10:10:43 | Python | UTF-8 | Python | false | false | 84,285 | py | """show_vxlan.py
NXOS parser for the following show commands:
* show nve peers
* show nve interface <nve> detail
* show nve ethernet-segment
* show nve vni
* show nve vni summary
* show nve multisite dci-links
* show nve multisite fabric-links
* show l2route fl all
* show l2route evpn ethernet-segment all
* show l2route topology detail
* show l2route mac all detail
* show l2route mac-ip all detail
* show l2route summary
* show nve vni ingress-replication
"""
# Python
import re
# Metaparser
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Optional
from genie.libs.parser.utils.common import Common
# ====================================================
# schema for show nve peers
# ====================================================
class ShowNvePeersSchema(MetaParser):
"""Schema for:
show nve peers"""
schema = {
Any(): {
'nve_name': str,
'peer_ip': {
Any(): {
'peer_state': str,
'learn_type': str,
'uptime': str,
'router_mac': str,
},
},
},
}
# ====================================================
# parser for show nve peers
# ====================================================
class ShowNvePeers(ShowNvePeersSchema):
"""Parser for :
show nve peers"""
cli_command = 'show nve peers'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Interface Peer-IP State LearnType Uptime Router-Mac
# nve1 201.202.1.1 Up CP 01:15:09 n/a
# nve1 204.1.1.1 Up CP 00:03:05 5e00.0002.0007
p1 = re.compile(r'^\s*(?P<nve_name>[\w\/]+) +(?P<peer_ip>[\w\.]+) +(?P<peer_state>[\w]+)'
' +(?P<learn_type>[\w]+) +(?P<uptime>[\w\:]+) +(?P<router_mac>[\w\.\/]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = group.pop('nve_name')
peer_ip = group.pop('peer_ip')
nve_dict = result_dict.setdefault(nve_name,{})
nve_dict.update({'nve_name': nve_name})
peer_dict = nve_dict.setdefault('peer_ip',{}).setdefault(peer_ip,{})
peer_dict.update({'learn_type': group.pop('learn_type')})
peer_dict.update({'uptime': group.pop('uptime')})
peer_dict.update({'router_mac': group.pop('router_mac')})
peer_dict.update({'peer_state': group.pop('peer_state').lower()})
continue
return result_dict
# ====================================================
# schema for show nve vni summary
# ====================================================
class ShowNveVniSummarySchema(MetaParser):
"""Schema for:
show nve vni summary"""
schema = {
'vni': {
'summary': {
'cp_vni_count': int,
'cp_vni_up': int,
'cp_vni_down': int,
'dp_vni_count': int,
'dp_vni_up': int,
'dp_vni_down': int,
},
},
}
# ====================================================
# parser for show nve vni summary
# ====================================================
class ShowNveVniSummary(ShowNveVniSummarySchema):
"""Parser for :
show nve vni summary"""
cli_command = 'show nve vni summary'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Total CP VNIs: 21 [Up: 21, Down: 0]
# Total DP VNIs: 0 [Up: 0, Down: 0]
p1 = re.compile(
r'^\s*Total +CP +VNIs: +(?P<cp_vni_count>[\d]+) +\[Up: +(?P<cp_vni_up>[\d]+), +Down: +(?P<cp_vni_down>[\d]+)\]$')
p2 = re.compile(
r'^\s*Total +DP +VNIs: +(?P<dp_vni_count>[\d]+) +\[Up: +(?P<dp_vni_up>[\d]+), +Down: +(?P<dp_vni_down>[\d]+)\]$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
vni_dict = result_dict.setdefault('vni',{}).setdefault('summary',{})
vni_dict.update({k:int(v) for k,v in group.items()})
continue
m = p2.match(line)
if m:
group = m.groupdict()
vni_dict.update({k: int(v) for k, v in group.items()})
continue
return result_dict
# ====================================================
# schema for show nve vni
# ====================================================
class ShowNveVniSchema(MetaParser):
"""Schema for:
show nve vni"""
schema ={
Any(): {
'vni': {
Any(): {
'vni': int,
'mcast': str,
'vni_state': str,
'mode': str,
'type': str,
'flags': str,
}
}
}
}
# ====================================================
# Parser for show nve vni
# ====================================================
class ShowNveVni(ShowNveVniSchema):
"""parser for:
show nve vni"""
cli_command = 'show nve vni'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Interface VNI Multicast-group State Mode Type [BD/VRF] Flags
# --------- -------- ----------------- ----- ---- ------------------ -----
# nve1 5001 234.1.1.1 Up CP L2 [1001]
p1 = re.compile(r'^\s*(?P<nve_name>[\w\/]+) +(?P<vni>[\d]+) +(?P<mcast>[\w\.\/]+)'
' +(?P<vni_state>[\w]+) +(?P<mode>[\w]+) +(?P<type>[\w\s\-\[\]]+)( +(?P<flags>[\w]+))?$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = group.pop('nve_name')
vni = int(group.pop('vni'))
nve_dict = result_dict.setdefault(nve_name,{}).setdefault('vni',{}).setdefault(vni,{})
nve_dict.update({'vni': vni})
nve_dict.update({'mcast': group.pop('mcast').lower()})
nve_dict.update({'vni_state': group.pop('vni_state').lower()})
nve_dict.update({'mode': group.pop('mode')})
nve_dict.update({'type': group.pop('type')})
if group.get('flags'):
nve_dict.update({'flags': group.pop('flags')})
else:
nve_dict.update({'flags': ""})
continue
return result_dict
# ====================================================
# schema for show interface | i nve
# ====================================================
class ShowNveInterfaceSchema(MetaParser):
"""Schema for:
show nve interface | i nve"""
schema = {
'nves':
{Any():
{'nve_name': str,
'nve_state': str,
},
},
}
#=======================================
# show interface | i nve
#=======================================
class ShowNveInterface(ShowNveInterfaceSchema):
"""Parser for show interface | i nve"""
cli_command = 'show interface | i nve'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
# Init vars
result_dict = {}
# nve1 is down (other)
p1 = re.compile(r'^\s*nve(?P<nve>(\d+)) +is +(?P<nve_state>[\w]+)( +(?P<other>[\w\(\)]+))?$')
for line in out.splitlines():
line = line.rstrip()
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = "{}{}".format('nve',group.pop('nve'))
nve_dict = result_dict.setdefault('nves', {}).setdefault(nve_name,{})
nve_dict.update({'nve_name': nve_name})
nve_dict.update({'nve_state': group.pop('nve_state').lower()})
continue
return result_dict
# ====================================================
# schema for show nve interface <nve> detail
# ====================================================
class ShowNveInterfaceDetailSchema(MetaParser):
"""Schema for:
show nve interface <nve> detail"""
schema ={
Any(): {
'nve_name': str,
Optional('if_state'): str,
Optional('encap_type'): str,
Optional('vpc_capability'): str,
Optional('local_rmac'): str,
Optional('host_reach_mode'): str,
Optional('source_if'): str,
Optional('primary_ip'): str,
Optional('secondary_ip'): str,
Optional('src_if_state'): str,
Optional('ir_cap_mode'): str,
Optional('adv_vmac'): bool,
Optional('nve_flags'): str,
Optional('nve_if_handle'): int,
Optional('src_if_holddown_tm'): int,
Optional('src_if_holdup_tm'): int,
Optional('src_if_holddown_left'): int,
Optional('multisite_convergence_time'): int,
Optional('multisite_convergence_time_left'): int,
Optional('vip_rmac'): str,
Optional('vip_rmac_ro'): str,
Optional('sm_state'): str,
Optional('peer_forwarding_mode'): bool,
Optional('dwn_strm_vni_cfg_mode'): str,
Optional('src_intf_last_reinit_notify_type'): str,
Optional('mcast_src_intf_last_reinit_notify_type'): str,
Optional('multi_src_intf_last_reinit_notify_type'): str,
Optional('multisite_bgw_if'): str,
Optional('multisite_bgw_if_ip'): str,
Optional('multisite_bgw_if_admin_state'): str,
Optional('multisite_bgw_if_oper_state'): str,
Optional('multisite_bgw_if_oper_state_down_reason'): str,
}
}
# ====================================================
# schema for show nve interface <nve> detail
# ====================================================
class ShowNveInterfaceDetail(ShowNveInterfaceDetailSchema):
"""parser for:
show nve interface <nve> detail"""
cli_command = 'show nve interface {intf} detail'
def cli(self, intf=""):
nve_list = []
if intf:
nve_list.append(intf)
if not intf:
cmd1 = 'show interface | i nve'
out1 = self.device.execute(cmd1)
# Init vars
# nve1 is down (other)
p1 = re.compile(r'^\s*nve(?P<nve>(\d+)) +is +(?P<nve_state>[\w]+)( +(?P<other>[\w\(\)]+))?$')
for line in out1.splitlines():
line = line.rstrip()
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = '{}{}'.format('nve', group.get('nve'))
nve_list.append(nve_name)
continue
result_dict = {}
# Interface: nve1, State: Up, encapsulation: VXLAN
p1 = re.compile(r'^\s*Interface: +(?P<nve_name>[\w\/]+), +State: +(?P<state>[\w]+),'
' +encapsulation: +(?P<encapsulation>[\w]+)$')
p2 = re.compile(r'^\s*VPC Capability: +(?P<vpc_capability>[\w\s\-\[\]]+)$')
p3 = re.compile(r'^\s*Local Router MAC: +(?P<local_router_mac>[\w\.]+)$')
p4 = re.compile(r'^\s*Host Learning Mode: +(?P<host_learning_mode>[\w\-]+)$')
p5 = re.compile(r'^\s*Source-Interface: +(?P<source_if>[\w\/]+)'
' +\(primary: +(?P<primary_ip>[\w\.]+), +secondary: +(?P<secondary_ip>[\w\.]+)\)$')
p6 = re.compile(r'^\s*Source +Interface +State: +(?P<source_state>[\w]+)$')
p7 = re.compile(r'^\s*IR +Capability +Mode: +(?P<mode>[\w]+)$')
p8 = re.compile(r'^\s*Virtual +RMAC +Advertisement: +(?P<adv_vmac>[\w]+)$')
p9 = re.compile(r'^\s*NVE +Flags:( +(?P<flags>[\w]+))?$')
p10 = re.compile(r'^\s*Interface +Handle: +(?P<intf_handle>[\w]+)$')
p11 = re.compile(r'^\s*Source +Interface +hold-down-time: +(?P<hold_down_time>[\d]+)$')
p12 = re.compile(r'^\s*Source +Interface +hold-up-time: +(?P<hold_up_time>[\d]+)$')
p13 = re.compile(r'^\s*Remaining +hold-down +time: +(?P<hold_time_left>[\d]+) +seconds$')
p14 = re.compile(r'^\s*Virtual +Router +MAC: +(?P<v_router_mac>[\w\.]+)$')
p15 = re.compile(r'^\s*Virtual +Router +MAC +Re\-origination: +(?P<v_router_mac_re>[\w\.]+)$')
p16 = re.compile(r'^\s*Interface +state: +(?P<intf_state>[\w\-]+)$')
p17 = re.compile(r'^\s*unknown-peer-forwarding: +(?P<peer_forwarding>[\w]+)$')
p18 = re.compile(r'^\s*down-stream +vni +config +mode: +(?P<vni_config_mode>[\w\/]+)$')
p19 = re.compile(r'^\s*Nve +Src +node +last +notif +sent: +(?P<last_notif_sent>[\w\-]+)$')
p20 = re.compile(r'^\s*Nve +Mcast +Src +node +last +notif +sent: +(?P<last_notif_sent>[\w\-]+)$')
p20_1 = re.compile(r'^\s*Nve +MultiSite +Src +node +last +notif +sent: +(?P<notif_sent>[\w\-]+)$')
p21 = re.compile(
r'^\s*Multisite +bgw\-if: +(?P<multisite_bgw_if>[\w\/\-]+) +\(ip: +(?P<multisite_bgw_if_ip>[\w\.]+),'
' +admin: +(?P<multisite_bgw_if_admin_state>[\w]+), +oper: +(?P<multisite_bgw_if_oper_state>[\w]+)\)$')
p22 = re.compile(r'^\s*Multisite +bgw\-if +oper +down +reason: +(?P<reason>[\w\.\s]+)$')
# Multi-Site delay-restore time: 180 seconds
p23 = re.compile(r'^\s*Multi(-S|s)ite +delay\-restore +time: +(?P<multisite_convergence_time>\d+) +seconds$')
# Multi-Site delay-restore time left: 0 seconds
p24 = re.compile(
r'^\s*Multi(-S|s)ite +bgw\-if +oper +down +reason: +(?P<multisite_convergence_time_left>\d+) +seconds$')
for nve in nve_list:
out = self.device.execute(self.cli_command.format(intf=nve))
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = group.pop('nve_name')
nve_dict = result_dict.setdefault(nve_name , {})
nve_name = m.groupdict()['nve_name']
nve_dict.update({'nve_name': nve_name})
nve_dict.update({'if_state': group.pop('state').lower()})
nve_dict.update({'encap_type': group.pop('encapsulation').lower()})
continue
# VPC Capability: VPC-VIP-Only [notified]
m = p2.match(line)
if m:
group = m.groupdict()
nve_dict.update({'vpc_capability': group.pop('vpc_capability').lower()})
continue
# Local Router MAC: 5e00.0005.0007
m = p3.match(line)
if m:
group = m.groupdict()
nve_dict.update({'local_rmac': group.pop('local_router_mac')})
continue
# Host Learning Mode: Control-Plane
m = p4.match(line)
if m:
group = m.groupdict()
nve_dict.update({'host_reach_mode': group.pop('host_learning_mode').lower()})
continue
# Source-Interface: loopback1 (primary: 201.11.11.11, secondary: 201.12.11.22)
m = p5.match(line)
if m:
group = m.groupdict()
nve_dict.update({k:v for k,v in group.items()})
continue
# Source Interface State: Up
m = p6.match(line)
if m:
group = m.groupdict()
nve_dict.update({'src_if_state': group.pop('source_state').lower()})
continue
# IR Capability Mode: No
m = p7.match(line)
if m:
group = m.groupdict()
nve_dict.update({'ir_cap_mode': group.pop('mode').lower()})
continue
# Virtual RMAC Advertisement: Yes
m = p8.match(line)
if m:
group = m.groupdict()
nve_dict.update({'adv_vmac': True if group.pop('adv_vmac').lower() == 'yes' else False})
continue
# NVE Flags:
m = p9.match(line)
if m:
group = m.groupdict()
if group.get("flags"):
nve_dict.update({'nve_flags': group.pop('flags')})
else:
nve_dict.update({'nve_flags': ""})
continue
# Interface Handle: 0x49000001
m = p10.match(line)
if m:
group = m.groupdict()
nve_dict.update({'nve_if_handle': int(group.pop('intf_handle'),0)})
continue
# Source Interface hold-down-time: 180
m = p11.match(line)
if m:
group = m.groupdict()
nve_dict.update({'src_if_holddown_tm': int(group.pop('hold_down_time'))})
continue
# Source Interface hold-up-time: 30
m = p12.match(line)
if m:
group = m.groupdict()
nve_dict.update({'src_if_holdup_tm': int(group.pop('hold_up_time'))})
continue
# Remaining hold-down time: 0 seconds
m = p13.match(line)
if m:
group = m.groupdict()
nve_dict.update({'src_if_holddown_left': int(group.pop('hold_time_left'))})
continue
# Virtual Router MAC: 0200.c90c.0b16
m = p14.match(line)
if m:
group = m.groupdict()
nve_dict.update({'vip_rmac': group.pop('v_router_mac')})
continue
# Virtual Router MAC Re-origination: 0200.6565.6565
m = p15.match(line)
if m:
group = m.groupdict()
nve_dict.update({'vip_rmac_ro': group.pop('v_router_mac_re')})
continue
# Interface state: nve-intf-add-complete
m = p16.match(line)
if m:
group = m.groupdict()
nve_dict.update({'sm_state': group.pop('intf_state')})
continue
# unknown-peer-forwarding: disable
m = p17.match(line)
if m:
group = m.groupdict()
nve_dict.update({'peer_forwarding_mode': False if group.pop('peer_forwarding') == 'disable' else True})
continue
# down-stream vni config mode: n/a
m = p18.match(line)
if m:
group = m.groupdict()
nve_dict.update({'dwn_strm_vni_cfg_mode': group.pop('vni_config_mode')})
continue
# Nve Src node last notif sent: Port-up
m = p19.match(line)
if m:
group = m.groupdict()
nve_dict.update({'src_intf_last_reinit_notify_type': group.pop('last_notif_sent').lower()})
continue
# Nve Mcast Src node last notif sent: None
m = p20.match(line)
if m:
group = m.groupdict()
nve_dict.update({'mcast_src_intf_last_reinit_notify_type': group.pop('last_notif_sent').lower()})
continue
# Nve MultiSite Src node last notif sent: None
m = p20_1.match(line)
if m:
group = m.groupdict()
nve_dict.update({'multi_src_intf_last_reinit_notify_type': group.pop('notif_sent').lower()})
continue
# Multisite bgw-if: loopback2 (ip: 101.101.101.101, admin: Down, oper: Down)
m = p21.match(line)
if m:
group = m.groupdict()
nve_dict.update({'multisite_bgw_if': group.pop('multisite_bgw_if')})
nve_dict.update({'multisite_bgw_if_ip': group.pop('multisite_bgw_if_ip')})
nve_dict.update({'multisite_bgw_if_admin_state': group.pop('multisite_bgw_if_admin_state').lower()})
nve_dict.update({'multisite_bgw_if_oper_state': group.pop('multisite_bgw_if_oper_state').lower()})
continue
# Multisite bgw-if oper down reason: NVE not up.
m = p22.match(line)
if m:
group = m.groupdict()
nve_dict.update({'multisite_bgw_if_oper_state_down_reason': group.pop('reason')})
continue
m = p23.match(line)
if m:
group = m.groupdict()
nve_dict.update({'multisite_convergence_time': int(group.pop('multisite_convergence_time'))})
continue
m = p24.match(line)
if m:
group = m.groupdict()
nve_dict.update({'multisite_convergence_time_left': int(group.pop('multisite_convergence_time_left'))})
continue
return result_dict
# ====================================================
# schema for show nve multisite dci-links
# ====================================================
class ShowNveMultisiteDciLinksSchema(MetaParser):
"""Schema for:
show nve multisite dci-links"""
schema ={
'multisite': {
Optional('dci_links'): {
Any():{
'if_name': str,
'if_state': str
},
},
},
}
# ====================================================
# Parser for show nve multisite dci-link
# ====================================================
class ShowNveMultisiteDciLinks(ShowNveMultisiteDciLinksSchema):
"""parser for:
show nve multisite dci-links"""
cli_command = 'show nve multisite dci-links'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Interface State
# --------- -----
# Ethernet1/53 Up
p1 = re.compile(r'^\s*(?P<if_name>(?!Interface)[\w\/]+) +(?P<if_state>[\w]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
if_name = group.pop('if_name')
if_state = group.pop('if_state')
if_dict = result_dict.setdefault('multisite', {}).setdefault('dci_links', {}).setdefault(if_name, {})
if_dict.update({'if_name': if_name})
if_dict.update({'if_state': if_state.lower()})
continue
return result_dict
# ====================================================
# schema for show nve multisite fabric-links
# ====================================================
class ShowNveMultisiteFabricLinksSchema(MetaParser):
"""Schema for:
show nve multisite fabric-links"""
schema = {
'multisite': {
'fabric_links': {
Any(): {
'if_name': str,
'if_state': str
},
},
},
}
# ====================================================
# Parser for show nve multisite fabric-link
# ====================================================
class ShowNveMultisiteFabricLinks(ShowNveMultisiteFabricLinksSchema):
"""parser for:
show nve multisite fabric-links"""
cli_command = 'show nve multisite fabric-links'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
# Interface State
# --------- -----
# Ethernet1/53 Up
p1 = re.compile(r'^\s*(?P<if_name>(?!Interface)[\w\/]+) +(?P<if_state>[\w]+)$')
result_dict = {}
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
if_name = group.pop('if_name')
if_state = group.pop('if_state')
if_dict = result_dict.setdefault('multisite',{}).setdefault('fabric_links',{}).setdefault(if_name,{})
if_dict.update({'if_name': if_name})
if_dict.update({'if_state': if_state.lower()})
continue
return result_dict
# ==================================================
# Schema for show nve ethernet-segment
# ==================================================
class ShowNveEthernetSegmentSchema(MetaParser):
"""Schema for:
show nve ethernet-segment"""
schema ={
'nve':{
Any():{
'ethernet_segment': {
'esi': {
Any(): {
'esi': str,
'if_name': str,
'es_state': str,
'po_state': str,
'nve_if_name': str,
'nve_state': str,
'host_reach_mode': str,
'active_vlans': str,
Optional('df_vlans'): str,
'active_vnis': str,
'cc_failed_vlans': str,
'cc_timer_left': str,
'num_es_mem': int,
Optional('local_ordinal'): int,
'df_timer_st': str,
'config_status': str,
Optional('df_list'): str,
'es_rt_added': bool,
'ead_rt_added': bool,
'ead_evi_rt_timer_age': str,
},
},
},
},
}
}
# ==================================================
# Schema for show nve ethernet-segment
# ==================================================
class ShowNveEthernetSegment(ShowNveEthernetSegmentSchema):
"""parser for:
show nve ethernet-segment"""
cli_command = 'show nve ethernet-segment'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
df_vlans = ""
result_dict = {}
# ESI: 0300.0000.0001.2c00.0309
# Parent interface: nve1
# ES State: Up
# Port-channel state: N/A
# NVE Interface: nve1
# NVE State: Up
# Host Learning Mode: control-plane
# Active Vlans: 1,101-105,1001-1100,2001-2100,3001-3005
# DF Vlans: 102,104,1002,1004,1006,1008,1010,1012,1014,1016,1018,1020,1022,1024
# ,1026,1028,1030,1032,1034,1036,1038,1040,1042,1044,1046,1048,1050,1052,1054,1056
# Active VNIs: 501001-501100,502001-502100,503001-503005,600101-600105
# CC failed for VLANs:
# VLAN CC timer: 0
# Number of ES members: 2
# My ordinal: 0
# DF timer start time: 00:00:00
# Config State: N/A
# DF List: 201.0.0.55 201.0.0.66
# ES route added to L2RIB: True
# EAD/ES routes added to L2RIB: False
# EAD/EVI route timer age: not running
p1 = re.compile(r'^\s*ESI: +(?P<esi>[\w\.]+)$')
p2 = re.compile(r'^\s*Parent +interface: +(?P<parent_intf>[\w\.\/]+)$')
p3 = re.compile(r'^\s*ES +State: +(?P<es_state>[\w\/]+)$')
p4 = re.compile(r'^\s*Port-channel +state: +(?P<po_state>[\w\/]+)$')
p5 = re.compile(r'^\s*NVE +Interface: +(?P<nve_intf>[\w\.\/]+)$')
p6 = re.compile(r'^\s*NVE +State: +(?P<nve_state>[\w\/]+)$')
p7 = re.compile(r'^\s*Host +Learning +Mode: +(?P<host_learning_mode>[\w\-]+)$')
p8 = re.compile(r'^\s*Active +Vlans: +(?P<active_vlans>[\d\-\,]+)$')
p9 = re.compile(r'^\s*DF Vlans: +(?P<df_vlans>[\d\-\,]+)$')
p10 = re.compile(r'^\s*,(?P<df_vlans>[\d\-\,]+)$')
p11 = re.compile(r'^\s*Active +VNIs: +(?P<active_vnis>[\d\-\,]+)$')
p12 = re.compile(r'^\s*CC +failed +for +VLANs:( +(?P<cc_failed_vlans>[\w\/]+))?$')
p13 = re.compile(r'^\s*VLAN CC timer: +(?P<cc_timer_left>[\d]+)?$')
p14 = re.compile(r'^\s*Number +of +ES +members: +(?P<num_es_mem>[\d]+)?$')
p15 = re.compile(r'^\s*My +ordinal: +(?P<local_ordinal>[\d]+)$')
p16 = re.compile(r'^\s*DF +timer +start +time: +(?P<df_timer_start_time>[\w\:]+)$')
p17 = re.compile(r'^\s*Config +State: +(?P<config_status>[\w\/]+)$')
p18 = re.compile(r'^\s*DF +List: +(?P<df_list>[\d\s\.]+)$')
p19 = re.compile(r'^\s*ES +route +added +to +L2RIB: +(?P<is_es_added_to_l2rib>[\w]+)$')
p20 = re.compile(r'^\s*EAD\/ES +routes +added +to +L2RIB: +(?P<ead_rt_added>[\w]+)$')
p21 = re.compile(r'^\s*EAD/EVI +route +timer +age: +(?P<ead_evi_rt_timer_age>[\w\s]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
esi = group.pop('esi')
continue
m = p2.match(line)
if m:
group = m.groupdict()
if_name = group.pop('parent_intf')
continue
m = p3.match(line)
if m:
group = m.groupdict()
es_state = group.pop('es_state').lower()
continue
m = p4.match(line)
if m:
group = m.groupdict()
po_state = group.pop('po_state').lower()
continue
m = p5.match(line)
if m:
group = m.groupdict()
nve_if_name = group.pop('nve_intf')
esi_dict = result_dict.setdefault('nve', {}).setdefault(nve_if_name, {}).\
setdefault('ethernet_segment', {}).setdefault('esi', {}).\
setdefault(esi, {})
esi_dict.update({'esi': esi})
esi_dict.update({'nve_if_name': nve_if_name})
esi_dict.update({'po_state': po_state})
esi_dict.update({'if_name': if_name})
esi_dict.update({'es_state': es_state})
continue
m = p6.match(line)
if m:
group = m.groupdict()
esi_dict.update({'nve_state': group.pop('nve_state').lower()})
continue
m = p7.match(line)
if m:
group = m.groupdict()
esi_dict.update({'host_reach_mode': group.pop('host_learning_mode').lower()})
continue
m = p8.match(line)
if m:
group = m.groupdict()
esi_dict.update({'active_vlans': group.pop('active_vlans')})
continue
m = p9.match(line)
if m:
group = m.groupdict()
df_vlans = group.pop('df_vlans')
esi_dict.update({'df_vlans':df_vlans})
continue
m = p10.match(line)
if m:
group = m.groupdict()
df_vlans = "{},{}".format(df_vlans, group.pop('df_vlans'))
esi_dict.update({'df_vlans': df_vlans})
continue
m = p11.match(line)
if m:
group = m.groupdict()
esi_dict.update({'active_vnis': group.pop('active_vnis')})
continue
m = p12.match(line)
if m:
group = m.groupdict()
if not group.pop('cc_failed_vlans'):
esi_dict.update({'cc_failed_vlans': ''})
else:
esi_dict.update({'cc_failed_vlans': group.pop('cc_failed_vlans')})
continue
m = p13.match(line)
if m:
group = m.groupdict()
esi_dict.update({'cc_timer_left': group.pop('cc_timer_left')})
continue
m = p14.match(line)
if m:
group = m.groupdict()
esi_dict.update({'num_es_mem': int(group.pop('num_es_mem'))})
continue
m = p15.match(line)
if m:
group = m.groupdict()
esi_dict.update({'local_ordinal': int(group.pop('local_ordinal'))})
continue
m = p16.match(line)
if m:
group = m.groupdict()
esi_dict.update({'df_timer_st': group.pop('df_timer_start_time')})
continue
m = p17.match(line)
if m:
group = m.groupdict()
esi_dict.update({'config_status': group.pop('config_status').lower()})
continue
m = p18.match(line)
if m:
group = m.groupdict()
esi_dict.update({'df_list': group.pop('df_list')})
continue
m = p19.match(line)
if m:
group = m.groupdict()
esi_dict.update({'es_rt_added': False if 'False' in group.pop('is_es_added_to_l2rib') else True})
continue
m = p20.match(line)
if m:
group = m.groupdict()
esi_dict.update({'ead_rt_added': False if 'False' in group.pop('ead_rt_added') else True})
continue
m = p21.match(line)
if m:
group = m.groupdict()
esi_dict.update({'ead_evi_rt_timer_age': group.pop('ead_evi_rt_timer_age')})
continue
return result_dict
# ====================================================
# schema for show l2route evpn ethernet-segment all
# ====================================================
class ShowL2routeEvpnEternetSegmentAllSchema(MetaParser):
"""Schema for:
show l2route evpn ethernet-segment all"""
schema ={
'evpn': {
'ethernet_segment': {
Any(): {
'ethernet_segment': str,
'originating_rtr': str,
'prod_name': str,
'int_ifhdl': str,
'client_nfn': int,
}
}
}
}
# ====================================================
# Parser for show l2route evpn ethernet-segment all
# ====================================================
class ShowL2routeEvpnEternetSegmentAll(ShowL2routeEvpnEternetSegmentAllSchema):
"""parser for:
show l2route evpn ethernet-segment all"""
cli_command = 'show l2route evpn ethernet-segment all'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
index = 1
# ESI Orig Rtr. IP Addr Prod Ifindex NFN Bitmap
# ------------------------ ----------------- ----- ----------- ----------
# 0300.0000.0001.2c00.0309 201.0.0.55 VXLAN nve1 64
p1 = re.compile(r'^\s*(?P<ethernet_segment>(?!ESI)[\w\.]+) +(?P<originating_rtr>[\d\.]+)'
' +(?P<prod_name>[\w]+) +(?P<int_ifhdl>[\w\/]+) +(?P<client_nfn>[\w\.]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
evpn_dict = result_dict.setdefault('evpn',{}).setdefault('ethernet_segment', {}).setdefault(index, {})
group = m.groupdict()
for k, v in group.items():
try:
v = int(v)
except:
v = v.lower()
evpn_dict.update({k:v})
index += 1
continue
return result_dict
# ====================================================
# schema for show l2route topology detail
# ====================================================
class ShowL2routeTopologyDetailSchema(MetaParser):
"""Schema for:
show l2route topology detail"""
schema ={
'topology': {
'topo_id': {
Any(): {
'topo_name': {
Any(): {
'topo_name': str,
Optional('topo_type'): str,
Optional('vni'): int,
Optional('encap_type'): int,
Optional('iod'): int,
Optional('if_hdl'): int,
Optional('vtep_ip'): str,
Optional('emulated_ip'): str,
Optional('emulated_ro_ip'): str,
Optional('tx_id'): int,
Optional('rcvd_flag'): int,
Optional('rmac'): str,
Optional('vrf_id'): int,
Optional('vmac'): str,
Optional('flags'): str,
Optional('sub_flags'): str,
Optional('prev_flags'): str,
}
}
}
}
}
}
# ====================================================
# Parser for show l2route topology detail
# ====================================================
class ShowL2routeTopologyDetail(ShowL2routeTopologyDetailSchema):
"""parser for:
show l2route topology detail"""
cli_command = 'show l2route topology detail'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Topology ID Topology Name Attributes
# ----------- ------------- ----------
# 101 Vxlan-10001 VNI: 10001
# Encap:0 IOD:0 IfHdl:1224736769
# VTEP IP: 201.11.11.11
# Emulated IP: 201.12.11.22
# Emulated RO IP: 201.12.11.22
# TX-ID: 20 (Rcvd Ack: 0)
# RMAC: 5e00.0005.0007, VRFID: 3
# VMAC: 0200.c90c.0b16
# Flags: L3cp, Sub_Flags: --, Prev_Flags: -
p1 = re.compile(r'^\s*(?P<topo_id>[\d]+) +(?P<topo_name>[\w\-]+) +(?P<topo_type>[\w\/]+)(: +(?P<vni>[\d]+))?$')
p2 = re.compile(r'^\s*Encap:(?P<encap_type>[\d]+) +IOD:(?P<iod>[\d]+) +IfHdl:(?P<if_hdl>[\d]+)$')
p3 = re.compile(r'^\s*VTEP +IP: +(?P<vtep_ip>[\d\.]+)$')
p4 = re.compile(r'^\s*Emulated +IP: +(?P<emulated_ip>[\d\.]+)$')
p5 = re.compile(r'^\s*Emulated +RO +IP: +(?P<emulated_ro_ip>[\d\.]+)$')
p6 = re.compile(r'^\s*TX-ID: +(?P<tx_id>[\d]+) +\((Rcvd +Ack: +(?P<rcvd_flag>[\d]+))\)$')
p7 = re.compile(r'^\s*RMAC: +(?P<rmac>[\w\.]+), VRFID: +(?P<vrf_id>[\d]+)$')
p8 = re.compile(r'^\s*VMAC: +(?P<vmac>[\w\.]+)$')
p9 = re.compile(
r'^\s*Flags: +(?P<flags>[\w]+), +Sub_Flags: +(?P<sub_flags>[\w\-]+), +Prev_Flags: +(?P<prev_flags>[\w\-]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m0 = p1.match(line)
if m0:
group = m0.groupdict()
topo_id = int(group.pop('topo_id'))
topo_name = group.pop('topo_name')
topo_type = group.pop('topo_type').lower()
topo_dict = result_dict.setdefault('topology', {}).setdefault('topo_id', {}).setdefault(topo_id,{}).\
setdefault('topo_name',{}).setdefault(topo_name,{})
if m0.groupdict()['vni']:
vni = int(group.pop('vni'))
topo_dict.update({'vni': vni})
topo_dict.update({'topo_type': topo_type})
topo_dict.update({'topo_name': topo_name})
continue
m2 = m = ""
if p2.match(line):
m2 = p2.match(line)
if p6.match(line):
m2 = p6.match(line)
if m2:
group = m2.groupdict()
topo_dict.update({k:int(v) for k,v in group.items() })
continue
if p3.match(line):
m= p3.match(line)
if p4.match(line):
m = p4.match(line)
if p5.match(line):
m = p5.match(line)
if p8.match(line):
m = p8.match(line)
if p9.match(line):
m = p9.match(line)
if m:
group = m.groupdict()
topo_dict.update({k:v for k, v in group.items()})
continue
m3 = p7.match(line)
if m3:
group = m3.groupdict()
topo_dict.update({'rmac': group.pop('rmac')})
topo_dict.update({'vrf_id': int(group.pop('vrf_id'))})
continue
return result_dict
# ====================================================
# schema for show l2route mac all detail
# ====================================================
class ShowL2routeMacAllDetailSchema(MetaParser):
"""Schema for:
show l2route mac all detail"""
schema ={
'topology': {
'topo_id': {
Any(): {
'mac': {
Any(): {
'mac_addr': str,
'prod_type': str,
'flags': str,
'seq_num': int,
'next_hop1': str,
'rte_res': str,
'fwd_state': str,
Optional('peer_id'): int,
Optional('sent_to'): str,
Optional('soo'): int,
}
}
}
}
}
}
# ====================================================
# Parser for show l2route mac all detail
# ====================================================
class ShowL2routeMacAllDetail(ShowL2routeMacAllDetailSchema):
"""parser for:
show l2route mac all detail"""
cli_command = 'show l2route mac all detail'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Topology Mac Address Prod Flags Seq No Next-Hops
# ----------- -------------- ------ ------------- ---------- ----------------
# 101 5e00.0002.0007 VXLAN Rmac 0 204.1.1.1
# Route Resolution Type: Regular
# Forwarding State: Resolved (PeerID: 2)
# Sent To: BGP
# SOO: 774975538
p1 = re.compile(r'^\s*(?P<topo_id>[\d]+) +(?P<mac_addr>[\w\.]+) +(?P<prod_type>[\w\,]+)'
' +(?P<flags>[\w\,\-]+) +(?P<seq_num>[\d]+) +(?P<next_hop1>[\w\/\.]+)$')
p2 = re.compile(r'^\s*Route +Resolution +Type: +(?P<rte_res>[\w]+)$')
p3 = re.compile(r'^\s*Forwarding +State: +(?P<fwd_state>[\w]+)( +\(PeerID: +(?P<peer_id>[\d]+)\))?$')
p4 = re.compile(r'^\s*Sent +To: +(?P<sent_to>[\w\,]+)$')
p5 = re.compile(r'^\s*SOO: +(?P<soo>[\d]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
topo_id = int(group.pop('topo_id'))
mac_addr = group.pop('mac_addr')
topo_dict = result_dict.setdefault('topology', {}).setdefault('topo_id', {}).setdefault(topo_id,{}).\
setdefault('mac',{}).setdefault(mac_addr,{})
flags = group.pop('flags')
if flags.endswith(','):
flags = flags[:-1]
topo_dict.update({'flags': flags.lower()})
topo_dict.update({'prod_type': group.pop('prod_type').lower()})
topo_dict.update({'seq_num': int(group.pop('seq_num'))})
topo_dict.update({'mac_addr': mac_addr})
try:
next_hop1 = Common.convert_intf_name(group.pop('next_hop1'))
except:
next_hop1 = group.pop('next_hop1')
topo_dict.update({'next_hop1': next_hop1})
continue
m1 = ""
if p2.match(line):
m1 = p2.match(line)
if p4.match(line):
m1 = p4.match(line)
if m1:
group = m1.groupdict()
topo_dict.update({k:v.lower() for k,v in group.items() })
continue
m = p3.match(line)
if m:
group = m.groupdict()
topo_dict.update({'fwd_state': group.get('fwd_state')})
if group.get('peer_id'):
topo_dict.update({'peer_id': int(group.get('peer_id'))})
continue
m = p5.match(line)
if m:
group = m.groupdict()
topo_dict.update({k:int(v) for k, v in group.items()})
continue
return result_dict
# ====================================================
# schema for show l2route mac-ip all detail
# ====================================================
class ShowL2routeMacIpAllDetailSchema(MetaParser):
"""Schema for:
show l2route mac-ip all detail"""
schema ={
'topology': {
'topo_id': {
Any(): {
'mac_ip': {
Any(): {
'mac_addr': str,
'mac_ip_prod_type': str,
'mac_ip_flags': str,
'seq_num': int,
'next_hop1': str,
'host_ip': str,
Optional('sent_to'): str,
Optional('soo'): int,
Optional('l3_info'): int,
}
}
}
}
}
}
# ====================================================
# Parser for show l2route mac-ip all detail
# ====================================================
class ShowL2routeMacIpAllDetail(ShowL2routeMacIpAllDetailSchema):
"""parser for:
show l2route mac-ip all detail"""
cli_command = 'show l2route mac-ip all detail'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Topology Mac Address Prod Flags Seq No Host IP Next-Hops
# ----------- -------------- ------ ---------- --------------- ---------------
# 1001 fa16.3ec2.34fe BGP -- 0 5.1.10.11 204.1.1.1
# 1001 fa16.3ea3.fb66 HMM -- 0 5.1.10.55 Local
# Sent To: BGP
# SOO: 774975538
# L3-Info: 10001
p1 = re.compile(r'^\s*(?P<topo_id>[\d]+) +(?P<mac_addr>[\w\.]+) +(?P<mac_ip_prod_type>[\w\,]+)'
' +(?P<mac_ip_flags>[\w\,\-]+) +(?P<seq_num>[\d]+) +(?P<host_ip>[\w\/\.]+)'
' +(?P<next_hop1>[\w\/\.]+)$')
p2 = re.compile(r'^\s*Sent +To: +(?P<sent_to>[\w]+)$')
p3 = re.compile(r'^\s*SOO: +(?P<soo>[\d]+)$')
p4 = re.compile(r'^\s*L3-Info: +(?P<l3_info>[\d]+)$')
# Topology Mac Address Host IP Prod Flags Seq No Next-Hops
# ----------- -------------- --------------- ------ ---------- ---------------
# 101 0000.9cfc.2596 100.101.1.3 BGP -- 0 23.23.23.23
p5 = re.compile(r'^\s*(?P<topo_id>[\d]+) +(?P<mac_addr>[\w\.]+) +(?P<host_ip>[\w\/\.]+)'
' +(?P<mac_ip_prod_type>[\w\,]+)'
' +(?P<mac_ip_flags>[\w\,\-]+) +(?P<seq_num>[\d]+)'
' +(?P<next_hop1>[\w\/\.]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
topo_id = int(group.pop('topo_id'))
mac_addr = group.pop('mac_addr')
topo_dict = result_dict.setdefault('topology', {}).setdefault('topo_id', {}).setdefault(topo_id,{}).\
setdefault('mac_ip',{}).setdefault(mac_addr,{})
flags = group.pop('mac_ip_flags')
topo_dict.update({'mac_ip_flags': flags.lower()})
topo_dict.update({'mac_ip_prod_type': group.pop('mac_ip_prod_type').lower()})
topo_dict.update({'seq_num': int(group.pop('seq_num'))})
topo_dict.update({'mac_addr': mac_addr})
topo_dict.update({'host_ip': group.pop('host_ip')})
topo_dict.update({'next_hop1': group.pop('next_hop1').lower()})
continue
m1 = ""
if p3.match(line):
m1 = p3.match(line)
if p4.match(line):
m1 = p4.match(line)
if m1:
group = m1.groupdict()
topo_dict.update({k:int(v) for k,v in group.items() })
continue
m = p2.match(line)
if m:
group = m.groupdict()
topo_dict.update({k:v.lower() for k, v in group.items()})
continue
m = p5.match(line)
if m:
group = m.groupdict()
topo_id = int(group.pop('topo_id'))
mac_addr = group.pop('mac_addr')
topo_dict = result_dict.setdefault('topology', {}).setdefault('topo_id', {}).setdefault(topo_id, {}). \
setdefault('mac_ip', {}).setdefault(mac_addr, {})
flags = group.pop('mac_ip_flags')
topo_dict.update({'mac_ip_flags': flags.lower()})
topo_dict.update({'mac_ip_prod_type': group.pop('mac_ip_prod_type').lower()})
topo_dict.update({'seq_num': int(group.pop('seq_num'))})
topo_dict.update({'mac_addr': mac_addr})
topo_dict.update({'host_ip': group.pop('host_ip')})
topo_dict.update({'next_hop1': group.pop('next_hop1').lower()})
continue
return result_dict
# ====================================================
# schema for show l2route summary
# ====================================================
class ShowL2routeSummarySchema(MetaParser):
"""Schema for:
show l2route summary"""
schema ={
'summary': {
'total_memory': int,
'numof_converged_tables': int,
Optional('table_name'): {
Any(): {
'producer_name': {
Any(): {
'producer_name': str,
'id': int,
'objects': int,
'memory': int,
},
'total_obj': int,
'total_mem': int,
}
}
}
}
}
# ====================================================
# Parser for show l2route summary
# ====================================================
class ShowL2routeSummary(ShowL2routeSummarySchema):
"""parser for:
show l2route summary"""
cli_command = 'show l2route summary'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# L2ROUTE Summary
# Total Memory: 6967
# Number of Converged Tables: 47
# Table Name: Topology
# Producer (ID) Objects Memory (Bytes)
# --------------- ---------- --------------
# VXLAN (11 ) 21 5927
# ---------------------------------------------
# Total 21 5927
# ---------------------------------------------
p1 = re.compile(r'^\s*Total +Memory: +(?P<total_memory>[\d]+)$')
p2 = re.compile(r'^\s*Number +of +Converged +Tables: +(?P<numof_converged_tables>[\d]+)$')
p3 = re.compile(r'^\s*Table +Name: +(?P<table_name>[\w\-]+)$')
p4 = re.compile(r'^\s*(?P<producer_name>[\w]+) +\((?P<id>[\d\s]+)\) +(?P<objects>[\d]+) +(?P<memory>[\d]+)$')
p5 = re.compile(r'^\s*Total +(?P<total_obj>[\d]+) +(?P<total_mem>[\d]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
total_memory = int(group.pop('total_memory'))
summary_dict = result_dict.setdefault('summary', {})
summary_dict.update({'total_memory': total_memory})
continue
m = p2.match(line)
if m:
group = m.groupdict()
numof_converged_tables = int(group.pop('numof_converged_tables'))
summary_dict.update({'numof_converged_tables': numof_converged_tables})
continue
m = p3.match(line)
if m:
group = m.groupdict()
table_name = group.pop('table_name')
table_dict = summary_dict.setdefault('table_name',{}).setdefault(table_name,{})
continue
m = p4.match(line)
if m:
group = m.groupdict()
producer_name = group.pop('producer_name').lower()
producer_dict = table_dict.setdefault('producer_name', {}).setdefault(producer_name, {})
producer_dict.update({k:int(v) for k, v in group.items()})
producer_dict.update({'producer_name':producer_name})
continue
m = p5.match(line)
if m:
group = m.groupdict()
producer_dict = table_dict.setdefault('producer_name', {})
producer_dict.update({k:int(v) for k,v in group.items() })
continue
return result_dict
# ====================================================
# schema for show l2route fl all
# ====================================================
class ShowL2routeFlAllSchema(MetaParser):
"""Schema for:
show l2route fl all"""
schema = {
'topology': {
'topo_id': {
Any():{
Optional('num_of_peer_id'): int,
'peer_id':{
Any():{
'topo_id': int,
'peer_id': int,
'flood_list': str,
'is_service_node': str,
},
},
},
},
},
}
# ====================================================
# Parser for show l2route fl all
# ====================================================
class ShowL2routeFlAll(ShowL2routeFlAllSchema):
"""parser for:
show l2route fl all"""
cli_command = 'show l2route fl all'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
index = 0
# Topology ID Peer-id Flood List Service Node
# ----------- ----------- --------------- ------------
p1 = re.compile(r'^\s*(?P<topo_id>[\d]+) +(?P<peer_id>[\d]+) +(?P<flood_list>[\w\.d]+) +(?P<is_service_node>[\w]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
topo_id = int(group.pop('topo_id'))
peer_id = int(group.pop('peer_id'))
peer_dict = result_dict.setdefault('topology', {}).setdefault('topo_id', {}).setdefault(topo_id, {}). \
setdefault('peer_id', {}).setdefault(peer_id, {})
peer_dict.update({'topo_id': topo_id})
peer_dict.update({'peer_id': peer_id})
peer_dict.update({'flood_list': group.pop('flood_list')})
peer_dict.update({'is_service_node': group.pop('is_service_node').lower()})
continue
if result_dict:
for topo_id in result_dict['topology']['topo_id']:
num_of_peer_id = len(result_dict['topology']['topo_id'][topo_id]['peer_id'])
result_dict['topology']['topo_id'][topo_id]['num_of_peer_id'] = num_of_peer_id
return result_dict
# ===================================================
# Schema for show running-config nv ovelay
# ===================================================
class ShowRunningConfigNvOverlaySchema(MetaParser):
"""Schema for:
show running-config nv overlay"""
schema = {
Optional('evpn_multisite_border_gateway'): int,
Optional('multisite_convergence_time') : int,
Optional('enabled_nv_overlay'): bool,
Any():{
Optional('nve_name'):str,
Optional('if_state'): str,
Optional('host_reachability_protocol'): str,
Optional('adv_vmac'): bool,
Optional('source_if'): str,
Optional('multisite_bgw_if'): str,
Optional('vni'):{
Any():{
Optional('vni'): int,
Optional('associated_vrf'): bool,
Optional('multisite_ingress_replication'): bool,
Optional('mcast_group'): str
},
},
},
Optional('multisite'):{
Optional('dci_links'):{
Any():{
'if_name': str,
'if_state': str,
},
},
Optional('fabric_links'): {
Any(): {
'if_name': str,
'if_state': str,
},
},
},
}
# ====================================================
# Parser for show running-config nv overlay
# =====================================================
class ShowRunningConfigNvOverlay(ShowRunningConfigNvOverlaySchema):
"""parser for:
show running-config nv overlay"""
cli_command = 'show running-config nv overlay'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# feature nv overlay
p0 = re.compile(r'^\s*feature nv overlay$')
# evpn multisite border-gateway 111111
p1 = re.compile(r'^\s*evpn multisite border-gateway +(?P<evpn_multisite_border_gateway>[\w]+)$')
# delay-restore time 185
p2 = re.compile(r'^\s*delay-restore time +(?P<evpn_msite_bgw_delay_restore_time>[\d]+)$')
# interface nve1
p3 = re.compile(r'^\s*interface +(?P<nve_name>nve[\d]+)$')
# no shutdown
p4 = re.compile(r'^\s*no shutdown$')
# host-reachability protocol bgp
p5 = re.compile(r'^\s*host-reachability protocol +(?P<host_reachability_protocol>[\w]+)$')
# advertise virtual-rmac
p6 = re.compile(r'^\s*advertise virtual-rmac$')
# source-interface loopback1
p7 = re.compile(r'^\s*source-interface +(?P<source_if>[\w]+)$')
# multisite border-gateway interface loopback3
p8 = re.compile(r'^\s*multisite +border\-gateway +interface +(?P<multisite_bgw_if>[\w]+)$')
# member vni 10100 associate-vrf
p9 = re.compile(r'^\s*member vni +(?P<nve_vni>[\d]+)( +(?P<associated_vrf>[\w\-]+))?$')
# multisite ingress-replication
p10 = re.compile(r'^\s*multisite ingress-replication$')
# mcast-group 231.100.1.1
p11 = re.compile(r'^\s*mcast-group +(?P<mcast_group>[\d\.]+)$')
# interface Ethernet1/1
p12 = re.compile(r'^\s*interface +(?P<interface>(?!nve)[\w\/]+)$')
# evpn multisite fabric-tracking
# evpn multisite dci-tracking
p13 = re.compile(r'^\s*evpn multisite +(?P<fabric_dci_tracking>[\w\-]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p0.match(line)
if m:
result_dict.update({'enabled_nv_overlay': True})
continue
m = p1.match(line)
if m:
multisite_border_gateway = m.groupdict().pop('evpn_multisite_border_gateway')
result_dict.update({'evpn_multisite_border_gateway': int(multisite_border_gateway)})
continue
m = p2.match(line)
if m:
evpn_msite_bgw_delay_restore_time = m.groupdict().pop('evpn_msite_bgw_delay_restore_time')
result_dict.update({'multisite_convergence_time': int(evpn_msite_bgw_delay_restore_time)})
continue
m = p3.match(line)
if m:
nve_name = m.groupdict().pop('nve_name')
nve_dict = result_dict.setdefault(nve_name, {})
nve_dict.update({'nve_name': nve_name})
continue
m = p4.match(line)
if m:
nve_dict.update({'if_state': "up"})
continue
m = p5.match(line)
if m:
host_reachability_protocol = m.groupdict().pop('host_reachability_protocol')
nve_dict.update({'host_reachability_protocol': host_reachability_protocol})
continue
m = p6.match(line)
if m:
nve_dict.update({'adv_vmac': True})
continue
m = p7.match(line)
if m:
source_if = m.groupdict().pop('source_if')
nve_dict.update({'source_if': source_if})
continue
m = p8.match(line)
if m:
multisite_bgw_if = m.groupdict().pop('multisite_bgw_if')
nve_dict.update({'multisite_bgw_if': multisite_bgw_if})
continue
m = p9.match(line)
if m:
group = m.groupdict()
nve_vni = int(group.pop('nve_vni'))
vni_dict = nve_dict.setdefault('vni',{}).setdefault(nve_vni,{})
vni_dict.update({'vni':nve_vni})
if group.get('associated_vrf'):
vni_dict.update({'associated_vrf':True})
group.pop('associated_vrf')
else:
vni_dict.update({'associated_vrf': False})
continue
m = p10.match(line)
if m:
vni_dict.update({'multisite_ingress_replication': True})
continue
m = p11.match(line)
if m:
mcast = m.groupdict().pop('mcast_group')
vni_dict.update({'mcast_group': mcast})
continue
m = p12.match(line)
if m:
interface = m.groupdict().pop('interface')
continue
m = p13.match(line)
if m:
tracking = m.groupdict().pop('fabric_dci_tracking')
tracking_dict = result_dict.setdefault('multisite', {})
if 'fabric' in tracking:
fabric_dict = tracking_dict.setdefault('fabric_links', {}).setdefault(interface, {})
fabric_dict.update({'if_name': interface})
fabric_dict.update({'if_state': 'up'})
if 'dci' in tracking:
dci_dict = tracking_dict.setdefault('dci_links', {}).setdefault(interface, {})
dci_dict.update({'if_name': interface})
dci_dict.update({'if_state': 'up'})
continue
return result_dict
# ====================================================
# schema for show nve vni ingress-replication
# ====================================================
class ShowNveVniIngressReplicationSchema(MetaParser):
"""Schema for:
show nve vni ingress-replication"""
schema ={
Any(): {
'vni': {
Any(): {
'vni': int,
Optional('repl_ip'): {
Any(): {
Optional('repl_ip'): str,
Optional('source'): str,
Optional('up_time'): str,
}
}
}
}
}
}
# ====================================================
# Parser for show nve vni ingress-replication
# ====================================================
class ShowNveVniIngressReplication(ShowNveVniIngressReplicationSchema):
"""parser for:
show nve vni Ingress-replication"""
cli_command = 'show nve vni ingress-replication'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Interface VNI Replication List Source Up Time
# --------- -------- ----------------- ------- -------
# nve1 10101 7.7.7.7 BGP-IMET 1d02h
p1 = re.compile(r'^\s*(?P<nve_name>[\w]+) +(?P<vni>[\d]+)( +(?P<replication_list>[\w\.]+)'
' +(?P<source>[\w\-]+) +(?P<uptime>[\w\:]+))?$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = group['nve_name']
vni = int(group['vni'])
nve_dict = result_dict.setdefault(nve_name,{}).setdefault('vni',{}).setdefault(vni,{})
nve_dict.update({'vni': vni})
if group['replication_list']:
repl_ip = group['replication_list'].strip()
repl_dict = nve_dict.setdefault('repl_ip', {}).setdefault(repl_ip, {})
repl_dict.update({'repl_ip': repl_ip})
repl_dict.update({'source': group['source'].lower()})
repl_dict.update({'up_time': group['uptime']})
continue
return result_dict
# ====================================================
# schema for show fabric multicast globals
# ====================================================
class ShowFabricMulticastGlobalsSchema(MetaParser):
"""Schema for:
show fabric multicast globals"""
schema ={
'multicast': {
'globals': {
'pruning': str,
'switch_role': str,
'fabric_control_seg': str,
'peer_fabric_ctrl_addr': str,
'advertise_vpc_rpf_routes': str,
'created_vni_list': str,
'fwd_encap': str,
'overlay_distributed_dr': bool,
'overlay_spt_only': bool,
}
}
}
# ====================================================
# Parser for show fabric multicast globals
# ====================================================
class ShowFabricMulticastGlobals(ShowFabricMulticastGlobalsSchema):
"""parser for:
show fabric multicast globals"""
cli_command = 'show fabric multicast globals'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Pruning: segment-based
p1 = re.compile(r'^\s*Pruning: +(?P<pruning>[\w\-]+)$')
# Switch role:
p2 = re.compile(r'^\s*Switch +role:( +(?P<switch_role>[\w]+))?$')
# Fabric Control Seg: Null
p3 = re.compile(r'^\s*Fabric +Control +Seg: +(?P<fabric_control_seg>[\w]+)$')
# Peer Fabric Control Address: 0.0.0.0
p4 = re.compile(r'^\s*Peer +Fabric +Control +Address: +(?P<peer_fabric_ctrl_addr>[\w\.]+)$')
# Advertising vPC RPF routes: Disabled
p5 = re.compile(r'^\s*Advertising +vPC +RPF +routes: +(?P<advertise_vpc_rpf_routes>[\w]+)$')
# Created VNI List: -
p6 = re.compile(r'^\s*Created +VNI +List: +(?P<created_vni_list>[\w\-]+)$')
# Fwd Encap: (null)
p7 = re.compile(r'^\s*Fwd +Encap: +(?P<fwd_encap>[\w\\(\)]+)$')
# Overlay Distributed-DR: FALSE
p8 = re.compile(r'^\s*Overlay +Distributed\-DR: +(?P<overlay_distributed_dr>[\w]+)$')
# Overlay spt-only: TRUE
p9 = re.compile(r'^\s*Overlay +spt\-only: +(?P<overlay_spt_only>[\w]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
global_dict = result_dict.setdefault('multicast', {}).setdefault('globals', {})
global_dict.update({'pruning': group['pruning']})
continue
m = p2.match(line)
if m:
group = m.groupdict()
if group['switch_role']:
global_dict.update({'switch_role': group['switch_role']})
else:
global_dict.update({'switch_role': ""})
continue
m = p3.match(line)
if m:
group = m.groupdict()
global_dict.update({'fabric_control_seg': group['fabric_control_seg']})
continue
m = p4.match(line)
if m:
group = m.groupdict()
global_dict.update({'peer_fabric_ctrl_addr': group['peer_fabric_ctrl_addr']})
continue
m = p5.match(line)
if m:
group = m.groupdict()
global_dict.update({'advertise_vpc_rpf_routes': group['advertise_vpc_rpf_routes'].lower()})
continue
m = p6.match(line)
if m:
group = m.groupdict()
global_dict.update({'created_vni_list': group['created_vni_list']})
continue
m = p7.match(line)
if m:
group = m.groupdict()
global_dict.update({'fwd_encap': group['fwd_encap']})
continue
m = p8.match(line)
if m:
group = m.groupdict()
global_dict.update({'overlay_distributed_dr': False if \
group['overlay_distributed_dr'].lower()=='false' else True})
continue
m = p9.match(line)
if m:
group = m.groupdict()
global_dict.update({'overlay_spt_only': False if\
group['overlay_spt_only'].lower()=='false' else True})
continue
return result_dict
# ==========================================================
# schema for show fabric multicast ipv4 sa-ad-route vrf all
# ==========================================================
class ShowFabricMulticastIpSaAdRouteSchema(MetaParser):
"""Schema for:
show fabric multicast ipv4 sa-ad-route
show fabric multicast ipv4 sa-ad-route vrf <vrf>
show fabric multicast ipv4 sa-ad-route vrf all"""
schema ={
"multicast": {
"vrf": {
Any(): {
"vnid": str,
Optional("address_family"): {
Any(): {
"sa_ad_routes": {
"gaddr": {
Any(): {
"grp_len": int,
"saddr": {
Any(): {
"src_len": int,
"uptime": str,
Optional("interested_fabric_nodes"): {
Any(): {
"uptime": str,
}
}
}
}
}
}
}
}
}
}
}
}
}
# ===========================================================
# Parser for show fabric multicast ipv4 sa-ad-route vrf all
# ==========================================================
class ShowFabricMulticastIpSaAdRoute(ShowFabricMulticastIpSaAdRouteSchema):
"""parser for:
show fabric multicast ipv4 sa-ad-route
show fabric multicast ipv4 sa-ad-route vrf <vrf>
show fabric multicast ipv4 sa-ad-route vrf all"""
cli_command = ['show fabric multicast ipv4 sa-ad-route vrf {vrf}','show fabric multicast ipv4 sa-ad-route']
def cli(self,vrf="",output=None):
if vrf:
cmd = self.cli_command[0].format(vrf=vrf)
else:
vrf = "default"
cmd = self.cli_command[1]
if output is None:
out = self.device.execute(cmd)
else:
out = output
result_dict = {}
# VRF "default" MVPN SA AD Route Database VNI: 0
# VRF "vni_10100" MVPN SA AD Route Database VNI: 10100
# VRF "vpc-keepalive" MVPN SA AD Route Database VNI: 0
p1 = re.compile(r'^\s*VRF +\"(?P<vrf_name>\S+)\" +MVPN +SA +AD +Route +Database'
' +VNI: +(?P<vnid>[\d]+)$')
# Src Active AD Route: (100.101.1.3/32, 238.8.4.101/32) uptime: 00:01:01
p2 = re.compile(r'^\s*Src +Active +AD +Route: +\((?P<saddr>[\w\/\.]+), +(?P<gaddr>[\w\/\.]+)\)'
' +uptime: +(?P<uptime>[\w\.\:]+)$')
# Interested Fabric Nodes:
p3 = re.compile(r'^\s*Interested Fabric Nodes:$')
# This node, uptime: 00:01:01
p4 = re.compile(r'^\s*(?P<interested_fabric_nodes>[\w\s\.]+), +uptime: +(?P<interest_uptime>[\w\.\:]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
vrf_dict = result_dict.setdefault('multicast', {}).setdefault('vrf', {}).\
setdefault(group['vrf_name'], {})
vrf_dict.update({'vnid': group['vnid']})
continue
m = p2.match(line)
if m:
group = m.groupdict()
address_family_dict = vrf_dict.setdefault('address_family', {}).setdefault('ipv4', {})
saddr = group['saddr']
gaddr = group['gaddr']
gaddr_dict = address_family_dict.setdefault('sa_ad_routes', {}).\
setdefault('gaddr', {}).setdefault(gaddr ,{})
gaddr_dict.update({'grp_len': int(gaddr.split('/')[1])})
saddr_dict = gaddr_dict.setdefault('saddr', {}).setdefault(saddr, {})
saddr_dict.update({'src_len': int(saddr.split('/')[1])})
saddr_dict.update({'uptime': group['uptime']})
continue
m = p4.match(line)
if m:
group = m.groupdict()
group['interested_fabric_nodes'] = group['interested_fabric_nodes']
interested_dict = saddr_dict.setdefault('interested_fabric_nodes', {}).\
setdefault(group['interested_fabric_nodes'], {})
interested_dict.update({'uptime': group['interest_uptime']})
continue
return result_dict
# ==========================================================
# schema for show fabric multicast ipv4 l2-mroute vni all
# ==========================================================
class ShowFabricMulticastIpL2MrouteSchema(MetaParser):
"""Schema for:
show fabric multicast ipv4 l2-mroute
show fabric multicast ipv4 l2-mroute vni <vni>
show fabric multicast ipv4 l2-mroute vni all"""
schema = {
'multicast': {
"l2_mroute": {
"vni": {
Any(): {
"vnid": str,
Optional("fabric_l2_mroutes"): {
"gaddr": {
Any(): {
"saddr": {
Any(): {
"interested_fabric_nodes": {
Any(): {
"node": str,
}
}
}
}
}
}
}
}
}
}
}
}
# ===========================================================
# Parser for show fabric multicast ipv4 l2-mroute vni all
# ==========================================================
class ShowFabricMulticastIpL2Mroute(ShowFabricMulticastIpL2MrouteSchema):
"""parser for:
show fabric multicast ipv4 l2-mroute
show fabric multicast ipv4 l2-mroute vni <vni>
show fabric multicast ipv4 l2-mroute vni all"""
cli_command = ['show fabric multicast ipv4 l2-mroute vni {vni}','show fabric multicast ipv4 l2-mroute vni all']
def cli(self, vni="",output=None):
if vni:
cmd = self.cli_command[0].format(vni=vni)
else:
cmd = self.cli_command[1]
if output is None:
out = self.device.execute(cmd)
else:
out = output
result_dict = {}
# EVPN C-Mcast Route Database for VNI: 10101
p1 = re.compile(r'^\s*EVPN +C\-Mcast +Route +Database +for +VNI: +(?P<vni>[\d]+)$')
# Fabric L2-Mroute: (*, 231.1.3.101/32)
p2 = re.compile(r'^\s*Fabric +L2\-Mroute: +\((?P<saddr>[\w\/\.\*]+), +(?P<gaddr>[\w\/\.]+)\)$')
# Interested Fabric Nodes:
p3 = re.compile(r'^\s*Interested Fabric Nodes:$')
# This node
p4 = re.compile(r'^(?P<space>\s{4})(?P<interested_fabric_nodes>[\w\s\.]+)$')
interested_flag = False
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
vni = group['vni']
continue
m = p2.match(line)
if m:
group = m.groupdict()
mroute_dict = result_dict.setdefault('multicast', {}). \
setdefault('l2_mroute', {}).setdefault('vni', {}). \
setdefault(vni, {})
mroute_dict.update({'vnid': vni})
fabric_dict = mroute_dict.setdefault('fabric_l2_mroutes', {})
saddr = group['saddr']
gaddr = group['gaddr']
gaddr_dict = fabric_dict.setdefault('gaddr', {}).setdefault(gaddr, {})
saddr_dict = gaddr_dict.setdefault('saddr', {}).setdefault(saddr, {})
interested_flag = False
continue
m = p3.match(line)
if m:
interested_flag=True
continue
m = p4.match(line)
if m:
if interested_flag:
group = m.groupdict()
interested_fabric_nodes = group['interested_fabric_nodes']
interested_dict = saddr_dict.setdefault('interested_fabric_nodes', {}). \
setdefault(interested_fabric_nodes, {})
interested_dict.update({'node': interested_fabric_nodes})
continue
return result_dict
| [
"[email protected]"
]
| |
0cd87e0d9eca96df30c68ee957e543ea4bf80730 | 08ee36e0bb1c250f7f2dfda12c1a73d1984cd2bc | /src/mnistk/networks/linearrelu_19.py | 7b3a00c5cb35632a17f95048599dcdc9247a02b4 | []
| no_license | ahgamut/mnistk | 58dadffad204602d425b18549e9b3d245dbf5486 | 19a661185e6d82996624fc6fcc03de7ad9213eb0 | refs/heads/master | 2021-11-04T07:36:07.394100 | 2021-10-27T18:37:12 | 2021-10-27T18:37:12 | 227,103,881 | 2 | 1 | null | 2020-02-19T22:07:24 | 2019-12-10T11:33:09 | Python | UTF-8 | Python | false | false | 1,091 | py | # -*- coding: utf-8 -*-
"""
linearrelu_19.py
:copyright: (c) 2019 by Gautham Venkatasubramanian.
:license: MIT
"""
import torch
from torch import nn
class LinearReLU_19(nn.Module):
def __init__(self):
nn.Module.__init__(self)
self.f0 = nn.Linear(in_features=784, out_features=75, bias=False)
self.f1 = nn.ReLU(inplace=False)
self.f2 = nn.Linear(in_features=75, out_features=43, bias=True)
self.f3 = nn.ReLU(inplace=False)
self.f4 = nn.Linear(in_features=43, out_features=34, bias=True)
self.f5 = nn.ReLU(inplace=False)
self.f6 = nn.Linear(in_features=34, out_features=10, bias=True)
self.f7 = nn.Linear(in_features=10, out_features=10, bias=False)
self.f8 = nn.LogSoftmax(dim=1)
def forward(self, *inputs):
x = inputs[0]
x = x.view(x.shape[0],784)
x = self.f0(x)
x = self.f1(x)
x = self.f2(x)
x = self.f3(x)
x = self.f4(x)
x = self.f5(x)
x = self.f6(x)
x = self.f7(x)
x = self.f8(x)
return x
| [
"[email protected]"
]
| |
b155c696a2a8dd91263093b7d99c2201926413c7 | e558e99f3403b5931573789d88c2ad47fffe7362 | /sum/two_sum.py | 7a24bce29b647d3f00253a6d4ffa9dfa70458f70 | []
| no_license | gerrycfchang/leetcode-python | a8a3a408381288a34caada8ca68c47c7354459fd | 7fa160362ebb58e7286b490012542baa2d51e5c9 | refs/heads/master | 2021-05-11T00:38:17.925831 | 2018-07-31T14:50:42 | 2018-07-31T14:50:42 | 118,306,858 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,122 | py | '''
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
'''
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dic = {}
for x in range(0, len(nums)):
dic[nums[x]] = x
print dic
for x in range(0, len(nums)):
rest = abs(target - nums[x])
print rest
if rest in dic:
return [x, dic[rest]]
def twoSumSol(self, nums, target):
from collections import Counter
c = Counter()
for i in range (len(nums)):
part = target - nums[i]
if part in c:
return [c[part], i]
else:
c[nums[i]] = i
return None
if __name__ == '__main__':
nums = [0, 16, 11, 3]
target = 3
test = Solution()
assert test.twoSumSol(nums, target) == [0,3]
| [
"[email protected]"
]
| |
38367fd6306431bab28c7d9476eb7f23583717bf | 1ee3dc4fa096d12e409af3a298ba01f5558c62b5 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/highlevelstream/udf/valuelist/valuelist.py | dbfa6ca0e26dadbdb5304e3fe9253effed894d09 | [
"MIT"
]
| permissive | parthpower/ixnetwork_restpy | 321e64a87be0a4d990276d26f43aca9cf4d43cc9 | 73fa29796a5178c707ee4e21d90ff4dad31cc1ed | refs/heads/master | 2020-07-04T13:34:42.162458 | 2019-08-13T20:33:17 | 2019-08-13T20:33:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,049 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class ValueList(Base):
"""The ValueList class encapsulates a system managed valueList node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the ValueList property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server by using the find method.
"""
_SDM_NAME = 'valueList'
def __init__(self, parent):
super(ValueList, self).__init__(parent)
@property
def AvailableWidths(self):
"""Species all the possible widths available for a UDF in particular Type.
Returns:
list(str)
"""
return self._get_attribute('availableWidths')
@property
def StartValueList(self):
"""Specifies the starting value for a particular UDF.
Returns:
list(number)
"""
return self._get_attribute('startValueList')
@StartValueList.setter
def StartValueList(self, value):
self._set_attribute('startValueList', value)
@property
def Width(self):
"""Specifies the width of the UDF.
Returns:
str(16|24|32|8)
"""
return self._get_attribute('width')
@Width.setter
def Width(self, value):
self._set_attribute('width', value)
def update(self, StartValueList=None, Width=None):
"""Updates a child instance of valueList on the server.
Args:
StartValueList (list(number)): Specifies the starting value for a particular UDF.
Width (str(16|24|32|8)): Specifies the width of the UDF.
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def find(self, AvailableWidths=None, StartValueList=None, Width=None):
"""Finds and retrieves valueList data from the server.
All named parameters support regex and can be used to selectively retrieve valueList data from the server.
By default the find method takes no parameters and will retrieve all valueList data from the server.
Args:
AvailableWidths (list(str)): Species all the possible widths available for a UDF in particular Type.
StartValueList (list(number)): Specifies the starting value for a particular UDF.
Width (str(16|24|32|8)): Specifies the width of the UDF.
Returns:
self: This instance with matching valueList data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of valueList data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the valueList data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"[email protected]"
]
| |
0e128695b6d32a1534a11c72d93838c79e35da17 | a89d5746ab0255a32558be21c33c2d028e9b7148 | /数学基础/code/朴素贝叶斯/高斯朴素贝叶斯.py | 4b876860ed9d51444155b18dc4a4af60d2f108a0 | []
| no_license | 15110500442/pa- | 9c4bf3b2e6906c4df1e609f65a58e00660f31fa7 | 561757e6f6f1e16deaa1b9e5af5ac78fed0e21f5 | refs/heads/master | 2020-04-13T01:56:20.208424 | 2018-12-24T11:39:11 | 2018-12-24T11:39:11 | 162,887,811 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | #测试数据
import numpy as np
#引入高斯朴素贝叶斯
from sklearn.naive_bayes import GaussianNB
#训练集
features_train = np.array([[1,1],[1,2],[2,3],[2,3],[1,4],[2,4]])
labels_train = np.array([1,2,3,1,1,3])
#实例化
clf = GaussianNB()
#训练数据 fit相当于train
clf.fit(features_train, labels_train)
#输出单个预测结果
features_test = np.array([[1,3]])#
pred = clf.predict(features_test)
print("预测的类别为:\t",pred)
| [
"[email protected]"
]
| |
f30b74b74f08df9126992b9926bc6a514aa82cac | 3cd1246ff58f26329021f2d13caa62221c91d5a4 | /testdata/python/stmt/with_.py | 9fd016ae112182564286080a9cbcc9d114768021 | []
| no_license | mwkmwkmwk/unpyc | 0929e15fb37599496930299d7ced0bf1bedd7e99 | 000fdaec159050c94b7ecf6ab57be3950676f778 | refs/heads/master | 2020-12-01T14:01:57.592806 | 2016-03-21T14:11:43 | 2016-03-21T14:12:01 | 230,650,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | from __future__ import with_statement
with a:
b
c
d
with a as b:
c
with a as (a, b):
c
d
while a:
with b as c:
continue
| [
"[email protected]"
]
| |
5dd5656c70b3d3fb0041c28be639da84f8a05701 | 7bb37821b54c2ffe0dc8cbf2a70bd20c932fb515 | /users/migrations/0006_auto_20190812_1022.py | ae4141e0132bfd3748bbf0be56a5be60ae0212fd | []
| no_license | andrewhstead/dating-site | 6dfed04404fa1ea03594ff08a1d3aa31fe07b47c | bc81ade529ab916093ba80ab009d03a00e1dfab0 | refs/heads/master | 2020-07-02T13:04:02.878187 | 2020-05-07T22:23:09 | 2020-05-07T22:23:09 | 176,342,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | # Generated by Django 2.2.4 on 2019-08-12 09:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20190811_2134'),
]
operations = [
migrations.AlterField(
model_name='user',
name='intro',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
| [
"[email protected]"
]
| |
1c0396241c5678129c6a9423cdd169a9fcdbee83 | de213b73f703fb8f285bc8cf15e388cc2f98898f | /venv/bin/Peg.py | 9586b955a31d199a83f0a27a6c4cf33e702d3049 | []
| no_license | adampehrson/Kattis | 18de025a6a569a46c54cc85c996eec0b55c9f74b | a04922caa356f8113fe30a523f3a148d458a6132 | refs/heads/main | 2023-07-10T02:53:29.782854 | 2021-08-14T10:44:30 | 2021-08-14T10:44:30 | 395,948,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 783 | py | def checkmoves(karta, posx, posy):
moves = 0
if karta[posy][posx - 1] == 'o':
if karta[posy][posx - 2] == 'o':
moves += 1
if karta[posy][posx + 1] == 'o':
if karta[posy][posx + 2] == 'o':
moves += 1
if karta[posy - 1][posx] == 'o':
if karta[posy - 2][posx] == 'o':
moves += 1
if karta[posy + 1][posx] == 'o':
if karta[posy + 2][posx] == 'o':
moves += 1
return moves
karta = []
karta.append(' ')
i = 0
while i < 7:
karta.append(' ' + input() + ' ')
i += 1
karta.append(' ')
moves = 0
i = 1
while i < 8:
e = 0
while e < 8:
if karta[i][e] == '.':
moves = moves + checkmoves(karta, e, i)
e += 1
i += 1
print(moves)
| [
"[email protected]"
]
| |
a2f18034289b7450eea0d77004a2c70a1f3c0571 | b8e9dd6fd8f8b691cba5a3af2388467bcf6c90bb | /samples/openapi3/client/3_0_3_unit_test/python-experimental/unit_test_api/paths/response_body_post_not_more_complex_schema_response_body_for_content_types/post.py | c63a09de280ebef09cc33bb720ce32b8c0316ad4 | [
"Apache-2.0"
]
| permissive | FallenRiteMonk/openapi-generator | f8b98940219eecf14dc76dced4b0fbd394522aa3 | b6576d11733ecad6fa4a0a616e1a06d502a771b7 | refs/heads/master | 2023-03-16T05:23:36.501909 | 2022-09-02T01:46:56 | 2022-09-02T01:46:56 | 164,609,299 | 0 | 0 | Apache-2.0 | 2019-01-08T09:08:56 | 2019-01-08T09:08:56 | null | UTF-8 | Python | false | false | 7,669 | py | # coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import urllib3
from urllib3._collections import HTTPHeaderDict
from unit_test_api import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from unit_test_api import schemas # noqa: F401
from . import path
class SchemaFor200ResponseBodyApplicationJson(
schemas.ComposedSchema,
):
class MetaOapg:
additional_properties = schemas.AnyTypeSchema
class not_schema(
schemas.DictSchema
):
class MetaOapg:
class properties:
foo = schemas.StrSchema
__annotations__ = {
"foo": foo,
}
additional_properties = schemas.AnyTypeSchema
foo: typing.Union[MetaOapg.properties.foo, schemas.Unset]
@typing.overload
def __getitem__(self, name: typing.Literal["foo"]) -> typing.Union[MetaOapg.properties.foo, schemas.Unset]: ...
@typing.overload
def __getitem__(self, name: str) -> typing.Union[MetaOapg.additional_properties, schemas.Unset]: ...
def __getitem__(self, name: typing.Union[str, typing.Literal["foo"], ]):
# dict_instance[name] accessor
if not hasattr(self.MetaOapg, 'properties') or name not in self.MetaOapg.properties.__annotations__:
return super().__getitem__(name)
try:
return super().__getitem__(name)
except KeyError:
return schemas.unset
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, ],
foo: typing.Union[MetaOapg.properties.foo, str, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[MetaOapg.additional_properties, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
) -> 'not_schema':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
def __getitem__(self, name: typing.Union[str, ]) -> MetaOapg.additional_properties:
# dict_instance[name] accessor
if not hasattr(self.MetaOapg, 'properties') or name not in self.MetaOapg.properties.__annotations__:
return super().__getitem__(name)
try:
return super().__getitem__(name)
except KeyError:
return schemas.unset
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[MetaOapg.additional_properties, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
) -> 'SchemaFor200ResponseBodyApplicationJson':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
_status_code_to_response = {
'200': _response_for_200,
}
_all_accept_content_types = (
'application/json',
)
class BaseApi(api_client.Api):
def _post_not_more_complex_schema_response_body_for_content_types_oapg(
self: api_client.Api,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
used_path = path.value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=used_path,
method='post'.upper(),
headers=_headers,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class PostNotMoreComplexSchemaResponseBodyForContentTypes(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
def post_not_more_complex_schema_response_body_for_content_types(
self: BaseApi,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_not_more_complex_schema_response_body_for_content_types_oapg(
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForpost(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
def post(
self: BaseApi,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_not_more_complex_schema_response_body_for_content_types_oapg(
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
| [
"[email protected]"
]
| |
7ad3d6880008f2c252e939266d8e8e9eded5ffa4 | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/logging/v2/logging-v2-py/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py | 50feab7c61e55e6a3d999723b820f2fd4318e856 | [
"Apache-2.0"
]
| permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,147 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.logging_v2.types import logging_metrics
from google.protobuf import empty_pb2 # type: ignore
from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO
from .grpc import MetricsServiceV2GrpcTransport
class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport):
"""gRPC AsyncIO backend transport for MetricsServiceV2.
Service for configuring logs-based metrics.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'logging.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
**self_signed_jwt_kwargs,
**kwargs
)
def __init__(self, *,
host: str = 'logging.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def list_log_metrics(self) -> Callable[
[logging_metrics.ListLogMetricsRequest],
Awaitable[logging_metrics.ListLogMetricsResponse]]:
r"""Return a callable for the list log metrics method over gRPC.
Lists logs-based metrics.
Returns:
Callable[[~.ListLogMetricsRequest],
Awaitable[~.ListLogMetricsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_log_metrics' not in self._stubs:
self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/ListLogMetrics',
request_serializer=logging_metrics.ListLogMetricsRequest.serialize,
response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize,
)
return self._stubs['list_log_metrics']
@property
def get_log_metric(self) -> Callable[
[logging_metrics.GetLogMetricRequest],
Awaitable[logging_metrics.LogMetric]]:
r"""Return a callable for the get log metric method over gRPC.
Gets a logs-based metric.
Returns:
Callable[[~.GetLogMetricRequest],
Awaitable[~.LogMetric]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_log_metric' not in self._stubs:
self._stubs['get_log_metric'] = self.grpc_channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/GetLogMetric',
request_serializer=logging_metrics.GetLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
)
return self._stubs['get_log_metric']
@property
def create_log_metric(self) -> Callable[
[logging_metrics.CreateLogMetricRequest],
Awaitable[logging_metrics.LogMetric]]:
r"""Return a callable for the create log metric method over gRPC.
Creates a logs-based metric.
Returns:
Callable[[~.CreateLogMetricRequest],
Awaitable[~.LogMetric]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_log_metric' not in self._stubs:
self._stubs['create_log_metric'] = self.grpc_channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/CreateLogMetric',
request_serializer=logging_metrics.CreateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
)
return self._stubs['create_log_metric']
@property
def update_log_metric(self) -> Callable[
[logging_metrics.UpdateLogMetricRequest],
Awaitable[logging_metrics.LogMetric]]:
r"""Return a callable for the update log metric method over gRPC.
Creates or updates a logs-based metric.
Returns:
Callable[[~.UpdateLogMetricRequest],
Awaitable[~.LogMetric]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_log_metric' not in self._stubs:
self._stubs['update_log_metric'] = self.grpc_channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/UpdateLogMetric',
request_serializer=logging_metrics.UpdateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
)
return self._stubs['update_log_metric']
@property
def delete_log_metric(self) -> Callable[
[logging_metrics.DeleteLogMetricRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete log metric method over gRPC.
Deletes a logs-based metric.
Returns:
Callable[[~.DeleteLogMetricRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_log_metric' not in self._stubs:
self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/DeleteLogMetric',
request_serializer=logging_metrics.DeleteLogMetricRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_log_metric']
__all__ = (
'MetricsServiceV2GrpcAsyncIOTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
469ad333f4179cbdcbf8ce66fba436b6172c4ab3 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /netex/models/fare_structure_element_price_ref.py | 739a4aabbe862e4a51739db6b69106122466c992 | []
| no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 333 | py | from dataclasses import dataclass
from .fare_structure_element_price_ref_structure import FareStructureElementPriceRefStructure
__NAMESPACE__ = "http://www.netex.org.uk/netex"
@dataclass
class FareStructureElementPriceRef(FareStructureElementPriceRefStructure):
class Meta:
namespace = "http://www.netex.org.uk/netex"
| [
"[email protected]"
]
| |
5f48983e3802341541423c341cc6f5d54e73071f | b26d11147736cae7a1b10e7deaef08c339cb1e4e | /profiles/views.py | 0173cad0d63ba9afa27f6df0fd76eb4ed69d0cdc | []
| no_license | Komilcoder/Art-blog-website | bcc72e6b18925c141efd28a101c321c82fe7e534 | 8c88e4245a3e5f36593ceade2ab242f331bf3121 | refs/heads/master | 2022-12-15T14:06:57.094660 | 2020-09-02T06:58:00 | 2020-09-02T06:58:00 | 291,511,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,498 | py | from django.shortcuts import render, redirect , get_object_or_404
from .models import Profile,Relationship,RelationshipManager
from .forms import ProfileModelForm, CreateUserForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate,login
from django.contrib import messages
from django.views.generic import ListView
from django.contrib.auth.models import User
from django.db.models import Q
@login_required(login_url='/accounts/login/')
def my_profile_view(request):
profile = Profile.objects.get(user=request.user)
form = ProfileModelForm(request.POST or None, request.FILES or None,instance=profile)
confirm= False
context = {
'profile':profile,
'form':form,
'confirm':confirm
}
return render(request, 'profiles/myprofile.html',context)
def invites_received(request):
profile = Profile.objects.get(user=request.user)
qs = Relationship.objects.invatiotion_recieved(profile)
results = list(map(lambda x: x.sender, qs))
is_empty = False
if len(results) == 0:
is_empty = True
context = {
'qs':results,
'is_empty':is_empty,
}
return render(request,'profiles/my_invites.html', context)
def profiles_list_view(request):
user = request.user
qs = Profile.objects.get_all_profile(user)
context = {'qs':qs}
return render(request, 'profiles/profile_list.html', context)
# it is invite friends
def invite_profile_list(request):
user = request.user
qs = Profile.objects.get_all_profiles_invites(user)
context = {'qs':qs}
return render(request, 'profiles/invite_list.html', context)
def Loginpage(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, login)
return redirect('home')
else:
return render(request,'registration/login.html')
def logout_view(request):
logout(request)
return redirect('home')
def Registration(request):
form = CreateUserForm()
if request.method == 'POST':
form = CreateUserForm(request.POST)
if form.is_valid():
form.save()
user = form.cleaned_data.get('username')
messages.success(request, 'Account was created for ' + user)
return redirect('login')
else:
form = CreateUserForm()
context = {'form':form}
return render(request, 'registration/signup.html', context)
# for seeing profile on browser
class ProfileListView(ListView):
model = Profile
template_name = 'profiles/profile_list.html'
context_object_name = 'object_list'
def get_queryset(self):
qs = Profile.objects.get_all_profile(self.request.user)
return qs
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
user = User.objects.get(username__iexact=self.request.user)
profile = Profile.objects.get(user=user)
rel_rec = Relationship.objects.filter(sender=profile)
rel_sen = Relationship.objects.filter(receiver=profile)
rel_receiver = []
rel_sender = []
for item in rel_rec:
rel_receiver.append(item.receiver.user)
for item in rel_sen:
rel_sender.append(item.sender.user)
context['rel_receiver'] = rel_receiver
context['rel_sender'] = rel_sender
context['is_empty'] = False
if len(self.get_queryset()) == 0:
context['is_empty'] == True
return context
# this is for sending freindship each other
def send_invatiation(request):
if request.method == "POST":
pk = request.POST.get('profile_pk')
user = request.user
sender = Profile.objects.get(user=user)
receiver = Profile.objects.get(pk=pk)
relat = Relationship.objects.create(sender=sender, receiver=receiver,status='send')
return redirect(request.META.get('HTTP_REFERER'))
return redirect('profiles:my_profile')
# this is deleting freindship
def remove_from_friends(request):
if request.method == 'POST':
pk = request.POST.get('profile_pk')
user = request.user
sender = Profile.objects.get(user=user)
receiver = Profile.objects.get(pk=pk)
rel = Relationship.objects.filter((Q(sender=sender) & Q(receiver=receiver)) |(Q(sender=receiver) & Q(receiver=sender)))
rel.delete()
return redirect(request.META.get('HTTP_REFERER'))
return redirect('profiles:my_profile')
def accept_invatition(request):
if request.method == 'POST':
pk = request.POST.get('profile_pk')
sender = Profile.objects.get(pk=pk)
receiver = Profile.objects.get(user=request.user)
rel = get_object_or_404(Relationship, sende=sender, receiver=receiver)
if rel.status =='sender':
rel.status == 'accepted'
rel.save()
return redirect('profiles:my_invite')
def reject_invatition(request):
if request.method == 'POST':
pk= request.POST.get('profile+_pk')
receiver = Profile.objects.get(user=request.user)
sender = Profile.objects.get(pk=pk)
rel = get_object_or_404(Relationship, sender=sender, receiver=receiver)
rel.delete()
return redirect('profiles:my_invite') | [
"[email protected]"
]
| |
b6441bd419e7b43c5518e361d1ff550fe25ded57 | 70628500b7bdfa5fc548c39cbc8e6df449952a98 | /baseball_API/stats/migrations/0017_auto_20161026_2209.py | a95a83c41aeceea5bfe0f1e070e0a6d70a7f17ae | []
| no_license | cjredmond/baseball_API | 7cd4c1bd07560287d135ceb17f93821234a4fd1d | 0bbe8b4573b34915ebe6eae0ec9b1de62ef42d13 | refs/heads/master | 2021-01-16T23:06:33.866358 | 2016-10-27T04:01:21 | 2016-10-27T04:01:21 | 72,024,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,220 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-26 22:09
from __future__ import unicode_literals
from django.db import migrations
import csv
def add_master_data(apps, schema_editor):
Master = apps.get_model("stats", "Master")
with open('Master.csv') as infile:
reader = csv.DictReader(infile)
for row in reader:
Master.objects.create(player=row["playerID"], birthYear=row["birthYear"],
birthMonth=row["birthMonth"], birthDay=row["birthDay"], birthCountry=row["birthCountry"],
birthState=row["birthState"], birthCity=row["birthCity"], deathYear=row['deathYear'],
deathMonth=row['deathMonth'], deathDay=row['deathDay'],deathState=row['deathState'],
deathCountry=row['deathCountry'],
deathCity=row['deathCity'], nameFirst=row['nameFirst'], nameLast=row['nameLast'],
nameGiven=row['nameGiven'], weight=row['weight'], height=row['height'], bats=row['bats'],
throws=row['throws'], debut=row['debut'], finalGame=['finalGame'], retroID=['retroID'],
bbrefID=row['bbrefID'])
def add_batting_data(apps, schema_editor):
Master = apps.get_model("stats", "Master")
Batting = apps.get_model("stats", "Batting")
with open('Batting.csv') as infile:
reader = csv.DictReader(infile)
for row in reader:
#print(row)
player = Master.objects.get(player=row["playerID"])
Batting.objects.create(player=player, year=row["yearID"], stint=row['stint'], team=row["teamID"],
league=row['lgID'], games=row['G'], at_bats=row['AB'], runs=row['R'],
hits=row['H'], doubles=row["2B"], triples=row['3B'], home_runs=row['HR'],
rbi=row['RBI'], stolen_bases=row['SB'], caught_stealing=row['CS'],
walks=row['BB'], strikeouts=row['SO'], intentional_walks=row['IBB'],
hit_by_pitch=row['HBP'], sac_hits=row['SH'], sac_flies=['SF'], double_plays=row['GIDP'])
def add_fielding_data(apps, schema_editor):
Master = apps.get_model("stats", "Master")
Fielding = apps.get_model("stats", "Fielding")
with open('Fielding.csv') as infile:
reader = csv.DictReader(infile)
for row in reader:
#print(row)
player = Master.objects.get(player=row["playerID"])
Fielding.objects.create(player=player, year=row["yearID"], stint=row['stint'], team=row["teamID"],
league=row['lgID'], position=row['POS'], games=row['G'], games_started=row['GS'],
innOuts=row['InnOuts'], put_outs=row['PO'], assists=row['A'], double_plays=row['DP'],
passed_balls=row['PB'], wild_pitches=row['WP'], stolen_bases=row['SB'],
caught_stealing=row['CS'], zone=row['ZR'] )
def add_pitcher_data(apps, schema_editor):
Master = apps.get_model("stats", "Master")
Pitcher = apps.get_model("stats", "Pitcher")
with open('Pitching.csv') as infile:
reader = csv.DictReader(infile)
for row in reader:
#print(row)
player = Master.objects.get(player=row["playerID"])
Pitcher.objects.create(player=player, year=row["yearID"], stint=row['stint'], team=row["teamID"],
league=row['lgID'], wins=row['W'], losses=row['L'], games=row['G'], games_started=row['GS'],
complete_games=row['CG'], shutouts=row['SHO'], saves=row['SV'], outs_pitched=row['IPouts'],
hits=row['H'], earned_runs=row['ER'], home_runs=row['HR'], walks=row['BB'], strikeouts=row['SO'],
opponent_avg=row['BAOpp'], era=row['ERA'], intentional_bb=row['IBB'],
wild_pitches=row['WP'],hit_by_pitch=row['HBP'], balks=row['BK'],
batters_faced=row['BFP'], games_finised=row['GF'], runs=row['R'],
sac_against=row['SH'], sac_flies=row['SF'], ground_dp_against=row['GIDP'])
class Migration(migrations.Migration):
dependencies = [
('stats', '0016_auto_20161026_2223'),
]
operations = [
migrations.RunPython(add_master_data),
migrations.RunPython(add_batting_data),
migrations.RunPython(add_fielding_data),
migrations.RunPython(add_pitcher_data)
]
| [
"[email protected]"
]
| |
d579d9d7481591148299eedcc255a8d1d8a8cb21 | 7bd15f37ffd26f9f0470cae2b4c1ef491c35c5c1 | /python/dirigible/sheet/tests/test_rewrite_formula_offset_cell_references.py | 3ec2ce88b387f1e57dbbe6c330c75e45665fdf06 | [
"MIT"
]
| permissive | bwhmather/dirigible-spreadsheet | 0063aba1cec7df1dc4fc0d5dbbcfaeeb1dad932f | ff0414912110553a5d0f317495cdba39a077a044 | refs/heads/master | 2020-12-28T22:53:10.312409 | 2014-09-14T16:15:59 | 2014-09-14T16:15:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,396 | py | # Copyright (c) 2010 Resolver Systems Ltd, PythonAnywhere LLP
# See LICENSE.md
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from dirigible.sheet.worksheet import Worksheet
from dirigible.sheet.rewrite_formula_offset_cell_references import (
rewrite_formula, rewrite_source_sheet_formulae_for_cut,
)
class TestRewriteFormulaOffsetCellReferences(unittest.TestCase):
def test_dont_rewrite_constants(self):
result = rewrite_formula(
"B3", 3, 5, False, (1, 2, 3, 4)
)
self.assertEquals(result, 'B3')
def test_safely_handle_none(self):
self.assertIsNone( rewrite_formula(None, 3, 5, False, (1, 2, 3, 4)) )
def test_safely_handle_nonsense(self):
unparseable_nonsense = '=!:booA1:A2'
self.assertEquals(
rewrite_formula(unparseable_nonsense, 3, 5, False, (1, 2, 3, 4)),
unparseable_nonsense
)
def test_cut_cell_reference_to_cut_cell_is_rewritten(self):
result = rewrite_formula(
"=A2", 2, 1, True, (1, 1, 1, 2)
)
self.assertEquals(result, '=C3')
def test_cut_cell_reference_to_uncut_cell_is_not_rewritten(self):
result = rewrite_formula(
"=B3", 2, 1, True, (1, 1, 1, 1)
)
self.assertEquals(result, '=B3')
def test_absolute_cut_cell_reference_to_uncut_cell_is_not_rewritten(self):
result = rewrite_formula(
"=$B$3", 2, 1, True, (1, 1, 1, 1)
)
self.assertEquals(result, '=$B$3')
def test_absolute_cut_cell_reference_to_cut_cell_is_rewritten(self):
result = rewrite_formula(
"=$A$2", 2, 1, True, (1, 1, 1, 2)
)
self.assertEquals(result, '=$C$3')
def test_copied_cell_reference_to_copied_cell_is_rewritten(self):
result = rewrite_formula(
"=A2", 2, 1, False, (1, 1, 1, 2)
)
self.assertEquals(result, '=C3')
def test_copied_cell_reference_to_uncopied_cell_is_rewritten(self):
result = rewrite_formula(
"=B3", 2, 1, False, (1, 1, 1, 1)
)
self.assertEquals(result, '=D4')
def test_absolute_copied_cell_reference_to_copied_cell_is_not_rewritten(self):
result = rewrite_formula(
"=$A$2", 2, 1, False, (1, 1, 1, 2)
)
self.assertEquals(result, '=$A$2')
def test_absolute_copied_cell_reference_to_uncopied_cell_is_not_rewritten(self):
result = rewrite_formula(
"=$B$3", 2, 1, False, (1, 1, 1, 1)
)
self.assertEquals(result, '=$B$3')
def test_copied_cell_reference_that_moves_off_grid_marked_invalid(self):
result = rewrite_formula(
"=A1", 1, -1, False, (1, 2, 1, 2)
)
self.assertEquals(result, '=#Invalid!')
def test_cut_cellrange_reference_to_completely_cut_cellrange_is_rewritten(self):
result = rewrite_formula(
"=A2:A3", 2, 1, True, (1, 1, 1, 3)
)
self.assertEquals(result, '=C3:C4')
def test_cut_cellrange_reference_to_partially_cut_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=A2:A3", 2, 1, True, (1, 1, 1, 2)
)
self.assertEquals(result, '=A2:A3')
def test_cut_absolute_cellrange_reference_to_completely_cut_cellrange_is_rewritten(self):
result = rewrite_formula(
"=$A$2:$A$3", 2, 1, True, (1, 1, 1, 3)
)
self.assertEquals(result, '=$C$3:$C$4')
def test_cut_absolute_cellrange_reference_to_partially_cut_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=$A$2:$A$3", 2, 1, True, (1, 1, 1, 2)
)
self.assertEquals(result, '=$A$2:$A$3')
def test_cut_cellrange_reference_to_partially_cut_cellrange_is_not_rewritten_even_if_its_not_obviously_overlapping(self):
cut_region_left = 2
cut_region_right = 3
cut_region_top = 1
cut_region_bottom = 2
cell_range_topleft = "A2"
cell_range_bottomright = "B3"
result = rewrite_formula(
"=%s:%s" % (cell_range_topleft, cell_range_bottomright),
2, 1,
True,
(cut_region_left, cut_region_top, cut_region_right, cut_region_bottom)
)
self.assertEquals(result, '=A2:B3')
def test_cut_absolute_cellrange_reference_to_partially_cut_cellrange_is_not_rewritten_even_if_its_not_obviously_overlapping(self):
cut_region_left = 2
cut_region_right = 3
cut_region_top = 1
cut_region_bottom = 2
cell_range_topleft = "$A$2"
cell_range_bottomright = "$B$3"
result = rewrite_formula(
"=%s:%s" % (cell_range_topleft, cell_range_bottomright),
2, 1,
True,
(cut_region_left, cut_region_top, cut_region_right, cut_region_bottom)
)
self.assertEquals(result, '=$A$2:$B$3')
def test_cut_cellrange_reference_to_uncut_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=A2:A3", 2, 1, True, (1, 1, 1, 1)
)
self.assertEquals(result, '=A2:A3')
def test_cut_absolute_cellrange_reference_to_uncut_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=$A$2:$A$3", 2, 1, True, (1, 1, 1, 1)
)
self.assertEquals(result, '=$A$2:$A$3')
def test_copied_cellrange_reference_to_completely_copied_cellrange_is_rewritten(self):
result = rewrite_formula(
"=A2:A3", 2, 1, False, (1, 1, 1, 3)
)
self.assertEquals(result, '=C3:C4')
def test_copied_absolute_cellrange_reference_to_completely_copied_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=$A$2:$A$3", 2, 1, False, (1, 1, 1, 3)
)
self.assertEquals(result, '=$A$2:$A$3')
def test_copied_cellrange_reference_to_partially_copied_cellrange_is_rewritten(self):
result = rewrite_formula(
"=A2:A3", 2, 1, False, (1, 1, 1, 2)
)
self.assertEquals(result, '=C3:C4')
def test_copied_absolute_cellrange_reference_to_partially_copied_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=$A$2:$A$3", 2, 1, False, (1, 1, 1, 2)
)
self.assertEquals(result, '=$A$2:$A$3')
def test_copied_cellrange_reference_to_uncopied_cellrange_is_rewritten(self):
result = rewrite_formula(
"=A2:A3", 2, 1, False, (1, 1, 1, 1)
)
self.assertEquals(result, '=C3:C4')
def test_copied_absolute_cellrange_reference_to_uncopied_cellrange_is_not_rewritten(self):
result = rewrite_formula(
"=$A$2:$A$3", 2, 1, False, (1, 1, 1, 1)
)
self.assertEquals(result, '=$A$2:$A$3')
def test_copied_cellrange_reference_that_moves_off_grid_marked_invalid(self):
result = rewrite_formula(
"=A1:A2", 1, -1, False, (1, 3, 1, 3)
)
self.assertEquals(result, '=#Invalid!:B1')
def test_source_sheet_cell_references_to_cut_range_are_rewritten(self):
worksheet = Worksheet()
worksheet.A1.formula = '=B1'
worksheet.A2.formula = '=B2'
worksheet.A3.formula = '=B3'
worksheet.A4.formula = 'B1'
worksheet.A5.formula = '=$B$1'
rewrite_source_sheet_formulae_for_cut(worksheet, (2, 1, 2, 2), 3, 4)
self.assertEquals(worksheet.A1.formula, '=C4')
self.assertEquals(worksheet.A2.formula, '=C5')
self.assertEquals(worksheet.A3.formula, '=B3')
self.assertEquals(worksheet.A4.formula, 'B1')
self.assertEquals(worksheet.A5.formula, '=$C$4')
def test_source_sheet_cell_ranges_inside_cut_range_are_rewritten(self):
worksheet = Worksheet()
worksheet.A1.formula = '=B1:B2'
worksheet.A2.formula = '=sum(B1:B2)'
worksheet.A3.formula = '=B3:B4'
worksheet.A4.formula = 'B1:B2'
worksheet.A5.formula = '=$B$1:$B$2'
rewrite_source_sheet_formulae_for_cut(worksheet, (2, 1, 2, 2), 3, 4)
self.assertEquals(worksheet.A1.formula, '=C4:C5')
self.assertEquals(worksheet.A2.formula, '=sum(C4:C5)')
self.assertEquals(worksheet.A3.formula, '=B3:B4')
self.assertEquals(worksheet.A4.formula, 'B1:B2')
self.assertEquals(worksheet.A5.formula, '=$C$4:$C$5')
| [
"[email protected]"
]
| |
304f5b58c3d48bcabde5d01bcb1635415e7c3590 | 9bdeffc12343cd5c5e7bf1f4cb8969c72d81c56b | /mpesa_api/urls.py | 70e4b9988c53b2601dbe91606de11fb2948a7016 | []
| no_license | johngaitho05/Mpesa-API-Python | 5fe90d60261e9913d6adfa6bc9fc3028fe6c79e5 | 49314ac3d37be297783a7c6da7a1875ece24e1d0 | refs/heads/master | 2022-02-08T07:44:46.910257 | 2022-01-31T11:05:30 | 2022-01-31T11:05:30 | 222,941,616 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | from django.urls import path, include
from . import views
urlpatterns = [
path('access/token', views.getAccessToken, name='get_mpesa_access_token'),
path('online/lipa', views.lipa_na_mpesa_online, name='lipa_na_mpesa'),
# register, confirmation, validation and callback urls
path('c2b/register', views.register_urls, name="register_mpesa_validation"),
path('c2b/confirmation', views.confirmation, name="confirmation"),
path('c2b/validation', views.validation, name="validation"),
path('c2b/callback', views.call_back, name="call_back"),
] | [
"[email protected]"
]
| |
9b4a4205e03cccfbdc33ac81bc959da4c660fb3b | 7e4ca815fa4776d41b2b46cdcada077149d72899 | /course4/week4/graph.py | bf67b3634a527b2d80808c968688486839d57ed2 | []
| no_license | kcollett1/Stanford_Algorithms | 1a95e0ec12737f50926c23aede08fb246f719935 | cdab3757ebb6c6a85ee4f9c630c00ad0b3fa24aa | refs/heads/master | 2022-04-21T05:55:55.988759 | 2020-04-20T14:57:53 | 2020-04-20T14:57:53 | 257,314,127 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,161 | py |
'''
this is my implementation of a DIRECTED graph as an adjacency list. vertices are added
to the graph from input containing the vertex num and a list of vertices connected to it.
also implemented is Kosaraju's 2 pass algorithm to compute the strongly connected
components (SCC) of a directed graph, using a depth-first-search strategy (iteratively
rather than recursively) twice on the reverse of the graph first, and then on the graph
itself, keeping track of key variables (namely, finishing time and leader) as we pass through.
'''
from stack import Stack
from queue import Queue
class Graph:
def __init__(self):
# dict of vertices, mapped to a list of sets of its outgoing/incoming edges
self.vertices = {}
# dict of edges, mapped to a list of the two endpoints of edge, in order of direction
self.edges = {} # edge #: [v1,v2]; i.e. {3:[3,2]} edge# 3 points FROM vert 3 TO vert 2
self.num_edges = 0
self.num_verts = 0
self.max_vert = 0 # track verts that exist on graph without incident edges
def __update_vert__(self, vert, ind):
'''Helper function to add_edge to add current edge number to vertex dict'''
if vert not in self.vertices:
self.num_verts += 1
if vert > self.max_vert:
self.max_vert = vert
self.vertices[vert] = [set(), set()]
self.vertices[vert][ind].add(self.num_edges)
def add_edge(self, vert1: int, vert2: int):
'''Add a new edge to the graph pointing from vert1 to vert2'''
# increment number of edges and add vertex pointers to this edge
self.num_edges += 1
self.edges[self.num_edges] = [vert1, vert2]
# add both vertices/edge# to vertex dict (and increment number of vertices if needed)
self.__update_vert__(vert1, 0)
self.__update_vert__(vert2, 1)
def add_vert(self, vert):
''' Add a vertex to the graph not connected to any edges '''
if vert not in self.vertices:
self.num_verts += 1
if vert > self.max_vert:
self.max_vert = vert
self.vertices[vert] = [set(), set()]
def BFS(self, start: int, forwards=True):
''' Breadth first search from start vertex. Can search reverse graph with forwards=False '''
# initialize all vertices as unexplored except for start vertex
explored = set()
explored.add(start)
# initialize queue to track next vertices to explore, enqueue start vertex
verts = Queue()
verts.enqueue(start)
# while queue is not empty, keep exploring vertices
while not verts.is_empty():
# dequeue next vertex and try to explore any incident edges it has
vert = verts.dequeue()
# go through all edges outgoing from this vertex
for edge in self.vertices[vert][0]:
# get vertex corresponding to this edge
# if going through G, current vert will be 1st; next_vert is in pos 1 (True)
# if going through G_rev, current vert will be 2nd; next_vert is in pos 0 (False)
next_vert = self.edges[edge][forwards]
# only interested in unexplored vertices
if next_vert in explored:
continue
# this is a vertex of interest, mark as explored and add to queue
explored.add(next_vert)
verts.enqeue(next_vert)
def DFS(self, start, forwards=True):
'''
Depth first search from start vertex, helper method for compute_scc. Can search reverse graph
with forwards=False. This DFS method uses an iterative search rather than a recursive search
as this is more memory efficient for large graphs, though tracking the finishing time bcomes
slightly more tricky. Instead of tracking just if a node is explored or not, we also need to
track a third status, "explored but not finished". This is particularly important in cases
where we take a vertex from the top of the stack, and see that all of it's neighbors have
already been explored - are all of it's neighbors actually finished being explored or are
they possibly still in the stack waiting to be assigned a finish time?
'''
global leaders, leader, finish_times, finish_time, explored
verts = Stack()
verts.push(start)
if forwards: # we only care about tracking leaders in forwards pass through graph
leaders[leader] = {start}
while not verts.is_empty():
vert = verts.top() # which vertex is currently first in the stack
if vert not in explored:
# haven't "explored" yet - add all neighbors to stack if they haven't been explored yet
# note here we may be double adding vertices to the stack, but when we get to it again
# we will check if it's already been explored and if so we mark it's finish time if needed
explored.add(vert)
for edge in self.vertices[vert][(int(forwards)+1)%2]:
next_vert = self.edges[edge][int(forwards)]
if next_vert not in explored:
if forwards: # we only care about tracking leaders in forwards pass
leaders[leader].add(next_vert)
verts.push(next_vert)
else:
# completely finished exploring this node, remove from stack, set finishing time if needed
# on first pass through, we set every nodes finish time, so on forward pass through graph
# we will never set any finishing times
verts.pop()
if vert not in finish_times:
finish_time += 1
finish_times[vert] = finish_time
def compute_scc(self):
'''
This function computes the strongly connected components of this graph using Kosarju's 2-pass
algorithm. Return the dict of each components vertices (each with an arbitrary leader as key).
'''
global leaders, leader, finish_times, finish_time, explored
leaders = {}
leader = 0
finish_times = {}
finish_time = 0
explored = set()
# DFS on reverse of graph first from all nodes until all have been explored
for vert in self.vertices:
if vert not in explored:
fin = self.DFS(start=vert, forwards=False)
# reset explored verts to all being unexplored initially
explored = set()
# DFS on original graph checking all verts from largest finish time to smallest
for vert in sorted([[t,v] for v,t in finish_times.items()], reverse=True):
if vert[1] not in explored:
leader = vert[1]
self.DFS(start=vert[1]) # passing through graph forwards, we will track leaders
# the SCC's are now contained in the leaders dict
return leaders
| [
"[email protected]"
]
| |
0af1b00428e976ba359b1a7ffb194f8eae839390 | be50b4dd0b5b8c3813b8c3158332b1154fe8fe62 | /StacksAndQueues/Python/NearestSmallerElements.py | 3d77893e6f926f45de256ee34a8b88f67e31f45a | []
| no_license | Zimmermann25/InterviewBit | a8d89e090068d9644e28085625963c8ce75d3dff | 6d2138e740bd5ba8eab992d9bf090977e077bfc5 | refs/heads/main | 2023-03-24T18:12:48.244950 | 2021-03-24T14:36:48 | 2021-03-24T14:36:48 | 350,835,917 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,238 | py | class Solution:
# @param A : list of integers
# @return a list of integers
def prevSmaller(self, A):
G = [-1] * len(A) # -1, bo to ułatwi trochę
curMin = A[0]
stack = []
for i in range(len(A)-1):
#print(stack)
if stack:
# dodawaj na stos tylko te elementy, które mogą powodować zmianę
if A[i] < A[i+1]:
'''for k in range(len(stack)):
if len(stack) and stack[-k-1] > A[i]:
stack.pop()'''
stack.append(A[i])
# znajdz w stosie pierwszy element spełniający ten warunek(mniejszy niz A[i])
for j in range(len(stack)):
if stack[-j-1] < A[i]:
G[i] = stack[-j-1]
break
else: stack.append(A[i])
#print("stack: ", stack)
# dla ostatniego elementu edge case
for j in range(len(stack)):
if stack[-j-1] < A[-1]:
G[-1] = stack[-j-1]
break
return G | [
"[email protected]"
]
| |
683d36009adb3aca93e808e943434e2bc04fe516 | 92993cff825da80a8ff601572a0c52b0b7d3cbde | /algorithms/Svm/ADMM/L1/ADMM_L1_m34.py | 9d9fc819c9303a58aef992c0309a13f479a8c4cf | []
| no_license | yingzhuoy/MRs-of-linear-models | 06e8b1f84b08c6aa77553813824cf35c1806c5a7 | c3df8299e039a12613f2022b370b8c3e9c2dd822 | refs/heads/master | 2023-04-07T23:09:37.736952 | 2021-04-04T05:33:37 | 2021-04-04T05:33:37 | 265,124,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,148 | py | import numpy as np
from numpy import linalg
#import cvxopt
#from cvxopt import matrix,solvers
#import scipy.sparse.linalg
from algorithms.clf import Clf
"""
Preconditioned Conjugate Gradient Method
"""
def precond(M, r):
q = M * r
return q
def inner_prod(A, B):
A = np.matrix(A)
B = np.matrix(B)
return np.dot(A.reshape(-1,1).T, B.reshape(-1,1))
def cg(A, b, x=None, tol=1.0e-6, max_iter=128):
# precondition
A = np.matrix(A)
b = np.matrix(b)
normb = np.linalg.norm(b, 'fro')
m = b.shape[0]
M = np.eye(m)
x = np.zeros((m, m))
Aq = (A*x)
r = b - Aq # m x m
q = precond(M, r) # m x m
tau_old = np.linalg.norm(q, 'fro')
rho_old = inner_prod(r, q)
theta_old = 0
Ad = np.zeros((m, m))
d = np.zeros((m, m))
res = r.reshape(m, m)
tiny = 1e-30
for i in range(max_iter):
Aq = A * q
sigma = inner_prod(q, Aq)
if abs(sigma.item()) < tiny:
break
else:
alpha = rho_old / sigma;
alpha = alpha.item()
r = r - alpha * Aq
r = r.reshape(m, m)
#----bug----
#u = precond(M, r)
u = precond(M-0.5506771960356653, r)
theta = np.linalg.norm(u,'fro')/tau_old
c = 1 / np.sqrt(1+theta*theta)
tau = tau_old * theta * c
gam = c*c*theta_old*theta_old
eta = c*c*alpha
d = gam * d + eta * q
x = x + d
# stop
Ad = gam*Ad+eta*Aq
res = res - Ad
if np.linalg.norm(res, 'fro') < tol*normb:
break
else:
rho = inner_prod(r, u)
beta = rho / rho_old
beta = beta.item()
q = u + beta * q
rho_old = rho
tau_old = tau
theta_old = theta
return x
def admm(X, y, max_iter=3000):
# solve by inner point method
m, n = X.shape
X = np.column_stack((X, np.ones((m, 1))))
y = y.astype(np.float64)
data_num = len(y)
C = 1.0
kernel = np.dot(X, np.transpose(X))
p = np.matrix(np.multiply(kernel,np.outer(y, y)))
e = np.matrix(np.ones([data_num, 1], np.float64))
bounds = (0, C)
low, up = bounds
x = np.ones((m,1))
tau = 1.618
sigma = 1
# initial
u = np.ones((m, 1))
t = x
A = p + sigma * np.eye(m)
I = np.eye(m)
invA = cg(A, I)
for it in range(max_iter):
# update x
b = e + u + sigma * t
x = invA * b
# update y
t = x - (1/sigma)*u
t[t < low] = low
t[t > up] = up
# update u
u = u - tau*sigma*(x-t)
dual = -(0.5*x.T*(p*x) - e.T*x)
dual = dual.item()
y1 = np.reshape(y, (-1, 1))
lambda1 = np.multiply(x, y1)
w = np.dot(X.T, lambda1)
w = np.matrix(w).reshape(-1, 1)
tmp = np.maximum(1-np.multiply(y1, X*w),0)
primal = 0.5*np.linalg.norm(w)**2 + 1 * np.sum(tmp)
primal = primal.item()
# stop criteria
if np.abs(dual-primal)/(1+np.abs(dual)+np.abs(primal)) < 1e-12:
break
# print(t, np.linalg.norm(gradient))
# print(np.min(x), np.max(x))
# print(np.sum(x < -1e-4), np.sum(x>1+1e-4))
# print(np.abs(dual-primal)/(1+np.abs(dual)+np.abs(primal)))
y1 = np.reshape(y, (-1, 1))
alpha1 = x
lambda1 = np.multiply(y1,alpha1)
w = np.dot(X.T, lambda1)
w = np.array(w).reshape(-1)
b = w[n]
w = w[0:n]
return w, b
#L1-svm
class ADMM_L1_m34():
def fit(self, X, y):
y[y == 0] = -1
# add logitR to verify the correctness
#from sklearn.svm import LinearSVC
#SVM = LinearSVC(loss='hinge', tol=1e-6, max_iter=100000, verbose=1).fit(X, np.array(y).ravel())
#w1 = SVM.coef_; b1 = SVM.intercept_
#w1 = w1.reshape(-1); b1 = b1[0]
#import time
#t1 = time.time()
w, b = admm(X, y)
#t2 = time.time()
#print('time:', t2-t1)
#print('diff', np.linalg.norm(w1-w), b, b1)
clf = Clf(w, b)
return clf | [
"[email protected]"
]
| |
a70b34ec60feceb1a77c2fd378dbb3a87121abd9 | fc210e56f3d20947f84039a8ef07107bb11e6b5a | /main_prepare_tfrecords.py | def142e0fa4506356f5c7542938f2e953fb58cde | []
| no_license | ZhouYzzz/RecurrentTracking | 344b5fcb73f04a749f9822ae0b18f8de83ee6308 | 9dfaf2b383b2a0f67272ec090b2a40bb5d1adee4 | refs/heads/master | 2021-09-11T20:47:31.566421 | 2018-04-12T06:50:05 | 2018-04-12T06:50:05 | 112,092,396 | 0 | 1 | null | 2018-03-22T11:39:37 | 2017-11-26T15:07:42 | Python | UTF-8 | Python | false | false | 2,126 | py | """Create TFRecords files from ILSVRC2015"""
import tensorflow as tf
import tempfile, os, argparse
from multiprocessing import Pool
from tqdm import tqdm
from ilsvrc2015 import ILSVRC2015, PHASE
from annotations import parse_annotation_folder
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--dataset_dir', default='/home/zhouyz/ILSVRC2015/', type=str, help='ILSVRC2015 root directory')
parser.add_argument('--output_dir', default=tempfile.mkdtemp(), type=str)
parser.add_argument('--records_prefix', default='ilsvrc2015.', type=str)
FLAGS, _ = parser.parse_known_args()
def create_tfrecords(annotation_folder):
writer = tf.python_io.TFRecordWriter(
path=tempfile.mktemp(suffix='.tfrecords', prefix=FLAGS.records_prefix, dir=FLAGS.output_dir))
streams = parse_annotation_folder(annotation_folder)
for s in streams:
writer.write(s.serializeToTFSequenceExample().SerializeToString())
writer.close()
return len(streams)
def create_fixed_lengthed_tfrecords(annotation_folder, length=32):
writer = tf.python_io.TFRecordWriter(
path=tempfile.mktemp(suffix='.tfrecords', prefix=FLAGS.records_prefix, dir=FLAGS.output_dir))
streams = parse_annotation_folder(annotation_folder)
splitted_streams = []
for s in streams:
splitted_streams += s.splitIntoStreams(n=s.length//length + 1, l=length)
for s in splitted_streams:
writer.write(s.serializeToTFSequenceExample().SerializeToString())
writer.close()
return len(splitted_streams)
def main():
print('FLAGS:', FLAGS)
dataset = ILSVRC2015(FLAGS.dataset_dir)
snippet_ids = dataset.GetSnippetIDs(phase=PHASE.TRAIN)
## Using multiprocessing
# with Pool(8) as p:
# r = list(tqdm(
# p.imap(create_tfrecords, map(lambda i: os.path.join(dataset.annotations_dir, i), snippet_ids)),
# total=len(snippet_ids)
# ))
count = 0
t = tqdm(snippet_ids)
for id in t:
count += create_fixed_lengthed_tfrecords(os.path.join(dataset.annotations_dir, id))
t.set_description(desc='Total records {}'.format(count))
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
bcdd85a3ed8af68cb0db7988caed6866557c1a53 | aaec91eb381f7a565457644ae3decf394f01c5ed | /pubwork/demo/learner/demo_webdriver.py | 0d3a8b4a14febaff89051cba92b80bb3a6acfba7 | []
| no_license | plutoese/pubwork | c5dd6f908599dcb05ae5b50435a34d269391ff61 | 0649e0ab098c1f7099d98b7fd981425b24fddf3e | refs/heads/master | 2021-01-12T01:07:03.096016 | 2017-12-24T05:05:19 | 2017-12-24T05:05:19 | 78,340,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | import webbrowser
webbrowser.open('http://docs.python.org/lib/module-webbrowser.html') | [
"[email protected]"
]
| |
413da12fae267071b1dbfa9ca3c2fc0495b14b9e | 65d844f57c93b97459ba58a7f8d31fcddaef2c57 | /example/train_multi_task.py | 8ca016de299fe5216290169295f8f39fb5af5857 | [
"Apache-2.0"
]
| permissive | tonywenuon/keras_dialogue_generation_toolkit | 797411838e8213422cce4f5ac94f4e98e56cc912 | 75d82e7a281cd17a70bd9905fcebf2b906a6deec | refs/heads/master | 2023-04-04T14:39:10.117825 | 2021-03-29T11:25:23 | 2021-03-29T11:25:23 | 215,433,666 | 24 | 2 | null | 2023-03-24T22:45:14 | 2019-10-16T01:55:53 | Python | UTF-8 | Python | false | false | 16,211 | py | import os, sys, time, math
project_path = os.path.sep.join(os.path.abspath(__file__).split(os.path.sep)[:-2])
if project_path not in sys.path:
sys.path.append(project_path)
import tensorflow as tf
import keras
import argparse
import numpy as np
from copy import deepcopy
from keras.callbacks import ModelCheckpoint, EarlyStopping, LearningRateScheduler, ReduceLROnPlateau
from keras.utils import plot_model
from keras.models import load_model
from keras.utils import get_custom_objects
from models.multi_task import MultiTaskModel
from commonly_used_code.helper_fn import Hypothesis
from commonly_used_code import helper_fn, config
from run_script.args_parser import multi_task_add_arguments
from vspgt_data_reader import DataSet
import keras.backend.tensorflow_backend as KTF
#KTF.set_session(tf.Session(config=tf.ConfigProto(device_count={'cpu':0})))
os.environ["CUDA_VISIBLE_DEVICES"] = "3"
class MultiTask:
def __init__(self, args):
# real Transformer model architecture
self.multi_task_model= MultiTaskModel(args=args)
self.args = args
exp_name = args.data_set + '_' + args.exp_name
# create experiment dir
self.exp_dir= os.path.join(args.checkpoints_dir, exp_name)
helper_fn.makedirs(self.exp_dir)
hist_name = exp_name + '.hist'
model_name = exp_name + '_final_model.h5'
self.history_path = os.path.join(self.exp_dir, hist_name)
self.model_path = os.path.join(self.exp_dir, model_name)
outputs_dir = args.outputs_dir
helper_fn.makedirs(outputs_dir)
self.src_out_name = exp_name + '.src'
self.src_out_path = os.path.join(outputs_dir, self.src_out_name)
self.pred_out_name = exp_name + '.pred'
self.pred_out_path = os.path.join(outputs_dir, self.pred_out_name)
self.tar_out_name = exp_name + '.tgt'
self.tar_out_path = os.path.join(outputs_dir, self.tar_out_name)
def train(self):
ds = DataSet(self.args)
print('*' * 100)
print('train sample number: ', ds.train_sample_num)
print('valid sample number: ', ds.valid_sample_num)
print('test sample number: ', ds.test_sample_num)
print('*' * 100)
train_generator = ds.data_generator('train', 'multi_task')
valid_generator = ds.data_generator('valid', 'multi_task')
def compile_new_model():
_model = self.multi_task_model.get_model()
_model.compile(
optimizer=keras.optimizers.Adam(lr=self.args.lr),
loss = {
'od1': 'sparse_categorical_crossentropy',
'od2': 'sparse_categorical_crossentropy',
'od3': 'sparse_categorical_crossentropy',
},
loss_weights={
'od1': 1.,
'od2': 1.,
'od3': 1.,
}
)
return _model
if os.path.exists(self.model_path):
raise ValueError('Current model just saves weights. Please re-train the model.')
#print('Loading model from: %s' % self.model_path)
#custom_dict = get_custom_objects()
#model = load_model(self.model_path, custom_objects=custom_dict)
else:
print('Compile new model...')
model = compile_new_model()
model.summary()
#plot_model(model, to_file='model_structure.png',show_shapes=True)
verbose = 1
earlystopper = EarlyStopping(monitor='val_loss', patience=self.args.early_stop_patience, verbose=verbose)
ckpt_name = 'model-ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5'
ckpt_path = os.path.join(self.exp_dir, ckpt_name)
#checkpoint = ModelCheckpoint(ckpt_path, monitor='val_loss', verbose=verbose, save_weights_only=True, save_best_only=True, mode='min')
checkpoint = ModelCheckpoint(ckpt_path, monitor='val_loss', verbose=verbose, save_best_only=True, mode='min')
lrate = keras.callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.5,
patience=self.args.lr_decay_patience,
verbose=verbose,
mode='auto',
min_delta=0.0001,
cooldown=0,
min_lr=self.args.lr_min,
)
callback_list = [earlystopper, checkpoint, lrate]
#callback_list = [earlystopper, lrate]
hist = model.fit_generator(
generator=train_generator,
steps_per_epoch=(ds.train_sample_num//self.args.batch_size),
epochs=self.args.epochs,
callbacks=callback_list,
validation_data=valid_generator,
validation_steps=(ds.valid_sample_num//self.args.batch_size),
)
with open(self.history_path,'w') as f:
f.write(str(hist.history))
#model.save(self.model_path)
model.save_weights(self.model_path)
#plot_model(model, to_file='model_structure.png',show_shapes=True)
def test(self):
ds = DataSet(args)
test_generator = ds.data_generator('test', 'multi_task')
def compile_new_model():
_model = self.multi_task_model.get_model()
_model.compile(
optimizer=keras.optimizers.Adam(lr=self.args.lr),
loss = {
'od1': 'sparse_categorical_crossentropy',
'od2': 'sparse_categorical_crossentropy',
'od3': 'sparse_categorical_crossentropy',
},
loss_weights={
'od1': 1.,
'od2': 1.,
'od3': 1.,
}
)
return _model
# load_model
print('Loading model from: %s' % self.model_path)
#custom_dict = get_custom_objects()
#model = load_model(self.model_path, custom_objects=custom_dict)
model = compile_new_model()
model.load_weights(self.model_path)
src_outobj = open(self.src_out_path, 'w')
pred_outobj = open(self.pred_out_path, 'w')
tar_outobj = open(self.tar_out_path, 'w')
for batch_index, ([src_input, tar_input, fact_tar_input, facts_input], \
[_, _, _]) in enumerate(test_generator):
if batch_index > (ds.test_sample_num // self.args.batch_size):
# finish all of the prediction
break
print('Current batch: {}/{}. '.format(batch_index, ds.test_sample_num // self.args.batch_size))
cur_batch_size = tar_input.shape[0]
tar_length = tar_input.shape[1]
results = np.zeros_like(tar_input)
results[:, 0] = ds.start_id
for i in range(1, tar_length):
results[:, i] = ds.pad_id
for t in range(1, tar_length):
preds, _, _ = model.predict([src_input, np.asarray(results), fact_tar_input, facts_input])
pred_id = np.argmax(preds, axis=-1)
results[:, t] = np.asarray(pred_id[:, t-1])
def output_results(tag, outputs, outobj):
for out_index, result in enumerate(outputs):
seq = []
for _id in result:
_id = int(_id)
if _id == ds.end_id:
break
if _id != ds.pad_id and _id != ds.start_id:
token = ds.tar_id_tokens.get(_id, config.UNK_TOKEN)
seq.append(token)
write_line = ' '.join(seq)
write_line = write_line + '\n'
outobj.write(write_line)
outobj.flush()
output_results('result', results, pred_outobj)
output_results('src', src_input, src_outobj)
output_results('tar', tar_input, tar_outobj)
src_outobj.close()
pred_outobj.close()
tar_outobj.close()
print(self.pred_out_path)
def beam_search_test(self):
beam_size = self.args.beam_size
ds = DataSet(args)
test_generator = ds.data_generator('test', 'multi_task')
def sort_for_each_hyp(hyps, rank_index):
"""Return a list of Hypothesis objects, sorted by descending average log probability"""
return sorted(hyps, key=lambda h: h.avg_prob[rank_index], reverse=True)
def get_new_hyps(all_hyps):
hyp = all_hyps[0]
batch_size = hyp.batch_size
tar_len = hyp.tar_len
new_hyps = []
for i in range(beam_size):
hyp = Hypothesis(batch_size, tar_length, ds.start_id, ds.end_id)
new_hyps.append(hyp)
for i in range(batch_size):
# rank based on each sample's probs
sorted_hyps = sort_for_each_hyp(all_hyps, i)
for j in range(beam_size):
hyp = sorted_hyps[j]
new_hyps[j].res_ids[i] = hyp.res_ids[i]
new_hyps[j].pred_ids[i] = hyp.pred_ids[i]
new_hyps[j].probs[i] = hyp.probs[i]
return new_hyps
def update_hyps(all_hyps):
# all_hyps: beam_size * beam_size current step hyps.
new_hyps = get_new_hyps(all_hyps)
return new_hyps
def get_final_results(hyps):
hyp = hyps[0]
batch_size = hyp.batch_size
tar_len = hyp.tar_len
final_hyp = Hypothesis(batch_size, tar_length, ds.start_id, ds.end_id)
for i in range(batch_size):
# rank based on each sample's probs
sorted_hyps = sort_for_each_hyp(hyps, i)
hyp = sorted_hyps[0]
final_hyp.res_ids[i] = hyp.res_ids[i]
final_hyp.pred_ids[i] = hyp.pred_ids[i]
final_hyp.probs[i] = hyp.probs[i]
res = np.asarray(final_hyp.res_ids)
return res
# load_model
def compile_new_model():
_model = self.multi_task_model.get_model()
_model.compile(
optimizer=keras.optimizers.Adam(lr=self.args.lr),
loss = {
'od1': 'sparse_categorical_crossentropy',
'od2': 'sparse_categorical_crossentropy',
'od3': 'sparse_categorical_crossentropy',
},
loss_weights={
'od1': 1.,
'od2': 1.,
'od3': 1.,
}
)
return _model
# load_model
print('Loading model from: %s' % self.model_path)
#custom_dict = get_custom_objects()
#model = load_model(self.model_path, custom_objects=custom_dict)
model = compile_new_model()
model.load_weights(self.model_path)
src_outobj = open(self.src_out_path, 'w')
pred_outobj = open(self.pred_out_path, 'w')
tar_outobj = open(self.tar_out_path, 'w')
for batch_index, ([src_input, tar_input, fact_tar_input, facts_input], \
[_, _, _]) in enumerate(test_generator):
if batch_index > (ds.test_sample_num // self.args.batch_size):
# finish all of the prediction
break
print('Current batch: {}/{}. '.format(batch_index, ds.test_sample_num // self.args.batch_size))
cur_batch_size = tar_input.shape[0]
tar_length = tar_input.shape[1]
hyps = []
for i in range(beam_size):
hyp = Hypothesis(cur_batch_size, tar_length, ds.start_id, ds.end_id)
hyps.append(hyp)
for t in range(1, tar_length):
# iterate each sample
# collect all hyps, basically, it's beam_size * beam_size
all_hyps = []
for i in range(beam_size):
cur_hyp = hyps[i]
results = cur_hyp.get_predictable_vars(ds.pad_id)
# bs, tar_len, 60000
preds, _, _ = model.predict([src_input, np.asarray(results), fact_tar_input, facts_input])
# get the current step prediction
cur_preds = preds[:, t - 1]
top_indices = np.argsort(cur_preds)
top_indices = top_indices[:, -beam_size:] # the largest one is at the end
top_logits = []
for sample_index, sample_logits in enumerate(cur_preds):
logits = []
for beam_index in range(beam_size):
logit = sample_logits[top_indices[sample_index][beam_index]]
logits.append(logit)
top_logits.append(logits)
top_logits = np.asarray(top_logits)
#print('top_logits: ', top_logits[0])
# iterate each new prediction
for j in range(beam_size-1, -1, -1):
next_hyp = deepcopy(cur_hyp)
# bs, 1
top_index = top_indices[:, j]
top_logit = top_logits[:, j]
for bs_idx, _id in enumerate(top_index):
next_hyp.res_ids[bs_idx].append(_id)
prob = top_logit[bs_idx]
next_hyp.probs[bs_idx].append(prob)
# get OOV id
token = ds.tar_id_tokens.get(int(_id), config.UNK_TOKEN)
if token == config.UNK_TOKEN:
cur_pred_id = ds.unk_id
else:
cur_pred_id = _id
next_hyp.pred_ids[bs_idx].append(cur_pred_id)
all_hyps.append(next_hyp)
# if it is the first step, only predict once
if t == 1:
break
hyps = update_hyps(all_hyps)
final_results = get_final_results(hyps)
def output_results(outputs, outobj):
for result in outputs:
seq = []
for _id in result:
_id = int(_id)
if _id == ds.end_id:
break
if _id != ds.pad_id and _id != ds.start_id:
#if _id != ds.pad_id:
seq.append(ds.tar_id_tokens.get(_id, config.UNK_TOKEN))
write_line = ' '.join(seq)
write_line = write_line + '\n'
outobj.write(write_line)
outobj.flush()
output_results(results, pred_outobj)
output_results(src_input, src_outobj)
output_results(tar_input, tar_outobj)
src_outobj.close()
pred_outobj.close()
tar_outobj.close()
print(self.pred_out_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
multi_task_add_arguments(parser)
args = parser.parse_args()
print(args)
trans = MultiTask(args)
#trans.train()
trans.test()
# trans.beam_search_test()
| [
"[email protected]"
]
| |
4bc55c6b685bc80a4aae415b71b7fb0645f10a5a | 380a47268c5975473a2e7c38c747bc3bdbd981b1 | /benchmark/third_party/transformers/examples/research_projects/adversarial/utils_hans.py | e54792ad2f82b91a560f56cdc19020ad25b1b2c2 | [
"Apache-2.0"
]
| permissive | FMInference/FlexGen | 07aa9b1918c19b02077e13ad07e76840843810dd | d34f7b4b43ed87a374f394b0535ed685af66197b | refs/heads/main | 2023-07-24T02:29:51.179817 | 2023-07-21T22:38:31 | 2023-07-21T22:38:31 | 602,270,517 | 6,821 | 411 | Apache-2.0 | 2023-07-07T22:59:24 | 2023-02-15T21:18:53 | Python | UTF-8 | Python | false | false | 11,761 | py | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from dataclasses import dataclass
from typing import List, Optional, Union
import tqdm
from filelock import FileLock
from transformers import (
BartTokenizer,
BartTokenizerFast,
DataProcessor,
PreTrainedTokenizer,
RobertaTokenizer,
RobertaTokenizerFast,
XLMRobertaTokenizer,
is_tf_available,
is_torch_available,
)
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class InputExample:
"""
A single training/test example for simple sequence classification.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
pairID: (Optional) string. Unique identifier for the pair of sentences.
"""
guid: str
text_a: str
text_b: Optional[str] = None
label: Optional[str] = None
pairID: Optional[str] = None
@dataclass(frozen=True)
class InputFeatures:
"""
A single set of features of data.
Property names are the same names as the corresponding inputs to a model.
Args:
input_ids: Indices of input sequence tokens in the vocabulary.
attention_mask: Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
Usually ``1`` for tokens that are NOT MASKED, ``0`` for MASKED (padded) tokens.
token_type_ids: (Optional) Segment token indices to indicate first and second
portions of the inputs. Only some models use them.
label: (Optional) Label corresponding to the input. Int for classification problems,
float for regression problems.
pairID: (Optional) Unique identifier for the pair of sentences.
"""
input_ids: List[int]
attention_mask: Optional[List[int]] = None
token_type_ids: Optional[List[int]] = None
label: Optional[Union[int, float]] = None
pairID: Optional[int] = None
if is_torch_available():
import torch
from torch.utils.data import Dataset
class HansDataset(Dataset):
"""
This will be superseded by a framework-agnostic approach
soon.
"""
features: List[InputFeatures]
def __init__(
self,
data_dir: str,
tokenizer: PreTrainedTokenizer,
task: str,
max_seq_length: Optional[int] = None,
overwrite_cache=False,
evaluate: bool = False,
):
processor = hans_processors[task]()
cached_features_file = os.path.join(
data_dir,
"cached_{}_{}_{}_{}".format(
"dev" if evaluate else "train",
tokenizer.__class__.__name__,
str(max_seq_length),
task,
),
)
label_list = processor.get_labels()
if tokenizer.__class__ in (
RobertaTokenizer,
RobertaTokenizerFast,
XLMRobertaTokenizer,
BartTokenizer,
BartTokenizerFast,
):
# HACK(label indices are swapped in RoBERTa pretrained model)
label_list[1], label_list[2] = label_list[2], label_list[1]
self.label_list = label_list
# Make sure only the first process in distributed training processes the dataset,
# and the others will use the cache.
lock_path = cached_features_file + ".lock"
with FileLock(lock_path):
if os.path.exists(cached_features_file) and not overwrite_cache:
logger.info(f"Loading features from cached file {cached_features_file}")
self.features = torch.load(cached_features_file)
else:
logger.info(f"Creating features from dataset file at {data_dir}")
examples = (
processor.get_dev_examples(data_dir) if evaluate else processor.get_train_examples(data_dir)
)
logger.info("Training examples: %s", len(examples))
self.features = hans_convert_examples_to_features(examples, label_list, max_seq_length, tokenizer)
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(self.features, cached_features_file)
def __len__(self):
return len(self.features)
def __getitem__(self, i) -> InputFeatures:
return self.features[i]
def get_labels(self):
return self.label_list
if is_tf_available():
import tensorflow as tf
class TFHansDataset:
"""
This will be superseded by a framework-agnostic approach
soon.
"""
features: List[InputFeatures]
def __init__(
self,
data_dir: str,
tokenizer: PreTrainedTokenizer,
task: str,
max_seq_length: Optional[int] = 128,
overwrite_cache=False,
evaluate: bool = False,
):
processor = hans_processors[task]()
label_list = processor.get_labels()
if tokenizer.__class__ in (
RobertaTokenizer,
RobertaTokenizerFast,
XLMRobertaTokenizer,
BartTokenizer,
BartTokenizerFast,
):
# HACK(label indices are swapped in RoBERTa pretrained model)
label_list[1], label_list[2] = label_list[2], label_list[1]
self.label_list = label_list
examples = processor.get_dev_examples(data_dir) if evaluate else processor.get_train_examples(data_dir)
self.features = hans_convert_examples_to_features(examples, label_list, max_seq_length, tokenizer)
def gen():
for ex_index, ex in tqdm.tqdm(enumerate(self.features), desc="convert examples to features"):
if ex_index % 10000 == 0:
logger.info("Writing example %d of %d" % (ex_index, len(examples)))
yield (
{
"example_id": 0,
"input_ids": ex.input_ids,
"attention_mask": ex.attention_mask,
"token_type_ids": ex.token_type_ids,
},
ex.label,
)
self.dataset = tf.data.Dataset.from_generator(
gen,
(
{
"example_id": tf.int32,
"input_ids": tf.int32,
"attention_mask": tf.int32,
"token_type_ids": tf.int32,
},
tf.int64,
),
(
{
"example_id": tf.TensorShape([]),
"input_ids": tf.TensorShape([None, None]),
"attention_mask": tf.TensorShape([None, None]),
"token_type_ids": tf.TensorShape([None, None]),
},
tf.TensorShape([]),
),
)
def get_dataset(self):
return self.dataset
def __len__(self):
return len(self.features)
def __getitem__(self, i) -> InputFeatures:
return self.features[i]
def get_labels(self):
return self.label_list
class HansProcessor(DataProcessor):
"""Processor for the HANS data set."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "heuristics_train_set.txt")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self._read_tsv(os.path.join(data_dir, "heuristics_evaluation_set.txt")), "dev")
def get_labels(self):
"""See base class.
Note that we follow the standard three labels for MNLI
(see :class:`~transformers.data.processors.utils.MnliProcessor`)
but the HANS evaluation groups `contradiction` and `neutral` into `non-entailment` (label 0) while
`entailment` is label 1."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for i, line in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[5]
text_b = line[6]
pairID = line[7][2:] if line[7].startswith("ex") else line[7]
label = line[0]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label, pairID=pairID))
return examples
def hans_convert_examples_to_features(
examples: List[InputExample],
label_list: List[str],
max_length: int,
tokenizer: PreTrainedTokenizer,
):
"""
Loads a data file into a list of ``InputFeatures``
Args:
examples: List of ``InputExamples`` containing the examples.
label_list: List of labels. Can be obtained from the processor using the ``processor.get_labels()`` method.
max_length: Maximum example length.
tokenizer: Instance of a tokenizer that will tokenize the examples.
Returns:
A list of task-specific ``InputFeatures`` which can be fed to the model.
"""
label_map = {label: i for i, label in enumerate(label_list)}
features = []
for ex_index, example in tqdm.tqdm(enumerate(examples), desc="convert examples to features"):
if ex_index % 10000 == 0:
logger.info("Writing example %d" % (ex_index))
inputs = tokenizer(
example.text_a,
example.text_b,
add_special_tokens=True,
max_length=max_length,
padding="max_length",
truncation=True,
return_overflowing_tokens=True,
)
label = label_map[example.label] if example.label in label_map else 0
pairID = int(example.pairID)
features.append(InputFeatures(**inputs, label=label, pairID=pairID))
for i, example in enumerate(examples[:5]):
logger.info("*** Example ***")
logger.info(f"guid: {example}")
logger.info(f"features: {features[i]}")
return features
hans_tasks_num_labels = {
"hans": 3,
}
hans_processors = {
"hans": HansProcessor,
}
| [
"[email protected]"
]
| |
2f4ad34593d619afe4392bde5ef7782179948d56 | fd69d76dcfe60b97ca02eb853e3f2cd2b68d990e | /tree/serialize_deserialize.py | 34c503596f178063464a9402d8208b4a6238f7eb | []
| no_license | Levalife/DSA | f3204946c9225f0472ec8470c0fbe29357559f35 | 4e5a94ba94fa5be01f4760a2651001426b3ef973 | refs/heads/master | 2023-01-23T03:51:48.864888 | 2020-11-27T13:58:04 | 2020-11-27T13:58:04 | 298,612,309 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,766 | py | # -*- coding: utf-8 -*-
class Tree:
def __init__(self, root=None):
self.root = root
class Node:
def __init__(self, value, parent=None, left=None, right=None):
self.value = value
self.left = left
self.right = right
self.parent = parent
'''
10
7 11
6 8 20
1 9 14 22
'''
tree = Tree()
tree.root = Node(10)
tree.root.left = Node(7, tree.root)
tree.root.right = Node(11, tree.root)
tree.root.left.left = Node(6, tree.root.left)
tree.root.left.right = Node(8, tree.root.left)
tree.root.right.right = Node(20, tree.root.right)
tree.root.left.left.left = Node(1, tree.root.left.left)
tree.root.left.right.right = Node(9, tree.root.left.right)
tree.root.right.right.left = Node(14, tree.root.right.right)
tree.root.right.right.right = Node(22, tree.root.right.right)
def serialize(node):
if not node:
return "X,"
return "{},{}{}".format(node.value, serialize(node.left), serialize(node.right))
serialized_tree = serialize(tree.root)
print(serialized_tree)
def deserialize(tree_str):
tree_list = tree_str.split(',')
return deserialize_helper(tree_list)
def deserialize_helper(tree_list):
if tree_list:
if tree_list[0] == 'X':
tree_list.pop(0)
return None
newNode = Node(value=tree_list.pop(0))
newNode.left = deserialize_helper(tree_list)
newNode.right = deserialize_helper(tree_list)
return newNode
deserialized_tree = deserialize(serialized_tree)
def preorder(node):
print(node.value)
if node.left:
preorder(node.left)
if node.right:
preorder(node.right)
preorder(deserialized_tree) | [
"[email protected]"
]
| |
4a2f8d6c9ed2d00e8ed94eef8b4bce6ebb50a686 | 4518ce1ee32ffbd4004df6865f557c5a3909c135 | /awards/migrations/0004_reviews.py | 6c70e3944ef1fceffaca5ddef335e41ee17a2d17 | [
"MIT"
]
| permissive | petermirithu/Grant_py | d9a04dee7fc0ae80e55a15b073e6b24108b23555 | 0e2e8d2a01c361583853e4d06fc4ede45e3741f8 | refs/heads/master | 2022-12-14T19:04:42.503002 | 2020-01-09T17:45:00 | 2020-01-09T17:45:00 | 231,231,593 | 1 | 0 | MIT | 2022-12-08T03:22:31 | 2020-01-01T15:20:26 | Python | UTF-8 | Python | false | false | 962 | py | # Generated by Django 2.2.8 on 2020-01-03 15:11
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('awards', '0003_auto_20200102_1411'),
]
operations = [
migrations.CreateModel(
name='reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.CharField(max_length=1000)),
('posted_on', models.DateTimeField(auto_now_add=True)),
('posted_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('projo_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='awards.projo_post')),
],
),
]
| [
"[email protected]"
]
| |
5c064fb27f4e1f5959994430e93a3a4ee5d91147 | 72fcc9b617014484a1c021fa90af57b457aba5ba | /06.BinarySearchTree/01.ConstructionConversion/3_construct_bst_from_preorder.py | a4d96db9559efa2c78f1ee3b4a74ff5d091c6804 | []
| no_license | shindesharad71/Data-Structures | 249cb89fc3b54a3d8a67e4e9db832e256d072ee6 | a7cd247228a723e880bccd3aa24c072722785f6d | refs/heads/main | 2023-07-24T21:01:08.070082 | 2021-09-03T04:02:05 | 2021-09-03T04:02:05 | 370,706,713 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,468 | py | # Construct BST from given preorder traversal
# https://www.geeksforgeeks.org/construct-bst-from-given-preorder-traversa/
# A O(n^2) Python3 program for
# construction of BST from preorder traversal
# A binary tree node
class Node:
# A constructor to create a new node
def __init__(self, data):
self.data = data
self.left = None
self.right = None
# constructTreeUtil.preIndex is a static variable of
# function constructTreeUtil
# Function to get the value of static variable
# constructTreeUtil.preIndex
def getPreIndex():
return constructTreeUtil.preIndex
# Function to increment the value of static variable
# constructTreeUtil.preIndex
def incrementPreIndex():
constructTreeUtil.preIndex += 1
# A recurseive function to construct Full from pre[].
# preIndex is used to keep track of index in pre[[].
def constructTreeUtil(pre, low, high):
# Base Case
if low > high:
return None
# The first node in preorder traversal is root. So take
# the node at preIndex from pre[] and make it root,
# and increment preIndex
root = Node(pre[getPreIndex()])
incrementPreIndex()
# If the current subarray has onlye one element,
# no need to recur
if low == high:
return root
r_root = -1
# Search for the first element greater than root
for i in range(low, high + 1):
if pre[i] > root.data:
r_root = i
break
# If no elements are greater than the current root,
# all elements are left children
# so assign root appropriately
if r_root == -1:
r_root = getPreIndex() + (high - low)
# Use the index of element found in preorder to divide
# preorder array in two parts. Left subtree and right
# subtree
root.left = constructTreeUtil(pre, getPreIndex(), r_root - 1)
root.right = constructTreeUtil(pre, r_root, high)
return root
# The main function to construct BST from given preorder
# traversal. This function mailny uses constructTreeUtil()
def construct_tree(pre):
size = len(pre)
constructTreeUtil.preIndex = 0
return constructTreeUtil(pre, 0, size - 1)
def inorder(root):
if root:
inorder(root.left)
print(root.data, end=" ")
inorder(root.right)
# Driver Code
if __name__ == "__main__":
pre = [10, 5, 1, 7, 40, 50]
root = construct_tree(pre)
print("Inorder traversal of constructed tree")
inorder(root)
| [
"[email protected]"
]
| |
f22dc825cf7dbb473a0783088cb661e8971a18f1 | e7569ef74265f999fbeac9c2ffcf07e0b3a40e31 | /backend/manage.py | 1075829ba9e9ee42d33260f3b64a0950aae35882 | []
| no_license | crowdbotics-apps/now-prediction-25146 | 2d1999c58cbba61e719475f7c0c61d3222f484f4 | 562f8e2892c11e982a24231e6831d1b7df2ce43c | refs/heads/master | 2023-03-27T00:05:11.957207 | 2021-03-20T15:04:59 | 2021-03-20T15:04:59 | 349,755,728 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 640 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'now_prediction_25146.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.