blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8be72a52068001cc66bd59da148af82ea5b224a8 | db575f3401a5e25494e30d98ec915158dd7e529b | /BIO_Stocks/PMD.py | f9d9498e20f4a6d77b53ce8653cbb90641628f67 | []
| no_license | andisc/StockWebScraping | b10453295b4b16f065064db6a1e3bbcba0d62bad | 41db75e941cfccaa7043a53b0e23ba6e5daa958a | refs/heads/main | 2023-08-08T01:33:33.495541 | 2023-07-22T21:41:08 | 2023-07-22T21:41:08 | 355,332,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,134 | py | import requests
from lxml import html
from bs4 import BeautifulSoup
import os
from datetime import date, datetime
from ValidationTools import validateday
from Database_Connections import InsertData, Insert_Logging
def main(id_control):
try:
url = 'https://investors.psychemedics.com/sec-filings-and-press-releases/news-releases/default.aspx'
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
result = requests.get(url, headers=headers)
#print(result.content.decode())
html_content = result.content.decode()
soup = BeautifulSoup(html_content, 'html.parser')
#print(soup)
articles = soup.findAll('div', attrs={'class':'irwTableRowItem'})
# get first article
FIRST_ARTICLE = articles[0]
article_date = FIRST_ARTICLE.find('div', attrs={'class':'irwPRDate'})
article_desc = FIRST_ARTICLE.find('h4')
v_article_date = article_date.text.lstrip().rstrip()
#if the process find any article with the today date
istoday, v_art_date = validateday(v_article_date)
if (istoday == True):
v_ticker = os.path.basename(__file__).replace(".py", "")
v_url = article_desc.a.get('href')
v_description = article_desc.text.lstrip().rstrip()
now = datetime.now()
print("URL: " + v_url)
print("DESCRIPTION: " + v_description)
print("ARTICLE_DATE: " + str(now))
# Insert articles
if "https://" in v_url:
InsertData(v_ticker, v_description, v_url, v_art_date)
else:
InsertData(v_ticker, v_description, url, v_art_date)
except Exception:
error_message = "Entrou na excepção ao tratar " + os.path.basename(__file__) + "..."
print(error_message)
Insert_Logging(id_control, 'Detail', error_message)
pass
#InsertData()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
018b2906e7a41541d957764ddd1c47e355d03386 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_2464487_0/Python/CuteCube/ra1.py | dbc146df38875aae8ae187eac50411365e303fb4 | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | #!/usr/bin/env python
import math
def main():
f = open('input.txt', 'r')
total_T = int(f.readline())
#print total_T
for T in xrange(1,total_T+1):
r,t = f.readline().split()
r = long(r)
t=long(t)
# 2k^2 + (2r - 1)k - t = 0
b = 2*r -1.0
a = 2.0
c = -t
k = (-b + math.sqrt(b*b - 4*a*c))/2/a
# k = 1
k = long(k)
while not (need(k ,r) <= t and need(k+1, r) > t):
if need(k, r) < t:
k += 1
else:
#k = max(long(k/2)+1, long(k*0.75))
k -= 1
print "Case #{}: {}".format(T, long(k))
def need(k,r):
return 2*k*k + (2*r-1)*k
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
36675db792eaa04c9b5c9732126b47ebda3a154f | 43cdd7cb26fe44b1ed7de6a46f8b5e680c9b1372 | /openpeerpower/generated/config_flows.py | 244c7e0f950d8f44b848d46e9680ed38ab8aaabb | [
"Apache-2.0"
]
| permissive | OpenPeerPower/Open-Peer-Power | 02ec5c133564b47c6f72f669e844a666643cacd6 | 940a04a88e8f78e2d010dc912ad6905ae363503c | refs/heads/master | 2022-08-16T09:38:49.994009 | 2021-05-29T03:54:13 | 2021-05-29T03:54:13 | 183,174,237 | 1 | 0 | Apache-2.0 | 2022-07-15T18:43:02 | 2019-04-24T07:35:47 | Python | UTF-8 | Python | false | false | 246 | py | """Automatically generated by oppfest.
To update, run python3 -m script.oppfest
"""
# fmt: off
FLOWS = [
"almond",
"daikin",
"dialogflow",
"homekit_controller",
"met",
"mobile_app",
"mqtt",
"zha",
"zwave"
]
| [
"[email protected]"
]
| |
20adba546311eb8ef3f505a79525f18a05e924ff | 4fd65dc15ed0e5849c440a41d81036d1ff47ea96 | /tests/integration/test_deploy_and_evaluate_model_auth_on.py | 56f92793bb30c984b1b9583ee2c3e49b30cd861f | [
"MIT"
]
| permissive | tableau/TabPy | 20ae3dacb958bf2d0e48fc36220366cb3db412bb | 96aa26252b6115bd2788f9526680ec1b34f1c86f | refs/heads/master | 2023-08-29T13:47:21.507211 | 2023-06-21T21:30:40 | 2023-06-21T21:30:40 | 69,400,040 | 1,527 | 633 | MIT | 2023-06-21T21:30:42 | 2016-09-27T21:26:03 | Python | UTF-8 | Python | false | false | 1,233 | py | from . import integ_test_base
class TestDeployAndEvaluateModelAuthOn(integ_test_base.IntegTestBase):
def _get_config_file_name(self) -> str:
return "./tests/integration/resources/deploy_and_evaluate_model_auth.conf"
def _get_port(self) -> str:
return "9009"
def test_deploy_and_evaluate_model(self):
# Uncomment the following line to preserve
# test case output and other files (config, state, ect.)
# in system temp folder.
# self.set_delete_temp_folder(False)
self.deploy_models(self._get_username(), self._get_password())
headers = {
"Content-Type": "application/json",
"Authorization": "Basic dXNlcjE6UEBzc3cwcmQ=",
"Host": "localhost:9009",
}
payload = """{
"data": { "_arg1": ["happy", "sad", "neutral"] },
"script":
"return tabpy.query('Sentiment Analysis',_arg1)['response']"
}"""
conn = self._get_connection()
conn.request("POST", "/evaluate", payload, headers)
SentimentAnalysis_eval = conn.getresponse()
self.assertEqual(200, SentimentAnalysis_eval.status)
SentimentAnalysis_eval.read()
| [
"[email protected]"
]
| |
1685d2a9cf7e5dc726fffb430a61ba17869e53f8 | 4cce3b466591f7f8b9d58c1f8cae4dd0b6425b09 | /classes dealing.py | 09e2606008d31426022cdef988fb9cec1726491e | []
| no_license | adityamangal1/hackerRank-solutions | 4e5fc66785215688449f58176b0260e05fb0c404 | 102ee32f5984240939bf14e799a458d99388774b | refs/heads/master | 2023-04-18T15:35:36.998087 | 2021-04-22T07:16:38 | 2021-04-22T07:16:38 | 297,935,486 | 13 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,770 | py | import math
class Complex(object):
def __init__(self, real, imaginary):
self.real = real
self.imaginary = imaginary
def __add__(self, no):
complex_n = complex(self.real, self.imaginary) + \
complex(no.real, no.imaginary)
return Complex(complex_n.real, complex_n.imag)
def __sub__(self, no):
complex_n = complex(self.real, self.imaginary) - \
complex(no.real, no.imaginary)
return Complex(complex_n.real, complex_n.imag)
def __mul__(self, no):
complex_n = complex(self.real, self.imaginary) * \
complex(no.real, no.imaginary)
return Complex(complex_n.real, complex_n.imag)
def __truediv__(self, no):
factor = no.real ** 2 + no.imaginary ** 2
return Complex((self.real * no.real + self.imaginary * no.imaginary) / factor, (self.imaginary * no.real - self.real * no.imaginary) / factor)
def mod(self):
return Complex((self.real ** 2 + self.imaginary ** 2) ** (1 / 2), 0)
def __str__(self):
if self.imaginary == 0:
result = "%.2f+0.00i" % (self.real)
elif self.real == 0:
if self.imaginary >= 0:
result = "0.00+%.2fi" % (self.imaginary)
else:
result = "0.00-%.2fi" % (abs(self.imaginary))
elif self.imaginary > 0:
result = "%.2f+%.2fi" % (self.real, self.imaginary)
else:
result = "%.2f-%.2fi" % (self.real, abs(self.imaginary))
return result
if __name__ == '__main__':
c = map(float, input().split())
d = map(float, input().split())
x = Complex(*c)
y = Complex(*d)
print(*map(str, [x+y, x-y, x*y, x/y, x.mod(), y.mod()]), sep='\n')
| [
"[email protected]"
]
| |
fe247491ad250b60cfc470e63187bc32eacfeb9c | 8ed215ee731bc8c55eabdc66ee028a43771510bc | /tasks/nooisee/flask/ids_and_flags.py | ab033ca249b1daac30bde9c7af9c03d3eb3c21b4 | [
"MIT"
]
| permissive | irdkwmnsb/lkshl-ctf | c6c0b0ae58653d3d7c427073221043d2adea212c | e5c0200ddc8ba73df5f321b87b9763fb1bbaba57 | refs/heads/master | 2020-03-23T22:22:23.499985 | 2019-02-22T13:29:51 | 2019-02-22T13:29:51 | 142,172,055 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,415 | py | ids = ['i2yctSSMGde1TjBIor4T', 'DZrIgOypvPqMctJS06O9', 'lH4uWfzCg1kFeULgUIVC', 'N9mUMmG8maidbCD0s4mV', 'PuqvlpXgaa5s42mZKE4j', 'CoRVyJtEoNE87xUoRqw0', '8uKC7RXAOBlOXdP7KVGl', '2cVvsSZY3QLRmoAYRFkL', '22enKTGqQwxga5RFQdus', 'Sw1vkSnYDQdEtOjTnfH0', 'S7mUlJwhAJhCFgxCOoAW', 'JkTjzY6JUEgo9G7tHciV', 'xn8irINr3sDQLIAb2CkI', 'WPnNc5oYE4s3IPkL0fGg', 'dVqlZVLNgCCm1Sy4qfOW', 'l5cEE0hLMVCt2TjTit5c', '7BcmXXs7kbip5gfT853u', 'QZrfCklvGpdwt4vB6oh8', 'hAiHcE7oRy0zXqpoNeWh', 'LbSDhksl1vNujaAmhVhJ', 'oZZ1TyF3Ysg9KxtBGVs8', 'FKerevwgUjdoWUGyZ652', 'jvutz2HwlzcUGbSoZkgu', 'oV28KmKrYyvSdosdRck1', 'tuacfxYJA1CE54Ixao5F', 'q2B8TWtgwVD2rsGEeehx', '5bO4XLG9OyswG01jVleq', 'rZBIgEB002nWqVjMIBzg', '5ojT7jrimtbZP6mp7MAh', 'Z31bdvkrb3NtMIC3MenW', 'bbH9tpTiZz7V8a7i848m', '9xWkjKVCX91TzD933bMD', '3Jq5yRa0S0DKpIi96kjH', '2h3bhgxJ3ohZeNuG5Noq', 'o2YpZKg7619CB6yzN5SB', '1JZsemZRho77QrN0skkl', 'K9ySRqaGklft8OAY4l0G', 'r4wgNCzZbxtMhx8SHZlv', 'zLUWPrq0JEvsXn0yb2c5', 'fa87JWxShRCUK1xAV611', 'Kcr65dOqJVpTDU2cuUP1', 'aidLGpK1oiHQ2lv7gC12', 'Ttxgw2BqBD1jrm5l7hYc', 'YbQgYJDKArj2kOh95j86', 'tF82UdE6QhLaNfxbJ5PB', '3lN47CbPRR22TsFVdP6i', '2msPQ0ruOJ5rvOzvi0Gz', 'MUdVTNaq9i2k85AqIlVl', 'AiPFQJJBnJGTKtU6Ifrc', 'a7patLnLlBvWKjzfoPUy', 'sAsApUGUvINTq68IVRXX', 'Ne2J72wYP6LAnrckWZIh', '0D774XxxX56yC2WM7qkr', 'oggJm9bOxLAEEb1FswRZ', 'DXonEXUOBi61oAAj24MG', 'udpI7111q5lofsEmVkZN', 'lSoDYeb6RnTtOUZCG0y5', 'foE9zcv1E3n4VlnbnMGO', 'H69Fr2lKjto9PoNhbGdO', 'rKPI7ONeLkxt6p0bkCnC', '0gXXuhUYpAfF5TWuouUu', 'R4AMaHKEDnMx2fxdFR4v', 'p8HtmlwqQFbRvmlWj09t', 'Gv8gN89ZvU32kvteW3Kw', 'xOtLyoBFxgTskjoHCA74', 'zWesr92l1uJKm9RGGFwt', 'iCkYhQIGrM46gZyOeivp', 'R6BRGvq6uPZ7cRHdx5iq', 'UiIfbhnbY0J7SmnPOiHI', 'tgzeloib826aSzWzJRqr', '2XNmQ1h3uOXxvBY5cf45', 'TronVfZ9KWuimzkX7o56', 'iN2I21k3xQ1x4slI1q81', 'Tg0LMsdZFyyuv5spDpcM', '5Tg0M84vOTpM5jee3xCj', 'xS5dAwdL8ZLCUBCRZnEf', 'r9lyMRAjrWp3BxpT9NB5', 'hhsJYamIQskGD4BkLPUK', 'Du0ryLhVJFhfoDDOquON', 'yJkQ3y6kv93edSSoPcXl', 'ax6V8yGruNZMWmQJW8s8', 'U7GWGaCUjYSJQROeJNPl', '0Kr0BkEkkbj0fHi0HVt9', 'TvX8REHZmZGFWchiFFff', 'unBi2qvY43oHIf67DpJk', 'f70Yf2D6U2cokFrE4M89', 'UHR1IIsj0RqR0tCMWjaY', 'k7noV2tQkX0mXSwbRTeA', 'G2RCr57Ur0r8Bd0msCnh', 'uEs94rT8dvyMSK3tgUmb', 'Cn86UI8t4tEU5LslZLhT', 'GasDs9W7O7zE5UOLlUyU', '8yBxaH7rrliGXrE9iN6l', '2ib88yqt6IMAhZLbP5eR', 'OqZnGIsjGmw2MGsS4Y7F', 'JRj9gpEn7JmdErJthkDN', 'OHoTjcphFL1tWxfdmwfb', 'CmLSRN8HL8CpH7Je56lw', 'BQPKERa1jaW7lv99ETYL', 'tshNpN967rtSld8NYCgt', 'MWVDlAwG0z6KSFKDVdxy', 'Aj6Wbseon4e6CM2a8N1s', 'ZtDlWQnQaURZqkWVxPSc', 'KdtXIlqS604uHDKoqenD', 'QO0sLYHHSnzPUKHjZgiy', 'C1Tn5OZCJfSewPe9qxnP', 'plyCyxg03GwFY0kc9Mh7', 'BV5FLHzyksnrHn4qgb7Z', 'uz2OONdBpGpWREOFn2Dp', 'nRafhWiq9ady7YMjMfmD', 'jbKC4rnCJqnSWNybL0ho', 'v15v2YSAdmmwFsbJzv17', 'wrA9JCI3vQ17wKRznfmT', 'Wrppq31UosvJyjTECnOB', 'MxloukUZxGkyVaDtGyG1', 'CoeFnlaucvloF9dGCV1K', 'q0z4Vqf8uVOJjXMooAd0', 'tEu9sRK1yGBC8kH7sDUb', 'lgMRRPCNwMKSjb82hJN0', 'UhYGSPsJx6dNUCzuVaEw', '0lzOfTipjEFqzJsLiyYI', 'hbTF34f4iv7a981bHuV7', 'DYvTEJqsIk8pPuQS5Hai', 'mG9i1Q9oOtimP7Xpge4R', 'fI4oQBFO67nD6GeFxPjb', 'TkZRLa98jQEvEk5xiBi2', 'gJdNhYVuNHsXemBEU0XX', 'TxyeoCCGL80nX6hv7OcE', '5SayX4207FIvIIjqBANn', 'Kt1CH39vMZQO8mVVbPVG', 'sZInNDG5pKcfzsL1GeJk', 'lSoiTyLc5Pm2g8bS25fO', '8RygrlGoU9tVd20nB6pU', 'QjpdFah2GgN73iMOcKgu', '5uqRz6u3P0kcLddR0X1Q', 'JlYTWI9NtbINkqA9Gcx8', 'O48qYJt8hOoOeIRFZiZ5', 'HKhGZkv4v4SCwVNgowvF', 'fV4BZGFvUS2PUNCHQJoQ', 'kqSSIrlgFzfrYzW7LeC6', 'Yf2waPjPfokxDnTkbKR0', '41fFikB0JwoZ2d3bbT2N', 'XzITKmVvLkUGANuolqjs', 'WeL0qrrg3VUSvce4eOqH', 'bnC1DabRzDnbLOHlVs95', 'iNVSQXEm7uamgR2uM8ub', 'Us0NNywPFasbh18rhKJL', '0UJ9ZTXiVSsXjpJCXYw5', 'MAZtrsxoVBMmh1KxOXIC', 'XMHVLcPDSFEL4PBgO5Uw', '57aBZrcavR7OsV8mFfdL', 'SDYrvSy5HNxZvCHpdPBa', 'qZjSvBWwtiV16tprHaJZ', 'cwYVhgQzRGa1UZa6wVXJ', 'ZurZjCN8DEZ87clrXlr0', 'sLTnm7u7ZtN9JfT29Vxg', 'sYdSmELU7dTZzi9tPmkG', 's0JdJwu3TsxmUDccEpKQ', 'jZrz2x2pItkIAFD2oyWF', 'ooOzVV9Dd1POoGwBiWsd', 'MuFsWSaRIWDWxUtMtHdH', 'mYEILDLnIaMBAg4LJZYA', 'w402UedR0qSN03uxFKro', 'Xh4tAImtQ11tnq25JIwt', 'Q6YOeR6OYFXpRc2vaqp4', '6EaLzsqaq7s0qRavwFOR', 'KipsQDR4RI6fgYkiYQeu', '5TByalXqgofiGuUFQ8ga', '6WSjCzum30MOuHJHI25r', '8o9mxMvUtlKMAIFxuQkY', 'x2N5Jp1uJPsIeLAknqrk', 'fn1DYGLRxayGv91i3ico', '1vpNXERfEuuvKG1yt6Es', 'YDgM6cyCeZ3WMbKtnZRA', 'VXcpNpWmcOD4ZuH0vvqE', '67bfDWVAqymhAV8xoow2', 'shuxt8SQoWuiSjmNCrq6', 'SUAwETaPiK5yZWwWgzLe', 'v8c6KsbsY0O7r20NcTc9', 'Z3I5tZoUE9Sl80IPDio7', 'erz0CZLp38LLQtw5CEyE', 'qiV6CQW3Np8fLUi4aUx1', 'UMxKLOtyDTZsD89IVXn5', '6Ue63hlYvUd2vHbNQTSZ', 'zLwT4gUVggNYF1Qz3eJK', 'EcfEf5UUER30630SJtcM', 'd1GbTz3UiUdCZAtOiSfH', '8I6JNrQL7zXkoMLQ14AI', '9oylE0h4WnWRlJJJ81RO', 'nSMZmbS7vIdnKGym2NOB', 'CJCVx5gq2zEVFZSsHlUi', '1okbUDCHJuIZJ4c4r0cN', 'rc8HONSCGpF0WTct384T', 'EcrLmnCC47uM5uNzapU7', 'BKcxCqu6kH2eB5tvqbp8', 'zxcpVWFMGRo96KdhAWC4', 'pNAbg6kLWHvgWU18GSDR', 'rXsOIcfQbrObgjhKFD1y', 'gng3koJU2ngLBOMBkn09', '6eDv9WvCunSJ3rbR7P41']
flags = ['LKL{g00D_N0isss3_M0VwcT}', 'LKL{g00D_N0isss3_fIh2JH}', 'LKL{g00D_N0isss3_oD1gJ7}', 'LKL{g00D_N0isss3_SfipqG}', 'LKL{g00D_N0isss3_oS5Nnz}', 'LKL{g00D_N0isss3_Btipdn}', 'LKL{g00D_N0isss3_Mo2isN}', 'LKL{g00D_N0isss3_gfjVax}', 'LKL{g00D_N0isss3_89DjDR}', 'LKL{g00D_N0isss3_U9rTxu}', 'LKL{g00D_N0isss3_zkT5Ks}', 'LKL{g00D_N0isss3_vVa7nj}', 'LKL{g00D_N0isss3_6PTYIO}', 'LKL{g00D_N0isss3_yXAKpI}', 'LKL{g00D_N0isss3_UXYisz}', 'LKL{g00D_N0isss3_485o6m}', 'LKL{g00D_N0isss3_IAfQoF}', 'LKL{g00D_N0isss3_u7jwOR}', 'LKL{g00D_N0isss3_0eVf9D}', 'LKL{g00D_N0isss3_cJEXvX}', 'LKL{g00D_N0isss3_r8yGte}', 'LKL{g00D_N0isss3_0Wg6vG}', 'LKL{g00D_N0isss3_2yxorP}', 'LKL{g00D_N0isss3_4F6Syl}', 'LKL{g00D_N0isss3_Sfy6NZ}', 'LKL{g00D_N0isss3_MHIZ0f}', 'LKL{g00D_N0isss3_besNuI}', 'LKL{g00D_N0isss3_3Ofy6n}', 'LKL{g00D_N0isss3_bU4Enb}', 'LKL{g00D_N0isss3_jTy3F5}', 'LKL{g00D_N0isss3_ZeCN3f}', 'LKL{g00D_N0isss3_qJE6fK}', 'LKL{g00D_N0isss3_86VxMN}', 'LKL{g00D_N0isss3_VXRzes}', 'LKL{g00D_N0isss3_JyPPq5}', 'LKL{g00D_N0isss3_JGYTE9}', 'LKL{g00D_N0isss3_NcaQzt}', 'LKL{g00D_N0isss3_Py2Jbl}', 'LKL{g00D_N0isss3_yepRkv}', 'LKL{g00D_N0isss3_2SsIXv}', 'LKL{g00D_N0isss3_O1Hz6r}', 'LKL{g00D_N0isss3_H6n4Z9}', 'LKL{g00D_N0isss3_Ncw3Z8}', 'LKL{g00D_N0isss3_KUcuzK}', 'LKL{g00D_N0isss3_qIY0i2}', 'LKL{g00D_N0isss3_084rcz}', 'LKL{g00D_N0isss3_CSOVie}', 'LKL{g00D_N0isss3_Tx304O}', 'LKL{g00D_N0isss3_NQHYem}', 'LKL{g00D_N0isss3_j2yrJp}', 'LKL{g00D_N0isss3_fYETyb}', 'LKL{g00D_N0isss3_KFKGph}', 'LKL{g00D_N0isss3_Y67kzX}', 'LKL{g00D_N0isss3_DFaPLi}', 'LKL{g00D_N0isss3_pH9R0C}', 'LKL{g00D_N0isss3_Jz9TY7}', 'LKL{g00D_N0isss3_JGxdKo}', 'LKL{g00D_N0isss3_EEUsf3}', 'LKL{g00D_N0isss3_tffJEU}', 'LKL{g00D_N0isss3_mCsaLE}', 'LKL{g00D_N0isss3_F8J0OW}', 'LKL{g00D_N0isss3_9l20a6}', 'LKL{g00D_N0isss3_bZHXxr}', 'LKL{g00D_N0isss3_WXInmT}', 'LKL{g00D_N0isss3_giBP9c}', 'LKL{g00D_N0isss3_S3Oxlh}', 'LKL{g00D_N0isss3_fVRZxk}', 'LKL{g00D_N0isss3_OePWlp}', 'LKL{g00D_N0isss3_VrqnRw}', 'LKL{g00D_N0isss3_IoLWv0}', 'LKL{g00D_N0isss3_IyM6fA}', 'LKL{g00D_N0isss3_auHrW6}', 'LKL{g00D_N0isss3_oK579V}', 'LKL{g00D_N0isss3_RVElQC}', 'LKL{g00D_N0isss3_oR9Aqc}', 'LKL{g00D_N0isss3_zPD9Za}', 'LKL{g00D_N0isss3_5khQWk}', 'LKL{g00D_N0isss3_wydJs2}', 'LKL{g00D_N0isss3_ttNaud}', 'LKL{g00D_N0isss3_kIMIU7}', 'LKL{g00D_N0isss3_SNahdB}', 'LKL{g00D_N0isss3_kBCPmL}', 'LKL{g00D_N0isss3_BpNCv3}', 'LKL{g00D_N0isss3_IZPzC4}', 'LKL{g00D_N0isss3_s6kihA}', 'LKL{g00D_N0isss3_KX4A5L}', 'LKL{g00D_N0isss3_uQUZzA}', 'LKL{g00D_N0isss3_632Y2A}', 'LKL{g00D_N0isss3_W135ft}', 'LKL{g00D_N0isss3_LE6N7W}', 'LKL{g00D_N0isss3_KyICZe}', 'LKL{g00D_N0isss3_zkD0rf}', 'LKL{g00D_N0isss3_9buyIv}', 'LKL{g00D_N0isss3_kGEOoy}', 'LKL{g00D_N0isss3_ZfBib1}', 'LKL{g00D_N0isss3_z0slZ2}', 'LKL{g00D_N0isss3_88A01U}', 'LKL{g00D_N0isss3_oUNEDP}', 'LKL{g00D_N0isss3_Cnyscg}', 'LKL{g00D_N0isss3_7IkYG0}', 'LKL{g00D_N0isss3_gF0wmI}', 'LKL{g00D_N0isss3_yMF2cR}', 'LKL{g00D_N0isss3_TXzhcc}', 'LKL{g00D_N0isss3_3vUVPT}', 'LKL{g00D_N0isss3_75g5Wu}', 'LKL{g00D_N0isss3_ZGkNWN}', 'LKL{g00D_N0isss3_9baV51}', 'LKL{g00D_N0isss3_emoXAO}', 'LKL{g00D_N0isss3_pVghGT}', 'LKL{g00D_N0isss3_tQFOWQ}', 'LKL{g00D_N0isss3_jd4Zue}', 'LKL{g00D_N0isss3_kcVj6F}', 'LKL{g00D_N0isss3_XBIDjP}', 'LKL{g00D_N0isss3_hCVw6C}', 'LKL{g00D_N0isss3_tkYVgw}', 'LKL{g00D_N0isss3_t7tZkx}', 'LKL{g00D_N0isss3_6xlFZ6}', 'LKL{g00D_N0isss3_HSWb9c}', 'LKL{g00D_N0isss3_sLOi9l}', 'LKL{g00D_N0isss3_YXkZdr}', 'LKL{g00D_N0isss3_K5w8aU}', 'LKL{g00D_N0isss3_mv8ziu}', 'LKL{g00D_N0isss3_vxVAEt}', 'LKL{g00D_N0isss3_azgJlU}', 'LKL{g00D_N0isss3_Z2NJdp}', 'LKL{g00D_N0isss3_JaF5vV}', 'LKL{g00D_N0isss3_KxSi7R}', 'LKL{g00D_N0isss3_OI6SRb}', 'LKL{g00D_N0isss3_4R6m2i}', 'LKL{g00D_N0isss3_xtOTsi}', 'LKL{g00D_N0isss3_8ulVa0}', 'LKL{g00D_N0isss3_HkjTle}', 'LKL{g00D_N0isss3_FcrnrL}', 'LKL{g00D_N0isss3_zIDDbw}', 'LKL{g00D_N0isss3_wh2Fh6}', 'LKL{g00D_N0isss3_pkrF9v}', 'LKL{g00D_N0isss3_1Lq22A}', 'LKL{g00D_N0isss3_Vyf8vW}', 'LKL{g00D_N0isss3_VZ9rR0}', 'LKL{g00D_N0isss3_aeVraB}', 'LKL{g00D_N0isss3_hSoDcd}', 'LKL{g00D_N0isss3_RkTNkY}', 'LKL{g00D_N0isss3_2jRJ44}', 'LKL{g00D_N0isss3_p6PYM7}', 'LKL{g00D_N0isss3_nODrjr}', 'LKL{g00D_N0isss3_Btlsll}', 'LKL{g00D_N0isss3_48wYnO}', 'LKL{g00D_N0isss3_TBcmal}', 'LKL{g00D_N0isss3_lErmPs}', 'LKL{g00D_N0isss3_fEHtQe}', 'LKL{g00D_N0isss3_gjShxr}', 'LKL{g00D_N0isss3_Daj3S7}', 'LKL{g00D_N0isss3_CfIRqC}', 'LKL{g00D_N0isss3_pXUtMd}', 'LKL{g00D_N0isss3_rhVZVx}', 'LKL{g00D_N0isss3_CqsRWp}', 'LKL{g00D_N0isss3_yNBCA6}', 'LKL{g00D_N0isss3_vw6ySl}', 'LKL{g00D_N0isss3_JzxHxq}', 'LKL{g00D_N0isss3_Wcjjdr}', 'LKL{g00D_N0isss3_AKedWk}', 'LKL{g00D_N0isss3_hs10Sa}', 'LKL{g00D_N0isss3_5WBLqq}', 'LKL{g00D_N0isss3_1riPbD}', 'LKL{g00D_N0isss3_dV1wxO}', 'LKL{g00D_N0isss3_or6wJE}', 'LKL{g00D_N0isss3_bfr8E6}', 'LKL{g00D_N0isss3_Jlgc1D}', 'LKL{g00D_N0isss3_t1J8ZG}', 'LKL{g00D_N0isss3_8m9ery}', 'LKL{g00D_N0isss3_hiVkBd}', 'LKL{g00D_N0isss3_vIrWAD}', 'LKL{g00D_N0isss3_Mn9K3B}', 'LKL{g00D_N0isss3_pgjdiB}', 'LKL{g00D_N0isss3_azAstf}', 'LKL{g00D_N0isss3_wwURNX}', 'LKL{g00D_N0isss3_dtXquC}', 'LKL{g00D_N0isss3_qYuvXY}', 'LKL{g00D_N0isss3_rIkruu}', 'LKL{g00D_N0isss3_ATULAI}', 'LKL{g00D_N0isss3_wernRd}', 'LKL{g00D_N0isss3_pvziV6}', 'LKL{g00D_N0isss3_WPIIJQ}', 'LKL{g00D_N0isss3_yJPisd}', 'LKL{g00D_N0isss3_xrXPrQ}', 'LKL{g00D_N0isss3_j0IkqH}', 'LKL{g00D_N0isss3_wXBlZx}', 'LKL{g00D_N0isss3_DKBsw5}', 'LKL{g00D_N0isss3_l9JeSM}', 'LKL{g00D_N0isss3_jPVEqw}', 'LKL{g00D_N0isss3_BuGWtj}', 'LKL{g00D_N0isss3_mJWPmx}', 'LKL{g00D_N0isss3_2zAryd}', 'LKL{g00D_N0isss3_rP5bah}', 'LKL{g00D_N0isss3_Z86HGm}', 'LKL{g00D_N0isss3_m08J5V}', 'LKL{g00D_N0isss3_hukANs}', 'LKL{g00D_N0isss3_P2KSOO}', 'LKL{g00D_N0isss3_aauXbW}', 'LKL{g00D_N0isss3_kZ6TBv}'] | [
"[email protected]"
]
| |
ce505ca0ceaa5e400375f9fc5ee87089d635e977 | 325fde42058b2b82f8a4020048ff910cfdf737d7 | /src/databox/azext_databox/vendored_sdks/databox/__init__.py | 1c85885ae27c33da1710d57cd105b2ea74f26605 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | ebencarek/azure-cli-extensions | 46b0d18fe536fe5884b00d7ffa30f54c7d6887d1 | 42491b284e38f8853712a5af01836f83b04a1aa8 | refs/heads/master | 2023-04-12T00:28:44.828652 | 2021-03-30T22:34:13 | 2021-03-30T22:34:13 | 261,621,934 | 2 | 5 | MIT | 2020-10-09T18:21:52 | 2020-05-06T01:25:58 | Python | UTF-8 | Python | false | false | 736 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from ._configuration import DataBoxManagementClientConfiguration
from ._data_box_management_client import DataBoxManagementClient
__all__ = ['DataBoxManagementClient', 'DataBoxManagementClientConfiguration']
from .version import VERSION
__version__ = VERSION
| [
"[email protected]"
]
| |
517e4b682e6b12974385b9c23201af4bebefd1d0 | 5679731cee36c537615d285ed72810f4c6b17380 | /513_FindBottomLeftTreeValue.py | 0de079fbf90fd9385df6647f65a7e451a7aa108a | []
| no_license | manofmountain/LeetCode | 6b76105190a9b62df65a7b56b6def4120498b9fa | 718f688b3d316e8c10ef680d9c21ecd518d062f8 | refs/heads/master | 2021-01-12T03:41:48.318116 | 2017-07-18T12:35:58 | 2017-07-18T12:35:58 | 78,252,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | # 40.9%
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#from collections import deque
class Solution(object):
def findBottomLeftValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return 0
q, last = [root], root.val
while q:
q.append(None)
last = q[0].val
while q[0]:
if q[0].left:
q.append(q[0].left)
if q[0].right:
q.append(q[0].right)
del q[0]
del q[0]
return last
def findLeftMostNode(self, root):
queue = [root]
for node in queue:
queue += filter(None, (node.right, node.left))
return node.val
| [
"[email protected]"
]
| |
540a90e5ada5365bcdd02cc93f075cf3bbcc7940 | aba9b00edec394f1389a7ecf88a290112303414d | /semestr_8/analiza_obrazu/projekt/image_anal.py | 27ef23203928b6d0aa4647a3b8b98e00f6ab0559 | []
| no_license | torgiren/szkola | 2aca12807f0030f8e2ae2dfcb808bf7cae5e2e27 | 5ed18bed273ab25b8e52a488e28af239b8beb89c | refs/heads/master | 2020-12-25T18:18:36.317496 | 2014-04-27T23:43:21 | 2014-04-27T23:43:21 | 3,892,030 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 33,859 | py | #*-* coding: utf8 *-*
"""Moduł zawierający klasy używane do przetwarzania obrazu"""
import numpy as np
from scipy import misc
import itertools
from pprint import pprint
from glob import glob
import os
class NoImageError(Exception):
"""Wyjątek sygnalizujący próbę operowania na niewczytanym obrazie"""
pass
class NoSuchMethodError(Exception):
"""Wyjątek sygnalizujący podanie złej metody operacji"""
pass
class FilterSizeError(Exception):
"""Wyjątek sygnalizujący błędny format filtru"""
pass
def gen_filename(down, left, up, right):
return "%05dx%05dx%05dx%05d.png" % (down, left, up, right)
def find_left(tab, point, factor=1):
sizey = point[0] - point[2]
tmp = np.array(filter(lambda x: x[1] < point[1], tab))
tmp = np.array(filter(lambda x: (x[0] > (point[0] - sizey * factor) and x[2] < (point[0] - sizey * factor)) or
(x[0] > (point[2] + sizey * factor) and x[2] < (point[2] + sizey * factor)), tmp))
print type(tmp)
print tmp.shape
if not len(tmp):
return np.array(None)
indices = tmp[:,1].argsort()
indices = indices[::-1]
tmp = tmp[indices]
return tmp
def find_down(tab, point, factor=1):
sizex = point[3] - point[1]
tmp = np.array(filter(lambda x: x[2] > point[0], tab))
tmp = np.array(filter(lambda x: ((x[1] - sizex * factor) < point[1] and (x[3] + sizex * factor) > point[1]) or
((x[1] - sizex * factor) < point[3] and (x[3] + sizex * factor) > point[3]), tmp))
if not len(tmp):
return np.array(None)
indices = tmp[:,0].argsort()
tmp = tmp[indices]
return tmp
class ImageAnal:
"""Klasa przetwarzająca obrazy"""
def image_loaded(fn):
"""dekorator.
Sprawdza czy został załadowany obraz"""
def wrapped(self, *args, **kwargs):
if self.__image is None:
raise NoImageError()
return fn(self, *args, **kwargs)
return wrapped
def __init__(self, path=None):
"""Konstruktor obiektu ImageAnal"""
self.__image = None
if path:
self.load_image(path)
def load_image(self, path):
"""Wczytuje obraz z pliku <path>"""
self.__image = misc.imread(path)
def open(self, path):
"""Wczytuje obraz z pliku"""
self.load_image(path)
@image_loaded
def negative(self):
"""Tworzy negatyw obrazu"""
self.__image = 255 - self.__image
@image_loaded
def grayscale(self, method=1):
"""Konwertuje do odcieni szarości.
method:
1 (default) wykorzystuje metodę wartości średniej kolorów
2 wykorzystuje wzór 0.3*R+0.59*G+0.11*B
Obsługa tylko formatu RGB"""
if method == 1:
self.__grayscale1()
elif method == 2:
self.__grayscale2()
else:
raise NoSuchMethodError()
# @image_loaded
# def convert(self, fmt):
# self.__image = self.__image.convert(fmt)
# """Konwertuje obraz do zadanego formatu"""
@image_loaded
def normalize(self):
data = self.__image
R = data[:, 0]
G = data[:, 1]
B = data[:, 2]
R = (R - R.min()) * 255 / R.max()
G = (G - G.min()) * 255 / G.max()
B = (B - B.min()) * 255 / B.max()
data[:, 0] = R
data[:, 1] = G
data[:, 2] = B
self.__image = data
@image_loaded
def scale(self, factor):
if factor < 1:
self.__scale_down(factor)
else:
self.__scale_up(factor)
@image_loaded
def progowanie(self, method="global", otoczenie=5, odchylenie=15):
"""Przeprowadza progowanie obrazka.
metody:
global - progowanie globalne
local - progowanie lokalne
mixed - progowanie mieszane
parametry:
otoczenie = rozmiar otoczenia pixela
odchylenie - stopień ochylenia od średniej"""
self.__grayscale1()
if method == "global":
self.__progowanie_globalne()
elif method == "local":
self.__progowanie_lokalne(otoczenie=otoczenie)
elif method == "mixed":
self.__progowanie_mieszane(
otoczenie=otoczenie, odchylenie=odchylenie)
@image_loaded
def splot(self, filter):
filter = np.array(filter, dtype=np.int8)
if filter.shape != (3, 3):
raise(FilterSizeError)
data = self.__image
new = self.__expand(data, 1)
new = np.array(new, dtype=np.int32)
# new = np.array(new, dtype=np.uint8)
# print (filter[0,0] * new[:-2,:-2])[160,130]
# print (filter[0,1] * new[:-2,1:-1])[160,130]
# print (filter[0,2] * new[:-2,2:])[160,130]
# print (filter[1,0] * new[1:-1,:-2])[160,130]
# print (filter[1,1] * new[1:-1,1:-1])[160,130]
# print (filter[1,2] * new[1:-1,2:])[160,130]
# print (filter[2,0] * new[2:,:-2])[160,130]
# print (filter[2,1] * new[2:,1:-1])[160,130]
# print (filter[2,2] * new[2:,2:])[160,130]
new = (filter[0, 0] * new[:-2, :-2] + filter[0, 1] * new[:-2, 1:-1] +
filter[0, 2] * new[:-2, 2:] + filter[1, 0] * new[1:-1, :-2] +
filter[1, 1] * new[1:-1, 1:-1] + filter[1, 2] * new[1:-1, 2:] +
filter[2, 0] * new[2:, :-2] + filter[2, 1] * new[2:, 1:-1] +
filter[2, 2] * new[2:, 2:])
new = new / (filter.sum())
new -= 255
new = new * (new < 0)
new += 255
new = new * (new > 0)
data = np.array(new, dtype=np.uint8)
self.__image = data
# self.normalize()
@image_loaded
def brightness(self, val):
data = self.__image
new = np.array(data[:, :, :3], dtype=np.int32)
new += val
new = self.__shrink_values(new)
self.__image[:, :, :3] = new
@image_loaded
def roberts(self):
data = self.__image
new = self.__expand(np.array(data, np.int32), 1)
data[:, :] = self.__shrink_values(abs(new[1:-1, 1:-1] - new[2:, 2:]) +
abs(new[2:, 1:-1] - new[1:-1, 2:]))
self.__image = data
@image_loaded
def sobel(self):
data = self.__image
new = self.__expand(np.array(data, np.int32), 1)
new[1:-1, 1:-1] = (((new[2:, :-2] + 2 * new[2:, 1:-1] + new[2:, 2:]) -
(new[:-2, :-2] + 2 * new[:-2, 1:-1] + new[:-2, 2:])) ** 2 +
((new[:-2, 2:] + 2 * new[1:-1, :-2] + new[2:, 2:]) -
(new[:-2, :-2] + 2 * new[1:-1, :-2] + new[2:, :-2])) ** 2) ** (0.5)
new = self.__shrink_values(new)
data = new[1:-1, 1:-1]
self.__image = data
@image_loaded
def rotate(self, angle):
angle = np.deg2rad(angle)
data = self.__image
px = data.shape[0] / 2
py = data.shape[1] / 2
new = np.zeros(
(data.shape[0] * 3, data.shape[1] * 3, data.shape[2]), np.uint8)
for i, j in itertools.product(np.arange(0, data.shape[0]), np.arange(0, data.shape[1]), repeat=1):
new[np.cos(angle) * i - np.sin(angle) * j + px, np.sin(
angle) * i + np.cos(angle) * j + py] = data[i, j]
horiz = np.nonzero(new.sum(axis=0) != 0)[0]
vert = np.nonzero(new.sum(axis=1) != 0)[0]
new = new[vert[0]:vert[-1], horiz[0]:horiz[-1]]
self.__image = new
@image_loaded
def szum(self, prop, method):
if method == 'solpieprz':
self.__szum_solpieprz(prop)
elif method == 'rownomierny1':
self.__szum_rownomierny1(prop)
elif method == 'rownomierny2':
self.__szum_rownomierny1(prop)
@image_loaded
def odszumianie(self, method):
if method == 'srednia':
self.__odszumianie_srednie(self)
elif method == 'mediana':
self.__odszumianie_medianowe(self)
elif method == 'mediana2':
self.__odszymianie_medianowe2(self)
else:
raise NoSuchMethodError()
@image_loaded
def maska(self):
data = self.__image
data = data[:, :, 0]
data = (data < 125) * 1
tmp = np.zeros(data.shape)
tmp[1:-1, 1:-1] = ((data[1:-1, :-2] == 0) & (data[1:-1, 1:-1] == 1) & (data[1:-1, 2:] == 1) & (data[:-2, 2:] == 1) & (data[2:, 2:] == 1) |
(data[2:, 1:-1] == 0) & (data[1:-1, 1:-1] == 1) & (data[:-2, 2:] == 1) & (data[:-2, 1:-1] == 1) & (data[:-2, :-2] == 1) |
(data[2:, 1:-1] == 0) & (data[1:-1, 1:-1] == 1) & (data[:-2, 2:] == 1) & (data[:-2, 1:-1] == 1) & (data[:-2, :-2] == 1) |
(data[:-2, 1:-1] == 0) & (data[1:-1, 1:-1] == 1) & (data[2:, :-2] == 1) & (data[2:, 1:-1] == 1) & (data[2:, 2:] == 1))
self.__image = np.zeros((data.shape[0], data.shape[1], 3))
self.__image[:, :, 0] = tmp
self.__image[:, :, 1] = tmp
self.__image[:, :, 2] = tmp
@image_loaded
def KKM(self):
# np.set_printoptions(linewidth=504, threshold='nan')
czworki = [3, 6, 7, 12, 14, 15, 24, 28, 30, 48, 56, 60, 96,
112, 120, 129, 131, 135, 192, 193, 195, 224, 225, 240]
wyciecia = [3, 5, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31, 48, 52, 53, 54, 55, 56, 60, 61, 62, 63, 65, 67, 69, 71, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 97, 99, 101, 103, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 131, 133, 135, 141, 143, 149, 151, 157, 159, 181, 183, 189, 191, 192, 193, 195, 197, 199, 205, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 227, 229, 231, 237, 239, 240, 241, 243, 244, 245, 246, 247, 248, 249, 251, 252, 253, 254, 255]
data = self.__image
print data.shape
data = data[:, :, 0]
data = (data < 125) * 1
# data[2,2] = 1
# data = np.array([[0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,1,0,0,0],
# [0,1,0,0,0,1,1,0,0],
# [0,1,0,0,0,1,1,0,0],
# [1,1,1,0,0,1,1,0,0],
# [0,1,0,0,0,1,1,0,0],
# [0,0,0,0,0,0,0,1,0],
# [0,0,0,0,0,0,0,0,0]])
old = np.zeros(data.shape)
iter = 0
verb = False
while not np.array_equal(old, data):
print "iteracja: ", iter
iter += 1
old = data.copy()
if verb:
print "Poczatkowe"
print data
#krok I
pion = np.zeros(data.shape)
pion[1:-1, 1:-1] = (data[:-2, 1:-1] == 0) | (data[2:, 1:-1] == 0) |\
(data[1:-1, :-2] == 0) | (data[1:-1, 2:] == 0)
# pion = pion < 4
pion = (data == 1) * pion
# data = (data * (-pion)) + (pion * 2)
data = data + pion
if verb:
print "Po kroku I"
print data
#krok II
pion = np.zeros(data.shape)
pion[1:-1, 1:-1] = (data[:-2, :-2] == 0) | (data[:-2, 2:] == 0) |\
(data[2:, :-2] == 0) | (data[2:, 2:] == 0)
# pion = pion < 4
pion = (data == 1) * pion
# data = (data * (-pion)) + (pion * 3)
data = data + pion * 2
if verb:
print "Po kroku II"
print data
#krok III
tmp = np.zeros(data.shape)
tmp[1:-1, 1:-1] = 1 * (data[:-2, :-2] > 0) +\
2 * (data[1:-1, :-2] > 0) +\
4 * (data[2:, :-2] > 0) +\
128 * (data[:-2, 1:-1] > 0) +\
8 * (data[2:, 1:-1] > 0) +\
64 * (data[:-2, 2:] > 0) +\
32 * (data[1:-1, 2:] > 0) +\
16 * (data[2:, 2:] > 0)
tmp = (data == 2) * tmp
tmp2 = np.zeros(tmp.shape, dtype=np.bool)
for i in czworki:
tmp2 |= (tmp == i)
data += (tmp2 * 2)
if verb:
print "Po kroku III"
print data
#krok IV
tmp = np.zeros(data.shape)
tmp[1:-1, 1:-1] = 1 * (data[:-2, :-2] > 0) +\
2 * (data[1:-1, :-2] > 0) +\
4 * (data[2:, :-2] > 0) +\
128 * (data[:-2, 1:-1] > 0) +\
8 * (data[2:, 1:-1] > 0) +\
64 * (data[:-2, 2:] > 0) +\
32 * (data[1:-1, 2:] > 0) +\
16 * (data[2:, 2:] > 0)
tmp = (data == 4) * tmp
tmp2 = np.zeros(tmp.shape, dtype=np.bool)
for i in wyciecia:
tmp2 |= (tmp == i)
tmp = (tmp > 0) - tmp2
data = data * (data != 4) + tmp * 1 + tmp2 * 0
if verb:
print "Po kroku IV"
print data
#krok V
tmp = np.zeros(data.shape)
tmp[1:-1, 1:-1] = 1 * (data[:-2, :-2] > 0) +\
2 * (data[1:-1, :-2] > 0) +\
4 * (data[2:, :-2] > 0) +\
128 * (data[:-2, 1:-1] > 0) +\
8 * (data[2:, 1:-1] > 0) +\
64 * (data[:-2, 2:] > 0) +\
32 * (data[1:-1, 2:] > 0) +\
16 * (data[2:, 2:] > 0)
tmp = (data == 2) * tmp
tmp2 = np.zeros(tmp.shape, dtype=np.bool)
for i in wyciecia:
tmp2 |= (tmp == i)
tmp = (tmp > 0) - tmp2
data = data * (data != 2) + tmp * 1 + tmp2 * 0
if verb:
print "Po kroku V"
print data
#krok VI
tmp = np.zeros(data.shape)
tmp[1:-1, 1:-1] = 1 * (data[:-2, :-2] > 0) +\
2 * (data[1:-1, :-2] > 0) +\
4 * (data[2:, :-2] > 0) +\
128 * (data[:-2, 1:-1] > 0) +\
8 * (data[2:, 1:-1] > 0) +\
64 * (data[:-2, 2:] > 0) +\
32 * (data[1:-1, 2:] > 0) +\
16 * (data[2:, 2:] > 0)
tmp = (data == 3) * tmp
tmp2 = np.zeros(tmp.shape, dtype=np.bool)
for i in wyciecia:
tmp2 |= (tmp == i)
tmp = (tmp > 0) - tmp2
data = data * (data != 3) + tmp * 1 + tmp2 * 0
if verb:
print "Po kroku VI"
print data
data = data * 255
print data.shape
self.__image = np.zeros((data.shape[0], data.shape[1], 3))
print self.__image.shape
self.__image[:, :, 0] = data
self.__image[:, :, 1] = data
self.__image[:, :, 2] = data
print self.__image.shape
# self.__image = data
@image_loaded
def save(self, path):
"""Zapisuje obraz do pliku"""
self.__clear_alpha()
misc.imsave(path, self.__image)
def __grayscale1(self):
"""Konwersja do skali szarości"""
data = self.__image
# data[:,:] = 3 * (data[:,:].mean())
# x = [4 * (int(x.mean()),) for x in data]
new = np.array(data, dtype=np.uint32)
new[:, :, 0] += data[:, :, 1]
new[:, :, 0] += data[:, :, 2]
new[:, :, 0] /= 3
data[:, :, 1] = data[:, :, 2] = data[:, :, 0] = new[:, :, 0]
self.__image = data
def __scale_down(self, factor):
factor = (int)(factor ** (-1))
data = self.__image
data = np.array(data[::factor, ::factor, :])
self.__image = data
def __scale_up(self, factor):
data = self.__image
new = np.zeros(
(data.shape[0] * factor, data.shape[1] * factor, data.shape[2]))
for x in xrange(data.shape[0]):
for y in xrange(data.shape[1]):
new[x * factor:(x + 1) * factor, y *
factor:(y + 1) * factor, :] = data[x, y, :]
self.__image = new
def __progowanie_globalne(self, *args, **kwargs):
data = self.__image
mean = self.__prog_globalny()
# mean = data[:, :, 0].mean()
data = (data > mean) * 255.
self.__image = data
def __progowanie_lokalne(self, otoczenie=5, *argx, **kwargs):
data = self.__image
prog = self.__prog_lokalny(otoczenie)
data = (data > prog) * 255
self.__image = data
def __progowanie_mieszane(self, otoczenie, odchylenie):
data = self.__image
prog = self.__prog_mieszany(otoczenie, odchylenie)
data = (data > prog) * 255
self.__image = data
def __prog_globalny(self):
data = self.__image
return data[:, :, 0].mean()
def __prog_lokalny(self, otoczenie):
data = self.__image
new = self.__expand(data, otoczenie)
prog = np.zeros(data.shape)
# for x in xrange(otoczenie, new.shape[0] - otoczenie):
# for y in xrange(otoczenie, new.shape[1] - otoczenie):
# prog[x - otoczenie, y - otoczenie] = new[x - otoczenie: x + otoczenie, y - otoczenie:y + otoczenie, :3].mean()
for d in itertools.product(np.arange(0, 2 * otoczenie + 1), repeat=2):
prog[:, :] += new[d[0]:new.shape[0] - 2 * otoczenie + d[0],
d[1]:new.shape[1] - 2 * otoczenie + d[1]]
prog /= (2 * otoczenie + 1) ** 2
# print prog
return prog
def __prog_mieszany(self, otoczenie, odchylenie):
globa = self.__prog_globaalny()
prog = self.__prog_lokalny(otoczenie)
prog -= (globa + odchylenie)
prog = prog * (prog > 0)
prog -= 2 * odchylenie
prog = prog * (prog < 0)
prog += (globa + odchylenie)
return prog
def __expand(self, src, otoczenie):
data = src.copy()
left = data[:, 0, :]
right = data[:, -1, :]
for i in xrange(otoczenie - 1):
left = np.column_stack((left, data[:, 0, :]))
right = np.column_stack((right, data[:, -1, :]))
left = left.reshape((data.shape[0], -1, data.shape[2]))
right = right.reshape((data.shape[0], -1, data.shape[2]))
data = np.column_stack((left, data, right))
top = data[0, :, :]
bottom = data[-1, :, :]
for i in xrange(otoczenie - 1):
top = np.column_stack((top, data[0, :, :]))
bottom = np.column_stack((bottom, data[-1, :, :]))
top = top.reshape((-1, data.shape[1], data.shape[2]))
bottom = bottom.reshape((-1, data.shape[1], data.shape[2]))
data = np.vstack((top, data, bottom))
return data
def __clear_alpha(self):
# print "clear alpha"
if len(self.__image.shape) > 2:
if self.__image.shape[2] == 4:
self.__image[:, :, 3] = 255
pass
def __shrink_values(self, src):
data = src.copy()
data = data * (data > 0)
data -= 255
data = data * (data < 0)
data += 255
return data
def __szum_solpieprz(self, prop):
data = self.__image
prop *= 100
s = data.shape[0] * data.shape[1]
s2 = (data.shape[0], data.shape[1])
r = np.random.randint(100, size=s).reshape(s2)
# R = r < prop
r2 = np.random.randint(2, size=s).reshape(s2)
data = data * (1 - r).repeat(
4).reshape(data.shape) + r2.repeat(4).reshape(data.shape)
self.__image = data
def __szum_rownomierny1(self, prop):
data = self.__image
prop *= 100
s2 = (data.shape[0], data.shape[1])
r = np.random.randint(100, size=s2).reshape(s2)
r = r < prop
tmp = np.array(data, dtype=np.int64)
r2 = np.random.randint(20, size=s2).reshape(s2) - 10
r2 = r2 + (r2 > 0) * 20 - (r2 < 0) * 20
r2 = r2 * r
r2 = r2.repeat(4).reshape(data.shape)
tmp += r2
tmp = tmp * (tmp > 0)
tmp -= 255
tmp = tmp * (tmp < 0)
tmp += 255
self.__image = tmp
def __szum_rownomierny2(self, prop):
data = self.__image
prop *= 100
s = reduce(lambda x, y: x * y, data.shape)
r = np.random.randint(100, size=s).reshape(s)
r = r < prop
tmp = np.array(data, dtype=np.int64)
r2 = np.random.randint(20, size=s) - 10
r2 = r2 * r
r2 = r2 + (r2 > 0) * 20 - (r2 < 0) * 20
r2 = r2.reshape(data.shape)
tmp += r2
tmp = tmp * (tmp > 0)
tmp -= 255
tmp = tmp * (tmp < 0)
tmp += 255
self.__image = tmp
def segment1(self, directory):
def ranges(val):
lines = []
tmp = 0
combo = False
for (i, j) in enumerate(hist):
if j > 0 and not combo:
combo = True
tmp = i
elif not j and combo:
combo = False
lines.append([tmp, i])
if combo:
lines.append([tmp, i])
return lines
# print type(self.__image)
# print self.__image.shape
data = (self.__image[:, :, 0] < 127) * 1
misc.imsave('binary.png', data)
hist = data.sum(axis=1)
lines = ranges(hist)
# print lines
num = 0
for l in lines:
line = data[l[0]:l[1], :]
hist = line.sum(axis=0)
chars = ranges(hist)
for c in chars:
path = directory + '/%05d.png' % num
# print path
c1 = data[l[0]:l[1], c[0]:c[1]]
hist = c1.sum(axis=1)
lines2 = ranges(hist)
# print lines2
# if lines2:
litera = misc.imresize(data[l[0] + lines2[0][0]:l[0] + lines2[
-1][1], c[0]:c[1]], size=(100, 100))
litera = [litera, litera, litera]
# misc.imsave(path, data[l[0]+lines2[0][0]:l[0]+lines2[-1][1], c[0]:c[1]])
misc.imsave(path, litera)
# else:
# misc.imsave(path, data[l[0]:l[1], c[0]:c[1]])
num += 1
def segment2(self, directory):
print "Segment2"
def neighbour(data, p):
p = list(p)
if p[0] == 0:
p[0] = 1
if p[1] == 0:
p[1] = 1
return set([tuple(i + p - (1, 1)) for i in np.transpose(data[p[0] - 1:p[0] + 2, p[1] - 1:p[1] + 2].nonzero())])
# self.kkm2()
# print "po kkm"
# print self.__image.shape
all_chars = []
pprint(self.__image[:, :, 0])
data = (self.__image[:, :, 0] < 130) * 1
misc.imsave('binary.png', data)
buf = set()
checked = set()
num = 0
pprint(data)
licznik = 1
while data.sum():
checked = set()
buf.add(tuple(np.transpose(data.nonzero())[0]))
while buf:
# print "buf",buf
p = buf.pop()
# print "point",p
n = neighbour(data, p)
# print "neighbour", n
checked.add(p)
# print "checked", checked
buf = buf.union(n - checked)
# print "buf", buf
# print "**********"
print licznik
licznik += 1
checked = np.array(list(checked))
minx = checked[:, 0].min()
miny = checked[:, 1].min()
maxx = checked[:, 0].max() + 1
maxy = checked[:, 1].max() + 1
tmp = np.zeros((1 + maxx - minx, 1 + maxy - miny))
#path = directory + '/%05dx%05dx%05dx%05d.png'%(minx, maxy, maxx, miny)
#path = directory + '/%05dx%05dx%05dx%05d.png'%(maxx, miny, minx, maxy)
filename = gen_filename(maxx, miny, minx, maxy)
path = directory + '/' + filename
all_chars.append(
np.array(filename.split('.')[0].split('x'), dtype=int))
for i in checked:
data[i[0], i[1]] = 0
tmp[i[0] - minx, i[1] - miny] = 1
misc.imsave(path, tmp)
num += 1
# sklejanie kropek z literkami i i j
files = glob(directory + "/*.png")
print "szukam kandydatów na kropki"
i = files[4]
# a = ".".join(i.split('/')[-1].split('.')[:-1]).split('x')
poz = np.array([".".join(i.split(
'/')[-1].split('.')[:-1]).split('x') for i in files], dtype=int)
# poz = [(int(i[0]), int(i[1]), int(i[2]), int(i[3])) for i in poz]
print poz
poz = np.array([i.tolist() + [i[0] - i[2], i[3] - i[1]] for i in poz])
# print poz
poz.tofile("/tmp/poz.txt", sep="&")
kropki = [tuple(i) for i in poz if i[4] < (poz[:, 4].mean() - 0.5 * poz[:, 4].std()) and i[5] < (poz[:, 4].mean() - 0.5 * poz[:, 4].std())]
# print poz[:, 4].mean() - 2 * poz[:, 4].std()
print kropki
kropki = set(kropki)
kropki_iter = kropki.copy()
# print "all chars"
# pprint(all_chars)
for k in kropki_iter:
found = False
print "Sprawdzam kropke:", k
lista = find_left(poz, k)
if not lista.shape:
found = True
while not found:
if not len(lista):
found = True
break
tmp = lista[0]
lista = lista[1:]
#pprint(kropki)
# tmp = np.array(filter(lambda x: x[1] < k[1], poz))
# tmp = filter(lambda x: x[1] == tmp[:, 1].max(), tmp)[0]
print "literka na lewo: ", tmp
if (tmp[0] > (k[2] - k[4])) and (tmp[0] < k[0] + k[4]):
if tuple(tmp) in kropki_iter:
print "warunek mówi że na końcu, ale jest koło innej kropki więc to jest kropka!!!"
else:
print "kropka na końcu"
found = True
kropki.remove(k)
else:
mid = (float(tmp[0]) + tmp[2]) / 2.0
top = float(tmp[2])
print "mid i top oraz k[0]:", mid, top, k[0]
print "mid - k[0], top - k[0]", mid - k[0], top - k[0]
if abs(mid - k[0]) < abs(top - k[0]):
print "Kropka na końcu. drugi warunek"
kropki.remove(k)
found = True
else:
print "Kropka do doklejenia", k
mid = float(k[1] + k[3]) / 2.0
print filter(lambda x: x[1] <= mid and x[3] >= mid, all_chars)
found = True
print ""
print "Kropki nad literami: ", kropki
for i in kropki:
print "Sklejam kropke", i
doklejka = find_down(poz, i)
if not doklejka.shape:
continue
doklejka = doklejka[0]
print "doklejka: ", doklejka
print doklejka[0]
maxy = doklejka[0]
miny = i[2]
if doklejka[1] < i[1]:
minx = doklejka[1]
else:
minx = i[1]
if doklejka[3] > i[3]:
maxx = doklejka[3]
else:
maxx = i[3]
sizex = maxx - minx + 1
sizey = maxy - miny + 1
new = np.zeros((sizex , sizey )).T
dx = i[1] - minx
dy = i[2] - miny
filename = gen_filename(i[0], i[1], i[2], i[3])
path = directory + '/' + filename
img = misc.imread(path)
print filename
os.remove(directory + '/' + filename)
odx = dy
dox = dy + i[0] - i[2] + 1
ody = dx
doy = dx + i[3] - i[1] + 1
print "minx=%d, maxx=%d, miny=%d, maxy=%d"%(minx, maxx, miny, maxy)
print "sizex=%d, sizey=%d"%(sizex, sizey)
print "new.shape", new.shape
print "img.shape", img.shape
print ody,":", doy, ", ",odx,":", dox
print "..."
new[odx:dox, ody:doy] = img
dx = doklejka[1] - minx
dy = doklejka[2] - miny
filename = gen_filename(doklejka[0], doklejka[1], doklejka[2], doklejka[3])
path = directory + '/' + filename
img = misc.imread(path)
print filename
os.remove(directory + '/' + filename)
odx = dy
dox = dy + doklejka[0] - doklejka[2] + 1
ody = dx
doy = dx + doklejka[3] - doklejka[1] + 1
print "minx=%d, maxx=%d, miny=%d, maxy=%d"%(minx, maxx, miny, maxy)
print "sizex=%d, sizey=%d"%(sizex, sizey)
print "new.shape", new.shape
print "img.shape", img.shape
print ody,":", doy, ", ",odx,":", dox
print "..."
new[odx:dox, ody:doy] = img
filename = gen_filename(maxy, minx, miny, maxx)
misc.imsave(directory + '/' + filename, new)
def resize2(self, size):
self.__image = misc.imresize(self.__image__, size)
return self.__image__
def kkm2(self):
czworki = [3, 6, 7, 12, 14, 15, 24, 28, 30, 48, 56, 60, 96,
112, 120, 129, 131, 135, 192, 193, 195, 224, 225, 240]
wyciecia = [3, 5, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31, 48, 52, 53, 54, 55, 56, 60, 61, 62, 63, 65, 67, 69, 71, 77, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 97, 99, 101, 103, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 123, 124, 125, 126, 127, 131, 133, 135, 141, 143, 149, 151, 157, 159, 181, 183, 189, 191, 192, 193, 195, 197, 199, 205, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 219, 220, 221, 222, 223, 224, 225, 227, 229, 231, 237, 239, 240, 241, 243, 244, 245, 246, 247, 248, 249, 251, 252, 253, 254, 255]
#sprawdzarka = [[128, 64, 32], [1, 0, 16], [2, 4, 8]]
def sprawdzarka(obj, p):
tmp = 1 * ((data[p[0] - 1:p[0] + 2, p[1] - 1:p[1] + 2]) > 0)
macierz = np.array([[128, 64, 32], [1, 0, 16], [2, 4, 8]])
#macierz = np.array([[128, 1, 2], [64,0,4], [32,16,8]])
suma = (tmp * macierz).sum()
# print "DEBUG"
# print p
# pprint(data[p[0]-1:p[0]+2,p[1]-1:p[1]+2])
# pprint(tmp)
# print suma
return suma
data = self.__expand(self.__image, 1)[:, :, 0]
data = 1 * (data < 127)
data[0, :] = 0
data[-1, :] = 0
data[:, 0] = 0
data[:, -1] = 0
old = np.zeros(data.shape)
DEBUG = True
while not (old == data).all():
print "iteracja"
old = data.copy()
#krok 1
sasiedzi = 1 * (data[1:-1, :-2] == 0) + 1 * (data[1:-1, 2:] == 0) +\
1 * (data[:-2, 1:-1] == 0) + 1 * (data[2:, 1:-1] == 0)
sasiedzi = (sasiedzi > 0)
sasiedzi = (data[1:-1, 1:-1] == 1) * sasiedzi
data[1:-1, 1:-1] = data[1:-1, 1:-1] + sasiedzi
if DEBUG:
print "Krok 1"
pprint(data)
#krok 2
sasiedzi = 1 * (data[:-2, :-2] == 0) + 1 * (data[2:, 2:] == 0) +\
1 * (data[:-2, 2:] == 0) + 1 * (data[2:, :-2] == 0)
sasiedzi = (sasiedzi > 0)
sasiedzi = (data[1:-1, 1:-1] == 1) * sasiedzi
data[1:-1, 1:-1] = data[1:-1, 1:-1] + sasiedzi * 2.0
if DEBUG:
print "Krok 2"
pprint(data)
#krok 3
# data2 = data.copy()
tmp = np.transpose((data == 2).nonzero())
for d in tmp:
if sprawdzarka(self, d) in czworki:
data[d[0], d[1]] = 4
if DEBUG:
print "Krok 3"
pprint(data)
#krok 4
#data2 = data.copy()
tmp = np.transpose((data == 4).nonzero())
for c in tmp:
if sprawdzarka(self, c) not in wyciecia:
data[c[0], c[1]] = 1
else:
data[c[0], c[1]] = 0
if DEBUG:
print "Krok 4"
pprint(data)
#krok 5
#data2 = data.copy()
tmp = np.transpose((data == 2).nonzero())
for c in tmp:
if sprawdzarka(self, c) not in wyciecia:
data[c[0], c[1]] = 1
else:
data[c[0], c[1]] = 0
if DEBUG:
print "Krok 5"
pprint(data)
#krok 6
#data2 = data.copy()
tmp = np.transpose((data == 3).nonzero())
for c in tmp:
if sprawdzarka(self, c) not in wyciecia:
data[c[0], c[1]] = 1
else:
data[c[0], c[1]] = 0
if DEBUG:
print "Krok 6"
pprint(data)
# print type(data)
# print "Po kkm2"
data = data[1:-1, 1:-1] * 255
wynik = []
for i in data:
tmp = []
for j in i:
tmp.append([j, j, j])
wynik.append(tmp)
self.__image = np.array(wynik)
self.negative()
print "A"
pprint(data)
pprint(self.__image)
print "B"
def shape(self):
return self.__image.shape
| [
"[email protected]"
]
| |
4e0fe6547a110182f76e9ab8ad0eb89cb972a754 | e9ffc75f2de77f76bcd10c28195e7b8dcc01db4b | /config/settings/test.py | 95f766b07155f793e1fc376098106f9cf3549397 | [
"MIT"
]
| permissive | Parkyes90/pystagram | c2d4b9e38a2065e5d1a8cb4eaa330640efe60a4e | 54a497f4acb70eb4d4288816d9ae460ec5722640 | refs/heads/master | 2023-08-04T06:20:40.536836 | 2023-07-27T10:12:01 | 2023-07-27T10:12:01 | 133,020,662 | 0 | 0 | MIT | 2023-07-27T10:12:02 | 2018-05-11T09:38:24 | JavaScript | UTF-8 | Python | false | false | 2,024 | py | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env("DJANGO_SECRET_KEY", default="NxsCski0lpOnFecEQq1YyJgwECIifBOjpuA0ftEf6UOKcGY4z88okutqp0T5rRZF")
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": ""
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]["OPTIONS"]["debug"] = DEBUG # noqa F405
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = "localhost"
# https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 1025
# Your stuff...
# ------------------------------------------------------------------------------
| [
"[email protected]"
]
| |
ec0bfed2e04944f6a53b48dd4438719b1733cb75 | 699ff10c347dc9b6d5af7f531a1c941dbfecd558 | /leetcode/python/232-implement-queue-using-stacks.py | cfbd49aa1d50363b1d16e3ac48c0bcd623bf7032 | []
| no_license | iampkuhz/OnlineJudge_cpp | 71a7637c54d81be2aa066a6132aab31b798bbe6b | 737b9bac5a73c319e46cda8c3e9d729f734d7792 | refs/heads/master | 2021-01-10T10:16:37.589855 | 2017-03-06T12:45:20 | 2017-03-06T12:45:20 | 24,891,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,886 | py | #!/usr/bin/env python
# encoding: utf-8
"""
Implement the following operations of a queue using stacks.
push(x) -- Push element x to the back of queue.
pop() -- Removes the element from in front of queue.
peek() -- Get the front element.
empty() -- Return whether the queue is empty.
Notes:
You must use only standard operations of a stack -- which means only push to top, peek/pop from top, size, and is empty operations are valid.
Depending on your language, stack may not be supported natively. You may simulate a stack by using a list or deque (double-ended queue), as long as you use only standard operations of a stack.
You may assume that all operations are valid (for example, no pop or peek operations will be called on an empty queue).
"""
# 2次过,速度差不多 40-44ms
class Queue(object):
def __init__(self):
self.ls = []
def push(self, x):
self.ls.append(x)
def pop(self):
return self.ls.pop()
def peek(self):
return self.ls[-1]
def empty(self):
return len(self.ls) == 0
# 3次过, 36-44ms
class Queue(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.ins, self.out = [], []
def conv(self):
k = len(self.ins)
while k > 0:
k -= 1
self.out.append(self.ins.pop())
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
self.ins.append(x)
def pop(self):
"""
:rtype: nothing
"""
if len(self.out) == 0:self.conv()
return self.out.pop()
def peek(self):
"""
:rtype: int
"""
if len(self.out) == 0: self.conv()
return self.out[-1]
def empty(self):
"""
:rtype: bool
"""
return len(self.out) == 0 and len(self.ins) == 0
| [
"[email protected]"
]
| |
168a729a213cb05a64c5b3b4dc1ab8aa2155d254 | ac9e892c02af18cea990bb0a3f60071b34a03194 | /pytorch_pfn_extras/training/triggers/manual_schedule_trigger.py | fc2db995b809735e7cefe6fc0d8df2ffd185d4ee | [
"MIT"
]
| permissive | limsijie93/pytorch-pfn-extras | 1323e796d59fe113ee86f631cc65ad44c7914a77 | 4b675fce8f4a420d87f1685423a9e62dbe598700 | refs/heads/master | 2022-09-18T09:18:25.459126 | 2020-06-04T04:43:47 | 2020-06-04T04:43:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,131 | py | class ManualScheduleTrigger:
"""Trigger invoked at specified point(s) of iterations or epochs.
This trigger accepts iterations or epochs indicated by given point(s).
There are two ways to specify the point(s): iteration and epoch.
``iteration`` means the number of updates, while ``epoch`` means the number
of sweeps over the training dataset. Fractional values are allowed
if the point is a number of epochs; the trigger uses the ``iteration``
and ``epoch_detail`` attributes defined by the updater.
Args:
points (int, float, or list of int or float): time of the trigger.
Must be an integer or list of integer if unit is ``'iteration'``.
unit (str): Unit of the time specified by ``points``. It must be
either ``'iteration'`` or ``'epoch'``.
Attributes:
finished (bool): Flag that indicates whether or not this trigger will
fire in the future. This flag is used to determine if the extension
should be initialized after resume.
"""
def __init__(self, points, unit):
if unit not in ('epoch', 'iteration'):
raise ValueError(
'Trigger unit must be either \'epoch\' or \'iteration\'.')
self.points = (points if isinstance(points, list) else [points])
self.unit = unit
self.finished = False
self._previous_iteration = 0
self._previous_epoch_detail = 0.
def __call__(self, manager):
"""Decides whether the extension should be called on this iteration.
Args:
manager (~pytorch_pfn_extras.training.ExtensionsManager):
Manager object that this trigger is associated with.
The updater associated with this manager is used to
determine if the trigger should fire.
Returns:
bool: True if the corresponding extension should be invoked in this
iteration.
"""
updater = manager.updater
if self.unit == 'epoch':
epoch_detail = updater.epoch_detail
previous_epoch_detail = self._previous_epoch_detail
# if previous_epoch_detail is invalid value,
# use the value of updater.
if previous_epoch_detail < 0:
previous_epoch_detail = updater.previous_epoch_detail
fire = any(
previous_epoch_detail < p <= epoch_detail
for p in self.points)
if hasattr(self, '_finished_is_tmp'):
del self._finished_is_tmp
if epoch_detail >= max(self.points):
self.finished = True
if fire and epoch_detail >= max(self.points):
self.finished = True
else:
iteration = updater.iteration
previous_iteration = self._previous_iteration
# if previous_iteration is invalid value,
# guess it from current iteration.
if previous_iteration < 0:
previous_iteration = iteration - 1
fire = any(
previous_iteration < p <= iteration
for p in self.points)
if hasattr(self, '_finished_is_tmp'):
del self._finished_is_tmp
if iteration >= max(self.points):
self.finished = True
if fire and iteration >= max(self.points):
self.finished = True
# save current values
self._previous_iteration = updater.iteration
if hasattr(updater, 'epoch_detail'):
self._previous_epoch_detail = updater.epoch_detail
return fire
def state_dict(self):
state = {}
state['_previous_iteration'] = self._previous_iteration
state['_previous_epoch_detail'] = self._previous_epoch_detail
state['finished'] = self.finished
return state
def load_state_dict(self, to_load):
self._previous_iteration = to_load['_previous_iteration']
self._previous_epoch_detail = to_load['_previous_epoch_detail']
self.finished = to_load['finished']
| [
"[email protected]"
]
| |
640acd474ccc2667449fec3953056cfc3acb5173 | 3e74c0b272bfd7981454953aeef96ab2f5c59c69 | /benchmarking/timeIt.py | 8d8650898c5cef602fc4840308c61e368cda7614 | []
| no_license | LokeshKD/DSPython | 09e2e086182d1d0e73f85cc88611b7aa446d1253 | f657678ac2cc1855c4d13bdc66d790a1022b6640 | refs/heads/master | 2023-04-16T13:58:02.500681 | 2021-04-17T17:04:51 | 2021-04-17T17:04:51 | 357,611,322 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | #
def my_function():
try:
1 / 0
except ZeroDivisionError:
pass
if __name__ == "__main__":
import timeit
setup = "from __main__ import my_function"
print(timeit.timeit("my_function()", setup=setup))
| [
"[email protected]"
]
| |
2718b4a553b211d4a9237d21b069590a78c1b9fc | df5d82456b26461643fe0f3c0d7f4b34a521afae | /volt/controllers.py | c8331417109714ac41cc8880e0b87eeefd6562ad | []
| no_license | ali96343/ombott-test | b5bfbc4e079ca3a50b40b210438405acdba65765 | 7d7c46d5cd5c73a92fae08247917ac988b83b9c7 | refs/heads/master | 2023-08-21T15:02:41.614957 | 2021-10-07T06:28:59 | 2021-10-07T06:28:59 | 380,330,616 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,870 | py | #
# py4web app, AI-biorex ported 26.04.2021 14:45:45 UTC+3, src: https://github.com/themesberg/volt-bootstrap-5-dashboard
# https://github.com/ali96343/facep4w
#
import os, json, uuid
import ombott as bottle
from py4web import action, request, response, abort, redirect, URL, Field
from py4web.utils.form import Form, FormStyleBulma
from py4web.utils.grid import Grid
from py4web.utils.publisher import Publisher, ALLOW_ALL_POLICY
from pydal.validators import IS_NOT_EMPTY, IS_INT_IN_RANGE, IS_IN_SET, IS_IN_DB
from py4web.core import Template, Reloader
from py4web.utils.dbstore import DBStore
from py4web import Session, Cache, Translator, Flash, DAL
from py4web.utils.url_signer import URLSigner
from yatl.helpers import INPUT, H1, HTML, BODY, A, DIV, SPAN, P
from .common import db, session, T, cache, authenticated, unauthenticated, auth
from .settings import APP_NAME
# ---------------------- Global -----------------------------------------------------
# exposes services necessary to access the db.thing via ajax
publisher = Publisher(db, policy=ALLOW_ALL_POLICY)
url_signer = URLSigner(session)
Glb= {'debug': True , 'my_app_name': APP_NAME, 'tte_path': '/static/tte' }
# ---------------------- Utils -------------------------------------------------------
def insert_form_vars(myform, mytable):
row_id, table_row, f0_fld = None, None, None
if Glb['debug'] == True:
print("app:",Glb['my_app_name'])
_ = [ print (f' {k}: {v}') for k,v in myform.vars.items() if k != '_formkey']
f0_fld = myform.vars.get('f0', None )
if (not f0_fld is None) and len(f0_fld):
row_id = mytable.insert(**mytable._filter_fields(myform.vars))
db.commit()
if not row_id is None:
table_row = mytable(row_id )
if not table_row is None:
if Glb['debug'] == True:
print( f' inserted: \"{f0_fld}\" into {mytable.f0}, id = {row_id}' )
print( f" select : \"{table_row.f0}\" from {mytable.f0}, id = {row_id}" )
print ()
else:
if Glb['debug'] == True:
print( f" no entry inserted: (f0_fld is None) or (len(f0_fld) == 0)")
print()
return row_id
@action('callback', method="GET")
# Note that we do not use a template. This is a JSON API, not a "web page".
@action.uses(url_signer.verify())
def callback():
print("Called with:", dict(request.params))
return dict(messages=request.params.echo)
#
def json2user(mess='mymess', icon_type = 'warning', js_alert='sweet2'):
response.headers["Content-Type"] = "application/json"
return json.dumps( {'messages' : f'{mess}', 'icon_type': icon_type, 'js_alert': js_alert})
# ---------------------- Controllers ------------------------------------------------
@action('X404', method=["GET", "POST"] )
@action.uses(db, session, T, Template('404.html', delimiters='[%[ ]]',))
def X404():
ctrl_info= { 'c':'X404', 'v':'404.html' }
messages = ['X404', '404.html']
#
ctrl_template_url = "\'" + URL('X404' ) + "\'"
return locals()
@action('X500', method=["GET", "POST"] )
@action.uses(db, session, T, Template('500.html', delimiters='[%[ ]]',))
def X500():
ctrl_info= { 'c':'X500', 'v':'500.html' }
messages = ['X500', '500.html']
#
ctrl_template_url = "\'" + URL('X500' ) + "\'"
return locals()
@action('lock', method=["GET", "POST"] )
@action.uses(db, session, T, Template('lock.html', delimiters='[%[ ]]',))
def lock():
ctrl_info= { 'c':'lock', 'v':'lock.html' }
messages = ['lock', 'lock.html']
#
ctrl_template_url = "\'" + URL('lock' ) + "\'"
#
flock0= Form(db.dflock0, dbio=False, formstyle=FormStyleBulma)
if flock0.accepted:
icon_type ='success' if insert_form_vars(flock0, db.dflock0) else 'info'
return json2user(mess = str( flock0.form_name ), icon_type=icon_type )
elif flock0.errors:
print("flock0 has errors: %s" % (flock0.errors))
return json2user(mess = str(flock0.form_name), icon_type = 'error')
return locals()
@action('index', method=["GET", "POST"] )
@action.uses(db, session, T, Template('index.html', delimiters='[%[ ]]',))
def index():
ctrl_info= { 'c':'index', 'v':'index.html' }
messages = ['index', 'index.html']
#
ctrl_template_url = "\'" + URL('index' ) + "\'"
#
findex0= Form(db.dfindex0, dbio=False, formstyle=FormStyleBulma)
if findex0.accepted:
icon_type ='success' if insert_form_vars(findex0, db.dfindex0) else 'info'
return json2user(mess = str( findex0.form_name ), icon_type=icon_type )
elif findex0.errors:
print("findex0 has errors: %s" % (findex0.errors))
return json2user(mess = str(findex0.form_name), icon_type = 'error')
return locals()
@action('forms', method=["GET", "POST"] )
@action.uses(db, session, T, Template('forms.html', delimiters='[%[ ]]',))
def forms():
ctrl_info= { 'c':'forms', 'v':'forms.html' }
messages = ['forms', 'forms.html']
#
ctrl_template_url = "\'" + URL('forms' ) + "\'"
#
fforms0= Form(db.dfforms0, dbio=False, formstyle=FormStyleBulma)
if fforms0.accepted:
icon_type ='success' if insert_form_vars(fforms0, db.dfforms0) else 'info'
return json2user(mess = str( fforms0.form_name ), icon_type=icon_type )
elif fforms0.errors:
print("fforms0 has errors: %s" % (fforms0.errors))
return json2user(mess = str(fforms0.form_name), icon_type = 'error')
return locals()
@action('modals', method=["GET", "POST"] )
@action.uses(db, session, T, Template('modals.html', delimiters='[%[ ]]',))
def modals():
ctrl_info= { 'c':'modals', 'v':'modals.html' }
messages = ['modals', 'modals.html']
#
ctrl_template_url = "\'" + URL('modals' ) + "\'"
#
fmodals0= Form(db.dfmodals0, dbio=False, formstyle=FormStyleBulma)
if fmodals0.accepted:
icon_type ='success' if insert_form_vars(fmodals0, db.dfmodals0) else 'info'
return json2user(mess = str( fmodals0.form_name ), icon_type=icon_type )
elif fmodals0.errors:
print("fmodals0 has errors: %s" % (fmodals0.errors))
return json2user(mess = str(fmodals0.form_name), icon_type = 'error')
#
fmodals1= Form(db.dfmodals1, dbio=False, formstyle=FormStyleBulma)
if fmodals1.accepted:
icon_type ='success' if insert_form_vars(fmodals1, db.dfmodals1) else 'info'
return json2user(mess = str( fmodals1.form_name ), icon_type=icon_type )
elif fmodals1.errors:
print("fmodals1 has errors: %s" % (fmodals1.errors))
return json2user(mess = str(fmodals1.form_name), icon_type = 'error')
#
fmodals2= Form(db.dfmodals2, dbio=False, formstyle=FormStyleBulma)
if fmodals2.accepted:
icon_type ='success' if insert_form_vars(fmodals2, db.dfmodals2) else 'info'
return json2user(mess = str( fmodals2.form_name ), icon_type=icon_type )
elif fmodals2.errors:
print("fmodals2 has errors: %s" % (fmodals2.errors))
return json2user(mess = str(fmodals2.form_name), icon_type = 'error')
return locals()
@action('buttons', method=["GET", "POST"] )
@action.uses(db, session, T, Template('buttons.html', delimiters='[%[ ]]',))
def buttons():
ctrl_info= { 'c':'buttons', 'v':'buttons.html' }
messages = ['buttons', 'buttons.html']
#
ctrl_template_url = "\'" + URL('buttons' ) + "\'"
#
fbuttons0= Form(db.dfbuttons0, dbio=False, formstyle=FormStyleBulma)
if fbuttons0.accepted:
icon_type ='success' if insert_form_vars(fbuttons0, db.dfbuttons0) else 'info'
return json2user(mess = str( fbuttons0.form_name ), icon_type=icon_type )
elif fbuttons0.errors:
print("fbuttons0 has errors: %s" % (fbuttons0.errors))
return json2user(mess = str(fbuttons0.form_name), icon_type = 'error')
return locals()
@action('signXin', method=["GET", "POST"] )
@action.uses(db, session, T, Template('sign-in.html', delimiters='[%[ ]]',))
def signXin():
ctrl_info= { 'c':'signXin', 'v':'sign-in.html' }
messages = ['signXin', 'sign-in.html']
#
ctrl_template_url = "\'" + URL('signXin' ) + "\'"
#
fsignXin0= Form(db.dfsignXin0, dbio=False, formstyle=FormStyleBulma)
if fsignXin0.accepted:
icon_type ='success' if insert_form_vars(fsignXin0, db.dfsignXin0) else 'info'
return json2user(mess = str( fsignXin0.form_name ), icon_type=icon_type )
elif fsignXin0.errors:
print("fsignXin0 has errors: %s" % (fsignXin0.errors))
return json2user(mess = str(fsignXin0.form_name), icon_type = 'error')
return locals()
@action('signXup', method=["GET", "POST"] )
@action.uses(db, session, T, Template('sign-up.html', delimiters='[%[ ]]',))
def signXup():
ctrl_info= { 'c':'signXup', 'v':'sign-up.html' }
messages = ['signXup', 'sign-up.html']
#
ctrl_template_url = "\'" + URL('signXup' ) + "\'"
#
fsignXup0= Form(db.dfsignXup0, dbio=False, formstyle=FormStyleBulma)
if fsignXup0.accepted:
icon_type ='success' if insert_form_vars(fsignXup0, db.dfsignXup0) else 'info'
return json2user(mess = str( fsignXup0.form_name ), icon_type=icon_type )
elif fsignXup0.errors:
print("fsignXup0 has errors: %s" % (fsignXup0.errors))
return json2user(mess = str(fsignXup0.form_name), icon_type = 'error')
return locals()
@action('settings', method=["GET", "POST"] )
@action.uses(db, session, T, Template('settings.html', delimiters='[%[ ]]',))
def settings():
ctrl_info= { 'c':'settings', 'v':'settings.html' }
messages = ['settings', 'settings.html']
#
ctrl_template_url = "\'" + URL('settings' ) + "\'"
#
fsettings0= Form(db.dfsettings0, dbio=False, formstyle=FormStyleBulma)
if fsettings0.accepted:
icon_type ='success' if insert_form_vars(fsettings0, db.dfsettings0) else 'info'
return json2user(mess = str( fsettings0.form_name ), icon_type=icon_type )
elif fsettings0.errors:
print("fsettings0 has errors: %s" % (fsettings0.errors))
return json2user(mess = str(fsettings0.form_name), icon_type = 'error')
#
fsettings1= Form(db.dfsettings1, dbio=False, formstyle=FormStyleBulma)
if fsettings1.accepted:
icon_type ='success' if insert_form_vars(fsettings1, db.dfsettings1) else 'info'
return json2user(mess = str( fsettings1.form_name ), icon_type=icon_type )
elif fsettings1.errors:
print("fsettings1 has errors: %s" % (fsettings1.errors))
return json2user(mess = str(fsettings1.form_name), icon_type = 'error')
return locals()
@action('dashboard', method=["GET", "POST"] )
@action.uses(db, session, T, Template('dashboard.html', delimiters='[%[ ]]',))
def dashboard():
ctrl_info= { 'c':'dashboard', 'v':'dashboard.html' }
messages = ['dashboard', 'dashboard.html']
#
ctrl_template_url = "\'" + URL('dashboard' ) + "\'"
rows_tdashboard0= db(db.tdashboard0).select()
#
fdashboard0= Form(db.dfdashboard0, dbio=False, formstyle=FormStyleBulma)
if fdashboard0.accepted:
icon_type ='success' if insert_form_vars(fdashboard0, db.dfdashboard0) else 'info'
return json2user(mess = str( fdashboard0.form_name ), icon_type=icon_type )
elif fdashboard0.errors:
print("fdashboard0 has errors: %s" % (fdashboard0.errors))
return json2user(mess = str(fdashboard0.form_name), icon_type = 'error')
return locals()
@action('typography', method=["GET", "POST"] )
@action.uses(db, session, T, Template('typography.html', delimiters='[%[ ]]',))
def typography():
ctrl_info= { 'c':'typography', 'v':'typography.html' }
messages = ['typography', 'typography.html']
#
ctrl_template_url = "\'" + URL('typography' ) + "\'"
#
ftypography0= Form(db.dftypography0, dbio=False, formstyle=FormStyleBulma)
if ftypography0.accepted:
icon_type ='success' if insert_form_vars(ftypography0, db.dftypography0) else 'info'
return json2user(mess = str( ftypography0.form_name ), icon_type=icon_type )
elif ftypography0.errors:
print("ftypography0 has errors: %s" % (ftypography0.errors))
return json2user(mess = str(ftypography0.form_name), icon_type = 'error')
return locals()
@action('transactions', method=["GET", "POST"] )
@action.uses(db, session, T, Template('transactions.html', delimiters='[%[ ]]',))
def transactions():
ctrl_info= { 'c':'transactions', 'v':'transactions.html' }
messages = ['transactions', 'transactions.html']
#
ctrl_template_url = "\'" + URL('transactions' ) + "\'"
rows_ttransactions0= db(db.ttransactions0).select()
#
ftransactions0= Form(db.dftransactions0, dbio=False, formstyle=FormStyleBulma)
if ftransactions0.accepted:
icon_type ='success' if insert_form_vars(ftransactions0, db.dftransactions0) else 'info'
return json2user(mess = str( ftransactions0.form_name ), icon_type=icon_type )
elif ftransactions0.errors:
print("ftransactions0 has errors: %s" % (ftransactions0.errors))
return json2user(mess = str(ftransactions0.form_name), icon_type = 'error')
return locals()
@action('notifications', method=["GET", "POST"] )
@action.uses(db, session, T, Template('notifications.html', delimiters='[%[ ]]',))
def notifications():
ctrl_info= { 'c':'notifications', 'v':'notifications.html' }
messages = ['notifications', 'notifications.html']
#
ctrl_template_url = "\'" + URL('notifications' ) + "\'"
#
fnotifications0= Form(db.dfnotifications0, dbio=False, formstyle=FormStyleBulma)
if fnotifications0.accepted:
icon_type ='success' if insert_form_vars(fnotifications0, db.dfnotifications0) else 'info'
return json2user(mess = str( fnotifications0.form_name ), icon_type=icon_type )
elif fnotifications0.errors:
print("fnotifications0 has errors: %s" % (fnotifications0.errors))
return json2user(mess = str(fnotifications0.form_name), icon_type = 'error')
return locals()
@action('upgradeXtoXpro', method=["GET", "POST"] )
@action.uses(db, session, T, Template('upgrade-to-pro.html', delimiters='[%[ ]]',))
def upgradeXtoXpro():
ctrl_info= { 'c':'upgradeXtoXpro', 'v':'upgrade-to-pro.html' }
messages = ['upgradeXtoXpro', 'upgrade-to-pro.html']
#
ctrl_template_url = "\'" + URL('upgradeXtoXpro' ) + "\'"
rows_tupgradeXtoXpro0= db(db.tupgradeXtoXpro0).select()
return locals()
@action('resetXpassword', method=["GET", "POST"] )
@action.uses(db, session, T, Template('reset-password.html', delimiters='[%[ ]]',))
def resetXpassword():
ctrl_info= { 'c':'resetXpassword', 'v':'reset-password.html' }
messages = ['resetXpassword', 'reset-password.html']
#
ctrl_template_url = "\'" + URL('resetXpassword' ) + "\'"
#
fresetXpassword0= Form(db.dfresetXpassword0, dbio=False, formstyle=FormStyleBulma)
if fresetXpassword0.accepted:
icon_type ='success' if insert_form_vars(fresetXpassword0, db.dfresetXpassword0) else 'info'
return json2user(mess = str( fresetXpassword0.form_name ), icon_type=icon_type )
elif fresetXpassword0.errors:
print("fresetXpassword0 has errors: %s" % (fresetXpassword0.errors))
return json2user(mess = str(fresetXpassword0.form_name), icon_type = 'error')
return locals()
@action('forgotXpassword', method=["GET", "POST"] )
@action.uses(db, session, T, Template('forgot-password.html', delimiters='[%[ ]]',))
def forgotXpassword():
ctrl_info= { 'c':'forgotXpassword', 'v':'forgot-password.html' }
messages = ['forgotXpassword', 'forgot-password.html']
#
ctrl_template_url = "\'" + URL('forgotXpassword' ) + "\'"
#
fforgotXpassword0= Form(db.dfforgotXpassword0, dbio=False, formstyle=FormStyleBulma)
if fforgotXpassword0.accepted:
icon_type ='success' if insert_form_vars(fforgotXpassword0, db.dfforgotXpassword0) else 'info'
return json2user(mess = str( fforgotXpassword0.form_name ), icon_type=icon_type )
elif fforgotXpassword0.errors:
print("fforgotXpassword0 has errors: %s" % (fforgotXpassword0.errors))
return json2user(mess = str(fforgotXpassword0.form_name), icon_type = 'error')
return locals()
@action('bootstrapXtables', method=["GET", "POST"] )
@action.uses(db, session, T, Template('bootstrap-tables.html', delimiters='[%[ ]]',))
def bootstrapXtables():
ctrl_info= { 'c':'bootstrapXtables', 'v':'bootstrap-tables.html' }
messages = ['bootstrapXtables', 'bootstrap-tables.html']
#
ctrl_template_url = "\'" + URL('bootstrapXtables' ) + "\'"
rows_tbootstrapXtables0= db(db.tbootstrapXtables0).select()
rows_tbootstrapXtables1= db(db.tbootstrapXtables1).select()
#
fbootstrapXtables0= Form(db.dfbootstrapXtables0, dbio=False, formstyle=FormStyleBulma)
if fbootstrapXtables0.accepted:
icon_type ='success' if insert_form_vars(fbootstrapXtables0, db.dfbootstrapXtables0) else 'info'
return json2user(mess = str( fbootstrapXtables0.form_name ), icon_type=icon_type )
elif fbootstrapXtables0.errors:
print("fbootstrapXtables0 has errors: %s" % (fbootstrapXtables0.errors))
return json2user(mess = str(fbootstrapXtables0.form_name), icon_type = 'error')
return locals()
from pydal.restapi import RestAPI, Policy
policy = Policy()
policy.set('*', 'GET', authorize=True, allowed_patterns=['*'])
policy.set('*', 'PUT', authorize=True)
policy.set('*', 'POST', authorize=True)
policy.set('*', 'DELETE', authorize=True)
@action('api/<tablename>/', method=["GET", "POST", "PUT", "DELETE"])
@action('api/<tablename>/<rec_id>', method=["GET", "POST", "PUT", "DELETE"])
def api(tablename, rec_id=None):
return RestAPI(db, policy)(request.method,
tablename,
rec_id,
request.GET,
request.POST
)
#
# curl -X GET http://localhost:8000/volt/api/test_table/
# curl -X GET http://localhost:8000/volt/api/test_table/9
# curl -X DELETE http://localhost:8000/volt/api/test_table/2
# curl -X POST -d 'f0=1111111&f1=2222222222&f2=33333333333' http://localhost:8000/volt/api/test_table/
# curl -X PUT -d 'f0=1111111&f1=2222222222&f2=33333333333' http://localhost:8000/volt/api/test_table/9
# curl -X POST -d f0=1111111 -d f1=2222222222 -d f2=8888888888 http://localhost:8000/volt/api/test_table/
#
# pip install httpie
# http localhost:8000/volt/api/test_table/
# http localhost:8000/volt/api/test_table/9
# http -f POST localhost:8000/volt/api/test_table/ f0=111111 f1=2222222 f2=333333
# http -f DELETE localhost:8000/volt/api/test_table/2
# http -f PUT localhost:8000/volt/api/test_table/2 f0=111111 f1=2222222 f2=333333
#------------------------------------------------------------------------------------
#curl -i -X POST -H 'Content-Type: application/json' -d '{"name": "New item", "year": "2009"}' http://rest-api.io/items
#curl -i -X PUT -H 'Content-Type: application/json' -d '{"name": "Updated item", "year": "2010"}' http://rest-api.io/items/5069b47aa892630aae059584
@bottle.error(404)
def error404(error):
func_mess = []
def check_rule(maybe_app_root):
for e in Reloader.ROUTES:
if ('rule' in e ) and ( e["rule"] == maybe_app_root) :
Glb["debug"] and func_mess.append(f" found_rule: {e['rule']}")
return True
return False
location = "/" + Glb["my_app_name"]
lx = bottle.request.path.split("/", 2)
if len(lx) >= 2 and check_rule("/" + lx[1]):
location = "/" + lx[1]
# this code is not necessary for modern py4web
#
# files_prefix = location + Glb["tte_path"]
#
# location_2x = location + location + "/"
# files_prefix_2x = files_prefix + files_prefix + "/"
#
# def rm_bad_prefix(bad_prefix):
# new_location = bottle.request.path.replace(bad_prefix, "", 1)
# Glb["debug"] and func_mess.append(f" rm_bad_prefix: {bad_prefix}")
# return new_location
#
# if bottle.request.path.startswith(files_prefix_2x):
# if len(bottle.request.path) > len(files_prefix_2x):
# location = rm_bad_prefix(files_prefix)
#
# elif bottle.request.path.startswith(location_2x):
# if len(bottle.request.path) > len(location_2x):
# location = rm_bad_prefix(location)
if Glb["debug"]:
debug_mess = [ f"404 app=/{Glb['my_app_name']}, err_path={bottle.request.path}",
f" info: {error}", ]
if len(func_mess):
debug_mess += func_mess
debug_mess.append(f" goto_new_path: {location}\n")
print("\n".join(debug_mess))
bottle.response.status = 303
bottle.response.set_header("Location", location)
# -------------------- tabinfo: my backend ------------------------------------
#
#from .atab_utils import mytab_grid
#from .images_utils import ima_grid
#from .upload_utils import p4wupload_file
#from .tlist_utils import tlist
#
#@unauthenticated("tabinfo", "tabinfo.html")
#def tabinfo():
# user = auth.get_user()
# message = T("Hello {first_name}".format(**user) if user else "Hello")
# menu = DIV(
# P( "test-demo for sql2table ( SQLTABLE from web2py)"),
# A( "sql2table", _role="button", _href=URL('mytab_grid', ),) ,
# A( "p4wupload_file", _role="button", _href=URL('p4wupload_file', ),) ,
# A( "tlist", _role="button", _href=URL('tlist', ),) ,
# A( "app_images", _role="button", _href=URL('ima_grid', ),) ,
# )
# return dict(message=message, menu=menu)
#
| [
"[email protected]"
]
| |
e760becc3c1eb5c190c95e6eb021d1db26b75b93 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/1/dcx.py | 6b975b5f2f2c98bbfca63125607d2e2c1d79986e | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'dCX':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
466502916f65ec970df5c90a6f2d448e9050d8b0 | 09efb7c148e82c22ce6cc7a17b5140aa03aa6e55 | /env/lib/python3.6/site-packages/plotly/graph_objs/ohlc/__init__.py | 6045b7202af831a93026f1550f8e714430892557 | [
"MIT"
]
| permissive | harryturr/harryturr_garmin_dashboard | 53071a23b267116e1945ae93d36e2a978c411261 | 734e04f8257f9f84f2553efeb7e73920e35aadc9 | refs/heads/master | 2023-01-19T22:10:57.374029 | 2020-01-29T10:47:56 | 2020-01-29T10:47:56 | 235,609,069 | 4 | 0 | MIT | 2023-01-05T05:51:27 | 2020-01-22T16:00:13 | Python | UTF-8 | Python | false | false | 34,074 | py | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Stream(_BaseTraceHierarchyType):
# maxpoints
# ---------
@property
def maxpoints(self):
"""
Sets the maximum number of points to keep on the plots from an
incoming stream. If `maxpoints` is set to 50, only the newest
50 points will be displayed on the plot.
The 'maxpoints' property is a number and may be specified as:
- An int or float in the interval [0, 10000]
Returns
-------
int|float
"""
return self["maxpoints"]
@maxpoints.setter
def maxpoints(self, val):
self["maxpoints"] = val
# token
# -----
@property
def token(self):
"""
The stream id number links a data trace on a plot with a
stream. See https://plot.ly/settings for more details.
The 'token' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["token"]
@token.setter
def token(self, val):
self["token"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "ohlc"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
"""
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs):
"""
Construct a new Stream object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.ohlc.Stream
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
Returns
-------
Stream
"""
super(Stream, self).__init__("stream")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.ohlc.Stream
constructor must be a dict or
an instance of plotly.graph_objs.ohlc.Stream"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.ohlc import stream as v_stream
# Initialize validators
# ---------------------
self._validators["maxpoints"] = v_stream.MaxpointsValidator()
self._validators["token"] = v_stream.TokenValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("maxpoints", None)
self["maxpoints"] = maxpoints if maxpoints is not None else _v
_v = arg.pop("token", None)
self["token"] = token if token is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
# dash
# ----
@property
def dash(self):
"""
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can also be
set per direction via `increasing.line.dash` and
`decreasing.line.dash`.
The 'dash' property is an enumeration that may be specified as:
- One of the following dash styles:
['solid', 'dot', 'dash', 'longdash', 'dashdot', 'longdashdot']
- A string containing a dash length list in pixels or percentages
(e.g. '5px 10px 2px 2px', '5, 10, 2, 2', '10% 20% 40%', etc.)
Returns
-------
str
"""
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
# width
# -----
@property
def width(self):
"""
[object Object] Note that this style setting can also be set
per direction via `increasing.line.width` and
`decreasing.line.width`.
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "ohlc"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
"""
def __init__(self, arg=None, dash=None, width=None, **kwargs):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.ohlc.Line
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
Returns
-------
Line
"""
super(Line, self).__init__("line")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.ohlc.Line
constructor must be a dict or
an instance of plotly.graph_objs.ohlc.Line"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.ohlc import line as v_line
# Initialize validators
# ---------------------
self._validators["dash"] = v_line.DashValidator()
self._validators["width"] = v_line.WidthValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("dash", None)
self["dash"] = dash if dash is not None else _v
_v = arg.pop("width", None)
self["width"] = width if width is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Increasing(_BaseTraceHierarchyType):
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of plotly.graph_objs.ohlc.increasing.Line
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
Sets the line color.
dash
Sets the dash style of lines. Set to a dash
type string ("solid", "dot", "dash",
"longdash", "dashdot", or "longdashdot") or a
dash length list in px (eg "5px,10px,2px,2px").
width
Sets the line width (in px).
Returns
-------
plotly.graph_objs.ohlc.increasing.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "ohlc"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
line
plotly.graph_objects.ohlc.increasing.Line instance or
dict with compatible properties
"""
def __init__(self, arg=None, line=None, **kwargs):
"""
Construct a new Increasing object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.ohlc.Increasing
line
plotly.graph_objects.ohlc.increasing.Line instance or
dict with compatible properties
Returns
-------
Increasing
"""
super(Increasing, self).__init__("increasing")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.ohlc.Increasing
constructor must be a dict or
an instance of plotly.graph_objs.ohlc.Increasing"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.ohlc import increasing as v_increasing
# Initialize validators
# ---------------------
self._validators["line"] = v_increasing.LineValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("line", None)
self["line"] = line if line is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Hoverlabel(_BaseTraceHierarchyType):
# align
# -----
@property
def align(self):
"""
Sets the horizontal alignment of the text content within hover
label box. Has an effect only if the hover label text spans
more two or more lines
The 'align' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'right', 'auto']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["align"]
@align.setter
def align(self, val):
self["align"] = val
# alignsrc
# --------
@property
def alignsrc(self):
"""
Sets the source reference on plot.ly for align .
The 'alignsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["alignsrc"]
@alignsrc.setter
def alignsrc(self, val):
self["alignsrc"] = val
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the background color of the hover labels for this trace
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bgcolorsrc
# ----------
@property
def bgcolorsrc(self):
"""
Sets the source reference on plot.ly for bgcolor .
The 'bgcolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["bgcolorsrc"]
@bgcolorsrc.setter
def bgcolorsrc(self, val):
self["bgcolorsrc"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the border color of the hover labels for this trace.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# bordercolorsrc
# --------------
@property
def bordercolorsrc(self):
"""
Sets the source reference on plot.ly for bordercolor .
The 'bordercolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["bordercolorsrc"]
@bordercolorsrc.setter
def bordercolorsrc(self, val):
self["bordercolorsrc"] = val
# font
# ----
@property
def font(self):
"""
Sets the font used in hover labels.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.ohlc.hoverlabel.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.ohlc.hoverlabel.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# namelength
# ----------
@property
def namelength(self):
"""
Sets the default length (in number of characters) of the trace
name in the hover labels for all traces. -1 shows the whole
name regardless of length. 0-3 shows the first 0-3 characters,
and an integer >3 will show the whole name if it is less than
that many characters, but if it is longer, will truncate to
`namelength - 3` characters and add an ellipsis.
The 'namelength' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [-1, 9223372036854775807]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|numpy.ndarray
"""
return self["namelength"]
@namelength.setter
def namelength(self, val):
self["namelength"] = val
# namelengthsrc
# -------------
@property
def namelengthsrc(self):
"""
Sets the source reference on plot.ly for namelength .
The 'namelengthsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["namelengthsrc"]
@namelengthsrc.setter
def namelengthsrc(self, val):
self["namelengthsrc"] = val
# split
# -----
@property
def split(self):
"""
Show hover information (open, close, high, low) in separate
labels.
The 'split' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["split"]
@split.setter
def split(self, val):
self["split"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "ohlc"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
split
Show hover information (open, close, high, low) in
separate labels.
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
bgcolor=None,
bgcolorsrc=None,
bordercolor=None,
bordercolorsrc=None,
font=None,
namelength=None,
namelengthsrc=None,
split=None,
**kwargs
):
"""
Construct a new Hoverlabel object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.ohlc.Hoverlabel
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
split
Show hover information (open, close, high, low) in
separate labels.
Returns
-------
Hoverlabel
"""
super(Hoverlabel, self).__init__("hoverlabel")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.ohlc.Hoverlabel
constructor must be a dict or
an instance of plotly.graph_objs.ohlc.Hoverlabel"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.ohlc import hoverlabel as v_hoverlabel
# Initialize validators
# ---------------------
self._validators["align"] = v_hoverlabel.AlignValidator()
self._validators["alignsrc"] = v_hoverlabel.AlignsrcValidator()
self._validators["bgcolor"] = v_hoverlabel.BgcolorValidator()
self._validators["bgcolorsrc"] = v_hoverlabel.BgcolorsrcValidator()
self._validators["bordercolor"] = v_hoverlabel.BordercolorValidator()
self._validators["bordercolorsrc"] = v_hoverlabel.BordercolorsrcValidator()
self._validators["font"] = v_hoverlabel.FontValidator()
self._validators["namelength"] = v_hoverlabel.NamelengthValidator()
self._validators["namelengthsrc"] = v_hoverlabel.NamelengthsrcValidator()
self._validators["split"] = v_hoverlabel.SplitValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("align", None)
self["align"] = align if align is not None else _v
_v = arg.pop("alignsrc", None)
self["alignsrc"] = alignsrc if alignsrc is not None else _v
_v = arg.pop("bgcolor", None)
self["bgcolor"] = bgcolor if bgcolor is not None else _v
_v = arg.pop("bgcolorsrc", None)
self["bgcolorsrc"] = bgcolorsrc if bgcolorsrc is not None else _v
_v = arg.pop("bordercolor", None)
self["bordercolor"] = bordercolor if bordercolor is not None else _v
_v = arg.pop("bordercolorsrc", None)
self["bordercolorsrc"] = bordercolorsrc if bordercolorsrc is not None else _v
_v = arg.pop("font", None)
self["font"] = font if font is not None else _v
_v = arg.pop("namelength", None)
self["namelength"] = namelength if namelength is not None else _v
_v = arg.pop("namelengthsrc", None)
self["namelengthsrc"] = namelengthsrc if namelengthsrc is not None else _v
_v = arg.pop("split", None)
self["split"] = split if split is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Decreasing(_BaseTraceHierarchyType):
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of plotly.graph_objs.ohlc.decreasing.Line
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
Sets the line color.
dash
Sets the dash style of lines. Set to a dash
type string ("solid", "dot", "dash",
"longdash", "dashdot", or "longdashdot") or a
dash length list in px (eg "5px,10px,2px,2px").
width
Sets the line width (in px).
Returns
-------
plotly.graph_objs.ohlc.decreasing.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "ohlc"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
line
plotly.graph_objects.ohlc.decreasing.Line instance or
dict with compatible properties
"""
def __init__(self, arg=None, line=None, **kwargs):
"""
Construct a new Decreasing object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.ohlc.Decreasing
line
plotly.graph_objects.ohlc.decreasing.Line instance or
dict with compatible properties
Returns
-------
Decreasing
"""
super(Decreasing, self).__init__("decreasing")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.ohlc.Decreasing
constructor must be a dict or
an instance of plotly.graph_objs.ohlc.Decreasing"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.ohlc import decreasing as v_decreasing
# Initialize validators
# ---------------------
self._validators["line"] = v_decreasing.LineValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("line", None)
self["line"] = line if line is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
__all__ = [
"Decreasing",
"Hoverlabel",
"Increasing",
"Line",
"Stream",
"decreasing",
"hoverlabel",
"increasing",
]
from plotly.graph_objs.ohlc import increasing
from plotly.graph_objs.ohlc import hoverlabel
from plotly.graph_objs.ohlc import decreasing
| [
"[email protected]"
]
| |
c40f6b94961010096fa1e43f69e3c26d32368c2c | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_229/ch57_2020_04_10_21_47_51_592464.py | 1b83ba556f170dae510b6bab0604f9e0d9a59eca | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,127 | py | def verifica_progressao(lista):
r = lista[1] - lista[0]
rn = lista[2] - lista[1]
continua = True
continua2 = True
i = 0
if lista[0] != 0 and lista[1] != 0:
q = lista[1]/lista[0]
qn = lista[2]/lista[1]
if qn == q:
while continua == True:
while i < len(lista):
if lista[i] != 0:
qn = lista[i+1]/lista[i]
if qn != q:
continua = False
break
else:
i += 1
break
else:
continua = False
break
if rn == r:
i = 0
while continua2 == True:
while i < len(lista):
rn = lista[i+1] - lista[i]
if rn != r:
continua2 = False
break
else:
i += 1
break
return "AG"
return "PG"
else:
return "PG"
elif rn == r:
i = 0
while continua2 == True:
while i < len(lista):
rn = lista[i+1] - lista[i]
if rn != r:
continua2 = False
break
else:
i += 1
break
return "PA"
else:
return "NA"
else:
if rn == r:
while continua2 == True:
while i < len(lista):
rn = lista[i+1] - lista[i]
if rn != r:
continua = False
break
else:
i += 1
break
return "PA"
else:
return "NA" | [
"[email protected]"
]
| |
7436c12c9b17ab4e53a8e623b20b1a24fc082352 | dfaf6f7ac83185c361c81e2e1efc09081bd9c891 | /k8sdeployment/k8sstat/python/kubernetes/test/test_v1beta1_subject_access_review_status.py | 86ad671f95cfc1388e0b498d3971b2a7c14d6e90 | [
"Apache-2.0",
"MIT"
]
| permissive | JeffYFHuang/gpuaccounting | d754efac2dffe108b591ea8722c831d979b68cda | 2c63a63c571240561725847daf1a7f23f67e2088 | refs/heads/master | 2022-08-09T03:10:28.185083 | 2022-07-20T00:50:06 | 2022-07-20T00:50:06 | 245,053,008 | 0 | 0 | MIT | 2021-03-25T23:44:50 | 2020-03-05T02:44:15 | JavaScript | UTF-8 | Python | false | false | 1,076 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.15.6
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import kubernetes.client
from kubernetes.client.models.v1beta1_subject_access_review_status import V1beta1SubjectAccessReviewStatus # noqa: E501
from kubernetes.client.rest import ApiException
class TestV1beta1SubjectAccessReviewStatus(unittest.TestCase):
"""V1beta1SubjectAccessReviewStatus unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta1SubjectAccessReviewStatus(self):
"""Test V1beta1SubjectAccessReviewStatus"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes.client.models.v1beta1_subject_access_review_status.V1beta1SubjectAccessReviewStatus() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
49ddfd050e02c9a29ad478cd2401367cf761db46 | 9adc810b07f7172a7d0341f0b38088b4f5829cf4 | /experiments/ashvin/icml2020/murtaza/pusher/demo_state_td3.py | 1bb88eaceb172d7677d3cb4f22eca88400bb1641 | [
"MIT"
]
| permissive | Asap7772/railrl_evalsawyer | 7ee9358b5277b9ddf2468f0c6d28beb92a5a0879 | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | refs/heads/main | 2023-05-29T10:00:50.126508 | 2021-06-18T03:08:12 | 2021-06-18T03:08:12 | 375,810,557 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,221 | py | from multiworld.envs.mujoco.cameras import sawyer_init_camera_zoomed_in
from rlkit.launchers.launcher_util import run_experiment
import rlkit.misc.hyperparameter as hyp
from rlkit.launchers.experiments.murtaza.rfeatures_rl import state_td3bc_experiment
from rlkit.launchers.arglauncher import run_variants
if __name__ == "__main__":
variant = dict(
env_id='SawyerPushNIPSEasy-v0',
algo_kwargs=dict(
batch_size=1024,
num_epochs=300,
num_eval_steps_per_epoch=500,
num_expl_steps_per_train_loop=500,
num_trains_per_train_loop=1000,
min_num_steps_before_training=10000,
max_path_length=50,
),
td3_trainer_kwargs=dict(
discount=0.99,
),
td3_bc_trainer_kwargs=dict(
discount=0.99,
demo_path=["demos/icml2020/pusher/demos_action_noise_1000.npy"],
demo_off_policy_path=None,
bc_num_pretrain_steps=10000,
q_num_pretrain_steps=10000,
rl_weight=1.0,
bc_weight=0,
reward_scale=1.0,
target_update_period=2,
policy_update_period=2,
obs_key='state_observation',
env_info_key='puck_distance',
max_path_length=50,
),
replay_buffer_kwargs=dict(
max_size=int(1e6),
fraction_goals_rollout_goals=0.2,
fraction_goals_env_goals=0.5,
),
qf_kwargs=dict(
hidden_sizes=[400, 300],
),
policy_kwargs=dict(
hidden_sizes=[400, 300],
),
exploration_noise=.8,
load_demos=True,
pretrain_rl=False,
pretrain_policy=False,
es='ou',
td3_bc=True,
save_video=True,
image_env_kwargs=dict(
imsize=48,
init_camera=sawyer_init_camera_zoomed_in,
transpose=True,
normalize=True,
),
num_exps_per_instance=1,
region='us-west-2',
logger_variant=dict(
tensorboard=True,
),
)
search_space = {
'td3_bc_trainer_kwargs.use_awr': [False],
# 'td3_bc_trainer_kwargs.demo_beta':[1, 10],
'td3_bc_trainer_kwargs.bc_weight': [1, 0],
'td3_bc_trainer_kwargs.rl_weight': [1],
'algo_kwargs.num_epochs': [1000],
'algo_kwargs.num_eval_steps_per_epoch': [100],
'algo_kwargs.num_expl_steps_per_train_loop': [100],
'algo_kwargs.min_num_steps_before_training': [0],
# 'td3_bc_trainer_kwargs.add_demos_to_replay_buffer':[True, False],
# 'td3_bc_trainer_kwargs.num_trains_per_train_loop':[1000, 2000, 4000, 10000, 16000],
# 'exploration_noise':[0.1, .3, .5],
# 'pretrain_rl':[True],
# 'pretrain_policy':[False],
'pretrain_rl': [False],
'pretrain_policy': [False],
'seedid': range(5),
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
variants = []
for variant in sweeper.iterate_hyperparameters():
variants.append(variant)
run_variants(state_td3bc_experiment, variants, run_id=0)
| [
"[email protected]"
]
| |
de575d64908dac2ae371562a98245e061498181d | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/X733GROUP-MIB.py | 2e3597342dc315db7cc0b26e65652659f42fc2f8 | [
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 18,926 | py | #
# PySNMP MIB module X733GROUP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/X733GROUP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:36:00 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, Counter32, IpAddress, ModuleIdentity, Integer32, NotificationType, Bits, MibIdentifier, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, Gauge32, ObjectIdentity, Unsigned32, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "Counter32", "IpAddress", "ModuleIdentity", "Integer32", "NotificationType", "Bits", "MibIdentifier", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "Gauge32", "ObjectIdentity", "Unsigned32", "TimeTicks")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
sni = MibIdentifier((1, 3, 6, 1, 4, 1, 231))
siemensUnits = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7))
oenProductMibs = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1))
nms = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3))
ncProxy = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1))
ewsdAlarms = ModuleIdentity((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1))
if mibBuilder.loadTexts: ewsdAlarms.setLastUpdated('200110150000Z')
if mibBuilder.loadTexts: ewsdAlarms.setOrganization('Siemens AG Osterreich')
commonGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 1))
controlGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2))
summaryGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 3))
miscGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 4))
x733Group = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5))
q3Group = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6))
osGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 7))
neName = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 1, 1), DisplayString())
if mibBuilder.loadTexts: neName.setStatus('current')
managedObjectClass = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 1, 2), DisplayString())
if mibBuilder.loadTexts: managedObjectClass.setStatus('current')
notificationId = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 1, 3), DisplayString())
if mibBuilder.loadTexts: notificationId.setStatus('current')
globalAlarmIds = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8192)))
if mibBuilder.loadTexts: globalAlarmIds.setStatus('current')
setPeriod = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 60)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: setPeriod.setStatus('current')
sendSummary = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sendSummary.setStatus('current')
resendAlarm = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: resendAlarm.setStatus('current')
sendAllAlarms = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sendAllAlarms.setStatus('current')
alarmSpontan = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2))).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmSpontan.setStatus('current')
countAlarmPeriod = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: countAlarmPeriod.setStatus('current')
countAlarmSpontan = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2))).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: countAlarmSpontan.setStatus('current')
numberOfAlarms = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 3, 1), Integer32())
if mibBuilder.loadTexts: numberOfAlarms.setStatus('current')
connectionReliable = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2))))
if mibBuilder.loadTexts: connectionReliable.setStatus('current')
critical = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 3, 3), Integer32())
if mibBuilder.loadTexts: critical.setStatus('current')
major = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 3, 4), Integer32())
if mibBuilder.loadTexts: major.setStatus('current')
minor = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 3, 5), Integer32())
if mibBuilder.loadTexts: minor.setStatus('current')
timePeriod = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 4, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 60)).clone(1))
if mibBuilder.loadTexts: timePeriod.setStatus('current')
q3AlarmNumber = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 4, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: q3AlarmNumber.setStatus('current')
eventType = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 8, 9, 10, 11, 13, 15))).clone(namedValues=NamedValues(("indeterminate", 0), ("communicationsAlarm", 2), ("enviromentalAlarm", 3), ("equipmentAlarm", 4), ("integrityViolation", 5), ("operationalViolation", 8), ("physicalViolation", 9), ("processingErrorAlarm", 10), ("qualityOfServiceAlarm", 11), ("securityServiceOrMechanismViolation", 13), ("timeDomainViolation", 15))))
if mibBuilder.loadTexts: eventType.setStatus('current')
severity = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("indeterminate", 0), ("critical", 1), ("major", 2), ("minor", 3), ("warning", 4), ("cleared", 5))))
if mibBuilder.loadTexts: severity.setStatus('current')
probableCause = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 3), DisplayString())
if mibBuilder.loadTexts: probableCause.setStatus('current')
originalAlarm = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8192)))
if mibBuilder.loadTexts: originalAlarm.setStatus('current')
processingStatus = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("not-processed", 0), ("in-process", 1), ("under-repair", 2), ("deferred", 3), ("cleared", 4))))
if mibBuilder.loadTexts: processingStatus.setStatus('current')
alarmClass = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 6), DisplayString())
if mibBuilder.loadTexts: alarmClass.setStatus('current')
managedObjectInstance = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 7), DisplayString())
if mibBuilder.loadTexts: managedObjectInstance.setStatus('current')
rack = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: rack.setStatus('current')
shelf = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: shelf.setStatus('current')
fromCard = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: fromCard.setStatus('current')
toCard = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: toCard.setStatus('current')
fromPort = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: fromPort.setStatus('current')
toPort = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 9999)))
if mibBuilder.loadTexts: toPort.setStatus('current')
eventTime = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 14), DisplayString())
if mibBuilder.loadTexts: eventTime.setStatus('current')
ipAddress = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 15), DisplayString())
if mibBuilder.loadTexts: ipAddress.setStatus('current')
trapName = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 16), DisplayString())
if mibBuilder.loadTexts: trapName.setStatus('current')
specificProblems = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 17), DisplayString())
if mibBuilder.loadTexts: specificProblems.setStatus('current')
additionalText = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 2048)))
if mibBuilder.loadTexts: additionalText.setStatus('current')
additionalInformation = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8192)))
if mibBuilder.loadTexts: additionalInformation.setStatus('current')
backupStatus = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2))))
if mibBuilder.loadTexts: backupStatus.setStatus('current')
backupObject = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 2), DisplayString())
if mibBuilder.loadTexts: backupObject.setStatus('current')
trendIndication = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("moresevere", 1), ("nochange", 2), ("lesssevere", 3))))
if mibBuilder.loadTexts: trendIndication.setStatus('current')
thresholdInformation = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 4), DisplayString())
if mibBuilder.loadTexts: thresholdInformation.setStatus('current')
correlatedEvents = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8192)))
if mibBuilder.loadTexts: correlatedEvents.setStatus('current')
stateChanges = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 6), DisplayString())
if mibBuilder.loadTexts: stateChanges.setStatus('current')
monitoredAttributes = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8192)))
if mibBuilder.loadTexts: monitoredAttributes.setStatus('current')
securityAlarmDetector = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 8), DisplayString())
if mibBuilder.loadTexts: securityAlarmDetector.setStatus('current')
serviceUser = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 9), DisplayString())
if mibBuilder.loadTexts: serviceUser.setStatus('current')
serviceProvider = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 10), DisplayString())
if mibBuilder.loadTexts: serviceProvider.setStatus('current')
listOfFaultyBoards = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 6, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8192)))
if mibBuilder.loadTexts: listOfFaultyBoards.setStatus('current')
mmnKey = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 7, 1), DisplayString())
if mibBuilder.loadTexts: mmnKey.setStatus('current')
thresholdValue = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 7, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)))
if mibBuilder.loadTexts: thresholdValue.setStatus('current')
currentValue = MibScalar((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 7, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)))
if mibBuilder.loadTexts: currentValue.setStatus('current')
summaryAlarms = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 201)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "numberOfAlarms"), ("X733GROUP-MIB", "connectionReliable"), ("X733GROUP-MIB", "globalAlarmIds"))
if mibBuilder.loadTexts: summaryAlarms.setStatus('current')
spontaneousAlarms = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 202)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "managedObjectClass"), ("X733GROUP-MIB", "notificationId"), ("X733GROUP-MIB", "severity"), ("X733GROUP-MIB", "eventType"), ("X733GROUP-MIB", "eventTime"), ("X733GROUP-MIB", "probableCause"), ("X733GROUP-MIB", "processingStatus"), ("X733GROUP-MIB", "alarmClass"), ("X733GROUP-MIB", "managedObjectInstance"), ("X733GROUP-MIB", "rack"), ("X733GROUP-MIB", "shelf"), ("X733GROUP-MIB", "fromCard"), ("X733GROUP-MIB", "toCard"), ("X733GROUP-MIB", "fromPort"), ("X733GROUP-MIB", "toPort"), ("X733GROUP-MIB", "originalAlarm"))
if mibBuilder.loadTexts: spontaneousAlarms.setStatus('current')
snmpAlarm = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 203)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "notificationId"), ("X733GROUP-MIB", "severity"), ("X733GROUP-MIB", "eventType"), ("X733GROUP-MIB", "eventTime"), ("X733GROUP-MIB", "probableCause"), ("X733GROUP-MIB", "specificProblems"), ("X733GROUP-MIB", "managedObjectClass"), ("X733GROUP-MIB", "managedObjectInstance"), ("X733GROUP-MIB", "ipAddress"), ("X733GROUP-MIB", "trapName"), ("X733GROUP-MIB", "originalAlarm"))
if mibBuilder.loadTexts: snmpAlarm.setStatus('current')
q3Alarm = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 204)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "notificationId"), ("X733GROUP-MIB", "q3AlarmNumber"), ("X733GROUP-MIB", "severity"), ("X733GROUP-MIB", "eventType"), ("X733GROUP-MIB", "eventTime"), ("X733GROUP-MIB", "probableCause"), ("X733GROUP-MIB", "specificProblems"), ("X733GROUP-MIB", "managedObjectClass"), ("X733GROUP-MIB", "managedObjectInstance"), ("X733GROUP-MIB", "additionalText"), ("X733GROUP-MIB", "additionalInformation"), ("X733GROUP-MIB", "backupStatus"), ("X733GROUP-MIB", "backupObject"), ("X733GROUP-MIB", "trendIndication"), ("X733GROUP-MIB", "thresholdInformation"), ("X733GROUP-MIB", "correlatedEvents"), ("X733GROUP-MIB", "stateChanges"), ("X733GROUP-MIB", "monitoredAttributes"), ("X733GROUP-MIB", "securityAlarmDetector"), ("X733GROUP-MIB", "serviceUser"), ("X733GROUP-MIB", "serviceProvider"), ("X733GROUP-MIB", "listOfFaultyBoards"), ("X733GROUP-MIB", "originalAlarm"))
if mibBuilder.loadTexts: q3Alarm.setStatus('current')
q3contAlarm = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 205)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "notificationId"), ("X733GROUP-MIB", "q3AlarmNumber"), ("X733GROUP-MIB", "correlatedEvents"), ("X733GROUP-MIB", "monitoredAttributes"), ("X733GROUP-MIB", "listOfFaultyBoards"), ("X733GROUP-MIB", "originalAlarm"))
if mibBuilder.loadTexts: q3contAlarm.setStatus('current')
timeAckAlarms = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 206)).setObjects(("X733GROUP-MIB", "timePeriod"))
if mibBuilder.loadTexts: timeAckAlarms.setStatus('current')
proxyStartUp = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 207))
if mibBuilder.loadTexts: proxyStartUp.setStatus('current')
countAlarm = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 208)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "critical"), ("X733GROUP-MIB", "major"), ("X733GROUP-MIB", "minor"))
if mibBuilder.loadTexts: countAlarm.setStatus('current')
osAlarm = NotificationType((1, 3, 6, 1, 4, 1, 231, 7, 1, 3, 1, 1, 5, 209)).setObjects(("X733GROUP-MIB", "neName"), ("X733GROUP-MIB", "notificationId"), ("X733GROUP-MIB", "severity"), ("X733GROUP-MIB", "eventType"), ("X733GROUP-MIB", "eventTime"), ("X733GROUP-MIB", "probableCause"), ("X733GROUP-MIB", "managedObjectClass"), ("X733GROUP-MIB", "managedObjectInstance"), ("X733GROUP-MIB", "mmnKey"), ("X733GROUP-MIB", "additionalText"), ("X733GROUP-MIB", "thresholdValue"), ("X733GROUP-MIB", "currentValue"), ("X733GROUP-MIB", "securityAlarmDetector"), ("X733GROUP-MIB", "serviceUser"), ("X733GROUP-MIB", "serviceProvider"))
if mibBuilder.loadTexts: osAlarm.setStatus('current')
mibBuilder.exportSymbols("X733GROUP-MIB", fromPort=fromPort, listOfFaultyBoards=listOfFaultyBoards, major=major, eventTime=eventTime, alarmClass=alarmClass, securityAlarmDetector=securityAlarmDetector, resendAlarm=resendAlarm, spontaneousAlarms=spontaneousAlarms, timePeriod=timePeriod, snmpAlarm=snmpAlarm, rack=rack, correlatedEvents=correlatedEvents, currentValue=currentValue, trendIndication=trendIndication, x733Group=x733Group, eventType=eventType, sni=sni, monitoredAttributes=monitoredAttributes, severity=severity, serviceProvider=serviceProvider, PYSNMP_MODULE_ID=ewsdAlarms, timeAckAlarms=timeAckAlarms, critical=critical, backupObject=backupObject, serviceUser=serviceUser, q3contAlarm=q3contAlarm, shelf=shelf, fromCard=fromCard, siemensUnits=siemensUnits, probableCause=probableCause, ewsdAlarms=ewsdAlarms, countAlarmSpontan=countAlarmSpontan, q3AlarmNumber=q3AlarmNumber, miscGroup=miscGroup, trapName=trapName, summaryAlarms=summaryAlarms, globalAlarmIds=globalAlarmIds, minor=minor, neName=neName, originalAlarm=originalAlarm, additionalInformation=additionalInformation, sendAllAlarms=sendAllAlarms, oenProductMibs=oenProductMibs, processingStatus=processingStatus, backupStatus=backupStatus, proxyStartUp=proxyStartUp, commonGroup=commonGroup, controlGroup=controlGroup, alarmSpontan=alarmSpontan, notificationId=notificationId, toCard=toCard, osAlarm=osAlarm, q3Group=q3Group, thresholdValue=thresholdValue, ipAddress=ipAddress, countAlarmPeriod=countAlarmPeriod, stateChanges=stateChanges, numberOfAlarms=numberOfAlarms, additionalText=additionalText, countAlarm=countAlarm, nms=nms, sendSummary=sendSummary, managedObjectInstance=managedObjectInstance, q3Alarm=q3Alarm, summaryGroup=summaryGroup, thresholdInformation=thresholdInformation, specificProblems=specificProblems, mmnKey=mmnKey, managedObjectClass=managedObjectClass, ncProxy=ncProxy, setPeriod=setPeriod, toPort=toPort, osGroup=osGroup, connectionReliable=connectionReliable)
| [
"[email protected]"
]
| |
075d717759921834a2a8c9622dbb53790cf0228a | b198ab1d3faf79d34b1745236daa5eb02a37e18e | /yggdrasil/metaschema/properties/tests/test_JSONArrayMetaschemaProperties.py | ed812677d1d5d9df256fbc5b8f6903ae12c185fa | [
"BSD-3-Clause"
]
| permissive | leighmatth/yggdrasil | 688f13aa0d274217daec9f412269fbbaf5f10aef | dcc4d75a4d2c6aaa7e50e75095a16df1df6b2b0a | refs/heads/master | 2021-07-09T10:39:25.422978 | 2021-04-14T16:40:04 | 2021-04-14T16:40:04 | 245,011,886 | 0 | 0 | NOASSERTION | 2020-03-04T21:54:25 | 2020-03-04T21:54:24 | null | UTF-8 | Python | false | false | 1,645 | py | from yggdrasil.metaschema.properties.tests import (
test_MetaschemaProperty as parent)
class TestItemsMetaschemaProperty(parent.TestMetaschemaProperty):
r"""Test class for ItemsMetaschemaProperty class."""
_mod = 'JSONArrayMetaschemaProperties'
_cls = 'ItemsMetaschemaProperty'
def __init__(self, *args, **kwargs):
super(TestItemsMetaschemaProperty, self).__init__(*args, **kwargs)
nele = 3
valid_value = [int(i) for i in range(nele)]
valid_sing = {'type': 'int'}
valid_mult = [{'type': 'int'} for i in range(nele)]
invalid_sing = {'type': 'float'}
invalid_mult = [{'type': 'float'} for i in range(nele)]
self._valid = [(valid_value, valid_sing),
(valid_value, valid_mult),
([int(i) for i in range(nele - 1)], valid_sing)]
self._invalid = [([float(i) for i in range(nele)], valid_sing),
([float(i) for i in range(nele)], valid_mult)]
# ([int(i) for i in range(nele - 1)], valid_mult)]
self._valid_compare = [(valid_sing, valid_sing),
(valid_sing, valid_mult),
(valid_mult, valid_sing),
(valid_mult, valid_mult)]
self._invalid_compare = [(valid_sing, invalid_sing),
(valid_sing, invalid_mult),
(valid_mult, invalid_sing),
(valid_mult, invalid_mult),
(1, 1),
(valid_mult, valid_mult[:-1])]
| [
"[email protected]"
]
| |
b08f3840e780e082aad97256d99c215839e1e058 | 1012f61f46ff7aaf37cd3ce0ead64e035ec201dc | /coding-challange/codewars/8kyu/~2021-07-25/capitalization-and-mutability/capitalization-and-mutability.py | 70ab2ba79b4d13199ed131fb83a863ae49274dcb | []
| no_license | polyglotm/coding-dojo | 89efe22f5a34088e94c9e3a4e25cad510b04172a | 43da9c75e3125f5cb1ac317d275475f1c0ea6727 | refs/heads/develop | 2023-08-17T11:59:30.945061 | 2023-08-16T14:13:45 | 2023-08-16T14:13:45 | 188,733,115 | 2 | 0 | null | 2023-03-04T05:49:21 | 2019-05-26T21:26:25 | JavaScript | UTF-8 | Python | false | false | 386 | py | """
capitalization-and-mutability
codewars/8kyu/Capitalization and Mutability
Difficulty: 8kyu
URL: https://www.codewars.com/kata/595970246c9b8fa0a8000086/
"""
def capitalize_word(word):
return word.capitalize()
def test_capitalize_word():
assert capitalize_word('word') == 'Word'
assert capitalize_word('i') == 'I'
assert capitalize_word('glasswear') == 'Glasswear'
| [
"[email protected]"
]
| |
4b10fa53b97294463e20ad06343f2dd982acc650 | afebbb07b2b4eada17a5853c1ce63b4075d280df | /marketsim/gen/_intrinsic/orderbook/of_trader.py | 804ce5709645171b35783b2eb31d41c8a145e2c1 | []
| no_license | peter1000/marketsimulator | 8c0a55fc6408b880311d3ad49defc55e9af57824 | 1b677200a9d5323f2970c83f076c2b83d39d4fe6 | refs/heads/master | 2021-01-18T01:39:04.869755 | 2015-03-29T17:47:24 | 2015-03-29T17:47:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,224 | py | from marketsim import types
from marketsim.gen._out.trader._singleproxy import SingleProxy
from marketsim import getLabel
from marketsim.gen._out._intrinsic_base.orderbook.of_trader import OfTrader_Base, Proxy_Base
class Base(object):
_properties = {}
def __getattr__(self, name):
if name[0:2] != '__' and self._impl:
return getattr(self._impl, name)
else:
raise AttributeError
def __str__(self):
return getLabel(self._impl) if self._impl else ''
def __repr__(self):
return self.__str__()
class OfTrader_Impl(Base, OfTrader_Base):
def __init__(self):
self._alias = ["$(TraderAsset)"] if type(self.Trader) == SingleProxy else ['OfTrader']
Base.__init__(self)
@property
def _impl(self):
try:
return self.Trader.orderBook
except AttributeError:
return None
class Proxy_Impl(Base, Proxy_Base):
def __init__(self):
self._impl = None
Base.__init__(self)
@property
def label(self):
return self._impl.label if self._impl else '$(OrderBook)'
def bind_impl(self, ctx):
if self._impl is None:
self._impl = ctx.orderbook
| [
"[email protected]"
]
| |
1fa173f6bfa99361c4de753688e6de4aa025f83f | ea378480ba678eb123ef826e3ca0c3eb8f4e538f | /paused/05. bk old/bk future includes/candidates/06.misc from nodebox/context.py | 1d6b06a511c7ed2c494ad636d23c4867059aa457 | []
| no_license | msarch/py | 67235643666b1ed762d418263f7eed3966d3f522 | dcd25e633a87cdb3710e90224e5387d3516c1cd3 | refs/heads/master | 2021-01-01T05:21:58.175043 | 2017-05-25T08:15:26 | 2017-05-25T08:15:26 | 87,453,820 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155,609 | py | #=== CONTEXT =========================================================================================
# 2D NodeBox API in OpenGL.
# Authors: Tom De Smedt, Frederik De Bleser
# License: BSD (see LICENSE.txt for details).
# Copyright (c) 2008 City In A Bottle (cityinabottle.org)
# http://cityinabottle.org/nodebox
# All graphics are drawn directly to the screen.
# No scenegraph is kept for obvious performance reasons (therefore, no canvas._grobs as in NodeBox).
# Debugging must be switched on or of before other modules are imported.
import pyglet
from pyglet.gl import *
from pyglet.image import Texture
from math import cos, sin, radians, pi, floor
from time import time
from random import seed, choice, shuffle, random as rnd
from new import instancemethod
from glob import glob
from os import path, remove
from sys import getrefcount
from StringIO import StringIO
from hashlib import md5
from types import FunctionType
from datetime import datetime
#=====================================================================================================
#--- COLORS -------------------------------------------------------------------
Color = namedtuple('Color', 'r g b a')
orange = Color(255, 127, 0, 255)
white = Color(255, 255, 255, 255)
black = Color( 0, 0, 0, 255)
yellow = Color(255, 255, 0, 255)
red = Color(255, 0, 0, 255)
blue = Color(127, 127, 255, 255)
blue50 = Color(127, 127, 255, 127)
pink = Color(255, 187, 187, 255)
very_light_grey = Color(242, 242, 242, 0)
# kapla_colors
r_k = Color(255, 69, 0, 255) # red kapla
b_k = Color( 0, 0, 140, 255) # blue kapla
g_k = Color( 0, 99, 0, 255) # green kapla
y_k = Color(255, 214, 0, 255) # yellow kapla
kapla_colors=(r_k, g_k, b_k, y_k, b_k) # addded 1 color for pb w 4 kaplas TODO
def set_background_color(color=white):
glClearColor(*blue)
#--- COLOR -------------------------------------------------------------------------------------------
RGB = "RGB"
HSB = "HSB"
XYZ = "XYZ"
LAB = "LAB"
_background = None # Current state background color.
_fill = None # Current state fill color.
_stroke = None # Current state stroke color.
_strokewidth = 1 # Current state strokewidth.
_strokestyle = "solid" # Current state strokestyle.
_alpha = 1 # Current state alpha transparency.
class Color(list):
def __init__(self, *args, **kwargs):
""" A color with R,G,B,A channels, with channel values ranging between 0.0-1.0.
Either takes four parameters (R,G,B,A), three parameters (R,G,B),
two parameters (grayscale and alpha) or one parameter (grayscale or Color object).
An optional base=1.0 parameter defines the range of the given parameters.
An optional colorspace=RGB defines the color space of the given parameters.
"""
# Values are supplied as a tuple.
if len(args) == 1 and isinstance(args[0], (list, tuple)):
args = args[0]
# R, G, B and A.
if len(args) == 4:
r, g, b, a = args[0], args[1], args[2], args[3]
# R, G and B.
elif len(args) == 3:
r, g, b, a = args[0], args[1], args[2], 1
# Two values, grayscale and alpha.
elif len(args) == 2:
r, g, b, a = args[0], args[0], args[0], args[1]
# One value, another color object.
elif len(args) == 1 and isinstance(args[0], Color):
r, g, b, a = args[0].r, args[0].g, args[0].b, args[0].a
# One value, None.
elif len(args) == 1 and args[0] is None:
r, g, b, a = 0, 0, 0, 0
# One value, grayscale.
elif len(args) == 1:
r, g, b, a = args[0], args[0], args[0], 1
# No value, transparent black.
elif len(args):
r, g, b, a = 0, 0, 0, 0
# Transform to base 1:
base = float(kwargs.get("base", 1.0))
if base != 1:
r, g, b, a = [ch/base for ch in r, g, b, a]
# Transform to color space RGB:
colorspace = kwargs.get("colorspace")
if colorspace and colorspace != RGB:
if colorspace == HSB: r, g, b = hsb_to_rgb(r, g, b)
if colorspace == XYZ: r, g, b = xyz_to_rgb(r, g, b)
if colorspace == LAB: r, g, b = lab_to_rgb(r, g, b)
list.__init__(self, [r, g, b, a])
self._dirty = False
def __setitem__(self, i, v):
list.__setitem__(self, i, v)
self._dirty = True
def _get_r(self): return self[0]
def _get_g(self): return self[1]
def _get_b(self): return self[2]
def _get_a(self): return self[3]
def _set_r(self, v): self[0] = v
def _set_g(self, v): self[1] = v
def _set_b(self, v): self[2] = v
def _set_a(self, v): self[3] = v
r = red = property(_get_r, _set_r)
g = green = property(_get_g, _set_g)
b = blue = property(_get_b, _set_b)
a = alpha = property(_get_a, _set_a)
def _get_rgb(self):
return self[0], self[1], self[2]
def _set_rgb(self, (r,g,b)):
self[0] = r
self[1] = g
self[2] = b
rgb = property(_get_rgb, _set_rgb)
def _get_rgba(self):
return self[0], self[1], self[2], self[3]
def _set_rgba(self, (r,g,b,a)):
self[0] = r
self[1] = g
self[2] = b
self[3] = a
rgba = property(_get_rgba, _set_rgba)
def copy(self):
return Color(self)
def _apply(self):
glColor4f(self[0], self[1], self[2], self[3] * _alpha)
def __repr__(self):
return "Color(%.3f, %.3f, %.3f, %.3f)" % \
(self[0], self[1], self[2], self[3])
def __eq__(self, clr):
if not isinstance(clr, Color): return False
return self[0] == clr[0] \
and self[1] == clr[1] \
and self[2] == clr[2] \
and self[3] == clr[3]
def __ne__(self, clr):
return not self.__eq__(clr)
def map(self, base=1.0, colorspace=RGB):
""" Returns a list of R,G,B,A values mapped to the given base,
e.g. from 0-255 instead of 0.0-1.0 which is useful for setting image pixels.
Other values than RGBA can be obtained by setting the colorspace (RGB/HSB/XYZ/LAB).
"""
r, g, b, a = self
if colorspace != RGB:
if colorspace == HSB: r, g, b = rgb_to_hsb(r, g, b)
if colorspace == XYZ: r, g, b = rgb_to_xyz(r, g, b)
if colorspace == LAB: r, g, b = rgb_to_lab(r, g, b)
if base != 1:
r, g, b, a = [ch*base for ch in r, g, b, a]
if base != 1 and isinstance(base, int):
r, g, b, a = [int(ch) for ch in r, g, b, a]
return r, g, b, a
def blend(self, clr, t=0.5, colorspace=RGB):
""" Returns a new color between the two colors.
Parameter t is the amount to interpolate between the two colors
(0.0 equals the first color, 0.5 is half-way in between, etc.)
Blending in CIE-LAB colorspace avoids "muddy" colors in the middle of the blend.
"""
ch = zip(self.map(1, colorspace)[:3], clr.map(1, colorspace)[:3])
r, g, b = [geometry.lerp(a, b, t) for a, b in ch]
a = geometry.lerp(self.a, len(clr)==4 and clr[3] or 1, t)
return Color(r, g, b, a, colorspace=colorspace)
def rotate(self, angle):
""" Returns a new color with it's hue rotated on the RYB color wheel.
"""
h, s, b = rgb_to_hsb(*self[:3])
h, s, b = rotate_ryb(h, s, b, angle)
return Color(h, s, b, self.a, colorspace=HSB)
color = Color
def background(*args, **kwargs):
""" Sets the current background color.
"""
global _background
if args:
_background = Color(*args, **kwargs)
xywh = (GLint*4)(); glGetIntegerv(GL_VIEWPORT, xywh); x,y,w,h = xywh
rect(x, y, w, h, fill=_background, stroke=None)
return _background
def fill(*args, **kwargs):
""" Sets the current fill color for drawing primitives and paths.
"""
global _fill
if args:
_fill = Color(*args, **kwargs)
return _fill
fill(0) # The default fill is black.
def stroke(*args, **kwargs):
""" Sets the current stroke color.
"""
global _stroke
if args:
_stroke = Color(*args, **kwargs)
return _stroke
def nofill():
""" No current fill color.
"""
global _fill
_fill = None
def nostroke():
""" No current stroke color.
"""
global _stroke
_stroke = None
def strokewidth(width=None):
""" Sets the outline stroke width.
"""
# Note: strokewidth is clamped to integers (e.g. 0.2 => 1),
# but finer lines can be achieved visually with a transparent stroke.
# Thicker strokewidth results in ugly (i.e. no) line caps.
global _strokewidth
if width is not None:
_strokewidth = width
glLineWidth(width)
return _strokewidth
SOLID = "solid"
DOTTED = "dotted"
DASHED = "dashed"
def strokestyle(style=None):
""" Sets the outline stroke style (SOLID / DOTTED / DASHED).
"""
global _strokestyle
if style is not None and style != _strokestyle:
_strokestyle = style
glLineDash(style)
return _strokestyle
def glLineDash(style):
if style == SOLID:
glDisable(GL_LINE_STIPPLE)
elif style == DOTTED:
glEnable(GL_LINE_STIPPLE); glLineStipple(0, 0x0101)
elif style == DASHED:
glEnable(GL_LINE_STIPPLE); glLineStipple(1, 0x000F)
def outputmode(mode=None):
raise NotImplementedError
def colormode(mode=None, range=1.0):
raise NotImplementedError
#--- COLOR SPACE -------------------------------------------------------------------------------------
# Transformations between RGB, HSB, CIE XYZ and CIE LAB color spaces.
# http://www.easyrgb.com/math.php
def rgb_to_hsb(r, g, b):
""" Converts the given R,G,B values to H,S,B (between 0.0-1.0).
"""
h, s, v = 0, 0, max(r, g, b)
d = v - min(r, g, b)
if v != 0:
s = d / float(v)
if s != 0:
if r == v: h = 0 + (g-b) / d
elif g == v: h = 2 + (b-r) / d
else : h = 4 + (r-g) / d
h = h / 6.0 % 1
return h, s, v
def hsb_to_rgb(h, s, v):
""" Converts the given H,S,B color values to R,G,B (between 0.0-1.0).
"""
if s == 0:
return v, v, v
h = h % 1 * 6.0
i = floor(h)
f = h - i
x = v * (1-s)
y = v * (1-s * f)
z = v * (1-s * (1-f))
if i > 4:
return v, x, y
return [(v,z,x), (y,v,x), (x,v,z), (x,y,v), (z,x,v)][int(i)]
def rgb_to_xyz(r, g, b):
""" Converts the given R,G,B values to CIE X,Y,Z (between 0.0-1.0).
"""
r, g, b = [ch > 0.04045 and ((ch+0.055) / 1.055) ** 2.4 or ch / 12.92 for ch in r, g, b]
r, g, b = [ch * 100.0 for ch in r, g, b]
r, g, b = ( # Observer = 2, Illuminant = D65
r * 0.4124 + g * 0.3576 + b * 0.1805,
r * 0.2126 + g * 0.7152 + b * 0.0722,
r * 0.0193 + g * 0.1192 + b * 0.9505)
return r/95.047, g/100.0, b/108.883
def xyz_to_rgb(x, y, z):
""" Converts the given CIE X,Y,Z color values to R,G,B (between 0.0-1.0).
"""
x, y, z = x*95.047, y*100.0, z*108.883
x, y, z = [ch / 100.0 for ch in x, y, z]
r = x * 3.2406 + y * -1.5372 + z * -0.4986
g = x * -0.9689 + y * 1.8758 + z * 0.0415
b = x * -0.0557 + y * -0.2040 + z * 1.0570
r, g, b = [ch > 0.0031308 and 1.055 * ch**(1/2.4) - 0.055 or ch * 12.92 for ch in r, g, b]
return r, g, b
def rgb_to_lab(r, g, b):
""" Converts the given R,G,B values to CIE L,A,B (between 0.0-1.0).
"""
x, y, z = rgb_to_xyz(r, g, b)
x, y, z = [ch > 0.008856 and ch**(1/3.0) or (ch*7.787) + (16/116.0) for ch in x, y, z]
l, a, b = y*116-16, 500*(x-y), 200*(y-z)
l, a, b = l/100.0, (a+86)/(86+98), (b+108)/(108+94)
return l, a, b
def lab_to_rgb(l, a, b):
""" Converts the given CIE L,A,B color values to R,G,B (between 0.0-1.0).
"""
l, a, b = l*100, a*(86+98)-86, b*(108+94)-108
y = (l+16)/116.0
x = y + a/500.0
z = y - b/200.0
x, y, z = [ch**3 > 0.008856 and ch**3 or (ch-16/116.0)/7.787 for ch in x, y, z]
return xyz_to_rgb(x, y, z)
def luminance(r, g, b):
""" Returns an indication (0.0-1.0) of how bright the color appears.
"""
return (r*0.2125 + g*0.7154 + b+0.0721) * 0.5
def darker(clr, step=0.2):
""" Returns a copy of the color with a darker brightness.
"""
h, s, b = rgb_to_hsb(clr.r, clr.g, clr.b)
r, g, b = hsb_to_rgb(h, s, max(0, b-step))
return Color(r, g, b, len(clr)==4 and clr[3] or 1)
def lighter(clr, step=0.2):
""" Returns a copy of the color with a lighter brightness.
"""
h, s, b = rgb_to_hsb(clr.r, clr.g, clr.b)
r, g, b = hsb_to_rgb(h, s, min(1, b+step))
return Color(r, g, b, len(clr)==4 and clr[3] or 1)
darken, lighten = darker, lighter
#--- COLOR ROTATION ----------------------------------------------------------------------------------
# Approximation of the RYB color wheel.
# In HSB, colors hues range from 0 to 360,
# but on the color wheel these values are not evenly distributed.
# The second tuple value contains the actual value on the wheel (angle).
_colorwheel = [
( 0, 0), ( 15, 8), ( 30, 17), ( 45, 26),
( 60, 34), ( 75, 41), ( 90, 48), (105, 54),
(120, 60), (135, 81), (150, 103), (165, 123),
(180, 138), (195, 155), (210, 171), (225, 187),
(240, 204), (255, 219), (270, 234), (285, 251),
(300, 267), (315, 282), (330, 298), (345, 329), (360, 360)
]
def rotate_ryb(h, s, b, angle=180):
""" Rotates the given H,S,B color (0.0-1.0) on the RYB color wheel.
The RYB colorwheel is not mathematically precise,
but focuses on aesthetically pleasing complementary colors.
"""
h = h*360 % 360
# Find the location (angle) of the hue on the RYB color wheel.
for i in range(len(_colorwheel)-1):
(x0, y0), (x1, y1) = _colorwheel[i], _colorwheel[i+1]
if y0 <= h <= y1:
a = geometry.lerp(x0, x1, t=(h-y0)/(y1-y0))
break
# Rotate the angle and retrieve the hue.
a = (a+angle) % 360
for i in range(len(_colorwheel)-1):
(x0, y0), (x1, y1) = _colorwheel[i], _colorwheel[i+1]
if x0 <= a <= x1:
h = geometry.lerp(y0, y1, t=(a-x0)/(x1-x0))
break
return h/360.0, s, b
def complement(clr):
""" Returns the color opposite on the color wheel.
The complementary color contrasts with the given color.
"""
if not isinstance(clr, Color):
clr = Color(clr)
return clr.rotate(180)
def analog(clr, angle=20, d=0.1):
""" Returns a random adjacent color on the color wheel.
Analogous color schemes can often be found in nature.
"""
h, s, b = rgb_to_hsb(*clr[:3])
h, s, b = rotate_ryb(h, s, b, angle=random(-angle,angle))
s *= 1 - random(-d,d)
b *= 1 - random(-d,d)
return Color(h, s, b, len(clr)==4 and clr[3] or 1, colorspace=HSB)
#--- COLOR MIXIN -------------------------------------------------------------------------------------
# Drawing commands like rect() have optional parameters fill and stroke to set the color directly.
def color_mixin(**kwargs):
fill = kwargs.get("fill", _fill)
stroke = kwargs.get("stroke", _stroke)
strokewidth = kwargs.get("strokewidth", _strokewidth)
strokestyle = kwargs.get("strokestyle", _strokestyle)
return (fill, stroke, strokewidth, strokestyle)
#--- COLOR PLANE -------------------------------------------------------------------------------------
# Not part of the standard API but too convenient to leave out.
def colorplane(x, y, width, height, *a):
""" Draws a rectangle that emits a different fill color from each corner.
An optional number of colors can be given:
- four colors define top left, top right, bottom right and bottom left,
- three colors define top left, top right and bottom,
- two colors define top and bottom,
- no colors assumes black top and white bottom gradient.
"""
if len(a) == 2:
# Top and bottom colors.
clr1, clr2, clr3, clr4 = a[0], a[0], a[1], a[1]
elif len(a) == 4:
# Top left, top right, bottom right, bottom left.
clr1, clr2, clr3, clr4 = a[0], a[1], a[2], a[3]
elif len(a) == 3:
# Top left, top right, bottom.
clr1, clr2, clr3, clr4 = a[0], a[1], a[2], a[2]
elif len(a) == 0:
# Black top, white bottom.
clr1 = clr2 = (0,0,0,1)
clr3 = clr4 = (1,1,1,1)
glPushMatrix()
glTranslatef(x, y, 0)
glScalef(width, height, 1)
glBegin(GL_QUADS)
glColor4f(clr1[0], clr1[1], clr1[2], clr1[3] * _alpha); glVertex2f(-0.0, 1.0)
glColor4f(clr2[0], clr2[1], clr2[2], clr2[3] * _alpha); glVertex2f( 1.0, 1.0)
glColor4f(clr3[0], clr3[1], clr3[2], clr3[3] * _alpha); glVertex2f( 1.0, -0.0)
glColor4f(clr4[0], clr4[1], clr4[2], clr4[3] * _alpha); glVertex2f(-0.0, -0.0)
glEnd()
glPopMatrix()
#=====================================================================================================
#--- TRANSFORMATIONS ---------------------------------------------------------------------------------
# Unlike NodeBox, all transformations are CORNER-mode and originate from the bottom-left corner.
# Example: using Transform to get a transformed path.
# t = Transform()
# t.rotate(45)
# p = BezierPath()
# p.rect(10,10,100,70)
# p = t.transform_path(p)
# p.contains(x,y) # now we can check if the mouse is in the transformed shape.
Transform = geometry.AffineTransform
def push():
""" Pushes the transformation state.
Subsequent transformations (translate, rotate, scale) remain in effect until pop() is called.
"""
glPushMatrix()
def pop():
""" Pops the transformation state.
This reverts the transformation to before the last push().
"""
glPopMatrix()
def translate(x, y, z=0):
""" By default, the origin of the layer or canvas is at the bottom left.
This origin point will be moved by (x,y) pixels.
"""
glTranslatef(round(x), round(y), round(z))
def rotate(degrees, axis=(0,0,1)):
""" Rotates the transformation state, i.e. all subsequent drawing primitives are rotated.
Rotations work incrementally:
calling rotate(60) and rotate(30) sets the current rotation to 90.
"""
glRotatef(degrees, *axis)
def scale(x, y=None, z=None):
""" Scales the transformation state.
"""
if y is None:
y = x
if z is None:
z = 1
glScalef(x, y, z)
def reset():
""" Resets the transform state of the layer or canvas.
"""
glLoadIdentity()
CORNER = "corner"
CENTER = "center"
def transform(mode=None):
if mode == CENTER:
raise NotImplementedError, "no center-mode transform"
return CORNER
def skew(x, y):
raise NotImplementedError
#=====================================================================================================
#--- DRAWING PRIMITIVES ------------------------------------------------------------------------------
# Drawing primitives: Point, line, rect, ellipse, arrow. star.
# The fill and stroke are two different shapes put on top of each other.
Point = geometry.Point
def line(x0, y0, x1, y1, **kwargs):
""" Draws a straight line from x0, y0 to x1, y1 with the current stroke color and strokewidth.
"""
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
if stroke is not None and strokewidth > 0:
glColor4f(stroke[0], stroke[1], stroke[2], stroke[3] * _alpha)
glLineWidth(strokewidth)
glLineDash(strokestyle)
glBegin(GL_LINE_LOOP)
glVertex2f(x0, y0)
glVertex2f(x1, y1)
glEnd()
def rect(x, y, width, height, **kwargs):
""" Draws a rectangle with the bottom left corner at x, y.
The current stroke, strokewidth and fill color are applied.
"""
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
for i, clr in enumerate((fill, stroke)):
if clr is not None and (i==0 or strokewidth > 0):
if i == 1:
glLineWidth(strokewidth)
glLineDash(strokestyle)
glColor4f(clr[0], clr[1], clr[2], clr[3] * _alpha)
# Note: this performs equally well as when using precompile().
glBegin((GL_POLYGON, GL_LINE_LOOP)[i])
glVertex2f(x, y)
glVertex2f(x+width, y)
glVertex2f(x+width, y+height)
glVertex2f(x, y+height)
glEnd()
def triangle(x1, y1, x2, y2, x3, y3, **kwargs):
""" Draws the triangle created by connecting the three given points.
The current stroke, strokewidth and fill color are applied.
"""
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
for i, clr in enumerate((fill, stroke)):
if clr is not None and (i==0 or strokewidth > 0):
if i == 1:
glLineWidth(strokewidth)
glLineDash(strokestyle)
glColor4f(clr[0], clr[1], clr[2], clr[3] * _alpha)
# Note: this performs equally well as when using precompile().
glBegin((GL_POLYGON, GL_LINE_LOOP)[i])
glVertex2f(x1, y1)
glVertex2f(x2, y2)
glVertex2f(x3, y3)
glEnd()
_ellipses = {}
ELLIPSE_SEGMENTS = 50
def ellipse(x, y, width, height, segments=ELLIPSE_SEGMENTS, **kwargs):
""" Draws an ellipse with the center located at x, y.
The current stroke, strokewidth and fill color are applied.
"""
if not segments in _ellipses:
# For the given amount of line segments, calculate the ellipse once.
# Then reuse the cached ellipse by scaling it to the desired size.
_ellipses[segments] = []
for mode in (GL_POLYGON, GL_LINE_LOOP):
_ellipses[segments].append(precompile(lambda:(
glBegin(mode),
[glVertex2f(cos(t)/2, sin(t)/2) for t in [2*pi*i/segments for i in range(segments)]],
glEnd()
)))
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
for i, clr in enumerate((fill, stroke)):
if clr is not None and (i==0 or strokewidth > 0):
if i == 1:
glLineWidth(strokewidth)
glLineDash(strokestyle)
glColor4f(clr[0], clr[1], clr[2], clr[3] * _alpha)
glPushMatrix()
glTranslatef(x, y, 0)
glScalef(width, height, 1)
glCallList(_ellipses[segments][i])
glPopMatrix()
oval = ellipse # Backwards compatibility.
def arrow(x, y, width, **kwargs):
""" Draws an arrow with its tip located at x, y.
The current stroke, strokewidth and fill color are applied.
"""
head = width * 0.4
tail = width * 0.2
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
for i, clr in enumerate((fill, stroke)):
if clr is not None and (i==0 or strokewidth > 0):
if i == 1:
glLineWidth(strokewidth)
glLineDash(strokestyle)
glColor4f(clr[0], clr[1], clr[2], clr[3] * _alpha)
# Note: this performs equally well as when using precompile().
glBegin((GL_POLYGON, GL_LINE_LOOP)[i])
glVertex2f(x, y)
glVertex2f(x-head, y+head)
glVertex2f(x-head, y+tail)
glVertex2f(x-width, y+tail)
glVertex2f(x-width, y-tail)
glVertex2f(x-head, y-tail)
glVertex2f(x-head, y-head)
glVertex2f(x, y)
glEnd()
def star(x, y, points=20, outer=100, inner=50, **kwargs):
""" Draws a star with the given points, outer radius and inner radius.
The current stroke, strokewidth and fill color are applied.
"""
# GL_POLYGON only works with convex polygons,
# so we use a BezierPath (which does tessellation for fill colors).
p = BezierPath(**kwargs)
p.moveto(x, y+outer)
for i in range(0, int(2*points)+1):
r = (outer, inner)[i%2]
a = pi*i/points
p.lineto(x+r*sin(a), y+r*cos(a))
p.closepath()
if kwargs.get("draw", True):
p.draw(**kwargs)
return p
#=====================================================================================================
#--- BEZIER PATH -------------------------------------------------------------------------------------
# A BezierPath class with lineto(), curveto() and moveto() commands.
# It has all the path math functionality from NodeBox and a ray casting algorithm for contains().
# A number of caching mechanisms are used for performance:
# drawn vertices, segment lengths, path bounds, and a hit test area for BezierPath.contains().
# For optimal performance, the path should be created once (not every frame) and left unmodified.
# When points in the path are added, removed or modified, a _dirty flag is set.
# When dirty, the cache will be cleared and the new path recalculated.
# If the path is being drawn with a fill color, this means doing tessellation
# (i.e. additional math for finding out if parts overlap and punch a hole in the shape).
MOVETO = "moveto"
LINETO = "lineto"
CURVETO = "curveto"
CLOSE = "close"
RELATIVE = "relative" # Number of straight lines to represent a curve = 20% of curve length.
RELATIVE_PRECISION = 0.2
class PathError(Exception):
pass
class NoCurrentPointForPath(Exception):
pass
class NoCurrentPath(Exception):
pass
class PathPoint(Point):
def __init__(self, x=0, y=0):
""" A control handle for PathElement.
"""
self._x = x
self._y = y
self._dirty = False
def _get_x(self): return self._x
def _set_x(self, v):
self._x = v
self._dirty = True
def _get_y(self): return self._y
def _set_y(self, v):
self._y = v
self._dirty = True
x = property(_get_x, _set_x)
y = property(_get_y, _set_y)
def copy(self, parent=None):
return PathPoint(self._x, self._y)
class PathElement(object):
def __init__(self, cmd=None, pts=None):
""" A point in the path, optionally with control handles:
- MOVETO : the list of points contains a single (x,y)-tuple.
- LINETO : the list of points contains a single (x,y)-tuple.
- CURVETO : the list of points contains (vx1,vy1), (vx2,vy2), (x,y) tuples.
- CLOSETO : no points.
"""
if cmd == MOVETO \
or cmd == LINETO:
pt, h1, h2 = pts[0], pts[0], pts[0]
elif cmd == CURVETO:
pt, h1, h2 = pts[2], pts[0], pts[1]
else:
pt, h1, h2 = (0,0), (0,0), (0,0)
self._cmd = cmd
self._x = pt[0]
self._y = pt[1]
self._ctrl1 = PathPoint(h1[0], h1[1])
self._ctrl2 = PathPoint(h2[0], h2[1])
self.__dirty = False
def _get_dirty(self):
return self.__dirty \
or self.ctrl1._dirty \
or self.ctrl2._dirty
def _set_dirty(self, b):
self.__dirty = b
self.ctrl1._dirty = b
self.ctrl2._dirty = b
_dirty = property(_get_dirty, _set_dirty)
@property
def cmd(self):
return self._cmd
def _get_x(self): return self._x
def _set_x(self, v):
self._x = v
self.__dirty = True
def _get_y(self): return self._y
def _set_y(self, v):
self._y = v
self.__dirty = True
x = property(_get_x, _set_x)
y = property(_get_y, _set_y)
def _get_xy(self):
return (self.x, self.y)
def _set_xy(self, (x,y)):
self.x = x
self.y = y
xy = property(_get_xy, _set_xy)
# Handle 1 describes now the curve from the previous point started.
def _get_ctrl1(self): return self._ctrl1
def _set_ctrl1(self, v):
self._ctrl1 = PathPoint(v.x, v.y)
self.__dirty = True
# Handle 2 describes how the curve from the previous point arrives in this point.
def _get_ctrl2(self): return self._ctrl2
def _set_ctrl2(self, v):
self._ctrl2 = PathPoint(v.x, v.y)
self.__dirty = True
ctrl1 = property(_get_ctrl1, _set_ctrl1)
ctrl2 = property(_get_ctrl2, _set_ctrl2)
def __eq__(self, pt):
if not isinstance(pt, PathElement): return False
return self.cmd == pt.cmd \
and self.x == pt.x \
and self.y == pt.y \
and self.ctrl1 == pt.ctrl1 \
and self.ctrl2 == pt.ctrl2
def __ne__(self, pt):
return not self.__eq__(pt)
def __repr__(self):
return "%s(cmd='%s', x=%.1f, y=%.1f, ctrl1=(%.1f, %.1f), ctrl2=(%.1f, %.1f))" % (
self.__class__.__name__, self.cmd, self.x, self.y,
self.ctrl1.x, self.ctrl1.y,
self.ctrl2.x, self.ctrl2.y)
def copy(self):
if self.cmd == MOVETO \
or self.cmd == LINETO:
pts = ((self.x, self.y),)
elif self.cmd == CURVETO:
pts = ((self.ctrl1.x, self.ctrl1.y), (self.ctrl2.x, self.ctrl2.y), (self.x, self.y))
else:
pts = None
return PathElement(self.cmd, pts)
class BezierPath(list):
def __init__(self, path=None, **kwargs):
""" A list of PathElements describing the curves and lines that make up the path.
"""
if isinstance(path, (BezierPath, list, tuple)):
self.extend([pt.copy() for pt in path])
self._kwargs = kwargs
self._cache = None # Cached vertices for drawing.
self._segments = None # Cached segment lengths.
self._bounds = None # Cached bounding rectangle.
self._polygon = None # Cached polygon hit test area.
self._dirty = False
self._index = {}
def copy(self):
return BezierPath(self, **self._kwargs)
def append(self, pt):
self._dirty = True; list.append(self, pt)
def extend(self, points):
self._dirty = True; list.extend(self, points)
def insert(self, i, pt):
self._dirty = True; self._index={}; list.insert(self, i, pt)
def remove(self, pt):
self._dirty = True; self._index={}; list.remove(self, pt)
def pop(self, i):
self._dirty = True; self._index={}; list.pop(self, i)
def __setitem__(self, i, pt):
self._dirty = True; self._index={}; list.__setitem__(self, i, pt)
def __delitem__(self, i):
self._dirty = True; self._index={}; list.__delitem__(self, i)
def sort(self):
self._dirty = True; self._index={}; list.sort(self)
def reverse(self):
self._dirty = True; self._index={}; list.reverse(self)
def index(self, pt):
return self._index.setdefault(pt, list.index(self, pt))
def _update(self):
# Called from BezierPath.draw().
# If points were added or removed, clear the cache.
b = self._dirty
for pt in self: b = b or pt._dirty; pt._dirty = False
if b:
if self._cache is not None:
if self._cache[0]: flush(self._cache[0])
if self._cache[1]: flush(self._cache[1])
self._cache = self._segments = self._bounds = self._polygon = None
self._dirty = False
def moveto(self, x, y):
""" Adds a new point to the path at x, y.
"""
self.append(PathElement(MOVETO, ((x, y),)))
def lineto(self, x, y):
""" Adds a line from the previous point to x, y.
"""
self.append(PathElement(LINETO, ((x, y),)))
def curveto(self, x1, y1, x2, y2, x3, y3):
""" Adds a Bezier-curve from the previous point to x3, y3.
The curvature is determined by control handles x1, y1 and x2, y2.
"""
self.append(PathElement(CURVETO, ((x1, y1), (x2, y2), (x3, y3))))
def arcto(self, x, y, radius=1, clockwise=True, short=False):
""" Adds a number of Bezier-curves that draw an arc with the given radius to (x,y).
The short parameter selects either the "long way" around or the "shortcut".
"""
x0, y0 = self[-1].x, self[-1].y
phi = geometry.angle(x0,y0,x,y)
for p in bezier.arcto(x0, y0, radius, radius, phi, short, not clockwise, x, y):
f = len(p) == 2 and self.lineto or self.curveto
f(*p)
def closepath(self):
""" Adds a line from the previous point to the last MOVETO.
"""
self.append(PathElement(CLOSE))
def rect(self, x, y, width, height, roundness=0.0):
""" Adds a (rounded) rectangle to the path.
Corner roundness can be given as a relative float or absolute int.
"""
if roundness <= 0:
self.moveto(x, y)
self.lineto(x+width, y)
self.lineto(x+width, y+height)
self.lineto(x, y+height)
self.lineto(x, y)
else:
if isinstance(roundness, int):
r = min(roundness, width/2, height/2)
else:
r = min(width, height)
r = min(roundness, 1) * r * 0.5
self.moveto(x+r, y)
self.lineto(x+width-r, y)
self.arcto(x+width, y+r, radius=r, clockwise=False)
self.lineto(x+width, y+height-r)
self.arcto(x+width-r, y+height, radius=r, clockwise=False)
self.lineto(x+r, y+height)
self.arcto(x, y+height-r, radius=r, clockwise=False)
self.lineto(x, y+r)
self.arcto(x+r, y, radius=r, clockwise=False)
def ellipse(self, x, y, width, height):
""" Adds an ellipse to the path.
"""
w, h = width*0.5, height*0.5
k = 0.5522847498 # kappa: (-1 + sqrt(2)) / 3 * 4
self.moveto(x, y-h) # http://www.whizkidtech.redprince.net/bezier/circle/
self.curveto(x+w*k, y-h, x+w, y-h*k, x+w, y, )
self.curveto(x+w, y+h*k, x+w*k, y+h, x, y+h)
self.curveto(x-w*k, y+h, x-w, y+h*k, x-w, y, )
self.curveto(x-w, y-h*k, x-w*k, y-h, x, y-h)
self.closepath()
oval = ellipse
def arc(self, x, y, width, height, start=0, stop=90):
""" Adds an arc to the path.
The arc follows the ellipse defined by (x, y, width, height),
with start and stop specifying what angle range to draw.
"""
w, h = width*0.5, height*0.5
for i, p in enumerate(bezier.arc(x-w, y-h, x+w, y+h, start, stop)):
if i == 0:
self.moveto(*p[:2])
self.curveto(*p[2:])
def smooth(self, *args, **kwargs):
""" Smooths the path by making the curve handles colinear.
With mode=EQUIDISTANT, the curve handles will be of equal (average) length.
"""
e = BezierEditor(self)
for i, pt in enumerate(self):
self._index[pt] = i
e.smooth(pt, *args, **kwargs)
def flatten(self, precision=RELATIVE):
""" Returns a list of contours, in which each contour is a list of (x,y)-tuples.
The precision determines the number of straight lines to use as a substition for a curve.
It can be a fixed number (int) or relative to the curve length (float or RELATIVE).
"""
if precision == RELATIVE:
precision = RELATIVE_PRECISION
contours = [[]]
x0, y0 = None, None
closeto = None
for pt in self:
if (pt.cmd == LINETO or pt.cmd == CURVETO) and x0 == y0 is None:
raise NoCurrentPointForPath
elif pt.cmd == LINETO:
contours[-1].append((x0, y0))
contours[-1].append((pt.x, pt.y))
elif pt.cmd == CURVETO:
# Curves are interpolated from a number of straight line segments.
# With relative precision, we use the (rough) curve length to determine the number of lines.
x1, y1, x2, y2, x3, y3 = pt.ctrl1.x, pt.ctrl1.y, pt.ctrl2.x, pt.ctrl2.y, pt.x, pt.y
if isinstance(precision, float):
n = int(max(0, precision) * bezier.curvelength(x0, y0, x1, y1, x2, y2, x3, y3, 3))
else:
n = int(max(0, precision))
if n > 0:
xi, yi = x0, y0
for i in range(n+1):
xj, yj, vx1, vy1, vx2, vy2 = bezier.curvepoint(float(i)/n, x0, y0, x1, y1, x2, y2, x3, y3)
contours[-1].append((xi, yi))
contours[-1].append((xj, yj))
xi, yi = xj, yj
elif pt.cmd == MOVETO:
contours.append([]) # Start a new contour.
closeto = pt
elif pt.cmd == CLOSE and closeto is not None:
contours[-1].append((x0, y0))
contours[-1].append((closeto.x, closeto.y))
x0, y0 = pt.x, pt.y
return contours
def draw(self, precision=RELATIVE, **kwargs):
""" Draws the path.
The precision determines the number of straight lines to use as a substition for a curve.
It can be a fixed number (int) or relative to the curve length (float or RELATIVE).
"""
if len(kwargs) > 0:
# Optional parameters in draw() overrule those set during initialization.
kw = dict(self._kwargs)
kw.update(kwargs)
fill, stroke, strokewidth, strokestyle = color_mixin(**kw)
else:
fill, stroke, strokewidth, strokestyle = color_mixin(**self._kwargs)
def _draw_fill(contours):
# Drawing commands for the path fill (as triangles by tessellating the contours).
v = geometry.tesselate(contours)
glBegin(GL_TRIANGLES)
for x, y in v:
glVertex3f(x, y, 0)
glEnd()
def _draw_stroke(contours):
# Drawing commands for the path stroke.
for path in contours:
glBegin(GL_LINE_STRIP)
for x, y in path:
glVertex2f(x, y)
glEnd()
self._update() # Remove the cache if points were modified.
if self._cache is None \
or self._cache[0] is None and fill \
or self._cache[1] is None and stroke \
or self._cache[-1] != precision:
# Calculate and cache the vertices as Display Lists.
# If the path requires a fill color, it will have to be tessellated.
if self._cache is not None:
if self._cache[0]: flush(self._cache[0])
if self._cache[1]: flush(self._cache[1])
contours = self.flatten(precision)
self._cache = [None, None, precision]
if fill : self._cache[0] = precompile(_draw_fill, contours)
if stroke : self._cache[1] = precompile(_draw_stroke, contours)
if fill is not None:
glColor4f(fill[0], fill[1], fill[2], fill[3] * _alpha)
glCallList(self._cache[0])
if stroke is not None and strokewidth > 0:
glColor4f(stroke[0], stroke[1], stroke[2], stroke[3] * _alpha)
glLineWidth(strokewidth)
glLineDash(strokestyle)
glCallList(self._cache[1])
def angle(self, t):
""" Returns the directional angle at time t (0.0-1.0) on the path.
"""
# The directed() enumerator is much faster but less precise.
pt0, pt1 = t==0 and (self.point(t), self.point(t+0.001)) or (self.point(t-0.001), self.point(t))
return geometry.angle(pt0.x, pt0.y, pt1.x, pt1.y)
def point(self, t):
""" Returns the PathElement at time t (0.0-1.0) on the path.
See the linear interpolation math in bezier.py.
"""
if self._segments is None:
self._segments = bezier.length(self, segmented=True, n=10)
return bezier.point(self, t, segments=self._segments)
def points(self, amount=2, start=0.0, end=1.0):
""" Returns a list of PathElements along the path.
To omit the last point on closed paths: end=1-1.0/amount
"""
if self._segments is None:
self._segments = bezier.length(self, segmented=True, n=10)
return bezier.points(self, amount, start, end, segments=self._segments)
def addpoint(self, t):
""" Inserts a new PathElement at time t (0.0-1.0) on the path.
"""
self._segments = None
self._index = {}
return bezier.insert_point(self, t)
split = addpoint
@property
def length(self, precision=10):
""" Returns an approximation of the total length of the path.
"""
return bezier.length(self, segmented=False, n=precision)
@property
def contours(self):
""" Returns a list of contours (i.e. segments separated by a MOVETO) in the path.
Each contour is a BezierPath object.
"""
return bezier.contours(self)
@property
def bounds(self, precision=100):
""" Returns a (x, y, width, height)-tuple of the approximate path dimensions.
"""
# In _update(), traverse all the points and check if they have changed.
# If so, the bounds must be recalculated.
self._update()
if self._bounds is None:
l = t = float( "inf")
r = b = float("-inf")
for pt in self.points(precision):
if pt.x < l: l = pt.x
if pt.y < t: t = pt.y
if pt.x > r: r = pt.x
if pt.y > b: b = pt.y
self._bounds = (l, t, r-l, b-t)
return self._bounds
def contains(self, x, y, precision=100):
""" Returns True when point (x,y) falls within the contours of the path.
"""
bx, by, bw, bh = self.bounds
if bx <= x <= bx+bw and \
by <= y <= by+bh:
if self._polygon is None \
or self._polygon[1] != precision:
self._polygon = [(pt.x,pt.y) for pt in self.points(precision)], precision
# Ray casting algorithm:
return geometry.point_in_polygon(self._polygon[0], x, y)
return False
def hash(self, state=None, decimal=1):
""" Returns the path id, based on the position and handles of its PathElements.
Two distinct BezierPath objects that draw the same path therefore have the same id.
"""
f = lambda x: int(x*10**decimal) # Format floats as strings with given decimal precision.
id = [state]
for pt in self: id.extend((
pt.cmd, f(pt.x), f(pt.y), f(pt.ctrl1.x), f(pt.ctrl1.y), f(pt.ctrl2.x), f(pt.ctrl2.y)))
id = str(id)
id = md5(id).hexdigest()
return id
def __repr__(self):
return "BezierPath(%s)" % repr(list(self))
def __del__(self):
# Note: it is important that __del__() is called since it unloads the cache from GPU.
# BezierPath and PathElement should contain no circular references, e.g. no PathElement.parent.
if hasattr(self, "_cache") and self._cache is not None and flush:
if self._cache[0]: flush(self._cache[0])
if self._cache[1]: flush(self._cache[1])
def drawpath(path, **kwargs):
""" Draws the given BezierPath (or list of PathElements).
The current stroke, strokewidth and fill color are applied.
"""
if not isinstance(path, BezierPath):
path = BezierPath(path)
path.draw(**kwargs)
_autoclosepath = True
def autoclosepath(close=False):
""" Paths constructed with beginpath() and endpath() are automatically closed.
"""
global _autoclosepath
_autoclosepath = close
_path = None
def beginpath(x, y):
""" Starts a new path at (x,y).
The commands moveto(), lineto(), curveto() and closepath()
can then be used between beginpath() and endpath() calls.
"""
global _path
_path = BezierPath()
_path.moveto(x, y)
def moveto(x, y):
""" Moves the current point in the current path to (x,y).
"""
if _path is None:
raise NoCurrentPath
_path.moveto(x, y)
def lineto(x, y):
""" Draws a line from the current point in the current path to (x,y).
"""
if _path is None:
raise NoCurrentPath
_path.lineto(x, y)
def curveto(x1, y1, x2, y2, x3, y3):
""" Draws a curve from the current point in the current path to (x3,y3).
The curvature is determined by control handles x1, y1 and x2, y2.
"""
if _path is None:
raise NoCurrentPath
_path.curveto(x1, y1, x2, y2, x3, y3)
def closepath():
""" Closes the current path with a straight line to the last MOVETO.
"""
if _path is None:
raise NoCurrentPath
_path.closepath()
def endpath(draw=True, **kwargs):
""" Draws and returns the current path.
With draw=False, only returns the path so it can be manipulated and drawn with drawpath().
"""
global _path, _autoclosepath
if _path is None:
raise NoCurrentPath
if _autoclosepath is True:
_path.closepath()
if draw:
_path.draw(**kwargs)
p, _path = _path, None
return p
def findpath(points, curvature=1.0):
""" Returns a smooth BezierPath from the given list of (x,y)-tuples.
"""
return bezier.findpath(list(points), curvature)
Path = BezierPath
#--- BEZIER EDITOR -----------------------------------------------------------------------------------
EQUIDISTANT = "equidistant"
IN, OUT, BOTH = "in", "out", "both" # Drag pt1.ctrl2, pt2.ctrl1 or both simultaneously?
class BezierEditor:
def __init__(self, path):
self.path = path
def _nextpoint(self, pt):
i = self.path.index(pt) # BezierPath caches this operation.
return i < len(self.path)-1 and self.path[i+1] or None
def translate(self, pt, x=0, y=0, h1=(0,0), h2=(0,0)):
""" Translates the point and its control handles by (x,y).
Translates the incoming handle by h1 and the outgoing handle by h2.
"""
pt1, pt2 = pt, self._nextpoint(pt)
pt1.x += x
pt1.y += y
pt1.ctrl2.x += x + h1[0]
pt1.ctrl2.y += y + h1[1]
if pt2 is not None:
pt2.ctrl1.x += x + (pt2.cmd == CURVETO and h2[0] or 0)
pt2.ctrl1.y += y + (pt2.cmd == CURVETO and h2[1] or 0)
def rotate(self, pt, angle, handle=BOTH):
""" Rotates the point control handles by the given angle.
"""
pt1, pt2 = pt, self._nextpoint(pt)
if handle == BOTH or handle == IN:
pt1.ctrl2.x, pt1.ctrl2.y = geometry.rotate(pt1.ctrl2.x, pt1.ctrl2.y, pt1.x, pt1.y, angle)
if handle == BOTH or handle == OUT and pt2 is not None and pt2.cmd == CURVETO:
pt2.ctrl1.x, pt2.ctrl1.y = geometry.rotate(pt2.ctrl1.x, pt2.ctrl1.y, pt1.x, pt1.y, angle)
def scale(self, pt, v, handle=BOTH):
""" Scales the point control handles by the given factor.
"""
pt1, pt2 = pt, self._nextpoint(pt)
if handle == BOTH or handle == IN:
pt1.ctrl2.x, pt1.ctrl2.y = bezier.linepoint(v, pt1.x, pt1.y, pt1.ctrl2.x, pt1.ctrl2.y)
if handle == BOTH or handle == OUT and pt2 is not None and pt2.cmd == CURVETO:
pt2.ctrl1.x, pt2.ctrl1.y = bezier.linepoint(v, pt1.x, pt1.y, pt2.ctrl1.x, pt2.ctrl1.y)
def smooth(self, pt, mode=None, handle=BOTH):
pt1, pt2, i = pt, self._nextpoint(pt), self.path.index(pt)
if pt2 is None:
return
if pt1.cmd == pt2.cmd == CURVETO:
if mode == EQUIDISTANT:
d1 = d2 = 0.5 * (
geometry.distance(pt1.x, pt1.y, pt1.ctrl2.x, pt1.ctrl2.y) + \
geometry.distance(pt1.x, pt1.y, pt2.ctrl1.x, pt2.ctrl1.y))
else:
d1 = geometry.distance(pt1.x, pt1.y, pt1.ctrl2.x, pt1.ctrl2.y)
d2 = geometry.distance(pt1.x, pt1.y, pt2.ctrl1.x, pt2.ctrl1.y)
if handle == IN:
a = geometry.angle(pt1.x, pt1.y, pt1.ctrl2.x, pt1.ctrl2.y)
if handle == OUT:
a = geometry.angle(pt2.ctrl1.x, pt2.ctrl1.y, pt1.x, pt1.y)
if handle == BOTH:
a = geometry.angle(pt2.ctrl1.x, pt2.ctrl1.y, pt1.ctrl2.x, pt1.ctrl2.y)
pt1.ctrl2.x, pt1.ctrl2.y = geometry.coordinates(pt1.x, pt1.y, d1, a)
pt2.ctrl1.x, pt2.ctrl1.y = geometry.coordinates(pt1.x, pt1.y, d2, a-180)
elif pt1.cmd == CURVETO and pt2.cmd == LINETO:
d = mode == EQUIDISTANT and \
geometry.distance(pt1.x, pt1.y, pt2.x, pt2.y) or \
geometry.distance(pt1.x, pt1.y, pt1.ctrl2.x, pt1.ctrl2.y)
a = geometry.angle(pt1.x, pt1.y, pt2.x, pt2.y)
pt1.ctrl2.x, pt1.ctrl2.y = geometry.coordinates(pt1.x, pt1.y, d, a-180)
elif pt1.cmd == LINETO and pt2.cmd == CURVETO and i > 0:
d = mode == EQUIDISTANT and \
geometry.distance(pt1.x, pt1.y, self.path[i-1].x, self.path[i-1].y) or \
geometry.distance(pt1.x, pt1.y, pt2.ctrl1.x, pt2.ctrl1.y)
a = geometry.angle(self.path[i-1].x, self.path[i-1].y, pt1.x, pt1.y)
pt2.ctrl1.x, pt2.ctrl1.y = geometry.coordinates(pt1.x, pt1.y, d, a)
#--- POINT ANGLES ------------------------------------------------------------------------------------
def directed(points):
""" Returns an iterator that yields (angle, point)-tuples for the given list of points.
The angle represents the direction of the point on the path.
This works with BezierPath, Bezierpath.points, [pt1, pt2, pt2, ...]
For example:
for a, pt in directed(path.points(30)):
push()
translate(pt.x, pt.y)
rotate(a)
arrow(0, 0, 10)
pop()
This is useful if you want to have shapes following a path.
To put text on a path, rotate the angle by +-90 to get the normal (i.e. perpendicular).
"""
p = list(points)
n = len(p)
for i, pt in enumerate(p):
if 0 < i < n-1 and pt.__dict__.get("_cmd") == CURVETO:
# For a point on a curve, the control handle gives the best direction.
# For PathElement (fixed point in BezierPath), ctrl2 tells us how the curve arrives.
# For DynamicPathElement (returnd from BezierPath.point()), ctrl1 tell how the curve arrives.
ctrl = isinstance(pt, bezier.DynamicPathElement) and pt.ctrl1 or pt.ctrl2
angle = geometry.angle(ctrl.x, ctrl.y, pt.x, pt.y)
elif 0 < i < n-1 and pt.__dict__.get("_cmd") == LINETO and p[i-1].__dict__.get("_cmd") == CURVETO:
# For a point on a line preceded by a curve, look ahead gives better results.
angle = geometry.angle(pt.x, pt.y, p[i+1].x, p[i+1].y)
elif i == 0 and isinstance(points, BezierPath):
# For the first point in a BezierPath, we can calculate a next point very close by.
pt1 = points.point(0.001)
angle = geometry.angle(pt.x, pt.y, pt1.x, pt1.y)
elif i == n-1 and isinstance(points, BezierPath):
# For the last point in a BezierPath, we can calculate a previous point very close by.
pt0 = points.point(0.999)
angle = geometry.angle(pt0.x, pt0.y, pt.x, pt.y)
elif i == n-1 and isinstance(pt, bezier.DynamicPathElement) and pt.ctrl1.x != pt.x or pt.ctrl1.y != pt.y:
# For the last point in BezierPath.points(), use incoming handle (ctrl1) for curves.
angle = geometry.angle(pt.ctrl1.x, pt.ctrl1.y, pt.x, pt.y)
elif 0 < i:
# For any point, look back gives a good result, if enough points are given.
angle = geometry.angle(p[i-1].x, p[i-1].y, pt.x, pt.y)
elif i < n-1:
# For the first point, the best (only) guess is the location of the next point.
angle = geometry.angle(pt.x, pt.y, p[i+1].x, p[i+1].y)
else:
angle = 0
yield angle, pt
#--- CLIPPING PATH -----------------------------------------------------------------------------------
class ClippingMask:
def draw(self, fill=(0,0,0,1), stroke=None):
pass
def beginclip(path):
""" Enables the given BezierPath (or ClippingMask) as a clipping mask.
Drawing commands between beginclip() and endclip() are constrained to the shape of the path.
"""
# Enable the stencil buffer to limit the area of rendering (stenciling).
glClear(GL_STENCIL_BUFFER_BIT)
glEnable(GL_STENCIL_TEST)
glStencilFunc(GL_NOTEQUAL, 0, 0)
glStencilOp(GL_INCR, GL_INCR, GL_INCR)
# Shouldn't depth testing be disabled when stencilling?
# In any case, if it is, transparency doesn't work.
#glDisable(GL_DEPTH_TEST)
path.draw(fill=(0,0,0,1), stroke=None) # Disregard color settings; always use a black mask.
#glEnable(GL_DEPTH_TEST)
glStencilFunc(GL_EQUAL, 1, 1)
glStencilOp(GL_KEEP, GL_KEEP, GL_KEEP)
def endclip():
glDisable(GL_STENCIL_TEST)
#--- SUPERSHAPE --------------------------------------------------------------------------------------
def supershape(x, y, width, height, m, n1, n2, n3, points=100, percentage=1.0, range=2*pi, **kwargs):
""" Returns a BezierPath constructed using the superformula,
which can be used to describe many complex shapes and curves that are found in nature.
"""
path = BezierPath()
first = True
for i in xrange(points):
if i <= points * percentage:
dx, dy = geometry.superformula(m, n1, n2, n3, i*range/points)
dx, dy = dx*width/2 + x, dy*height/2 + y
if first is True:
path.moveto(dx, dy); first=False
else:
path.lineto(dx, dy)
path.closepath()
if kwargs.get("draw", True):
path.draw(**kwargs)
return path
#=====================================================================================================
#--- IMAGE -------------------------------------------------------------------------------------------
# Textures and quad vertices are cached for performance.
# Textures remain in cache for the duration of the program.
# Quad vertices are cached as Display Lists and destroyed when the Image object is deleted.
# For optimal performance, images should be created once (not every frame) and its quads left unmodified.
# Performance should be comparable to (moving) pyglet.Sprites drawn in a batch.
pow2 = [2**n for n in range(20)] # [1, 2, 4, 8, 16, 32, 64, ...]
def ceil2(x):
""" Returns the nearest power of 2 that is higher than x, e.g. 700 => 1024.
"""
for y in pow2:
if y >= x: return y
class ImageError(Exception):
pass
_texture_cache = {} # pyglet.Texture referenced by filename.
_texture_cached = {} # pyglet.Texture.id is in keys once the image has been cached.
def texture(img, data=None):
""" Returns a (cached) texture from the given image filename or byte data.
When a Image or Pixels object is given, returns the associated texture.
"""
# Image texture stored in cache, referenced by file path (or a custom id defined with cache()).
if isinstance(img, (basestring, int)) and img in _texture_cache:
return _texture_cache[img]
# Image file path, load it, cache it, return texture.
if isinstance(img, basestring):
try: cache(img, pyglet.image.load(img).get_texture())
except IOError:
raise ImageError, "can't load image from %s" % repr(img)
return _texture_cache[img]
# Image texture, return original.
if isinstance(img, pyglet.image.Texture):
return img
# Image object, return image texture.
# (if you use this to create a new image, the new image will do expensive caching as well).
if isinstance(img, Image):
return img.texture
# Pixels object, return pixel texture.
if isinstance(img, Pixels):
return img.texture
# Pyglet image data.
if isinstance(img, pyglet.image.ImageData):
return img.texture
# Image data as byte string, load it, return texture.
if isinstance(data, basestring):
return pyglet.image.load("", file=StringIO(data)).get_texture()
# Don't know how to handle this image.
raise ImageError, "unknown image type: %s" % repr(img.__class__)
def cache(id, texture):
""" Store the given texture in cache, referenced by id (which can then be passed to image()).
This is useful for procedurally rendered images (which are not stored in cache by default).
"""
if isinstance(texture, (Image, Pixels)):
texture = texture.texture
if not isinstance(texture, pyglet.image.Texture):
raise ValueError, "can only cache texture, not %s" % repr(texture.__class__.__name__)
_texture_cache[id] = texture
_texture_cached[_texture_cache[id].id] = id
def cached(texture):
""" Returns the cache id if the texture has been cached (None otherwise).
"""
if isinstance(texture, (Image, Pixels)):
texture = texture.texture
if isinstance(texture, pyglet.image.Texture):
return _texture_cached.get(texture.texture.id)
if isinstance(texture, (basestring, int)):
return texture in _texture_cache and texture or None
return None
def _render(texture, quad=(0,0,0,0,0,0,0,0)):
""" Renders the texture on the canvas inside a quadtriliteral (i.e. rectangle).
The quadriliteral can be distorted by giving corner offset coordinates.
"""
t = texture.tex_coords # power-2 dimensions
w = texture.width # See Pyglet programming guide -> OpenGL imaging.
h = texture.height
dx1, dy1, dx2, dy2, dx3, dy3, dx4, dy4 = quad or (0,0,0,0,0,0,0,0)
glEnable(texture.target)
glBindTexture(texture.target, texture.id)
glBegin(GL_QUADS)
glTexCoord3f(t[0], t[1], t[2] ); glVertex3f(dx4, dy4, 0)
glTexCoord3f(t[3], t[4], t[5] ); glVertex3f(dx3+w, dy3, 0)
glTexCoord3f(t[6], t[7], t[8] ); glVertex3f(dx2+w, dy2+h, 0)
glTexCoord3f(t[9], t[10], t[11]); glVertex3f(dx1, dy1+h, 0)
glEnd()
glDisable(texture.target)
class Quad(list):
def __init__(self, dx1=0, dy1=0, dx2=0, dy2=0, dx3=0, dy3=0, dx4=0, dy4=0):
""" Describes the four-sided polygon on which an image texture is "mounted".
This is a quadrilateral (four sides) of which the vertices do not necessarily
have a straight angle (i.e. the corners can be distorted).
"""
list.__init__(self, (dx1, dy1, dx2, dy2, dx3, dy3, dx4, dy4))
self._dirty = True # Image objects poll Quad._dirty to check if the image cache is outdated.
def copy(self):
return Quad(*self)
def reset(self):
list.__init__(self, (0,0,0,0,0,0,0,0))
self._dirty = True
def __setitem__(self, i, v):
list.__setitem__(self, i, v)
self._dirty = True
def _get_dx1(self): return self[0]
def _get_dy1(self): return self[1]
def _get_dx2(self): return self[2]
def _get_dy2(self): return self[3]
def _get_dx3(self): return self[4]
def _get_dy3(self): return self[5]
def _get_dx4(self): return self[6]
def _get_dy4(self): return self[7]
def _set_dx1(self, v): self[0] = v
def _set_dy1(self, v): self[1] = v
def _set_dx2(self, v): self[2] = v
def _set_dy2(self, v): self[3] = v
def _set_dx3(self, v): self[4] = v
def _set_dy3(self, v): self[5] = v
def _set_dx4(self, v): self[6] = v
def _set_dy4(self, v): self[7] = v
dx1 = property(_get_dx1, _set_dx1)
dy1 = property(_get_dy1, _set_dy1)
dx2 = property(_get_dx2, _set_dx2)
dy2 = property(_get_dy2, _set_dy2)
dx3 = property(_get_dx3, _set_dx3)
dy3 = property(_get_dy3, _set_dy3)
dx4 = property(_get_dx4, _set_dx4)
dy4 = property(_get_dy4, _set_dy4)
class Image(object):
def __init__(self, path, x=0, y=0, width=None, height=None, alpha=1.0, data=None):
""" A texture that can be drawn at a given position.
The quadrilateral in which the texture is drawn can be distorted (slow, image cache is flushed).
The image can be resized, colorized and its opacity can be set.
"""
self._src = (path, data)
self._texture = texture(path, data=data)
self._cache = None
self.x = x
self.y = y
self.width = width or self._texture.width # Scaled width, Image.texture.width yields original width.
self.height = height or self._texture.height # Scaled height, Image.texture.height yields original height.
self.quad = Quad()
self.color = Color(1.0, 1.0, 1.0, alpha)
def copy(self, texture=None, width=None, height=None):
img = texture is None \
and self.__class__(self._src[0], data=self._src[1]) \
or self.__class__(texture)
img.x = self.x
img.y = self.y
img.width = self.width
img.height = self.height
img.quad = self.quad.copy()
img.color = self.color.copy()
if width is not None:
img.width = width
if height is not None:
img.height = height
return img
@property
def id(self):
return self._texture.id
@property
def texture(self):
return self._texture
def _get_xy(self):
return (self.x, self.y)
def _set_xy(self, (x,y)):
self.x = x
self.y = y
xy = property(_get_xy, _set_xy)
def _get_size(self):
return (self.width, self.height)
def _set_size(self, (w,h)):
self.width = w
self.height = h
size = property(_get_size, _set_size)
def _get_alpha(self):
return self.color[3]
def _set_alpha(self, v):
self.color[3] = v
alpha = property(_get_alpha, _set_alpha)
def distort(self, dx1=0, dy1=0, dx2=0, dy2=0, dx3=0, dy3=0, dx4=0, dy4=0):
""" Adjusts the four-sided polygon on which an image texture is "mounted",
by incrementing the corner coordinates with the given values.
"""
for i, v in enumerate((dx1, dy1, dx2, dy2, dx3, dy3, dx4, dy4)):
if v != 0:
self.quad[i] += v
def adjust(r=1.0, g=1.0, b=1.0, a=1.0):
""" Adjusts the image color by multiplying R,G,B,A channels with the given values.
"""
self.color[0] *= r
self.color[1] *= g
self.color[2] *= b
self.color[3] *= a
def draw(self, x=None, y=None, width=None, height=None, alpha=None, color=None, filter=None):
""" Draws the image.
The given parameters (if any) override the image's attributes.
"""
# Calculate and cache the quad vertices as a Display List.
# If the quad has changed, update the cache.
if self._cache is None or self.quad._dirty:
flush(self._cache)
self._cache = precompile(_render, self._texture, self.quad)
self.quad._dirty = False
# Given parameters override Image attributes.
if x is None:
x = self.x
if y is None:
y = self.y
if width is None:
width = self.width
if height is None:
height = self.height
if color and len(color) < 4:
color = color[0], color[1], color[2], 1.0
if color is None:
color = self.color
if alpha is not None:
color = color[0], color[1], color[2], alpha
if filter:
filter.texture = self._texture # Register the current texture with the filter.
filter.push()
# Round position (x,y) to nearest integer to avoid sub-pixel rendering.
# This ensures there are no visual artefacts on transparent borders (e.g. the "white halo").
# Halo can also be avoided by overpainting in the source image, but this requires some work:
# http://technology.blurst.com/remove-white-borders-in-transparent-textures/
x = round(x)
y = round(y)
w = float(width) / self._texture.width
h = float(height) / self._texture.height
# Transform and draw the quads.
glPushMatrix()
glTranslatef(x, y, 0)
glScalef(w, h, 0)
glColor4f(color[0], color[1], color[2], color[3] * _alpha)
glCallList(self._cache)
glPopMatrix()
if filter:
filter.pop()
def save(self, path):
""" Exports the image as a PNG-file.
"""
self._texture.save(path)
def __repr__(self):
return "%s(x=%.1f, y=%.1f, width=%.1f, height=%.1f, alpha=%.2f)" % (
self.__class__.__name__, self.x, self.y, self.width, self.height, self.alpha)
def __del__(self):
try:
if hasattr(self, "_cache") and self._cache is not None and flush:
flush(self._cache)
except:
pass
_IMAGE_CACHE = 200
_image_cache = {} # Image object referenced by Image.texture.id.
_image_queue = [] # Most recent id's are at the front of the list.
def image(img, x=None, y=None, width=None, height=None,
alpha=None, color=None, filter=None, data=None, draw=True):
""" Draws the image at (x,y), scaling it to the given width and height.
The image's transparency can be set with alpha (0.0-1.0).
Applies the given color adjustment, quad distortion and filter (one filter can be specified).
Note: with a filter enabled, alpha and color will not be applied.
This is because the filter overrides the default drawing behavior with its own.
"""
if not isinstance(img, Image):
# If the given image is not an Image object, create one on the fly.
# This object is cached for reuse.
# The cache has a limited size (200), so the oldest Image objects are deleted.
t = texture(img, data=data)
if t.id in _image_cache:
img = _image_cache[t.id]
else:
img = Image(img, data=data)
_image_cache[img.texture.id] = img
_image_queue.insert(0, img.texture.id)
for id in reversed(_image_queue[_IMAGE_CACHE:]):
del _image_cache[id]
del _image_queue[-1]
# Draw the image.
if draw:
img.draw(x, y, width, height, alpha, color, filter)
return img
def imagesize(img):
""" Returns a (width, height)-tuple with the image dimensions.
"""
t = texture(img)
return (t.width, t.height)
def crop(img, x=0, y=0, width=None, height=None):
""" Returns the given (x, y, width, height)-region from the image.
Use this to pass cropped image files to image().
"""
t = texture(img)
if width is None: width = t.width
if height is None: height = t.height
t = t.get_region(x, y, min(t.width-x, width), min(t.height-y, height))
if isinstance(img, Image):
img = img.copy(texture=t)
return img.copy(texture=t, width=t.width, height=t.height)
if isinstance(img, Pixels):
return Pixels(t)
if isinstance(img, pyglet.image.Texture):
return t
return Image(t)
#--- PIXELS ------------------------------------------------------------------------------------------
class Pixels(list):
def __init__(self, img):
""" A list of RGBA color values (0-255) for each pixel in the given image.
The Pixels object can be passed to the image() command.
"""
self._img = texture(img).get_image_data()
# A negative pitch means the pixels are stored top-to-bottom row.
self._flipped = self._img.pitch >= 0
# Data yields a byte array if no conversion (e.g. BGRA => RGBA) was necessary,
# or a byte string otherwise - which needs to be converted to a list of ints.
data = self._img.get_data("RGBA", self._img.width*4 * (-1,1)[self._flipped])
if isinstance(data, str):
data = map(ord, list(data))
# Some formats seem to store values from -1 to -256.
data = [(256+v)%256 for v in data]
self.array = data
self._texture = None
@property
def width(self):
return self._img.width
@property
def height(self):
return self._img.height
@property
def size(self):
return (self.width, self.height)
def __len__(self):
return len(self.array) / 4
def __iter__(self):
for i in xrange(len(self)):
yield self[i]
def __getitem__(self, i):
""" Returns a list of R,G,B,A channel values between 0-255 from pixel i.
Users need to wrap the list in a Color themselves for performance.
- r,g,b,a = Pixels[i]
- clr = color(Pixels[i], base=255)
"""
return self.array[i*4:i*4+4]
def __setitem__(self, i, v):
""" Sets pixel i to the given R,G,B,A values.
Users need to unpack a Color themselves for performance,
and are resposible for keeping channes values between 0 and 255
(otherwise an error will occur when Pixels.update() is called),
- Pixels[i] = r,g,b,a
- Pixels[i] = clr.map(base=255)
"""
for j in range(4):
self.array[i*4+j] = v[j]
def __getslice__(self, i, j):
return [self[i+n] for n in xrange(j-i)]
def __setslice__(self, i, j, seq):
for n in xrange(j-i):
self[i+n] = seq[n]
def map(self, function):
""" Applies a function to each pixel.
Function takes a list of R,G,B,A channel values and must return a similar list.
"""
for i in xrange(len(self)):
self[i] = function(self[i])
def get(self, i, j):
""" Returns the pixel at row i, column j as a Color object.
"""
if 0 <= i < self.width and 0 <= j < self.height:
return color(self[i+j*self.width], base=255)
def set(self, i, j, clr):
""" Sets the pixel at row i, column j from a Color object.
"""
if 0 <= i < self.width and 0 <= j < self.height:
self[i+j*self.width] = clr.map(base=255)
def update(self):
""" Pixels.update() must be called to refresh the image.
"""
data = self.array
data = "".join(map(chr, data))
self._img.set_data("RGBA", self._img.width*4*(-1,1)[self._flipped], data)
self._texture = self._img.get_texture()
@property
def texture(self):
if self._texture is None:
self.update()
return self._texture
def copy(self):
return Pixels(self.texture)
def __repr__(self):
return "%s(width=%.1f, height=%.1f)" % (
self.__class__.__name__, self.width, self.height)
pixels = Pixels
#--- ANIMATION ---------------------------------------------------------------------------------------
# A sequence of images displayed in a loop.
# Useful for storing pre-rendered effect frames like explosions etc.
class Animation(list):
def __init__(self, images=[], duration=None, loop=False, **kwargs):
""" Constructs an animation loop from the given image frames.
The duration specifies the time for the entire animation to run.
Animations are useful to cache effects like explosions,
that have for example been prepared in an offscreen buffer.
"""
list.__init__(self, list(images))
self.duration = duration # Duration of the entire animation.
self.loop = loop # Loop from last frame to first frame?
self._i = -1 # Frame counter.
self._t = Transition(0, interpolation=kwargs.get("interpolation", LINEAR))
def copy(self, **kwargs):
return Animation(self,
duration = kwargs.get("duration", self.duration),
loop = kwargs.get("loop", self.loop),
interpolation = self._t._interpolation)
def update(self):
if self.duration is not None:
# With a duration,
# skip to a next frame so that the entire animation takes the given time.
if self._i < 0 or self.loop and self._i == len(self)-1:
self._t.set(0, 0)
self._t.update()
self._t.set(len(self)-1, self.duration)
self._t.update()
self._i = int(self._t.current)
else:
# Without a duration,
# Animation.update() simply moves to the next frame.
if self._i < 0 or self.loop and self._i == len(self)-1:
self._i = -1
self._i = min(self._i+1, len(self)-1)
@property
def frames(self):
return self
@property
def frame(self):
# Yields the current frame Image (or None).
try: return self[self._i]
except:
return None
@property
def done(self):
# Yields True when the animation has stopped (or hasn't started).
return self.loop is False and self._i == len(self)-1
def draw(self, *args, **kwargs):
if not self.done:
image(self.frame, *args, **kwargs)
def __repr__(self):
return "%s(frames=%i, duration=%s)" % (
self.__class__.__name__, len(self), repr(self.duration))
animation = Animation
#--- OFFSCREEN RENDERING -----------------------------------------------------------------------------
# Offscreen buffers can be used to render images from paths etc.
# or to apply filters on images before drawing them to the screen.
# There are several ways to draw offscreen:
# - render(img, filter): applies the given filter to the image and returns it.
# - procedural(function, width, height): execute the drawing commands in function inside an image.
# - Create your own subclass of OffscreenBuffer with a draw() method:
# class MyBuffer(OffscreenBuffer):
# def draw(self): pass
# - Define drawing commands between OffscreenBuffer.push() and pop():
# b = MyBuffer()
# b.push()
# # drawing commands
# b.pop()
# img = Image(b.render())
#
# The shader.py module already defines several filters that use an offscreen buffer, for example:
# blur(), adjust(), multiply(), twirl(), ...
#
# The less you change about an offscreen buffer, the faster it runs.
# This includes switching it on and off and changing its size.
from shader import *
#=====================================================================================================
#--- FONT --------------------------------------------------------------------------------------------
def install_font(ttf):
""" Loads the given TrueType font from file, and returns True on success.
"""
try:
pyglet.font.add_file(ttf)
return True
except:
# This might fail with Carbon on 64-bit Mac systems.
# Fonts can be installed on the system manually if this is the case.
return False
# Load the platform-independent fonts shipped with NodeBox.
# The default font is Droid (licensed under Apache 2.0).
try:
for f in glob(path.join(path.dirname(__file__), "..", "font", "*")):
install_font(f)
DEFAULT_FONT = "Droid Sans"
except:
DEFAULT_FONT = "Arial"
# Font weight
NORMAL = "normal"
BOLD = "bold"
ITALIC = "italic"
# Text alignment
LEFT = "left"
RIGHT = "right"
CENTER = "center"
_fonts = [] # Custom fonts loaded from file.
_fontname = DEFAULT_FONT # Current state font name.
_fontsize = 12 # Current state font size.
_fontweight = [False, False] # Current state font weight (bold, italic).
_lineheight = 1.0 # Current state text lineheight.
_align = LEFT # Current state text alignment (LEFT/RIGHT/CENTER).
def font(fontname=None, fontsize=None, fontweight=None, file=None):
""" Sets the current font and/or fontsize.
If a filename is also given, loads the fontname from the given font file.
"""
global _fontname, _fontsize
if file is not None and file not in _fonts:
_fonts.append(file); install_font(file)
if fontname is not None:
_fontname = fontname
if fontsize is not None:
_fontsize = fontsize
if fontweight is not None:
_fontweight_(fontweight) # _fontweight_() is just an alias for fontweight().
return _fontname
def fontname(name=None):
""" Sets the current font used when drawing text.
"""
global _fontname
if name is not None:
_fontname = name
return _fontname
def fontsize(size=None):
""" Sets the current fontsize in points.
"""
global _fontsize
if size is not None:
_fontsize = size
return _fontsize
def fontweight(*args, **kwargs):
""" Sets the current font weight.
You can supply NORMAL, BOLD and/or ITALIC or set named parameters bold=True and/or italic=True.
"""
global _fontweight
if len(args) == 1 and isinstance(args, (list, tuple)):
args = args[0]
if NORMAL in args:
_fontweight = [False, False]
if BOLD in args or kwargs.get(BOLD):
_fontweight[0] = True
if ITALIC in args or kwargs.get(ITALIC):
_fontweight[1] = True
return _fontweight
_fontweight_ = fontweight
def lineheight(size=None):
""" Sets the vertical spacing between lines of text.
The given size is a relative value: lineheight 1.2 for fontsize 10 means 12.
"""
global _lineheight
if size is not None:
_lineheight = size
return _lineheight
def align(mode=None):
""" Sets the alignment of text paragrapgs (LEFT, RIGHT or CENTER).
"""
global _align
if mode is not None:
_align = mode
return _align
#--- FONT MIXIN --------------------------------------------------------------------------------------
# The text() command has optional parameters font, fontsize, fontweight, bold, italic, lineheight and align.
def font_mixin(**kwargs):
fontname = kwargs.get("fontname", kwargs.get("font", _fontname))
fontsize = kwargs.get("fontsize", _fontsize)
bold = kwargs.get("bold", BOLD in kwargs.get("fontweight", "") or _fontweight[0])
italic = kwargs.get("italic", ITALIC in kwargs.get("fontweight", "") or _fontweight[1])
lineheight = kwargs.get("lineheight", _lineheight)
align = kwargs.get("align", _align)
return (fontname, fontsize, bold, italic, lineheight, align)
#--- TEXT --------------------------------------------------------------------------------------------
# Text is cached for performance.
# For optimal performance, texts should be created once (not every frame) and left unmodified.
# Dynamic texts use a cache of recycled Text objects.
# pyglet.text.Label leaks memory when deleted, because its old batch continues to reference
# loaded font/fontsize/bold/italic glyphs.
# Adding all labels to our own batch remedies this.
_label_batch = pyglet.graphics.Batch()
def label(str="", width=None, height=None, **kwargs):
""" Returns a drawable pyglet.text.Label object from the given string.
Optional arguments include: font, fontsize, bold, italic, align, lineheight, fill.
If these are omitted the current state is used.
"""
fontname, fontsize, bold, italic, lineheight, align = font_mixin(**kwargs)
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
fill = fill is None and (0,0,0,0) or fill
# We use begin_update() so that the TextLayout doesn't refresh on each update.
# FormattedDocument allows individual styling of characters - see Text.style().
label = pyglet.text.Label(batch=_label_batch)
label.begin_update()
label.document = pyglet.text.document.FormattedDocument(str or " ")
label.width = width
label.height = height
label.font_name = fontname
label.font_size = fontsize
label.bold = bold
label.italic = italic
label.multiline = True
label.anchor_y = "bottom"
label.set_style("align", align)
label.set_style("line_spacing", lineheight * fontsize)
label.color = [int(ch*255) for ch in fill]
if str == "":
# Empty string "" does not set properties so we used " " first.
label.text = str
label.end_update()
return label
class Text(object):
def __init__(self, str, x=0, y=0, width=None, height=None, **kwargs):
""" A formatted string of text that can be drawn at a given position.
Text has the following properties:
text, x, y, width, height, font, fontsize, bold, italic, lineheight, align, fill.
Individual character ranges can be styled with Text.style().
"""
if width is None:
# Supplying a string with "\n" characters will crash if no width is given.
# On the outside it appears as None but inside we use a very large number.
width = geometry.INFINITE
a, kwargs["align"] = kwargs.get("align", _align), LEFT
else:
a = None
self.__dict__["x"] = x
self.__dict__["y"] = y
self.__dict__["_label"] = label(str, width, height, **kwargs)
self.__dict__["_dirty"] = False
self.__dict__["_align"] = a
self.__dict__["_fill"] = None
def _get_xy(self):
return (self.x, self.y)
def _set_xy(self, (x,y)):
self.x = x
self.y = y
xy = property(_get_xy, _set_xy)
def _get_size(self):
return (self.width, self.height)
def _set_size(self, (w,h)):
self.width = w
self.height = h
size = property(_get_size, _set_size)
def __getattr__(self, k):
if k in self.__dict__:
return self.__dict__[k]
elif k in ("text", "height", "bold", "italic"):
return getattr(self._label, k)
elif k == "string":
return self._label.text
elif k == "width":
if self._label.width != geometry.INFINITE: return self._label.width
elif k in ("font", "fontname"):
return self._label.font_name
elif k == "fontsize":
return self._label.font_size
elif k == "fontweight":
return ((None, BOLD)[self._label.bold], (None, ITALIC)[self._label.italic])
elif k == "lineheight":
return self._label.get_style("line_spacing") / (self.fontsize or 1)
elif k == "align":
if not self._align: self._align = self._label.get_style(k)
return self._align
elif k == "fill":
if not self._fill: self._fill = Color([ch/255.0 for ch in self._label.color])
return self._fill
else:
raise AttributeError, "'Text' object has no attribute '%s'" % k
def __setattr__(self, k, v):
if k in self.__dict__:
self.__dict__[k] = v; return
# Setting properties other than x and y requires the label's layout to be updated.
self.__dict__["_dirty"] = True
self._label.begin_update()
if k in ("text", "height", "bold", "italic"):
setattr(self._label, k, v)
elif k == "string":
self._label.text = v
elif k == "width":
self._label.width = v is None and geometry.INFINITE or v
elif k in ("font", "fontname"):
self._label.font_name = v
elif k == "fontsize":
self._label.font_size = v
elif k == "fontweight":
self._label.bold, self._label.italic = BOLD in v, ITALIC in v
elif k == "lineheight":
self._label.set_style("line_spacing", v * (self.fontsize or 1))
elif k == "align":
self._align = v
self._label.set_style(k, self._label.width == geometry.INFINITE and LEFT or v)
elif k == "fill":
self._fill = v
self._label.color = [int(255*ch) for ch in self._fill or (0,0,0,0)]
else:
raise AttributeError, "'Text' object has no attribute '%s'" % k
def _update(self):
# Called from Text.draw(), Text.copy() and Text.metrics.
# Ensures that all the color changes have been reflected in Text._label.
# If necessary, recalculates the label's layout (happens in end_update()).
if hasattr(self._fill, "_dirty") and self._fill._dirty:
self.fill = self._fill
self._fill._dirty = False
if self._dirty:
self._label.end_update()
self._dirty = False
@property
def path(self):
raise NotImplementedError
@property
def metrics(self):
""" Yields a (width, height)-tuple of the actual text content.
"""
self._update()
return self._label.content_width, self._label.content_height
def draw(self, x=None, y=None):
""" Draws the text.
"""
# Given parameters override Text attributes.
if x is None:
x = self.x
if y is None:
y = self.y
# Fontsize is rounded, and fontsize 0 will output a default font.
# Therefore, we don't draw text with a fontsize smaller than 0.5.
if self._label.font_size >= 0.5:
glPushMatrix()
glTranslatef(x, y, 0)
self._update()
self._label.draw()
glPopMatrix()
def copy(self):
self._update()
txt = Text(self.text, self.x, self.y, self.width, self.height,
fontname = self.fontname,
fontsize = self.fontsize,
bold = self.bold,
italic = self.italic,
lineheight = self.lineheight,
align = self.align,
fill = self.fill
)
# The individual character styling is retrieved from Label.document._style_runs.
# Traverse it and set the styles in the new text.
txt._label.begin_update()
for k in self._label.document._style_runs:
for i, j, v in self._label.document._style_runs[k]:
txt.style(i,j, **{k:v})
txt._label.end_update()
return txt
def style(self, i, j, **kwargs):
""" Defines the styling for a range of characters in the text.
Valid arguments can include: font, fontsize, bold, italic, lineheight, align, fill.
For example: text.style(0, 10, bold=True, fill=color(1,0,0))
"""
attributes = {}
for k,v in kwargs.items():
if k in ("font", "fontname"):
attributes["font_name"] = v
elif k == "fontsize":
attributes["font_size"] = v
elif k in ("bold", "italic", "align"):
attributes[k] = v
elif k == "fontweight":
attributes.setdefault("bold", BOLD in v)
attributes.setdefault("italic", ITALIC in v)
elif k == "lineheight":
attributes["line_spacing"] = v * self._label.font_size
elif k == "fill":
attributes["color"] = [int(ch*255) for ch in v]
else:
attributes[k] = v
self._dirty = True
self._label.begin_update()
self._label.document.set_style(i, j, attributes)
def __len__(self):
return len(self.text)
def __del__(self):
if hasattr(self, "_label") and self._label:
self._label.delete()
_TEXT_CACHE = 200
_text_cache = {}
_text_queue = []
def text(str, x=None, y=None, width=None, height=None, draw=True, **kwargs):
""" Draws the string at the given position, with the current font().
Lines of text will span the given width before breaking to the next line.
The text will be displayed with the current state font(), fontsize(), fontweight(), etc.
When the given text is a Text object, the state will not be applied.
"""
if isinstance(str, Text) and width is None and height is None and len(kwargs) == 0:
txt = str
else:
# If the given text is not a Text object, create one on the fly.
# Dynamic Text objects are cached by (font, fontsize, bold, italic),
# and those that are no longer referenced by the user are recycled.
# Changing Text properties is still faster than creating a new Text.
# The cache has a limited size (200), so the oldest Text objects are deleted.
fontname, fontsize, bold, italic, lineheight, align = font_mixin(**kwargs)
fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)
id = (fontname, int(fontsize), bold, italic)
recycled = False
if id in _text_cache:
for txt in _text_cache[id]:
# Reference count 3 => Python, _text_cache[id], txt.
# No other variables are referencing the text, so we can recycle it.
if getrefcount(txt) == 3:
txt.text = str
txt.x = x or 0
txt.y = y or 0
txt.width = width
txt.height = height
txt.lineheight = lineheight
txt.align = align
txt.fill = fill
recycled = True
break
if not recycled:
txt = Text(str, x or 0, y or 0, width, height, **kwargs)
_text_cache.setdefault(id, [])
_text_cache[id].append(txt)
_text_queue.insert(0, id)
for id in reversed(_text_queue[_TEXT_CACHE:]):
del _text_cache[id][0]
del _text_queue[-1]
if draw:
txt.draw(x, y)
return txt
def textwidth(txt, **kwargs):
""" Returns the width of the given text.
"""
if not isinstance(txt, Text) or len(kwargs) > 0:
kwargs["draw"] = False
txt = text(txt, 0, 0, **kwargs)
return txt.metrics[0]
def textheight(txt, width=None, **kwargs):
""" Returns the height of the given text.
"""
if not isinstance(txt, Text) or len(kwargs) > 0 or width != txt.width:
kwargs["draw"] = False
txt = text(txt, 0, 0, width=width, **kwargs)
return txt.metrics[1]
def textmetrics(txt, width=None, **kwargs):
""" Returns a (width, height)-tuple for the given text.
"""
if not isinstance(txt, Text) or len(kwargs) > 0 or width != txt.width:
kwargs["draw"] = False
txt = text(txt, 0, 0, width=width, **kwargs)
return txt.metrics
#--- TEXTPATH ----------------------------------------------------------------------------------------
class GlyphPathError(Exception):
pass
import cPickle
glyphs = {}
try:
# Load cached font glyph path information from nodebox/font/glyph.p.
# By default, it has glyph path info for Droid Sans, Droid Sans Mono, Droid Serif.
glyphs = path.join(path.dirname(__file__), "..", "font", "glyph.p")
glyphs = cPickle.load(open(glyphs))
except:
pass
def textpath(string, x=0, y=0, **kwargs):
""" Returns a BezierPath from the given text string.
The fontname, fontsize and fontweight can be given as optional parameters,
width, height, lineheight and align are ignored.
Only works with ASCII characters in the default fonts (Droid Sans, Droid Sans Mono, Droid Serif, Arial).
See nodebox/font/glyph.py on how to activate other fonts.
"""
fontname, fontsize, bold, italic, lineheight, align = font_mixin(**kwargs)
w = bold and italic and "bold italic" or bold and "bold" or italic and "italic" or "normal"
p = BezierPath()
f = fontsize / 1000.0
for ch in string:
try: glyph = glyphs[fontname][w][ch]
except:
raise GlyphPathError, "no glyph path information for %s %s '%s'" % (w, fontname, ch)
for pt in glyph:
if pt[0] == MOVETO:
p.moveto(x+pt[1]*f, y-pt[2]*f)
elif pt[0] == LINETO:
p.lineto(x+pt[1]*f, y-pt[2]*f)
elif pt[0] == CURVETO:
p.curveto(x+pt[3]*f, y-pt[4]*f, x+pt[5]*f, y-pt[6]*f, x+pt[1]*f, y-pt[2]*f)
elif pt[0] == CLOSE:
p.closepath()
x += textwidth(ch, font=fontname, fontsize=fontsize, bold=bold, italic=italic)
return p
#=====================================================================================================
#--- UTILITIES ---------------------------------------------------------------------------------------
_RANDOM_MAP = [90.0, 9.00, 4.00, 2.33, 1.50, 1.00, 0.66, 0.43, 0.25, 0.11, 0.01]
def _rnd_exp(bias=0.5):
bias = max(0, min(bias, 1)) * 10
i = int(floor(bias)) # bias*10 => index in the _map curve.
n = _RANDOM_MAP[i] # If bias is 0.3, rnd()**2.33 will average 0.3.
if bias < 10:
n += (_RANDOM_MAP[i+1]-n) * (bias-i)
return n
def random(v1=1.0, v2=None, bias=None):
""" Returns a number between v1 and v2, including v1 but not v2.
The bias (0.0-1.0) represents preference towards lower or higher numbers.
"""
if v2 is None:
v1, v2 = 0, v1
if bias is None:
r = rnd()
else:
r = rnd()**_rnd_exp(bias)
x = r * (v2-v1) + v1
if isinstance(v1, int) and isinstance(v2, int):
x = int(x)
return x
def grid(cols, rows, colwidth=1, rowheight=1, shuffled=False):
""" Yields (x,y)-tuples for the given number of rows and columns.
The space between each point is determined by colwidth and colheight.
"""
rows = range(int(rows))
cols = range(int(cols))
if shuffled:
shuffle(rows)
shuffle(cols)
for y in rows:
for x in cols:
yield (x*colwidth, y*rowheight)
def files(path="*"):
""" Returns a list of files found at the given path.
"""
return glob(path)
#=====================================================================================================
#--- PROTOTYPE ----------------------------------------------------------------------------------------
class Prototype(object):
def __init__(self):
""" A base class that allows on-the-fly extension.
This means that external functions can be bound to it as methods,
and properties set at runtime are copied correctly.
Prototype can handle:
- functions (these become class methods),
- immutable types (str, unicode, int, long, float, bool),
- lists, tuples and dictionaries of immutable types,
- objects with a copy() method.
"""
self._dynamic = {}
def _deepcopy(self, value):
if isinstance(value, FunctionType):
return instancemethod(value, self)
elif hasattr(value, "copy"):
return value.copy()
elif isinstance(value, (list, tuple)):
return [self._deepcopy(x) for x in value]
elif isinstance(value, dict):
return dict([(k, self._deepcopy(v)) for k,v in value.items()])
elif isinstance(value, (str, unicode, int, long, float, bool)):
return value
else:
# Biggest problem here is how to find/relink circular references.
raise TypeError, "Prototype can't bind %s." % str(value.__class__)
def _bind(self, key, value):
""" Adds a new method or property to the prototype.
For methods, the given function is expected to take the object (i.e. self) as first parameter.
For properties, values can be: list, tuple, dict, str, unicode, int, long, float, bool,
or an object with a copy() method.
For example, we can define a Layer's custom draw() method in two ways:
- By subclassing:
class MyLayer(Layer):
def draw(layer):
pass
layer = MyLayer()
layer.draw()
- By function binding:
def my_draw(layer):
pass
layer = Layer()
layer._bind("draw", my_draw)
layer.draw()
"""
self._dynamic[key] = value
object.__setattr__(self, key, self._deepcopy(value))
def set_method(self, function, name=None):
""" Creates a dynamic method (with the given name) from the given function.
"""
if not name:
name = function.__name__
self._bind(name, function)
def set_property(self, key, value):
""" Adds a property to the prototype.
Using this method ensures that dynamic properties are copied correctly - see inherit().
"""
self._bind(key, value)
def inherit(self, prototype):
""" Inherit all the dynamic properties and methods of another prototype.
"""
for k,v in prototype._dynamic.items():
self._bind(k,v)
#=====================================================================================================
#--- EVENT HANDLER ------------------------------------------------------------------------------------
class EventHandler:
def __init__(self):
# Use __dict__ directly so we can do multiple inheritance in combination with Prototype:
self.__dict__["enabled"] = True # Receive events from the canvas?
self.__dict__["focus"] = False # True when this object receives the focus.
self.__dict__["pressed"] = False # True when the mouse is pressed on this object.
self.__dict__["dragged"] = False # True when the mouse is dragged on this object.
self.__dict__["_queue"] = []
def on_mouse_enter(self, mouse):
pass
def on_mouse_leave(self, mouse):
pass
def on_mouse_motion(self, mouse):
pass
def on_mouse_press(self, mouse):
pass
def on_mouse_release(self, mouse):
pass
def on_mouse_drag(self, mouse):
pass
def on_mouse_scroll(self, mouse):
pass
def on_key_press(self, keys):
pass
def on_key_release(self, keys):
pass
# Instead of calling an event directly it could be queued,
# e.g. layer.queue_event(layer.on_mouse_press, canvas.mouse).
# layer.process_events() can then be called whenever desired,
# e.g. after the canvas has been drawn so that events can contain drawing commands.
def queue_event(self, event, *args):
self._queue.append((event, args))
def process_events(self):
for event, args in self._queue:
event(*args)
self._queue = []
# Note: there is no event propagation.
# Event propagation means that, for example, if a layer is pressed
# all its child (or parent) layers receive an on_mouse_press() event as well.
# If this kind of behavior is desired, it is the responsibility of custom subclasses of Layer.
#=====================================================================================================
#--- TRANSITION --------------------------------------------------------------------------------------
# Transition.update() will tween from the last value to transition.set() new value in the given time.
# Transitions are used as attributes (e.g. position, rotation) for the Layer class.
TIME = 0 # the current time in this frame changes when the canvas is updated
LINEAR = "linear"
SMOOTH = "smooth"
class Transition(object):
def __init__(self, value, interpolation=SMOOTH):
self._v0 = value # Previous value => Transition.start.
self._vi = value # Current value => Transition.current.
self._v1 = value # Desired value => Transition.stop.
self._t0 = TIME # Start time.
self._t1 = TIME # End time.
self._interpolation = interpolation
def copy(self):
t = Transition(None)
t._v0 = self._v0
t._vi = self._vi
t._v1 = self._v1
t._t0 = self._t0
t._t1 = self._t1
t._interpolation = self._interpolation
return t
def get(self):
""" Returns the transition stop value.
"""
return self._v1
def set(self, value, duration=1.0):
""" Sets the transition stop value, which will be reached in the given duration (seconds).
Calling Transition.update() moves the Transition.current value toward Transition.stop.
"""
if duration == 0:
# If no duration is given, Transition.start = Transition.current = Transition.stop.
self._vi = value
self._v1 = value
self._v0 = self._vi
self._t0 = TIME # Now.
self._t1 = TIME + duration
@property
def start(self):
return self._v0
@property
def stop(self):
return self._v1
@property
def current(self):
return self._vi
@property
def done(self):
return TIME >= self._t1
def update(self):
""" Calculates the new current value. Returns True when done.
The transition approaches the desired value according to the interpolation:
- LINEAR: even transition over the given duration time,
- SMOOTH: transition goes slower at the beginning and end.
"""
if TIME >= self._t1 or self._vi is None:
self._vi = self._v1
return True
else:
# Calculate t: the elapsed time as a number between 0.0 and 1.0.
t = (TIME-self._t0) / (self._t1-self._t0)
if self._interpolation == LINEAR:
self._vi = self._v0 + (self._v1-self._v0) * t
else:
self._vi = self._v0 + (self._v1-self._v0) * geometry.smoothstep(0.0, 1.0, t)
return False
#--- LAYER -------------------------------------------------------------------------------------------
# The Layer class is responsible for the following:
# - it has a draw() method to override; all sorts of NodeBox drawing commands can be put here,
# - it has a transformation origin point and rotates/scales its drawn items as a group,
# - it has child layers that transform relative to this layer,
# - when its attributes (position, scale, angle, ...) change, they will tween smoothly over time.
_UID = 0
def _uid():
global _UID; _UID+=1; return _UID
RELATIVE = "relative" # Origin point is stored as float, e.g. (0.5, 0.5).
ABSOLUTE = "absolute" # Origin point is stored as int, e.g. (100, 100).
class LayerRenderError(Exception):
pass
# When Layer.clipped=True, children are clipped to the bounds of the layer.
# The layer clipping masks lazily changes size with the layer.
class LayerClippingMask(ClippingMask):
def __init__(self, layer):
self.layer = layer
def draw(self, fill=(0,0,0,1), stroke=None):
w = not self.layer.width and geometry.INFINITE or self.layer.width
h = not self.layer.height and geometry.INFINITE or self.layer.height
rect(0, 0, w, h, fill=fill, stroke=stroke)
class Layer(list, Prototype, EventHandler):
def __init__(self, x=0, y=0, width=None, height=None, origin=(0,0),
scale=1.0, rotation=0, opacity=1.0, duration=0.0, name=None,
parent=None, **kwargs):
""" Creates a new drawing layer that can be appended to the canvas.
The duration defines the time (seconds) it takes to animate transformations or opacity.
When the animation has terminated, layer.done=True.
"""
if origin == CENTER:
origin = (0.5,0.5)
origin_mode = RELATIVE
elif isinstance(origin[0], float) \
and isinstance(origin[1], float):
origin_mode = RELATIVE
else:
origin_mode = ABSOLUTE
Prototype.__init__(self) # Facilitates extension on the fly.
EventHandler.__init__(self)
self._id = _uid()
self.name = name # Layer name. Layers are accessible as ParentLayer.[name]
self.canvas = None # The canvas this layer is drawn to.
self.parent = parent # The layer this layer is a child of.
self._x = Transition(x) # Layer horizontal position in pixels, from the left.
self._y = Transition(y) # Layer vertical position in pixels, from the bottom.
self._width = Transition(width) # Layer width in pixels.
self._height = Transition(height) # Layer height in pixels.
self._dx = Transition(origin[0]) # Transformation origin point.
self._dy = Transition(origin[1]) # Transformation origin point.
self._origin = origin_mode # Origin point as RELATIVE or ABSOLUTE coordinates?
self._scale = Transition(scale) # Layer width and height scale.
self._rotation = Transition(rotation) # Layer rotation.
self._opacity = Transition(opacity) # Layer opacity.
self.duration = duration # The time it takes to animate transformations.
self.top = True # Draw on top of or beneath parent?
self.flipped = False # Flip the layer horizontally?
self.clipped = False # Clip child layers to bounds?
self.hidden = False # Hide the layer?
self._transform_cache = None # Cache of the local transformation matrix.
self._transform_stack = None # Cache of the cumulative transformation matrix.
self._clipping_mask = LayerClippingMask(self)
@classmethod
def from_image(self, img, *args, **kwargs):
""" Returns a new layer that renders the given image, and with the same size as the image.
The layer's draw() method and an additional image property are set.
"""
if not isinstance(img, Image):
img = Image(img, data=kwargs.get("data"))
kwargs.setdefault("width", img.width)
kwargs.setdefault("height", img.height)
def draw(layer):
image(layer.image)
layer = self(*args, **kwargs)
layer.set_method(draw)
layer.set_property("image", img)
return layer
@classmethod
def from_function(self, function, *args, **kwargs):
""" Returns a new layer that renders the drawing commands in the given function.
The layer's draw() method is set.
"""
def draw(layer):
function(layer)
layer = self(*args, **kwargs)
layer.set_method(draw)
return layer
def copy(self, parent=None, canvas=None):
""" Returns a copy of the layer.
All Layer properties will be copied, except for the new parent and canvas,
which you need to define as optional parameters.
This means that copies are not automatically appended to the parent layer or canvas.
"""
layer = self.__class__() # Create instance of the derived class, not Layer.
layer.duration = 0 # Copy all transitions instantly.
layer.canvas = canvas
layer.parent = parent
layer.name = self.name
layer._x = self._x.copy()
layer._y = self._y.copy()
layer._width = self._width.copy()
layer._height = self._height.copy()
layer._origin = self._origin
layer._dx = self._dx.copy()
layer._dy = self._dy.copy()
layer._scale = self._scale.copy()
layer._rotation = self._rotation.copy()
layer._opacity = self._opacity.copy()
layer.duration = self.duration
layer.top = self.top
layer.flipped = self.flipped
layer.clipped = self.clipped
layer.hidden = self.hidden
layer.enabled = self.enabled
# Use base Layer.extend(), we don't care about what subclass.extend() does.
Layer.extend(layer, [child.copy() for child in self])
# Inherit all the dynamic properties and methods.
Prototype.inherit(layer, self)
return layer
def __getattr__(self, key):
""" Returns the given property, or the layer with the given name.
"""
if key in self.__dict__:
return self.__dict__[key]
for layer in self:
if layer.name == key:
return layer
raise AttributeError, "%s instance has no attribute '%s'" % (self.__class__.__name__, key)
def _set_container(self, key, value):
# If Layer.canvas is set to None, the canvas should no longer contain the layer.
# If Layer.canvas is set to Canvas, this canvas should contain the layer.
# Remove the layer from the old canvas/parent.
# Append the layer to the new container.
if self in (self.__dict__.get(key) or ()):
self.__dict__[key].remove(self)
if isinstance(value, list) and self not in value:
list.append(value, self)
self.__dict__[key] = value
def _get_canvas(self):
return self.__dict__.get("canvas")
def _get_parent(self):
return self.__dict__.get("parent")
def _set_canvas(self, canvas):
self._set_container("canvas", canvas)
def _set_parent(self, layer):
self._set_container("parent", layer)
canvas = property(_get_canvas, _set_canvas)
parent = property(_get_parent, _set_parent)
@property
def root(self):
return self.parent and self.parent.root or self
@property
def layers(self):
return self
def insert(self, index, layer):
list.insert(self, index, layer)
layer.__dict__["parent"] = self
def append(self, layer):
list.append(self, layer)
layer.__dict__["parent"] = self
def extend(self, layers):
for layer in layers:
Layer.append(self, layer)
def remove(self, layer):
list.remove(self, layer)
layer.__dict__["parent"] = None
def pop(self, index):
layer = list.pop(self, index)
layer.__dict__["parent"] = None
return layer
def _get_x(self):
return self._x.get()
def _get_y(self):
return self._y.get()
def _get_width(self):
return self._width.get()
def _get_height(self):
return self._height.get()
def _get_scale(self):
return self._scale.get()
def _get_rotation(self):
return self._rotation.get()
def _get_opacity(self):
return self._opacity.get()
def _set_x(self, x):
self._transform_cache = None
self._x.set(x, self.duration)
def _set_y(self, y):
self._transform_cache = None
self._y.set(y, self.duration)
def _set_width(self, width):
self._transform_cache = None
self._width.set(width, self.duration)
def _set_height(self, height):
self._transform_cache = None
self._height.set(height, self.duration)
def _set_scale(self, scale):
self._transform_cache = None
self._scale.set(scale, self.duration)
def _set_rotation(self, rotation):
self._transform_cache = None
self._rotation.set(rotation, self.duration)
def _set_opacity(self, opacity):
self._opacity.set(opacity, self.duration)
x = property(_get_x, _set_x)
y = property(_get_y, _set_y)
width = property(_get_width, _set_width)
height = property(_get_height, _set_height)
scaling = property(_get_scale, _set_scale)
rotation = property(_get_rotation, _set_rotation)
opacity = property(_get_opacity, _set_opacity)
def _get_xy(self):
return (self.x, self.y)
def _set_xy(self, (x,y)):
self.x = x
self.y = y
xy = property(_get_xy, _set_xy)
def _get_origin(self, relative=False):
""" Returns the point (x,y) from which all layer transformations originate.
When relative=True, x and y are defined percentually (0.0-1.0) in terms of width and height.
In some cases x=0 or y=0 is returned:
- For an infinite layer (width=None or height=None), we can't deduct the absolute origin
from coordinates stored relatively (e.g. what is infinity*0.5?).
- Vice versa, for an infinite layer we can't deduct the relative origin from coordinates
stored absolute (e.g. what is 200/infinity?).
"""
dx = self._dx.current
dy = self._dy.current
w = self._width.current
h = self._height.current
# Origin is stored as absolute coordinates and we want it relative.
if self._origin == ABSOLUTE and relative:
if w is None: w = 0
if h is None: h = 0
dx = w!=0 and dx/w or 0
dy = h!=0 and dy/h or 0
# Origin is stored as relative coordinates and we want it absolute.
elif self._origin == RELATIVE and not relative:
dx = w is not None and dx*w or 0
dy = h is not None and dy*h or 0
return dx, dy
def _set_origin(self, x, y, relative=False):
""" Sets the transformation origin point in either absolute or relative coordinates.
For example, if a layer is 400x200 pixels, setting the origin point to (200,100)
all transformations (translate, rotate, scale) originate from the center.
"""
self._transform_cache = None
self._dx.set(x, self.duration)
self._dy.set(y, self.duration)
self._origin = relative and RELATIVE or ABSOLUTE
def origin(self, x=None, y=None, relative=False):
""" Sets or returns the point (x,y) from which all layer transformations originate.
"""
if x is not None:
if x == CENTER:
x, y, relative = 0.5, 0.5, True
if y is not None:
self._set_origin(x, y, relative)
return self._get_origin(relative)
def _get_relative_origin(self):
return self.origin(relative=True)
def _set_relative_origin(self, xy):
self._set_origin(xy[0], xy[1], relative=True)
relative_origin = property(_get_relative_origin, _set_relative_origin)
def _get_absolute_origin(self):
return self.origin(relative=False)
def _set_absolute_origin(self, xy):
self._set_origin(xy[0], xy[1], relative=False)
absolute_origin = property(_get_absolute_origin, _set_absolute_origin)
def _get_visible(self):
return not self.hidden
def _set_visible(self, b):
self.hidden = not b
visible = property(_get_visible, _set_visible)
def translate(self, x, y):
self.x += x
self.y += y
def rotate(self, angle):
self.rotation += angle
def scale(self, f):
self.scaling *= f
def flip(self):
self.flipped = not self.flipped
def _update(self):
""" Called each frame from canvas._update() to update the layer transitions.
"""
done = self._x.update()
done &= self._y.update()
done &= self._width.update()
done &= self._height.update()
done &= self._dx.update()
done &= self._dy.update()
done &= self._scale.update()
done &= self._rotation.update()
if not done: # i.e. the layer is being transformed
self._transform_cache = None
self._opacity.update()
self.update()
for layer in self:
layer._update()
def update(self):
"""Override this method to provide custom updating code.
"""
pass
@property
def done(self):
""" Returns True when all transitions have finished.
"""
return self._x.done \
and self._y.done \
and self._width.done \
and self._height.done \
and self._dx.done \
and self._dy.done \
and self._scale.done \
and self._rotation.done \
and self._opacity.done
def _draw(self):
""" Draws the transformed layer and all of its children.
"""
if self.hidden:
return
glPushMatrix()
# Be careful that the transformations happen in the same order in Layer._transform().
# translate => flip => rotate => scale => origin.
# Center the contents around the origin point.
dx, dy = self.origin(relative=False)
glTranslatef(round(self._x.current), round(self._y.current), 0)
if self.flipped:
glScalef(-1, 1, 1)
glRotatef(self._rotation.current, 0, 0, 1)
glScalef(self._scale.current, self._scale.current, 1)
# Enable clipping mask if Layer.clipped=True.
if self.clipped:
beginclip(self._clipping_mask)
# Draw child layers below.
for layer in self:
if layer.top is False:
layer._draw()
# Draw layer.
global _alpha
_alpha = self._opacity.current # XXX should also affect child layers?
glPushMatrix()
glTranslatef(-round(dx), -round(dy), 0) # Layers are drawn relative from parent origin.
self.draw()
glPopMatrix()
_alpha = 1
# Draw child layers on top.
for layer in self:
if layer.top is True:
layer._draw()
if self.clipped:
endclip()
glPopMatrix()
def draw(self):
"""Override this method to provide custom drawing code for this layer.
At this point, the layer is correctly transformed.
"""
pass
def render(self):
""" Returns the layer as a flattened image.
The layer and all of its children need to have width and height set.
"""
b = self.bounds
if geometry.INFINITE in (b.x, b.y, b.width, b.height):
raise LayerRenderError, "can't render layer of infinite size"
return render(lambda: (translate(-b.x,-b.y), self._draw()), b.width, b.height)
def layer_at(self, x, y, clipped=False, enabled=False, transformed=True, _covered=False):
""" Returns the topmost layer containing the mouse position, None otherwise.
With clipped=True, no parts of child layers outside the parent's bounds are checked.
With enabled=True, only enabled layers are checked (useful for events).
"""
if self.hidden:
# Don't do costly operations on layers the user can't see.
return None
if enabled and not self.enabled:
# Skip disabled layers during event propagation.
return None
if _covered:
# An ancestor is blocking this layer, so we can't select it.
return None
hit = self.contains(x, y, transformed)
if clipped:
# If (x,y) is not inside the clipped bounds, return None.
# If children protruding beyond the layer's bounds are clipped,
# we only need to look at children on top of the layer.
# Each child is drawn on top of the previous child,
# so we hit test them in reverse order (highest-first).
if not hit:
return None
children = [layer for layer in reversed(self) if layer.top is True]
else:
# Otherwise, traverse all children in on-top-first order to avoid
# selecting a child underneath the layer that is in reality
# covered by a peer on top of the layer, further down the list.
children = sorted(reversed(self), key=lambda layer: not layer.top)
for child in children:
# An ancestor (e.g. grandparent) may be covering the child.
# This happens when it hit tested and is somewhere on top of the child.
# We keep a recursive covered-state to verify visibility.
# The covered-state starts as False, but stays True once it switches.
_covered = _covered or (hit and not child.top)
child = child.layer_at(x, y, clipped, enabled, transformed, _covered)
if child is not None:
# Note: "if child:" won't work because it can be an empty list (no children).
# Should be improved by not having Layer inherit from list.
return child
if hit:
return self
else:
return None
def _transform(self, local=True):
""" Returns the transformation matrix of the layer:
a calculated state of its translation, rotation and scaling.
If local=False, prepends all transformations of the parent layers,
i.e. you get the absolute transformation state of a nested layer.
"""
if self._transform_cache is None:
# Calculate the local transformation matrix.
# Be careful that the transformations happen in the same order in Layer._draw().
# translate => flip => rotate => scale => origin.
tf = Transform()
dx, dy = self.origin(relative=False)
tf.translate(round(self._x.current), round(self._y.current))
if self.flipped:
tf.scale(-1, 1)
tf.rotate(self._rotation.current)
tf.scale(self._scale.current, self._scale.current)
tf.translate(-round(dx), -round(dy))
self._transform_cache = tf
# Flush the cumulative transformation cache of all children.
def _flush(layer):
layer._transform_stack = None
self.traverse(_flush)
if not local:
# Return the cumulative transformation matrix.
# All of the parent transformation states need to be up to date.
# If not, we need to recalculate the whole chain.
if self._transform_stack is None:
if self.parent is None:
self._transform_stack = self._transform_cache.copy()
else:
# Accumulate all the parent layer transformations.
# In the process, we update the transformation state of any outdated parent.
dx, dy = self.parent.origin(relative=False)
# Layers are drawn relative from parent origin.
tf = self.parent._transform(local=False).copy()
tf.translate(round(dx), round(dy))
self._transform_stack = self._transform_cache.copy()
self._transform_stack.prepend(tf)
return self._transform_stack
return self._transform_cache
@property
def transform(self):
return self._transform(local=False)
def _bounds(self, local=True):
""" Returns the rectangle that encompasses the transformed layer and its children.
If one of the children has width=None or height=None, bounds will be infinite.
"""
w = self._width.current; w = w is None and geometry.INFINITE or w
h = self._height.current; h = h is None and geometry.INFINITE or h
# Find the transformed bounds of the layer:
p = self.transform.map([(0,0), (w,0), (w,h), (0,h)])
x = min(p[0][0], p[1][0], p[2][0], p[3][0])
y = min(p[0][1], p[1][1], p[2][1], p[3][1])
w = max(p[0][0], p[1][0], p[2][0], p[3][0]) - x
h = max(p[0][1], p[1][1], p[2][1], p[3][1]) - y
b = geometry.Bounds(x, y, w, h)
if not local:
for child in self:
b = b.union(child.bounds)
return b
@property
def bounds(self):
return self._bounds(local=False)
def contains(self, x, y, transformed=True):
""" Returns True if (x,y) falls within the layer's rectangular area.
Useful for GUI elements: with transformed=False the calculations are much faster;
and it will report correctly as long as the layer (or parent layer)
is not rotated or scaled, and has its origin at (0,0).
"""
w = self._width.current; w = w is None and geometry.INFINITE or w
h = self._height.current; h = h is None and geometry.INFINITE or h
if not transformed:
x0, y0 = self.absolute_position()
return x0 <= x <= x0+w \
and y0 <= y <= y0+h
# Find the transformed bounds of the layer:
p = self.transform.map([(0,0), (w,0), (w,h), (0,h)])
return geometry.point_in_polygon(p, x, y)
hit_test = contains
def absolute_position(self, root=None):
""" Returns the absolute (x,y) position (i.e. cumulative with parent position).
"""
x = 0
y = 0
layer = self
while layer is not None and layer != root:
x += layer.x
y += layer.y
layer = layer.parent
return x, y
def traverse(self, visit=lambda layer: None):
""" Recurses the layer structure and calls visit() on each child layer.
"""
visit(self)
[layer.traverse(visit) for layer in self]
def __repr__(self):
return "Layer(%sx=%.2f, y=%.2f, scale=%.2f, rotation=%.2f, opacity=%.2f, duration=%.2f)" % (
self.name is not None and "name='%s', " % self.name or "",
self.x,
self.y,
self.scaling,
self.rotation,
self.opacity,
self.duration
)
def __eq__(self, other):
return isinstance(other, Layer) and self._id == other._id
def __ne__(self, other):
return not self.__eq__(other)
layer = Layer
#--- GROUP -------------------------------------------------------------------------------------------
class Group(Layer):
def __init__(self, *args, **kwargs):
""" A layer that serves as a container for other layers.
It has no width or height and doesn't draw anything.
"""
Layer.__init__(self, *args, **kwargs)
self._set_width(0)
self._set_height(0)
@classmethod
def from_image(*args, **kwargs):
raise NotImplementedError
@classmethod
def from_function(*args, **kwargs):
raise NotImplementedError
@property
def width(self):
return 0
@property
def height(self):
return 0
def layer_at(self, x, y, clipped=False, enabled=False, transformed=True, _covered=False):
# Ignores clipped=True for Group (since it has no width or height).
for child in reversed(self):
layer = child.layer_at(x, y, clipped, enabled, transformed, _covered)
if layer:
return layer
group = Group
#=====================================================================================================
#--- MOUSE -------------------------------------------------------------------------------------------
# Mouse cursors:
DEFAULT = "default"
HIDDEN = "hidden"
CROSS = pyglet.window.Window.CURSOR_CROSSHAIR
HAND = pyglet.window.Window.CURSOR_HAND
TEXT = pyglet.window.Window.CURSOR_TEXT
WAIT = pyglet.window.Window.CURSOR_WAIT
# Mouse buttons:
LEFT = "left"
RIGHT = "right"
MIDDLE = "middle"
class Mouse(Point):
def __init__(self, canvas, x=0, y=0):
""" Keeps track of the mouse position on the canvas, buttons pressed and the cursor icon.
"""
Point.__init__(self, x, y)
self._canvas = canvas
self._cursor = DEFAULT # Mouse cursor: CROSS, HAND, HIDDEN, TEXT, WAIT.
self._button = None # Mouse button pressed: LEFT, RIGHT, MIDDLE.
self.modifiers = [] # Mouse button modifiers: CTRL, SHIFT, OPTION.
self.pressed = False # True if the mouse button is pressed.
self.dragged = False # True if the mouse is dragged.
self.scroll = Point(0,0) # Scroll offset.
self.dx = 0 # Relative offset from previous horizontal position.
self.dy = 0 # Relative offset from previous vertical position.
# Backwards compatibility due to an old typo:
@property
def vx(self):
return self.dx
@property
def vy(self):
return self.dy
@property
def relative_x(self):
try: return float(self.x) / self._canvas.width
except ZeroDivisionError:
return 0
@property
def relative_y(self):
try: return float(self.y) / self._canvas.height
except ZeroDivisionError:
return 0
def _get_cursor(self):
return self._cursor
def _set_cursor(self, mode):
self._cursor = mode != DEFAULT and mode or None
if mode == HIDDEN:
self._canvas._window.set_mouse_visible(False); return
self._canvas._window.set_mouse_cursor(
self._canvas._window.get_system_mouse_cursor(
self._cursor))
cursor = property(_get_cursor, _set_cursor)
def _get_button(self):
return self._button
def _set_button(self, button):
self._button = \
button == pyglet.window.mouse.LEFT and LEFT or \
button == pyglet.window.mouse.RIGHT and RIGHT or \
button == pyglet.window.mouse.MIDDLE and MIDDLE or None
button = property(_get_button, _set_button)
def __repr__(self):
return "Mouse(x=%.1f, y=%.1f, pressed=%s, dragged=%s)" % (
self.x, self.y, repr(self.pressed), repr(self.dragged))
#--- KEYBOARD ----------------------------------------------------------------------------------------
# Key codes:
BACKSPACE = "backspace"
DELETE = "delete"
TAB = "tab"
ENTER = "enter"
SPACE = "space"
ESCAPE = "escape"
UP = "up"
DOWN = "down"
LEFT = "left"
RIGHT = "right"
# Key modifiers:
OPTION = \
ALT = "option"
CTRL = "ctrl"
SHIFT = "shift"
COMMAND = "command"
MODIFIERS = (OPTION, CTRL, SHIFT, COMMAND)
class Keys(list):
def __init__(self, canvas):
""" Keeps track of the keys pressed and any modifiers (e.g. shift or control key).
"""
self._canvas = canvas
self.code = None # Last key pressed
self.char = "" # Last key character representation (i.e., SHIFT + "a" = "A").
self.modifiers = [] # Modifier keys pressed (OPTION, CTRL, SHIFT, COMMAND).
self.pressed = False
def append(self, code):
code = self._decode(code)
if code in MODIFIERS:
self.modifiers.append(code)
list.append(self, code)
self.code = self[-1]
def remove(self, code):
code = self._decode(code)
if code in MODIFIERS:
self.modifiers.remove(code)
list.remove(self, self._decode(code))
self.code = len(self) > 0 and self[-1] or None
def _decode(self, code):
if not isinstance(code, int):
s = code
else:
s = pyglet.window.key.symbol_string(code) # 65288 => "BACKSPACE"
s = s.lower() # "BACKSPACE" => "backspace"
s = s.lstrip("_") # "_1" => "1"
s = s.replace("return", ENTER) # "return" => "enter"
s = s.replace("num_", "") # "num_space" => "space"
s = s.endswith(MODIFIERS) and s.lstrip("lr") or s # "lshift" => "shift"
return s
def __repr__(self):
return "Keys(char=%s, code=%s, modifiers=%s, pressed=%s)" % (
repr(self.char), repr(iter(self)), repr(self.modifiers), repr(self.pressed))
#=====================================================================================================
#--- CANVAS ------------------------------------------------------------------------------------------
VERY_LIGHT_GREY = 0.95
FRAME = 0
# Window styles.
WINDOW_DEFAULT = pyglet.window.Window.WINDOW_STYLE_DEFAULT
WINDOW_BORDERLESS = pyglet.window.Window.WINDOW_STYLE_BORDERLESS
# Configuration settings for the canvas.
# http://www.pyglet.org/doc/programming_guide/opengl_configuration_options.html
# The stencil buffer is enabled (we need it to do clipping masks).
# Multisampling will be enabled (if possible) to do anti-aliasing.
settings = OPTIMAL = dict(
# buffer_size = 32, # Let Pyglet decide automatically.
# red_size = 8,
# green_size = 8,
# blue_size = 8,
depth_size = 24,
stencil_size = 1,
alpha_size = 8,
double_buffer = 1,
sample_buffers = 1,
samples = 4
)
def _configure(settings):
""" Returns a pyglet.gl.Config object from the given dictionary of settings.
If the settings are not supported, returns the default settings.
"""
screen = pyglet.window.get_platform().get_default_display().get_default_screen()
c = pyglet.gl.Config(**settings)
try:
c = screen.get_best_config(c)
except pyglet.window.NoSuchConfigException:
# Probably the hardwarde doesn't support multisampling.
# We can still do some anti-aliasing by turning on GL_LINE_SMOOTH.
c = pyglet.gl.Config()
c = screen.get_best_config(c)
return c
class Canvas(list, Prototype, EventHandler):
def __init__(self, width=640, height=480, name="NodeBox for OpenGL", resizable=False, border=True, settings=OPTIMAL, vsync=True):
""" The main application window containing the drawing canvas.
It is opened when Canvas.run() is called.
It is a collection of drawable Layer objects, and it has its own draw() method.
This method must be overridden with your own drawing commands, which will be executed each frame.
Event handlers for keyboard and mouse interaction can also be overriden.
Events will be passed to layers that have been appended to the canvas.
"""
window = dict(
caption = name,
visible = False,
width = width,
height = height,
resizable = resizable,
style = border is False and WINDOW_BORDERLESS or WINDOW_DEFAULT,
config = _configure(settings),
vsync = vsync
)
Prototype.__init__(self)
EventHandler.__init__(self)
self.profiler = Profiler(self)
self._window = pyglet.window.Window(**window)
self._fps = 60 # Frames per second.
self._frame = 0 # The current frame.
self._elapsed = 0 # dt = time elapsed since last frame.
self._active = False # Application is running?
self.paused = False # Pause animation?
self._mouse = Mouse(self) # The mouse cursor location.
self._keys = Keys(self) # The keys pressed on the keyboard.
self._focus = None # The layer being focused by the mouse.
# Mouse and keyboard events:
self._window.on_mouse_enter = self._on_mouse_enter
self._window.on_mouse_leave = self._on_mouse_leave
self._window.on_mouse_motion = self._on_mouse_motion
self._window.on_mouse_press = self._on_mouse_press
self._window.on_mouse_release = self._on_mouse_release
self._window.on_mouse_drag = self._on_mouse_drag
self._window.on_mouse_scroll = self._on_mouse_scroll
self._window.on_key_pressed = False
self._window.on_key_press = self._on_key_press
self._window.on_key_release = self._on_key_release
self._window.on_text = self._on_text
self._window.on_text_motion = self._on_text_motion
self._window.on_move = self._on_move
self._window.on_resize = self._on_resize
self._window.on_close = self.stop
def _get_name(self):
return self._window.caption
def _set_name(self, str):
self._window.set_caption(str)
name = property(_get_name, _set_name)
def _get_vsync(self):
return self._window.vsync
def _set_vsync(self, bool):
self._window.set_vsync(bool)
vsync = property(_get_vsync, _set_vsync)
@property
def layers(self):
return self
def insert(self, index, layer):
list.insert(self, index, layer)
layer.__dict__["canvas"] = self
def append(self, layer):
list.append(self, layer)
layer.__dict__["canvas"] = self
def extend(self, layers):
for layer in layers:
self.append(layer)
def remove(self, layer):
list.remove(self, layer)
layer.__dict__["canvas"] = None
def pop(self, index):
layer = list.pop(index)
layer.__dict__["canvas"] = None
return layer
def _get_x(self):
return self._window.get_location()[0]
def _set_x(self, v):
self._window.set_location(v, self.y)
def _get_y(self):
return self._window.get_location()[1]
def _set_y(self, v):
self._window.set_location(self.x, v)
def _get_xy(self):
return (self.x, self.y)
def _set_xy(self, (x,y)):
self.x = x
self.y = y
def _get_width(self):
return self._window.width
def _get_height(self):
return self._window.height
def _get_size(self):
return (self.width, self.height)
def _set_width(self, v):
self._window.width = v
def _set_height(self, v):
self._window.height = v
def _set_size(self, (w,h)):
self.width = w
self.height = h
x = property(_get_x, _set_x)
y = property(_get_y, _set_y)
xy = property(_get_xy, _set_xy)
width = property(_get_width, _set_width)
height = property(_get_height, _set_height)
size = property(_get_size, _set_size)
def _get_fullscreen(self):
return self._window.fullscreen
def _set_fullscreen(self, mode=True):
self._window.set_fullscreen(mode)
fullscreen = property(_get_fullscreen, _set_fullscreen)
@property
def screen(self):
return pyglet.window.get_platform().get_default_display().get_default_screen()
@property
def frame(self):
""" Yields the current frame number.
"""
return self._frame
@property
def elapsed(self):
""" Yields the elapsed time since last frame.
"""
return self._elapsed
dt = elapsed
@property
def mouse(self):
""" Yields a Point(x, y) with the mouse position on the canvas.
"""
return self._mouse
@property
def keys(self):
return self._keys
@property # Backwards compatibility.
def key(self):
return self._keys
@property
def focus(self):
return self._focus
#--- Event dispatchers ------------------------------
# First events are dispatched, then update() and draw() are called.
def layer_at(self, x, y, **kwargs):
""" Find the topmost layer at the specified coordinates.
This method returns None if no layer was found.
"""
for layer in reversed(self):
layer = layer.layer_at(x, y, **kwargs)
if layer is not None:
return layer
return None
def _on_mouse_enter(self, x, y):
self._mouse.x = x
self._mouse.y = y
self.on_mouse_enter(self._mouse)
def _on_mouse_leave(self, x, y):
self._mouse.x = x
self._mouse.y = y
self.on_mouse_leave(self._mouse)
# When the mouse leaves the canvas, no layer has the focus.
if self._focus is not None:
self._focus.on_mouse_leave(self._mouse)
self._focus.focus = False
self._focus.pressed = False
self._focus.dragged = False
self._focus = None
def _on_mouse_motion(self, x, y, dx, dy):
self._mouse.x = x
self._mouse.y = y
self._mouse.dx = int(dx)
self._mouse.dy = int(dy)
self.on_mouse_motion(self._mouse)
# Get the topmost layer over which the mouse is hovering.
layer = self.layer_at(x, y, enabled=True)
# If the layer differs from the layer which currently has the focus,
# or the mouse is not over any layer, remove the current focus.
if self._focus is not None and (self._focus != layer or not self._focus.contains(x,y)):
self._focus.on_mouse_leave(self._mouse)
self._focus.focus = False
self._focus = None
# Set the focus.
if self.focus != layer and layer is not None:
self._focus = layer
self._focus.focus = True
self._focus.on_mouse_enter(self._mouse)
# Propagate mouse motion to layer with the focus.
if self._focus is not None:
self._focus.on_mouse_motion(self._mouse)
def _on_mouse_press(self, x, y, button, modifiers):
self._mouse.pressed = True
self._mouse.button = button
self._mouse.modifiers = [a for (a,b) in (
(CTRL, pyglet.window.key.MOD_CTRL),
(SHIFT, pyglet.window.key.MOD_SHIFT),
(OPTION, pyglet.window.key.MOD_OPTION)) if modifiers & b]
self.on_mouse_press(self._mouse)
# Propagate mouse clicking to the layer with the focus.
if self._focus is not None:
self._focus.pressed = True
self._focus.on_mouse_press(self._mouse)
def _on_mouse_release(self, x, y, button, modifiers):
if self._focus is not None:
self._focus.on_mouse_release(self._mouse)
self._focus.pressed = False
self._focus.dragged = False
self.on_mouse_release(self._mouse)
self._mouse.button = None
self._mouse.modifiers = []
self._mouse.pressed = False
self._mouse.dragged = False
if self._focus is not None:
# Get the topmost layer over which the mouse is hovering.
layer = self.layer_at(x, y, enabled=True)
# If the mouse is no longer over the layer with the focus
# (this can happen after dragging), remove the focus.
if self._focus != layer or not self._focus.contains(x,y):
self._focus.on_mouse_leave(self._mouse)
self._focus.focus = False
self._focus = None
# Propagate mouse to the layer with the focus.
if self._focus != layer and layer is not None:
layer.focus = True
layer.on_mouse_enter(self._mouse)
self._focus = layer
def _on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
self._mouse.dragged = True
self._mouse.x = x
self._mouse.y = y
self._mouse.dx = int(dx)
self._mouse.dy = int(dy)
self._mouse.modifiers = [a for (a,b) in (
(CTRL, pyglet.window.key.MOD_CTRL),
(SHIFT, pyglet.window.key.MOD_SHIFT),
(OPTION, pyglet.window.key.MOD_OPTION)) if modifiers & b]
# XXX also needs to log buttons.
self.on_mouse_drag(self._mouse)
# Propagate mouse dragging to the layer with the focus.
if self._focus is not None:
self._focus.dragged = True
self._focus.on_mouse_drag(self._mouse)
def _on_mouse_scroll(self, x, y, scroll_x, scroll_y):
self._mouse.scroll.x = scroll_x
self._mouse.scroll.y = scroll_y
self.on_mouse_scroll(self._mouse)
# Propagate mouse scrolling to the layer with the focus.
if self._focus is not None:
self._focus.on_mouse_scroll(self._mouse)
def _on_key_press(self, keycode, modifiers):
self._keys.pressed = True
self._keys.append(keycode)
if self._keys.code == TAB:
self._keys.char = "\t"
# The event is delegated in _update():
self._window.on_key_pressed = True
def _on_key_release(self, keycode, modifiers):
for layer in self:
layer.on_key_release(self.key)
self.on_key_release(self.key)
self._keys.char = ""
self._keys.remove(keycode)
self._keys.pressed = False
def _on_text(self, text):
self._keys.char = text
# The event is delegated in _update():
self._window.on_key_pressed = True
def _on_text_motion(self, keycode):
self._keys.char = ""
# The event is delegated in _update():
self._window.on_key_pressed = True
def _on_move(self, x, y):
self.on_move()
def _on_resize(self, width, height):
pyglet.window.Window.on_resize(self._window, width, height)
self.on_resize()
# Event methods are meant to be overridden or patched with Prototype.set_method().
def on_key_press(self, keys):
""" The default behavior of the canvas:
- ESC exits the application,
- CTRL-P pauses the animation,
- CTRL-S saves a screenshot.
"""
if keys.code == ESCAPE:
self.stop()
if keys.code == "p" and CTRL in keys.modifiers:
self.paused = not self.paused
if keys.code == "s" and CTRL in keys.modifiers:
self.save("nodebox-%s.png" % str(datetime.now()).split(".")[0].replace(" ","-").replace(":","-"))
def on_move(self):
pass
def on_resize(self):
pass
#--- Main loop --------------------------------------
def setup(self):
pass
def update(self):
pass
def draw(self):
self.clear()
def draw_overlay(self):
""" Override this method to draw once all the layers have been drawn.
"""
pass
draw_over = draw_overlay
def _setup(self):
# Set the window color, this will be transparent in saved images.
glClearColor(VERY_LIGHT_GREY, VERY_LIGHT_GREY, VERY_LIGHT_GREY, 0)
# Reset the transformation state.
# Most of this is already taken care of in Pyglet.
#glMatrixMode(GL_PROJECTION)
#glLoadIdentity()
#glOrtho(0, self.width, 0, self.height, -1, 1)
#glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
# Enable line anti-aliasing.
glEnable(GL_LINE_SMOOTH)
# Enable alpha transparency.
glEnable(GL_BLEND)
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ONE, GL_ONE_MINUS_SRC_ALPHA)
#glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# Start the application (if not already running).
if not self._active:
self._window.switch_to()
self._window.dispatch_events()
self._window.set_visible()
self._active = True
self.clear()
self.setup()
def _draw(self, lapse=0):
""" Draws the canvas and its layers.
This method gives the same result each time it gets drawn; only _update() advances state.
"""
if self.paused:
return
self._window.switch_to()
glPushMatrix()
self.draw()
glPopMatrix()
glPushMatrix()
for layer in self:
layer._draw()
glPopMatrix()
glPushMatrix()
self.draw_overlay()
glPopMatrix()
def _update(self, lapse=0):
""" Updates the canvas and its layers.
This method does not actually draw anything, it only updates the state.
"""
self._elapsed = lapse
if not self.paused:
# Advance the animation by updating all layers.
# This is only done when the canvas is not paused.
# Events will still be propagated during pause.
global TIME; TIME = time()
self._frame += 1
self.update()
for layer in self:
layer._update()
if self._window.on_key_pressed is True:
# Fire on_key_press() event,
# which combines _on_key_press(), _on_text() and _on_text_motion().
self._window.on_key_pressed = False
self.on_key_press(self._keys)
for layer in self:
layer.on_key_press(self._keys)
def stop(self):
# If you override this method, don't forget to call Canvas.stop() to exit the app.
# Any user-defined stop method, added with canvas.set_method() or canvas.run(stop=stop),
# is called first.
try: self._user_defined_stop()
except:
pass
for f in (self._update, self._draw):
pyglet.clock.unschedule(f)
self._window.close()
self._active = False
pyglet.app.exit()
def clear(self):
""" Clears the previous frame from the canvas.
"""
glClear(GL_COLOR_BUFFER_BIT)
glClear(GL_DEPTH_BUFFER_BIT)
glClear(GL_STENCIL_BUFFER_BIT)
def run(self, draw=None, setup=None, update=None, stop=None):
""" Opens the application windows and starts drawing the canvas.
Canvas.setup() will be called once during initialization.
Canvas.draw() and Canvas.update() will be called each frame.
Canvas.clear() needs to be called explicitly to clear the previous frame drawing.
Canvas.stop() closes the application window.
If the given setup, draw or update parameter is a function,
it overrides that canvas method.
"""
if isinstance(setup, FunctionType):
self.set_method(setup, name="setup")
if isinstance(draw, FunctionType):
self.set_method(draw, name="draw")
if isinstance(update, FunctionType):
self.set_method(update, name="update")
if isinstance(stop, FunctionType):
self.set_method(stop, name="stop")
self._setup()
self.fps = self._fps # Schedule the _update and _draw events.
pyglet.app.run()
@property
def active(self):
return self._active
def _get_fps(self):
return self._fps
def _set_fps(self, v):
# Use pyglet.clock to schedule _update() and _draw() events.
# The clock will then take care of calling them enough times.
# Note: frames per second is related to vsync.
# If the vertical refresh rate is about 30Hz you'll get top speed of around 33fps.
# It's probably a good idea to leave vsync=True if you don't want to fry the GPU.
for f in (self._update, self._draw):
pyglet.clock.unschedule(f)
if v is None:
pyglet.clock.schedule(f)
if v > 0:
pyglet.clock.schedule_interval(f, 1.0/v)
self._fps = v
fps = property(_get_fps, _set_fps)
#--- Frame export -----------------------------------
def render(self):
""" Returns a screenshot of the current frame as a texture.
This texture can be passed to the image() command.
"""
return pyglet.image.get_buffer_manager().get_color_buffer().get_texture()
buffer = screenshot = render
@property
def texture(self):
return pyglet.image.get_buffer_manager().get_color_buffer().get_texture()
def save(self, path):
""" Exports the current frame as a PNG-file.
"""
pyglet.image.get_buffer_manager().get_color_buffer().save(path)
#--- Prototype --------------------------------------
def __setattr__(self, k, v):
# Canvas is a Prototype, so Canvas.draw() can be overridden
# but it can also be patched with Canvas.set_method(draw).
# Specific methods (setup, draw, mouse and keyboard events) can also be set directly
# (e.g. canvas.on_mouse_press = my_mouse_handler).
# This way we don't have to explain set_method() to beginning users..
if isinstance(v, FunctionType) and (k in ("setup", "draw", "update", "stop") \
or k.startswith("on_") and k in (
"on_mouse_enter",
"on_mouse_leave",
"on_mouse_motion",
"on_mouse_press",
"on_mouse_release",
"on_mouse_drag",
"on_mouse_scroll",
"on_key_press",
"on_key_release",
"on_move",
"on_resize")):
self.set_method(v, name=k)
else:
object.__setattr__(self, k, v)
def set_method(self, function, name=None):
if name == "stop" \
or name is None and function.__name__ == "stop":
Prototype.set_method(self, function, name="_user_defined_stop") # Called from Canvas.stop().
else:
Prototype.set_method(self, function, name)
def __repr__(self):
return "Canvas(name='%s', size='%s', layers=%s)" % (self.name, self.size, repr(list(self)))
#--- PROFILER ----------------------------------------------------------------------------------------
CUMULATIVE = "cumulative"
SLOWEST = "slowest"
_profile_canvas = None
_profile_frames = 100
def profile_run():
for i in range(_profile_frames):
_profile_canvas._update()
_profile_canvas._draw()
class Profiler:
def __init__(self, canvas):
self.canvas = canvas
@property
def framerate(self):
return pyglet.clock.get_fps()
def run(self, draw=None, setup=None, update=None, frames=100, sort=CUMULATIVE, top=30):
""" Runs cProfile on the canvas for the given number of frames.
The performance statistics are returned as a string, sorted by SLOWEST or CUMULATIVE.
For example, instead of doing canvas.run(draw):
print canvas.profiler.run(draw, frames=100)
"""
# Register the setup, draw, update functions with the canvas (if given).
if isinstance(setup, FunctionType):
self.canvas.set_method(setup, name="setup")
if isinstance(draw, FunctionType):
self.canvas.set_method(draw, name="draw")
if isinstance(update, FunctionType):
self.canvas.set_method(update, name="update")
# If enabled, turn Psyco off.
psyco_stopped = False
try:
psyco.stop()
psyco_stopped = True
except:
pass
# Set the current canvas and the number of frames to profile.
# The profiler will then repeatedly execute canvas._update() and canvas._draw().
# Statistics are redirected from stdout to a temporary file.
global _profile_canvas, _profile_frames
_profile_canvas = self.canvas
_profile_frames = frames
import cProfile
import pstats
cProfile.run("profile_run()", "_profile")
p = pstats.Stats("_profile")
p.stream = open("_profile", "w")
p.sort_stats(sort==SLOWEST and "time" or sort).print_stats(top)
p.stream.close()
s = open("_profile").read()
remove("_profile")
# Restart Psyco if we stopped it.
if psyco_stopped:
psyco.profile()
return s
#--- LIBRARIES ---------------------------------------------------------------------------------------
# Import the library and assign it a _ctx variable containing the current context.
# This mimics the behavior in NodeBox for Mac OS X.
def ximport(library):
from sys import modules
library = __import__(library)
library._ctx = modules[__name__]
return library
#-----------------------------------------------------------------------------------------------------
# Linear interpolation math for BezierPath.point() etc.
import bezier
| [
"[email protected]"
]
| |
7cf435eb848c7d03f3f9aad53f457dca59045ba8 | fb91c53b311cf191bc0f3a4efe5b1a0ba197944e | /play/tmp/0003_auto__add_field_coupon_coupons_released.py | e83fcdb8627bc6416c6a929aaadb00a6125eb43e | [
"MIT"
]
| permissive | fraferra/PlayCity | e0ba878c52a321afbdbba68d25717b29a5dd3109 | 2bf97c30599b686ca0e642d1ebaf73fc99705291 | refs/heads/master | 2021-01-19T16:26:24.955470 | 2014-08-08T21:56:08 | 2014-08-08T21:56:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,807 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Coupon.coupons_released'
db.add_column(u'play_coupon', 'coupons_released',
self.gf('django.db.models.fields.DecimalField')(default=10, max_digits=4, decimal_places=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Coupon.coupons_released'
db.delete_column(u'play_coupon', 'coupons_released')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'play.challenge': {
'Meta': {'object_name': 'Challenge'},
'challenge_type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['play.Player']", 'symmetrical': 'False'}),
'points': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
u'play.coupon': {
'Meta': {'object_name': 'Coupon'},
'buyers': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'to': u"orm['play.Player']", 'null': 'True', 'symmetrical': 'False'}),
'coupons_released': ('django.db.models.fields.DecimalField', [], {'default': '10', 'max_digits': '4', 'decimal_places': '0'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['play.Shop']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
u'play.couponhistory': {
'Meta': {'object_name': 'CouponHistory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'player': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['play.Player']"}),
'shop': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
u'play.event': {
'Meta': {'object_name': 'Event'},
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True'}),
'event_type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50'}),
'experience': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '5', 'decimal_places': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'organizer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['play.Organization']"}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'to': u"orm['play.Player']", 'null': 'True', 'symmetrical': 'False'}),
'points': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
u'play.eventhistory': {
'Meta': {'object_name': 'EventHistory'},
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'player': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['play.Player']"}),
'points': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
u'play.idea': {
'Meta': {'object_name': 'Idea'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True'}),
'experience': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '5', 'decimal_places': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
u'play.organization': {
'Meta': {'object_name': 'Organization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'Super Duper!'", 'max_length': '100', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'play.player': {
'Meta': {'object_name': 'Player'},
'experience': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '5', 'decimal_places': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '4', 'decimal_places': '0'}),
'picture_url': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
'score': ('django.db.models.fields.DecimalField', [], {'default': '20', 'null': 'True', 'max_digits': '4', 'decimal_places': '0'}),
'token': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'play.shop': {
'Meta': {'object_name': 'Shop'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'Super shop!'", 'max_length': '100', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['play'] | [
"[email protected]"
]
| |
e4f7d50b81def02a4fc5c109097676d372a8b5c3 | fbb12b2b7dcf7f2a33235f6766b4176c083a0c8e | /ARsyntax/workflow/rules/pseudoReplicates.smk | 66d10732db24581cccd7e5b362ac228197b0e3d1 | []
| no_license | birkiy/TermProjectCOMP541 | b76c8fa3a01e48dc302dc040a2c499c2c9f1b8ba | 400a81765889a21d0590b599c4ba0e529a56e3ca | refs/heads/main | 2023-01-19T21:36:55.085293 | 2020-11-30T12:59:14 | 2020-11-30T12:59:14 | 306,048,866 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,596 | smk |
folder = "results/mapping/processed"
rule pseudoReplicates:
input:
"results/mapping/processed/{raw}.merged.final.bam"
output:
header=temp("results/mapping/processed/{raw}.merged.header.final.sam"),
pseudo1="results/mapping/processed/{raw}.pseudo1.final.bam",
pseudo2="results/mapping/processed/{raw}.pseudo2.final.bam"
message:
"Executing pseudoReplicates rule for {wildcards.raw}"
shell:
"""
samtools view -H {input} > {output.header}
#Split merged treatments
nlines=$(samtools view {input} | wc -l )
nlines=$(( (nlines + 1) / 2 )) # half that number
samtools view {input} | shuf - | split -d -l $nlines - "{folder}/{wildcards.raw}"
cat {output.header} {folder}/{wildcards.raw}00 | \
samtools view -bS - > {output.pseudo1}
cat {output.header} {folder}/{wildcards.raw}01 | \
samtools view -bS - > {output.pseudo2}
"""
rule pool:
input:
expand("results/mapping/processed/{{raw}}.{rep}.final.bam", rep=["rep1", "rep2"])
output:
"results/mapping/processed/{raw}.merged.final.bam"
message:
"Executing pool rule for {wildcards.raw}"
threads:
16
shell:
"""
#Merge treatment BAMS
samtools merge -@ {threads} -u {output} {input}
"""
| [
"[email protected]"
]
| |
b7345219fb5ba716b3fed095337bf4ff6b1df307 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_2/nwxtho001/question2.py | e5b2fd67ffe16d5f456ab603de434f28d2291d9f | []
| no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,398 | py | print ("Welcome to the 30 Second Rule Expert\n------------------------------------\nAnswer the following questions by selecting from among the options.")
seen = input ('Did anyone see you? (yes/no)\n')
if seen == 'yes' :
seen_type = input ('Was it a boss/lover/parent? (yes/no)\n')
if seen_type == 'no' :
print ('Decision: Eat it.')
else :
exp = input ('Was it expensive? (yes/no)\n')
if exp == 'yes' :
cut = input ('Can you cut off the part that touched the floor? (yes/no)\n')
if cut == 'yes' :
print ('Decision: Eat it.')
else :
print ('Decision: Your call.')
else :
choc = input ('Is it chocolate? (yes/no)\n')
if choc == 'yes' :
print ('Decision: Eat it.')
else :
print ('Decision: Don\'t eat it.')
else :
sticky = input ('Was it sticky? (yes/no)\n')
if sticky == 'yes' :
steak = input ('Is it a raw steak? (yes/no)\n')
if steak == 'yes' :
puma = input ('Are you a puma? (yes/no)\n')
if puma == 'yes' :
print ('Decision: Eat it.')
else :
print ('Decision: Don\'t eat it.')
else :
cat = input ('Did the cat lick it? (yes/no)\n')
if cat == 'yes' :
health = input ('Is your cat healthy? (yes/no)\n')
if health == 'yes' :
print ('Decision: Eat it.')
else :
print ('Decision: Your call.')
else :
print ('Decision: Eat it.')
else :
emau = input ('Is it an Emausaurus? (yes/no)\n')
if emau == 'yes':
mega = input ('Are you a Megalosaurus? (yes/no)\n')
if mega == 'yes' :
print ('Decision: Eat it.')
else :
print ('Decision: Don\'t eat it.')
else :
cat = input ('Did the cat lick it? (yes/no)\n')
if cat == 'yes' :
health = input ('Is your cat healthy? (yes/no)\n')
if health == 'yes' :
print ('Decision: Eat it.')
else :
print ('Decision: Your call.')
else :
print ('Decision: Eat it.') | [
"[email protected]"
]
| |
f486e9a0a0c4bfa8648db2f3ab716096708a8df8 | 4b7e282fe480415f5d52c0fc0429f144156190fe | /google/ads/googleads/v8/common/types/feed_common.py | 12888a33eb9f184c2402a3337e503e869b2be75f | [
"Apache-2.0"
]
| permissive | Z2Xsoft/google-ads-python | c4750357bb19da91bb3b6bf2fa84bef9d2df36d3 | 1779d52a0446c8afb2437b0a9e103dcb849f5590 | refs/heads/main | 2023-08-18T15:22:17.840364 | 2021-09-26T04:08:53 | 2021-09-26T04:08:53 | 410,444,398 | 0 | 0 | Apache-2.0 | 2021-09-26T04:08:53 | 2021-09-26T03:55:38 | null | UTF-8 | Python | false | false | 1,263 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v8.common",
marshal="google.ads.googleads.v8",
manifest={"Money",},
)
class Money(proto.Message):
r"""Represents a price in a particular currency.
Attributes:
currency_code (str):
Three-character ISO 4217 currency code.
amount_micros (int):
Amount in micros. One million is equivalent
to one unit.
"""
currency_code = proto.Field(proto.STRING, number=3, optional=True,)
amount_micros = proto.Field(proto.INT64, number=4, optional=True,)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
]
| |
d5655f14e27d61edfb7d6882009fe9f0ad295296 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2755/60793/267817.py | 8e0ef8cec59b834a9a8d68728452208db38b0567 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | for test in range(0, int(input())):
input()
ls1 = list(map(int, input().split()))
ls2 = list(map(int, input().split()))
l1, l2 = len(ls1), len(ls2)
ls3 = [0 for x in range(0, l1 + l2 - 1)]
for i in range(0, l1):
for j in range(0, l2):
ls3[i + j] += ls1[i] * ls2[j]
for i in ls3:
print(i, end=" ")
print(ls3[-1]) | [
"[email protected]"
]
| |
086a9a37c222334524b2121455b685678a95f665 | 63c7060562ec5d1a9153f0454ea6886b0a62a28e | /tb/axi_cdma/test_axi_cdma.py | 6b7ce9326dc3e25a24752ed080d6e17b2cf42064 | [
"MIT"
]
| permissive | alexforencich/verilog-axi | 666e6dfbd14fd124bdcbc2798b4f557347fb8261 | 38915fb5330cb8270b454afc0140a94489dc56db | refs/heads/master | 2023-03-30T07:34:17.721579 | 2023-03-30T07:12:13 | 2023-03-30T07:12:13 | 142,810,315 | 1,042 | 342 | MIT | 2023-03-05T19:52:57 | 2018-07-30T01:36:26 | Verilog | UTF-8 | Python | false | false | 6,800 | py | """
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import cocotb_test.simulator
import pytest
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotb.regression import TestFactory
from cocotbext.axi import AxiBus, AxiRam
from cocotbext.axi.stream import define_stream
DescBus, DescTransaction, DescSource, DescSink, DescMonitor = define_stream("Desc",
signals=["read_addr", "write_addr", "len", "tag", "valid", "ready"]
)
DescStatusBus, DescStatusTransaction, DescStatusSource, DescStatusSink, DescStatusMonitor = define_stream("DescStatus",
signals=["tag", "error", "valid"]
)
class TB(object):
def __init__(self, dut):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.start_soon(Clock(dut.clk, 10, units="ns").start())
# control interface
self.desc_source = DescSource(DescBus.from_prefix(dut, "s_axis_desc"), dut.clk, dut.rst)
self.desc_status_sink = DescStatusSink(DescStatusBus.from_prefix(dut, "m_axis_desc_status"), dut.clk, dut.rst)
# AXI interface
self.axi_ram = AxiRam(AxiBus.from_prefix(dut, "m_axi"), dut.clk, dut.rst, size=2**16)
dut.enable.setimmediatevalue(0)
def set_idle_generator(self, generator=None):
if generator:
self.desc_source.set_pause_generator(generator())
self.axi_ram.write_if.b_channel.set_pause_generator(generator())
self.axi_ram.read_if.r_channel.set_pause_generator(generator())
def set_backpressure_generator(self, generator=None):
if generator:
self.axi_ram.write_if.aw_channel.set_pause_generator(generator())
self.axi_ram.write_if.w_channel.set_pause_generator(generator())
self.axi_ram.read_if.ar_channel.set_pause_generator(generator())
async def cycle_reset(self):
self.dut.rst.setimmediatevalue(0)
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst.value = 1
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst.value = 0
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
async def run_test(dut, data_in=None, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
byte_lanes = tb.axi_ram.write_if.byte_lanes
step_size = 1 if int(os.getenv("PARAM_ENABLE_UNALIGNED")) else byte_lanes
tag_count = 2**len(tb.desc_source.bus.tag)
cur_tag = 1
await tb.cycle_reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
dut.enable.value = 1
for length in list(range(1, byte_lanes*4+1))+[128]:
for read_offset in list(range(8, 8+byte_lanes*2, step_size))+list(range(4096-byte_lanes*2, 4096, step_size)):
for write_offset in list(range(8, 8+byte_lanes*2, step_size))+list(range(4096-byte_lanes*2, 4096, step_size)):
tb.log.info("length %d, read_offset %d, write_offset %d", length, read_offset, write_offset)
read_addr = read_offset+0x1000
write_addr = 0x00008000+write_offset+0x1000
test_data = bytearray([x % 256 for x in range(length)])
tb.axi_ram.write(read_addr, test_data)
tb.axi_ram.write(write_addr & 0xffff80, b'\xaa'*(len(test_data)+256))
desc = DescTransaction(read_addr=read_addr, write_addr=write_addr, len=len(test_data), tag=cur_tag)
await tb.desc_source.send(desc)
status = await tb.desc_status_sink.recv()
tb.log.info("status: %s", status)
assert int(status.tag) == cur_tag
assert int(status.error) == 0
tb.log.debug("%s", tb.axi_ram.hexdump_str((write_addr & ~0xf)-16, (((write_addr & 0xf)+length-1) & ~0xf)+48))
assert tb.axi_ram.read(write_addr-8, len(test_data)+16) == b'\xaa'*8+test_data+b'\xaa'*8
cur_tag = (cur_tag + 1) % tag_count
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
def cycle_pause():
return itertools.cycle([1, 1, 1, 0])
if cocotb.SIM_NAME:
for test in [run_test]:
factory = TestFactory(test)
factory.add_option("idle_inserter", [None, cycle_pause])
factory.add_option("backpressure_inserter", [None, cycle_pause])
factory.generate_tests()
# cocotb-test
tests_dir = os.path.abspath(os.path.dirname(__file__))
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
@pytest.mark.parametrize("unaligned", [0, 1])
@pytest.mark.parametrize("axi_data_width", [8, 16, 32])
def test_axi_cdma(request, axi_data_width, unaligned):
dut = "axi_cdma"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
]
parameters = {}
parameters['AXI_DATA_WIDTH'] = axi_data_width
parameters['AXI_ADDR_WIDTH'] = 16
parameters['AXI_STRB_WIDTH'] = parameters['AXI_DATA_WIDTH'] // 8
parameters['AXI_ID_WIDTH'] = 8
parameters['AXI_MAX_BURST_LEN'] = 16
parameters['LEN_WIDTH'] = 20
parameters['TAG_WIDTH'] = 8
parameters['ENABLE_UNALIGNED'] = unaligned
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| [
"[email protected]"
]
| |
570e791962616acf2b90d808f402aaea2ee15533 | e1834bce67d20e73d10eb4533584d635f2840782 | /onir/datasets/nyt.py | 14b837a35362df7067ee673b9743e46b16f78994 | [
"MIT"
]
| permissive | tgeral68/OpenNIR | f1d8361c1543fda401386ee5d87ecb14766c16da | 225b26185bd67fdc00f24de3ef70d35768e22243 | refs/heads/master | 2023-02-27T02:06:28.357884 | 2021-02-08T16:22:09 | 2021-02-08T16:22:09 | 327,644,600 | 0 | 0 | MIT | 2021-02-08T15:55:28 | 2021-01-07T15:03:22 | null | UTF-8 | Python | false | false | 16,788 | py | import os
import tarfile
import contextlib
import functools
from glob import glob
from multiprocessing import Pool
from pytools import memoize_method
from bs4 import BeautifulSoup
import onir
from onir import datasets, util, indices, log, config
from onir.interfaces import trec, plaintext
logger = log.easy()
_HELD_OUT_IDS = {'1206388', '46335', '1223589', '1642970', '144845', '420493', '1186325', '564166', '1092844', '1232733', '243508', '946470', '1147459', '84957', '87385', '1298633', '1327402', '1482333', '1069716', '1575477', '1110091', '655579', '1562062', '541298', '1571257', '639395', '1341710', '663400', '1174700', '1406944', '1368755', '1315376', '1609162', '1746895', '1447812', '193348', '882027', '213652', '126658', '799474', '1677212', '1254313', '43743', '250901', '426439', '1803638', '1111630', '1220244', '1142672', '944176', '860862', '342011', '1556809', '1574691', '292048', '855559', '1473717', '157893', '252570', '305646', '198014', '1444467', '1842149', '161276', '455333', '146910', '1414339', '1413851', '1352725', '509114', '563685', '1738087', '1115555', '639541', '427073', '1435887', '862324', '476212', '870108', '315852', '144389', '684154', '845724', '117999', '35935', '716125', '1818546', '551762', '687923', '1817616', '135841', '618338', '1597113', '1549790', '1292666', '147051', '1778945', '1347630', '1337511', '299371', '1384273', '388274', '938995', '263847', '195638', '303927', '646946', '1620311', '1455534', '325463', '1380230', '1038853', '1040633', '1831119', '363686', '260491', '1611855', '147526', '542544', '581106', '1766627', '899656', '236785', '1408409', '300748', '742732', '986023', '1662861', '1083296', '152722', '1458233', '1203328', '1810235', '996231', '1226680', '427277', '517560', '1230947', '185677', '1524891', '492603', '1023515', '334223', '1219069', '1021319', '152336', '1227959', '1501876', '765819', '395940', '524179', '1494335', '66871', '105130', '1660760', '744794', '1616161', '876120', '714837', '35529', '42617', '198139', '1811671', '147293', '1041065', '841417', '1346509', '200467', '850536', '1235945', '184078', '1269259', '1314141', '1368414', '387436', '896464', '84650', '375608', '423014', '1201696', '883245', '137547', '1376881', '1207160', '280170', '968570', '1438840', '626732', '1085071', '632127', '1206647', '399973', '1316303', '1187122', '805546', '1727291', '570037', '1178896', '555992', '977573', '1340396', '632958', '63542', '1280664', '977205', '1567169', '783676', '814977', '1668678', '1735184', '1074278', '1652858', '1108702', '955404', '1784962', '1185130', '250831', '818408', '623624', '134405', '104342', '965709', '956076', '1260229', '27255', '1500603', '1127679', '1722973', '1734641', '309555', '1681934', '695555', '48767', '433808', '995051', '180797', '123367', '378006', '1216681', '324683', '1711346', '211935', '1801492', '103678', '446767', '594334', '860460', '660793', '1393998', '266826', '876460', '994066', '1282229', '1587147', '815344', '1103826', '343997', '1200405', '179480', '742314', '1780439', '1066709', '1330760', '1368900', '1549318', '1110897', '619788', '188464', '173770', '34154', '578909', '645650', '1157537', '62836', '700552', '1388063', '408649', '848686', '1694615', '1617883', '1765655', '1466678', '155464', '1445513', '1303273', '231804', '581627', '742052', '1212886', '1405769', '481040', '1855639', '54259', '111905', '1313586', '387001', '1185491', '1670617', '906527', '69825', '499522', '1819890', '164762', '970999', '1179216', '993221', '372699', '296270', '1185999', '792835', '1037962', '1740374', '1624046', '954664', '368818', '1087747', '1026355', '812422', '1544110', '1226870', '155570', '1190376', '869921', '296349', '595907', '614301', '1241703', '442373', '995807', '1369864', '1709789', '114305', '184927', '1120202', '584073', '828184', '1473187', '1521230', '440704', '1013610', '1830313', '721770', '1658974', '313921', '692325', '368461', '985252', '290240', '1251117', '1538562', '422046', '1630032', '1181653', '125066', '1837263', '1656997', '441', '490006', '1643057', '165954', '69049', '1199388', '1507218', '1329673', '509136', '1466695', '16687', '508419', '268880', '969961', '340902', '253378', '256155', '863620', '1683671', '1560798', '675553', '1748098', '458865', '1665924', '1055150', '66385', '215071', '13148', '986080', '236365', '517825', '873311', '441741', '720189', '572737', '1225926', '624119', '997868', '515426', '691257', '419206', '1130476', '100471', '6461', '1807548', '1544601', '407787', '380030', '1152266', '1065150', '694778', '811554', '1854529', '444117', '1099590', '922315', '1217477', '1779802', '369061', '775743', '72992', '144419', '552889', '1181556', '1292830', '1778514', '1489202', '914269', '1706337', '1196929', '184181', '314027', '1227737', '559948', '784834', '1704396', '1256508', '1508836', '317087', '96486', '747998', '1632274', '950708', '1649807', '446890', '593993', '814566', '1292672', '560408', '1077779', '978883', '393982', '844217', '398230', '183055', '53060', '1210135', '916178', '1532407', '1139738', '1518821', '728959', '1304148', '491724', '1568275', '712403', '1728481', '660217', '821176', '1222683', '1778005', '1195123', '1817074', '974513', '426701', '1111638', '1240027', '1664639', '1464379', '521007', '1199739', '578456', '1439699', '284928', '494919', '491912', '232568', '923474', '99386', '1643092', '1790124', '1061993', '621986', '1122877', '100662', '1473138', '1030173', '71586', '1096287', '1138157', '262640', '602945', '1300130', '1338721', '1270177', '39801', '1692635', '56624', '211659', '1646283', '324374', '255385', '1255526', '1786203', '1406143', '1788514', '289251', '672936', '452286', '137862', '185683', '1430', '1380422', '845912', '775802', '647375', '145796', '355527', '146542', '1410218', '345442', '190717', '371036', '1797336', '120994', '1718571', '1054043', '4558', '428059', '1396897', '1201117', '1158485', '1089656', '519981', '43015', '520964', '1494349', '1094063', '1392684', '978574', '1052143', '1118795', '1687088', '1314160', '162771', '911024', '1820168', '1192318', '91766', '143489', '1004985', '518421', '166275', '370104', '974150', '546915', '1323563', '1798085', '938123', '182313', '1364401', '9506', '557187', '112370', '611777', '1159485', '1403348', '683930', '797900', '1383582', '114608', '350383', '1604331', '568871', '1047323', '394651', '165898', '283949', '810556', '105425', '1013875', '1464119', '1312394', '1695169', '58536', '1169598', '1125874', '1665958', '769476', '594319', '683707', '882361', '1302321', '450679', '254550', '1033539', '1301128', '1320428', '41154', '1657029', '1227578', '171871', '1792745', '288902', '453868', '271254', '409591', '143722', '535764', '1830350', '578047', '230266', '111402', '773754', '1245031', '1350576', '1624207', '1807992', '1015799', '1794740', '511024', '789525', '319777', '1132669', '1327710', '1272568', '1390168', '1533260', '617767', '638910', '496086', '1205039', '1626665', '191596', '1810513', '1556267', '1100153', '207238', '1501543', '834402', '279588', '568816', '1632682', '822260', '343317', '430137', '1768788', '545282', '279954', '165473', '828347', '1470816', '1327112', '1529515', '1016007', '270386', '1702078', '286404', '1088273', '1322387', '1643857', '489043', '380855', '1083556', '1619528', '583350', '132853', '546862', '1253587', '535138', '264437', '943235', '1620828', '1006607', '553760', '828792', '1624460', '1434951', '833541', '212690', '200229', '1064862', '220330', '1579543', '363926', '1258350', '1184051', '720391', '1459592', '457690', '38548', '81369', '1679222', '390074', '286007', '378270', '816642', '283001', '372084', '411601', '910971', '1590440', '135775', '1112005', '75424', '213834', '689492', '1005355', '1139329', '808335', '720425', '1267233', '263546', '1222854', '258056', '837513', '940506', '1103175', '1378900', '1385626', '237112', '730612', '301649', '273771', '497029', '736059', '1193481', '797044', '1144902', '1030001', '719277', '1119289', '1337197', '942773', '982474', '584235', '1707268', '1754255', '1104478', '1534921', '128481', '470969', '347013', '509587', '408644', '772685', '1733430', '1317735', '848134', '404829', '267884', '953680', '1303696', '884333', '968388', '1201708', '1112434', '303328', '1304264', '1133757', '1724836', '1334405', '1829066', '925761', '946016', '552534', '943383', '1100246', '1846843', '1088146', '544438', '1753939', '74810', '1807078', '100915', '1236323', '803592', '429972', '393687', '1378937', '456043', '1613185', '613184', '417913', '1563559', '1339387', '1502489', '656071', '365604', '1151482', '1259752', '277596', '673808', '161493', '873580', '832327', '260612', '924572', '1064547', '1125330', '1641045', '1151695', '256879', '394244', '556588', '1305678', '1263185', '136826', '1399892', '557148', '1358190', '1776190', '249236', '1492533', '1303288', '521017', '1066272', '541133', '1623539', '137859', '687241', '237814', '1369332', '371264', '24081', '1552898', '1502059', '1047404', '1023221', '177279', '1267817', '1411135', '191656', '980600', '951516', '499404', '1695509', '811244', '238763', '1284303', '585143', '1033260', '942257', '1349353', '1429932', '140492', '1044892', '418808', '698145', '1796223', '59227', '194957', '269275', '730734', '1145222', '253742', '581098', '45351', '66070', '426605', '1050966', '529688', '1801056', '1718077', '1266182', '129555', '1531233', '74473', '302447', '215843', '792070', '1104761', '1573381', '202553', '60314', '1503921', '280964', '711987', '136821', '832921', '1419515', '1662966', '1819530', '716942', '219736', '436016', '1735969', '713752', '60858', '121707', '689812', '193395', '1624062', '1330056', '563645', '1492653', '1449544', '376209', '1750188', '1478352', '410699', '777880', '1029514', '108914', '720269', '1448513', '74549', '972109', '215002', '404357', '1647764', '550693', '1255375', '1293865', '1264570', '896848', '789563', '826347', '903589', '1018558', '277290', '1683375', '1496790', '1112399', '860557', '127350', '1015623', '312660', '233953', '1565217', '1639977', '1607902', '397905', '490534', '1513419', '174443', '1215224', '66269', '275494', '209655', '516500', '1675849', '836893', '947869', '789401', '1553981', '155710', '496679', '821652', '1139493', '286234', '128146', '1207153', '1199733', '1778364', '1704065', '326315', '317132', '1824346', '319345', '1219375', '99297', '1850878', '755324', '1737932', '1556261', '1389561', '128767', '24850', '1105008', '1046487', '390245', '899371', '623036', '1190883', '1218126', '334762', '1496567', '1228970', '540795', '689403', '1465965', '1585171', '734591', '1257610', '685476', '784313', '1178416', '1468942', '883627', '1000719', '952670', '51709', '933442'}
@datasets.register('nyt')
class NytDataset(datasets.IndexBackedDataset):
"""
Interface to the New York Times (NYT) dataset, useful for content-based weak supervision.
> Sean MacAvaney, Andrew Yates, Kai Hui, Ophir Frieder. Content-Based Weak Supervision for
> Ad-Hoc Re-Ranking. SIGIR 2019.
"""
DUA = """Will begin downloading Robust04 dataset.
Copy or link NYT source file directory (contains data/1987/01.tgz, data/1987/02.tgz, ...) to:
{ds_path}/nyt_corpus
Please confirm you agree to the authors' data usage stipulations found at
https://catalog.ldc.upenn.edu/LDC2008T19"""
@staticmethod
def default_config():
result = datasets.IndexBackedDataset.default_config()
result.update({
'subset': config.Choices(['main', 'heldout']),
})
return result
def __init__(self, config, vocab):
super().__init__(config, logger, vocab)
base_path = util.path_dataset(self)
self.index = indices.AnseriniIndex(os.path.join(base_path, 'anserini'), stemmer='none')
self.index_stem = indices.AnseriniIndex(os.path.join(base_path, 'anserini.porter'), stemmer='porter')
self.doc_store = indices.SqliteDocstore(os.path.join(base_path, 'docs.sqllite'))
def _get_index(self, record):
return self.index
def _get_docstore(self):
return self.doc_store
def _get_index_for_batchsearch(self):
return self.index_stem
def qrels(self, fmt='dict'):
return self._load_qrels(self.config['subset'], fmt=fmt)
def _load_run_base(self, index, subset, rankfn, ranktopk, fmt='dict', fscache=False, memcache=True):
return super()._load_run_base(index, subset, rankfn, ranktopk, fmt, fscache, memcache)
@memoize_method
def _load_qrels(self, subset, fmt):
with logger.duration('loading qrels'):
base_path = util.path_dataset(self)
path = os.path.join(base_path, f'{subset}.qrels')
return trec.read_qrels_fmt(path, fmt)
def load_queries(self) -> dict:
return self._load_queries_base(self.config['subset'])
@memoize_method
def _load_queries_base(self, subset):
with logger.duration('loading queries'):
base_path = util.path_dataset(self)
path = os.path.join(base_path, f'{subset}.queries')
return dict(plaintext.read_tsv(path))
def pair_iter_pos_candidates_intersect(self, qrels_fn, run_fn, pos_minrel):
# overrides
# simply removes anything that doesn't retrieve itself in the ranktopk results
run = run_fn()
return run[run['qid'] == run['did']]
def pair_iter_neg_candidates_run(self, qrels_fn, run_fn, unjudged_rel):
# overrides
return run_fn()
def pair_iter_neg_candidates_union(self, qrels_fn, run_fn, unjudged_rel):
# overrides
raise ValueError('unsupported operation')
def pair_iter_neg_candidates_qrels(self, qrels_fn, run_fn, unjudged_rel):
# overrides
raise ValueError('unsupported operation')
def init(self, force=False):
path = util.path_dataset(self)
needs_collection = []
for index in [self.index, self.index_stem, self.doc_store]:
if force or not index.built():
needs_collection.append(index.build)
for subset in ['main', 'heldout']:
is_heldout = (subset == 'heldout')
query_file = os.path.join(path, f'{subset}.queries')
if force or not os.path.exists(query_file):
needs_collection.append(self._init_build_queryfile(query_file, is_heldout))
qrels_file = os.path.join(path, f'{subset}.qrels')
if force or not os.path.exists(query_file):
needs_collection.append(self._init_build_qrels(qrels_file, is_heldout))
if needs_collection and self._confirm_dua():
with contextlib.ExitStack() as stack:
collection_iter = logger.pbar(self._init_iter_corpus(), desc='collection')
sub_iters = util.blocking_tee(collection_iter, len(needs_collection))
for fn, it in zip(needs_collection, sub_iters):
stack.enter_context(util.CtxtThread(functools.partial(fn, it)))
def _init_iter_corpus(self):
nyt_corpus_dir = os.path.join(util.path_dataset(self), 'nyt_corpus')
with Pool(onir.util.safe_thread_count()) as pool:
for tgz_file in sorted(glob(f'{nyt_corpus_dir}/*/*.tgz')):
logger.debug(f'reading file {tgz_file}')
for doc in pool.imap(_parse_file, _read_tarfile(tgz_file)):
if doc:
yield doc
def _init_build_queryfile(self, file, is_heldout):
def wrapped(it):
with util.finialized_file(file, 'wt') as f:
for doc in it:
if is_heldout == (doc.did in _HELD_OUT_IDS):
plaintext.write_tsv(f, [(doc.did, doc.data['headline'])])
return wrapped
def _init_build_qrels(self, file, is_heldout):
def wrapped(it):
with util.finialized_file(file, 'wt') as f:
for doc in it:
if is_heldout == (doc.did in _HELD_OUT_IDS):
trec.write_qrels_dict(f, {doc.did: {doc.did: 1}})
return wrapped
def _parse_file(text):
soup = BeautifulSoup(text, 'lxml-xml')
did = soup.find('doc-id')
if did is None:
return None
did = did['id-string']
content = soup.find_all('block')
headline = soup.find('hl1') # 'headline' element can contain multiple (e.g. hl2 for online)
if content and headline:
content = content[-1].get_text()
headline = headline.get_text()
return indices.misc.RawDoc(did, text=content, headline=headline.strip())
return None
def _read_tarfile(tgz_fn):
with tarfile.open(tgz_fn, 'r') as tgz:
for member in tgz.getmembers():
if member.isfile():
yield tgz.extractfile(member).read()
| [
"[email protected]"
]
| |
3611831f18561cfa5af0f745acdf03a946f45c97 | d3762b1b4d908b2b43f6e0ae362daa7136c6c7a4 | /elections/management/commands/migrate_data.py | d427572608b5a937a16039325feb542271465cab | []
| no_license | pbahle/elections-api | c58cdf2b05f1560c8d6a69f8bc07e878458585c1 | 60cc06610ab7a279102018078f29f38d31e8bd26 | refs/heads/master | 2020-09-02T10:44:03.663386 | 2019-11-02T19:26:30 | 2019-11-02T19:26:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,838 | py | # pylint: disable=no-self-use
import sys
from datetime import timedelta
from pathlib import Path
from django.core.management.base import BaseCommand
from django.utils import timezone
import log
from elections import defaults
from elections.helpers import normalize_jurisdiction
from elections.models import District, DistrictCategory, Election, Party, Position
class Command(BaseCommand):
help = "Initialize contants and migrate data between existing models"
def handle(self, verbosity: int, **_kwargs):
log.init(verbosity=verbosity if '-v' in sys.argv else 2)
defaults.initialize_parties()
defaults.initialize_districts()
self.update_elections()
self.update_jurisdictions()
self.import_descriptions()
self.export_descriptions()
def update_elections(self):
for election in Election.objects.filter(active=True):
age = timezone.now() - timedelta(weeks=3)
if election.date < age.date():
log.info(f'Deactivating election: {election}')
election.active = False
election.save()
def update_jurisdictions(self):
jurisdiction = DistrictCategory.objects.get(name="Jurisdiction")
for district in District.objects.filter(category=jurisdiction):
old = district.name
new = normalize_jurisdiction(district.name)
if new != old:
if District.objects.filter(category=jurisdiction, name=new):
log.warning(f'Deleting district {old!r} in favor of {new!r}')
district.delete()
else:
log.info(f'Renaming district {old!r} to {new!r}')
district.name = new
district.save()
def import_descriptions(self):
pass
def export_descriptions(self):
elections = {}
for election in Election.objects.all():
elections[election.name] = election.description
self._write('elections', elections)
districts = {}
for category in DistrictCategory.objects.all():
districts[category.name] = category.description
self._write('districts', districts)
parties = {}
for party in Party.objects.all():
parties[party.name] = party.description
self._write('parties', parties)
positions = {}
for position in Position.objects.all():
positions[position.name] = position.description
self._write('positions', positions)
def _write(self, name, data):
with Path(f'content/{name}.txt').open('w') as f:
for key, value in sorted(data.items()):
f.write(f'name: {key}\n')
f.write(f'description: {value}\n')
f.write('\n')
| [
"[email protected]"
]
| |
c1fb632462fb073565ae995962ae392db45905b3 | a411a55762de11dc2c9d913ff33d2f1477ac02cf | /lte/gateway/python/magma/mobilityd/subscriberdb_client.py | 1fec443db1956ef872a11cfbc3a1d98d7a4c2e0f | [
"BSD-3-Clause"
]
| permissive | magma/magma | 0dc48c1513d9968bd05fb7589f302c192b7c0f94 | 0e1d895dfe625681229e181fbc2dbad83e13c5cb | refs/heads/master | 2023-09-04T09:31:56.140395 | 2023-08-29T13:54:49 | 2023-08-29T13:54:49 | 170,803,235 | 1,219 | 525 | NOASSERTION | 2023-09-07T17:45:42 | 2019-02-15T04:46:24 | C++ | UTF-8 | Python | false | false | 6,221 | py | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import ipaddress
import logging
from typing import Optional
import grpc
from lte.protos.apn_pb2 import APNConfiguration
from magma.mobilityd.utils import log_error_and_raise
from magma.subscriberdb.sid import SIDUtils
class NetworkInfo:
def __init__(
self, gw_ip: Optional[str] = None, gw_mac: Optional[str] = None,
vlan: int = 0,
):
gw_ip_parsed = None
try:
gw_ip_parsed = ipaddress.ip_address(gw_ip) # type: ignore
except ValueError:
logging.debug("invalid internet gw ip: %s", gw_ip)
self.gw_ip = gw_ip_parsed
self.gw_mac = gw_mac
self.vlan = vlan
def __str__(self):
return f"GW-IP: {self.gw_ip} GW-MAC: {self.gw_mac} VLAN: {self.vlan}"
class StaticIPInfo:
"""
Operator can configure Static GW IP and MAC.
This would be used by AGW services to generate networking
configuration.
"""
def __init__(
self, ip: Optional[str],
gw_ip: Optional[str],
gw_mac: Optional[str],
vlan: int,
):
self.ip = None
if ip:
self.ip = ipaddress.ip_address(ip)
self.net_info = NetworkInfo(gw_ip, gw_mac, vlan)
def __str__(self):
return f"IP: {self.ip} NETWORK: {self.net_info}"
class SubscriberDbClient:
def __init__(self, subscriberdb_rpc_stub):
self.subscriber_client = subscriberdb_rpc_stub
def get_subscriber_ip(self, sid: str) -> Optional[StaticIPInfo]:
"""
Make RPC call to 'GetSubscriberData' method of local SubscriberDB
service to get assigned IP address if any.
"""
if self.subscriber_client is None:
return None
try:
apn_config = self._find_ip_and_apn_config(sid)
logging.debug("ip: Got APN: %s", apn_config)
if apn_config and apn_config.assigned_static_ip:
return StaticIPInfo(
ip=apn_config.assigned_static_ip,
gw_ip=apn_config.resource.gateway_ip,
gw_mac=apn_config.resource.gateway_mac,
vlan=apn_config.resource.vlan_id,
)
except ValueError as ex:
logging.warning(
"static Ip: Invalid or missing data for sid %s: ", sid,
)
logging.debug(ex)
raise SubscriberDBStaticIPValueError(sid)
except grpc.RpcError as err:
log_error_and_raise(
SubscriberDBConnectionError,
"GetSubscriberData: while reading vlan-id error[%s] %s",
err.code(),
err.details(),
)
return None
def get_subscriber_apn_network_info(self, sid: str) -> NetworkInfo:
"""
Make RPC call to 'GetSubscriberData' method of local SubscriberDB
service to get assigned IP address if any.
TODO: Move this API to separate APN configuration service.
"""
if self.subscriber_client:
try:
apn_config = self._find_ip_and_apn_config(sid)
logging.debug("vlan: Got APN: %s", apn_config)
if apn_config and apn_config.resource.vlan_id:
return NetworkInfo(
gw_ip=apn_config.resource.gateway_ip,
gw_mac=apn_config.resource.gateway_mac,
vlan=apn_config.resource.vlan_id,
)
except ValueError as ex:
logging.warning(
"vlan: Invalid or missing data for sid %s", sid,
)
logging.debug(ex)
raise SubscriberDBMultiAPNValueError(sid)
except grpc.RpcError as err:
log_error_and_raise(
SubscriberDBConnectionError,
"GetSubscriberData: while reading vlan-id error[%s] %s",
err.code(),
err.details(),
)
return NetworkInfo()
# use same API to retrieve IP address and related config.
def _find_ip_and_apn_config(
self, sid: str,
) -> Optional[APNConfiguration]:
if '.' in sid:
imsi, apn_name_part = sid.split('.', maxsplit=1)
apn_name, _ = apn_name_part.split(',', maxsplit=1)
else:
imsi, _ = sid.split(',', maxsplit=1)
apn_name = ''
logging.debug("Find APN config for: %s", sid)
data = self.subscriber_client.GetSubscriberData(SIDUtils.to_pb(imsi))
if data and data.non_3gpp and data.non_3gpp.apn_config:
selected_apn_conf = None
for apn_config in data.non_3gpp.apn_config:
logging.debug("APN config: %s", apn_config)
try:
if apn_config.assigned_static_ip:
ipaddress.ip_address(apn_config.assigned_static_ip)
except ValueError:
continue
if apn_config.service_selection == '*':
selected_apn_conf = apn_config
elif apn_config.service_selection == apn_name:
selected_apn_conf = apn_config
break
return selected_apn_conf
return None
class SubscriberDBConnectionError(Exception):
""" Exception thrown subscriber DB is not available
"""
pass
class SubscriberDBStaticIPValueError(Exception):
""" Exception thrown when subscriber DB has invalid IP value for the subscriber.
"""
pass
class SubscriberDBMultiAPNValueError(Exception):
""" Exception thrown when subscriber DB has invalid MultiAPN vlan value
for the subscriber.
"""
pass
| [
"[email protected]"
]
| |
536e8eda7de1c4a381f2c709fa56729cfbf19ee7 | 04b1803adb6653ecb7cb827c4f4aa616afacf629 | /native_client_sdk/src/build_tools/tests/verify_filelist_test.py | 2e01da1c93e9b3e5b6743a0e4d6f71f712de429d | [
"BSD-3-Clause"
]
| permissive | Samsung/Castanets | 240d9338e097b75b3f669604315b06f7cf129d64 | 4896f732fc747dfdcfcbac3d442f2d2d42df264a | refs/heads/castanets_76_dev | 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 | BSD-3-Clause | 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null | UTF-8 | Python | false | false | 3,854 | py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(BUILD_TOOLS_DIR)
import verify_filelist
def Verify(platform, rules_contents, directory_list):
rules = verify_filelist.Rules('test', platform, rules_contents)
rules.VerifyDirectoryList(directory_list)
class VerifyFilelistTestCase(unittest.TestCase):
def testBasic(self):
rules = """\
foo/file1
foo/file2
foo/file3
bar/baz/other
"""
dirlist = ['foo/file1', 'foo/file2', 'foo/file3', 'bar/baz/other']
Verify('linux', rules, dirlist)
def testGlob(self):
rules = 'foo/*'
dirlist = ['foo/file1', 'foo/file2', 'foo/file3/and/subdir']
Verify('linux', rules, dirlist)
def testPlatformVar(self):
rules = 'dir/${PLATFORM}/blah'
dirlist = ['dir/linux/blah']
Verify('linux', rules, dirlist)
def testPlatformVarGlob(self):
rules = 'dir/${PLATFORM}/*'
dirlist = ['dir/linux/file1', 'dir/linux/file2']
Verify('linux', rules, dirlist)
def testPlatformRule(self):
rules = """\
[linux]dir/linux/only
all/platforms
"""
linux_dirlist = ['dir/linux/only', 'all/platforms']
other_dirlist = ['all/platforms']
Verify('linux', rules, linux_dirlist)
Verify('mac', rules, other_dirlist)
def testMultiPlatformRule(self):
rules = """\
[linux,win]dir/no/macs
all/platforms
"""
nonmac_dirlist = ['dir/no/macs', 'all/platforms']
mac_dirlist = ['all/platforms']
Verify('linux', rules, nonmac_dirlist)
Verify('win', rules, nonmac_dirlist)
Verify('mac', rules, mac_dirlist)
def testPlatformRuleBadPlatform(self):
rules = '[frob]bad/platform'
self.assertRaises(verify_filelist.ParseException, Verify,
'linux', rules, [])
def testMissingFile(self):
rules = """\
foo/file1
foo/missing
"""
dirlist = ['foo/file1']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testExtraFile(self):
rules = 'foo/file1'
dirlist = ['foo/file1', 'foo/extra_file']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testEmptyGlob(self):
rules = 'foo/*'
dirlist = ['foo'] # Directory existing is not enough!
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testBadGlob(self):
rules = '*/foo/bar'
dirlist = []
self.assertRaises(verify_filelist.ParseException, Verify,
'linux', rules, dirlist)
def testUnknownPlatform(self):
rules = 'foo'
dirlist = ['foo']
for platform in ('linux', 'mac', 'win'):
Verify(platform, rules, dirlist)
self.assertRaises(verify_filelist.ParseException, Verify,
'foobar', rules, dirlist)
def testUnexpectedPlatformFile(self):
rules = '[mac,win]foo/file1'
dirlist = ['foo/file1']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testWindowsPaths(self):
if os.path.sep != '/':
rules = 'foo/bar/baz'
dirlist = ['foo\\bar\\baz']
Verify('win', rules, dirlist)
else:
rules = 'foo/bar/baz\\foo'
dirlist = ['foo/bar/baz\\foo']
Verify('linux', rules, dirlist)
def testNestedGlobs(self):
rules = """\
foo/*
foo/bar/*"""
dirlist = ['foo/file', 'foo/bar/file']
Verify('linux', rules, dirlist)
rules = """\
foo/bar/*
foo/*"""
dirlist = ['foo/file', 'foo/bar/file']
Verify('linux', rules, dirlist)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
ca1b707109866827e14056049f57c913b474171f | 4229a406a83a573dc357c1144cae7c5aad6f673b | /trestle/tasks/__init__.py | 86a3ca67891864e7f9daa7aafdae8b49ba9a8a8d | [
"Apache-2.0"
]
| permissive | xee5ch/compliance-trestle | dbc0647fe18e1164a75bcfdc4d38687df14e3247 | 969c10eceb73202d2b7856bac598f9b11afc696e | refs/heads/main | 2023-09-02T17:21:35.659432 | 2021-11-17T00:01:27 | 2021-11-17T00:01:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | # -*- mode:python; coding:utf-8 -*-
# Copyright (c) 2020 IBM Corp. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trestle tasks module. Designed for arbitrary actions that are not editing to be flexible for multiple usecases."""
| [
"[email protected]"
]
| |
9a6669dbb8aa1d8739a39c14d383548d2e889676 | 557d75e6dfb42c881d4df73950c41935635f2162 | /preprocessing/recon_all.py | d9bf4632fb59ca40f6606a9db0ddc41864471963 | []
| no_license | sssilvar/multiple-sclerosis | e6139558249f00a882ffeb9d4b82ac323a50ec96 | a2e1e97e1297d45c2b84c5c57b372eee26047941 | refs/heads/master | 2020-06-05T00:09:04.781033 | 2019-07-13T23:15:00 | 2019-07-13T23:15:00 | 192,245,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,045 | py | #!/bin/env python3
import os
import glob
from multiprocessing.pool import Pool
from os.path import join, isdir, basename
def recon_all(vol_file):
sid = basename(vol_file).split('_')[0]
t2_file = vol_file.replace('T1Wreg.nii.gz', 'T2Wreg.nii.gz')
cmd = f'recon-all -i {vol_file} -T2 {t2_file} -s {sid} -sd {out_folder} -all'
print(cmd)
os.system(cmd)
if __name__ == "__main__":
# Set dataset folder
dataset_folder = '/home/jullygh/Downloads/MS/extracted/*'
pattern = join(dataset_folder, 'patient*_study1_T1Wreg.nii.gz')
print(f'Finging pattern: {pattern}')
# Output Folder
out_folder = '/home/jullygh/Downloads/MS/processed_fs/'
# Find files in folder
files = glob.glob(pattern, recursive=True)
print(f'Total files found: {len(files)}')
confirm = input('Start [y/n]:')
if confirm == 'y':
# Process subjects in parallel
pool = Pool(20)
pool.map(recon_all, files)
pool.close()
else:
print('No process started')
print('Done')
| [
"[email protected]"
]
| |
d04ae994a53ff06417f846f19c0403d3bc065f10 | e5d83ede8521027b05d9b91c43be8cab168610e6 | /0x0B-python-input_output/1-number_of_lines.py | 1dfc5fcc64012fcf583f7f599a0cd5e13d80cbb1 | []
| no_license | Danielo814/holbertonschool-higher_level_programming | 8918c3a6a9c136137761d47c5162b650708dd5cd | 832b692529198bbee44d2733464aedfe650bff7e | refs/heads/master | 2020-03-28T11:09:00.343055 | 2019-02-22T03:33:54 | 2019-02-22T03:33:54 | 148,181,433 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | #!/usr/bin/python3
"""
1-number_of_lines module
"""
def number_of_lines(filename=""):
"""
returns the number of lines of a text file
"""
numlines = 0
with open(filename, 'r', encoding='utf-8') as f:
for line in f:
numlines += 1
return numlines
| [
"[email protected]"
]
| |
0d6361a1c0ab589a30c8857539292b0ea2ba6f17 | 43dabf77afd5c44d55b465c1b88bf9a5e7c4c9be | /drawing_random_circles.py | be298cbf90b23e67ea008144b485fca1b94b056c | []
| no_license | geegatomar/OpenCV-Computer-Vision-Adrian-Rosebrock | cc81a990a481b5e4347dd97369b38479b46e55bc | daa579309010e6e7fefb004b878ffb26374401d0 | refs/heads/master | 2022-11-18T13:07:08.040483 | 2020-07-20T01:55:39 | 2020-07-20T01:55:39 | 280,987,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | import cv2
import numpy as np
# drawing 25 random circles
canvas = np.zeros((400, 400, 3), dtype="uint8")
for i in range(25):
radius = np.random.randint(180) # will generate random radius value between 0 and 100
centre = np.random.randint(0, 400, size=(2, ))
color = np.random.randint(0, 255, size=(3, ))
color = (int(color[0]), int(color[1]), int(color[2]))
cv2.circle(canvas, tuple(centre), radius, tuple(color), 2)
cv2.imshow("MyCanvas", canvas)
cv2.waitKey(0)
| [
"[email protected]"
]
| |
5b2adb99ac1c7f639cd70f0a78682b1b33699973 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/292/85489/submittedfiles/testes.py | 8470e2323f37fdef8789f886efdda325a1056e93 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
import datetime
n=date.today()
print(n) | [
"[email protected]"
]
| |
a0cb28bad70fcd7e7477f099e1ce87cedae8050d | f33b30743110532ddae286ba1b34993e61669ab7 | /比赛/力扣杯2020春季全国编程大赛/1.py | dff7e2ce6da1326a6dd9870c6a3b18e8dfb798d9 | []
| no_license | c940606/leetcode | fe9dcee7a5daa4d52999d5f53253dd6dd33c348b | 631df2ce6892a6fbb3e435f57e90d85f8200d125 | refs/heads/master | 2021-07-10T14:01:26.164966 | 2020-08-16T10:46:16 | 2020-08-16T10:46:16 | 186,588,449 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | from typing import List
import collections
a = Solution()
print()
| [
"[email protected]"
]
| |
c3e597348ecd704038d52109bd25c04c2baf9da0 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/abc003/D/4547211.py | 6a0acb9b36e11c97948531a48a505d78d41e9f86 | []
| no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 998 | py | mod = 10**9 + 7
def powmod(x, n):
ret = 1
while n > 0:
if n & 1:
ret *= x; ret %= mod; n -= 1
else:
x *= x; x %= mod; n >>= 1
return ret
fact = [1 for _ in range(1000)]
revfact = [1 for _ in range(1000)]
def setfact(n):
for i in range(n):
fact[i+1] = fact[i] * (i+1); fact[i+1] %= mod
revfact[n] = powmod(fact[n], mod-2)
for i in range(n):
revfact[n-i-1] = revfact[n-i] * (n-i); revfact[i] %= mod
return
def getC(n, r):
if n < r: return 0
return fact[n] * revfact[r] % mod * revfact[n-r] % mod
r, c = map(int, input().split())
x, y = map(int, input().split())
d, l = map(int, input().split())
setfact(x*y)
num = 0
for i in range(1, 2**4):
txy = [x, y]
cnt = 0
for j in range(4):
if (i>>j)&1:
txy[j%2] -= 1
cnt += 1
if txy[0] > 0 and txy[1] > 0:
num += (cnt%2*2-1) * getC(txy[0]*txy[1], d+l) % mod
print((r-x+1) * (c-y+1) % mod * (getC(x*y, d+l) - num) % mod * getC(d+l, d) % mod) | [
"[email protected]"
]
| |
873fd33b792017d4797bb0d1acbb046e82beacde | 26f8a8782a03693905a2d1eef69a5b9f37a07cce | /test/test_destiny_historical_stats_destiny_historical_stats_period_group.py | 54f3aa5d3731b9a1cb0a50764667212af0aef180 | []
| no_license | roscroft/openapi3-swagger | 60975db806095fe9eba6d9d800b96f2feee99a5b | d1c659c7f301dcfee97ab30ba9db0f2506f4e95d | refs/heads/master | 2021-06-27T13:20:53.767130 | 2017-08-31T17:09:40 | 2017-08-31T17:09:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,391 | py | # coding: utf-8
"""
Bungie.Net API
These endpoints constitute the functionality exposed by Bungie.net, both for more traditional website functionality and for connectivity to Bungie video games and their related functionality.
OpenAPI spec version: 2.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.destiny_historical_stats_destiny_historical_stats_period_group import DestinyHistoricalStatsDestinyHistoricalStatsPeriodGroup
class TestDestinyHistoricalStatsDestinyHistoricalStatsPeriodGroup(unittest.TestCase):
""" DestinyHistoricalStatsDestinyHistoricalStatsPeriodGroup unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testDestinyHistoricalStatsDestinyHistoricalStatsPeriodGroup(self):
"""
Test DestinyHistoricalStatsDestinyHistoricalStatsPeriodGroup
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.destiny_historical_stats_destiny_historical_stats_period_group.DestinyHistoricalStatsDestinyHistoricalStatsPeriodGroup()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
c29ff1701a3bfbca5682d464670a0183a3517f7b | 8882bfe78b3a6e5d022f81c86512b22f851d9dc8 | /tgflow/TgFlow.py | ffb115f18b4e9744371863b4ed3007956ddc5bbd | [
"MIT"
]
| permissive | inexcode/tgflow | 5600fa4040d30157daf6d2ad5fe8d625ac64789d | e7bbd7df87e7a711c1b2924f3f2ae909fb2086c5 | refs/heads/master | 2020-03-29T10:19:39.405683 | 2018-09-21T18:13:30 | 2018-09-21T18:13:30 | 149,799,442 | 0 | 0 | null | 2018-09-21T17:59:23 | 2018-09-21T17:59:23 | null | UTF-8 | Python | false | false | 6,673 | py | #import telebot
import hashlib
from enum import Enum
from . import handles
from . import render
import pickle,time
from .api.tg import telegramAPI
import pprint
pp = pprint.PrettyPrinter(indent=4)
action = handles.action
api,key = None,None
def_state = None
def_data= None
States = {}
UI = {}
Data = {}
Actions = {}
Keyboards = {}
Reaction_triggers = {}
def read_sd(sf,df):
with open(sf,'rb') as f:
try:
s= pickle.load(f)
except:
s={}
with open(df,'rb') as f:
try:
d = pickle.load(f)
except:
d={}
return s,d
def save_sd(states,data):
try:
with open('states.p','wb+') as f:
pickle.dump(states,f)
with open('data.p','wb+') as f:
pickle.dump(data,f)
except Exception as e:
print('Non-picklable',str(e))
try:
States,Data = read_sd('states.p','data.p')
except FileNotFoundError:
print("tgflow: creating data.p and states.p files")
def configure(token=None, state=None,
apiModel=telegramAPI, data={},
group_id=None
):
global def_state,def_data
global api,key
if not token:
raise Exception("tgflow needs your bot token")
if not state:
raise Exception("tgflow needs a default state for new users")
key =token
def_state=state
def_data =data
# create bot and assign handlers
# Group Id is not used in telegram
api = apiModel(key,group_id=group_id)
api.set_message_handler(message_handler)
api.set_callback_handler(callback_handler)
def start(ui):
global api,UI
UI = ui
print("tgflow: listening")
try:
api.start(none_stop=True)
except Exception as e:
print("tgflow:polling error",e)
def get_file_link(file_id):
# TODO: implement this in api
finfo = bot.get_file(file_id)
l='https://api.telegram.org/file/bot%s/%s'%(
key,finfo.file_path)
return l
def message_handler(messages):
global States,UI
for msg in messages:
s = States.get(msg.chat.id,def_state)
print('tgflow: got message. State:'+str(s))
# for security reasons need to hash. user can call every action in this state
# key format: kb_+ButtonName
a = Actions.get('kb_'+str(msg.text))
if not a:
if Reaction_triggers.get(msg.chat.id):
for r,a_ in Reaction_triggers[msg.chat.id]:
if msg.__dict__.get(r):
a = a_
if r=='all':
a = a_
d = Data.get(msg.chat.id,def_data)
# following restriction is dictaded by telegram api
messages = flow(a,s,d,msg,msg.chat.id)
send(messages,msg.chat.id)
def callback_handler(call):
s = States.get(call.message.chat.id,def_state)
a = Actions.get(call.data)
d = Data.get(call.message.chat.id,def_data)
print("tgflow: got callback. State:",s)
messages = flow(a,s,d,call,call.message.chat.id)
if a:
if not a.update:
send(messages,call.message.chat.id)
else:
update(messages, call.message)
else:
print("tgflow: Warning: no action found but should")
send(messages,call.message.chat.id)
def gen_state_msg(i,ns,nd,_id,state_upd=True):
pre_a = UI.get(ns).get('prepare')
if pre_a:
# call user-defined data perparations.
print("tgflow: found a prep function, calling...")
nd = pre_a(i,ns,**nd)
args = {'s':ns,'d':nd}
ui = render.prep(UI.get(ns),args)
# saving data and state
Data[_id] = nd
if state_upd: States[_id] = ns
save_sd(States,Data)
# registering callback triggers on buttons
save_iactions(ui.get('b'))
save_kactions(ns,ui.get('kb'),ns,_id)
print("tgflow: actions registered:\n",Actions)
# registering reaction triggers
rc = ui.get('react') or ui.get('react_to')
if rc:
trigs = Reaction_triggers.get(_id)
if trigs:
Reaction_triggers[_id].append((rc.react_to,rc))
else:
Reaction_triggers.update({_id:[(rc.react_to,rc)]})
print("tgflow: reaction tgigger for %s registrated %s"%(str(_id),str(rc)))
# clearing reaction triggers if needed
rc = ui.get('clear_trig')
if rc:
print("tgflow: reaction trigger clear",rc)
if Reaction_triggers.get(_id):
for r,a_ in Reaction_triggers[_id]:
#TODO: handle arrays of triggers
if rc == r:
Reaction_triggers[_id].remove((r,a_))
else:
print("tgflow:WARN removing unset trigger",rc)
# rendering message and buttons
messages = render.render(ui)
return messages
def send_state(ns,tg_id):
d = Data.get(tg_id,def_data)
msg = gen_state_msg(None,ns,d,tg_id)
send(msg,tg_id)
def flow(a,s,d,i,_id):
if a:
ns,nd = a.call(i,s,**d)
print('tgflow: called action:'+str(a))
if isinstance(s,Enum) and isinstance(ns,Enum):
print ('tgflow: states change %s --> %s'%(s.name,ns.name))
else:
print ('tgflow: states change %s --> %s'%(s,ns))
else:
print('tgflow: no action found for message. %s unchanged'%s)
ns,nd = s,d
return gen_state_msg(i,ns,nd,_id)
def get_state(id,s):
pass
def save_iactions(ui):
if isinstance(ui,action):
#TODO: assign actions to every user distinctly, as with butons
key = ui.get_register_key()
Actions[key]=ui
if isinstance(ui,dict):
for k,v in ui.items():
save_iactions(v)
elif isinstance(ui,list):
d = [save_iactions(x) for x in ui ]
# TODO: remove s argument
def save_kactions(k,ui,s,_id):
if isinstance(ui,action):
# key format: State+ButtonName
if ui.react_to:
trigs = Reaction_triggers.get(_id)
if trigs:
Reaction_triggers[_id].append((ui.react_to,ui))
else:
Reaction_triggers.update({_id:[(ui.react_to,ui)]})
print("tgflow: reaction tgigger for %s registrated %s"%(str(_id),str(ui)))
else:
Actions['kb_'+str(k)]=ui
if isinstance(ui,dict):
for k,v in ui.items():
save_kactions(k,v,s,_id)
elif isinstance(ui,list):
ui = [save_kactions(k,x,s,_id) for x in ui ]
def send(message,id):
print("tgflow: sending message")
for text,markup in message:
api.send(id,text=text,markup=markup)
def update(messages,msg):
for text,markup in messages:
print("tgflow: updating message")
api.update(msg,text=text,markup=markup)
| [
"[email protected]"
]
| |
28eb9cf5f13dc05100ba9264f00df18331a9e5ba | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p04030/s999899727.py | d1c7afb12be8ed4228e960b1a6e2e0f7fc222ea5 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | s=input()
fin=""
for c in s:
if c=='1':
fin+="1"
elif c=='0':
fin+="0"
else:
if len(fin)>0:
fin=fin[:-1]
print(fin)
| [
"[email protected]"
]
| |
5fe4c7ed46fc6342f89f21baa980a8b8f0c9a22a | a814debee728e59a7a10d8c12b92c1f3ee97e19d | /Cadeias/Questao01.py | 5a06773ddc2a07e94da38507662ab3bf4ae50ea1 | []
| no_license | PedroVitor1995/Algoritmo-ADS-2016.1 | 0ee034d2f03b29d3c8177fb3402f7aeae08d07cf | 8e3b6dfb0db188b9f5d68dcb8619f6636883ab89 | refs/heads/master | 2021-01-01T15:51:56.636502 | 2017-07-19T13:47:36 | 2017-07-19T13:47:36 | 81,328,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | #__*__ encoding:utf8 __*__
"""1. Faça a criptografia de uma frase digitada pelo usuário. Na criptografia, a frase deverá ser invertida e as
consoantes deverão ser substituídas pelo caractere #."""
def main():
frase = raw_input('Digite uma frase: ')
consoantes = 'BCDFGHJKLMNPQRSTVXYWZbcdfghjklmnpqrstvxywz'
for letra in consoantes:
if letra in frase:
frase = frase[::-1].replace(letra,'#')
print frase
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
b350f1f0416822ef956cae7c7a8e285fdeae380a | 2d1649a7a00d49b72ed7e53afa4abb3c9281ce03 | /.history/ParticleFilter/go_to_goal_20190421181756.py | fa5c4dea237b41fd8aea882ecec9f2e1f521c0ff | []
| no_license | joshzhang5/CS3630Lab6 | 9547dc6c89198e9bb4aebd8359d4feb974082d20 | 69e6df12829e18a211ae850236d74b4d728046ef | refs/heads/master | 2020-05-15T13:59:51.906195 | 2019-04-22T18:21:42 | 2019-04-22T18:21:42 | 182,317,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,240 | py | # Jiaxi Zhang
# George McAlear
# If you run into an "[NSApplication _setup] unrecognized selector" problem on macOS,
# try uncommenting the following snippet
try:
import matplotlib
matplotlib.use('TkAgg')
except ImportError:
pass
from skimage import color
import numpy as np
from numpy.linalg import inv
import threading
import time
import sys
import asyncio
from PIL import Image
from markers import detect, annotator
from grid import CozGrid
from gui import GUIWindow
from particle import Particle, Robot
from setting import *
from particle_filter import *
from utils import *
from time import sleep
import time
import cozmo
from cozmo.util import distance_mm, degrees, speed_mmps, Pose, Angle
#particle filter functionality
class ParticleFilter:
def __init__(self, grid):
self.particles = Particle.create_random(PARTICLE_COUNT, grid)
self.grid = grid
def update(self, odom, r_marker_list):
# ---------- Motion model update ----------
self.particles = motion_update(self.particles, odom)
# ---------- Sensor (markers) model update ----------
self.particles = measurement_update(self.particles, r_marker_list, self.grid)
# ---------- Show current state ----------
# Try to find current best estimate for display
m_x, m_y, m_h, m_confident = compute_mean_pose(self.particles)
return (m_x, m_y, m_h, m_confident)
async def run(robot: cozmo.robot.Robot):
await look_around_until_converge(robot)
# intialize an explorer after localized
#cosimo = CozmoExplorer(robot, x_0=last_pose.position.x, y_0=last_pose.position.y, theta_0=last_pose.rotation.angle_z.radians)
# move robot to pickup zone once localized
print("LAST POSE IS:", last_pose)
#print("COZMO CONVERTED THAT TO A START AT:", cosimo.last_arena_pose)
directions = goal_pose - last_pose
current_pose = last_pose
last_robot_pose = robot.pose
print("SETTING LAST ROBOT POSE TO: ", last_robot_pose)
print("SO WE GOING TO FOLLOW THIS TO PICKUP ZONE:", directions)
await execute_directions(robot, directions)
await robot.turn_in_place(angle=Angle(degrees=45)).wait_for_completed()
print("LAST ROBOT POSE IS: ", last_robot_pose)
print("CURRENT POSE IS:", robot.pose)
print("WE THINK WE MOVED THIS MUCH TO GO TO PICKUP ZONE: ", convertPoseToInches(robot.pose - last_robot_pose))
current_pose = current_pose + convertPoseToInches(rotate_point(robot.pose, - last_robot_pose)
last_robot_pose = robot.pose
print("COZMO THINKS IT IS AT AFTER DRIVING TO PICKUPZONE: ", current_pose)
# await robot.say_text('Ready for pick up!').wait_for_completed()
while True:
cube = await robot.world.wait_for_observed_light_cube(timeout=30)
print("Found cube: %s" % cube)
await robot.pickup_object(cube, num_retries=5).wait_for_completed()
current_pose = current_pose + convertPoseToInches(robot.pose - last_robot_pose)
print("WE THINK WE MOVED THIS MUCH TO PICK UP CUBE: ", convertPoseToInches(robot.pose - last_robot_pose))
last_robot_pose = robot.pose
#cosimo.update_pose()
print("COZMO THINKS IT IS AT AFTER PICKING UP CUBE: ", current_pose)
#await look_around_until_converge(robot)
# intialize an explorer after localized
#cosimo = CozmoExplorer(robot, x_0=last_pose.position.x, y_0=last_pose.position.y, theta_0=last_pose.rotation.angle_z.radians)
# move robot to pickup zone once localized
#print("COZMO CONVERTED THAT TO A START AT:", cosimo.last_arena_pose)
#current_pose = last_pose
# rrt to drop zone and drop off cube
for destination in drop_off_directions:
directions = convertInchesToPose(destination) - current_pose
await execute_directions(robot,directions)
current_pose = current_pose + convertPoseToInches(robot.pose - last_robot_pose)
print("WE THINK WE MOVED THIS MUCH TO FOLLOW DIRECTIONS: ", convertPoseToInches(robot.pose - last_robot_pose))
last_robot_pose = robot.pose
print("COZMO THINKS IT IS AT AFTER FOLLOWING DIRECTIONS: ", current_pose)
#await cosimo.go_to_goal(goal_node=dropoff_node)
await robot.set_lift_height(0.0).wait_for_completed()
# rrt to just in front of pick up zone
# await cosimo.go_to_goal(goal_node=pickup_node)
def CozmoWarehouseWorker:
def __init__(self, robot:cozmo.robot.Robot, current_arena_pose):
self.current_arena_pose = current_arena_pose
self.last_robot_pose = robot.pose
self.robot = robot
# start streaming
await robot.set_head_angle(degrees(3)).wait_for_completed()
robot.camera.image_stream_enabled = True
robot.camera.color_image_enabled = False
robot.camera.enable_auto_exposure()
# Obtain the camera intrinsics matrix
fx, fy = robot.camera.config.focal_length.x_y
cx, cy = robot.camera.config.center.x_y
self.camera_settings = np.array([
[fx, 0, cx],
[ 0, fy, cy],
[ 0, 0, 1]
], dtype=np.float)
self.grid = CozGrid("map_arena.json")
self.pf = ParticleFilter(self.grid)
self.gui = GUIWindow(self.grid, show_camera=True)
self.drop_off_directions = [Pose(x=3, y=4.5, 0, angle_z=degrees(0)), Pose(x=21.75, y=4.5, 0, angle_z=degrees(90)), Pose(x=21.75, y=13.75, 0, angle_z=degrees(90))]
self.pick_up_directions = [Pose(x=21.75, y=4.5, 0, angle_z=degrees(90)), Pose(x=3, y=4.5, 0, angle_z=degrees(0)), Pose(x=4.5, y=20, 0, angle_z=degrees(90))]
async def execute_directions(directions):
print("Robot is at: ", self.robot.pose)
await self.robot.turn_in_place(angle=directions.rotation.angle_z).wait_for_completed()
print("ROBOT is at AFTER TURNING to be parallel to X: ", self.robot.pose)
await self.robot.drive_straight(distance=distance_mm(directions.position.x * grid.scale), speed=speed_mmps(80)).wait_for_completed()
print("ROBOT is at AFTER DRIVING in the X direction: ", self.robot.pose)
await self.robot.turn_in_place(angle=degrees(90)).wait_for_completed()
print("ROBOT is at AFTER TURNING to be parallel to Y: ", self.robot.pose)
await self.robot.drive_straight(distance=distance_mm(directions.position.y * grid.scale), speed=speed_mmps(80)).wait_for_completed()
print("ROBOT is at AFTER DRIVING in the Y direction: ", self.robot.pose)
async def localize(self):
# reset our location estimates
conf = False
self.current_arena_pose = Pose(0,0,0,angle_z=degrees(0))
self.pf = ParticleFilter(grid)
# reset lift and head
await self.robot.set_lift_height(0.0).wait_for_completed()
await self.robot.set_head_angle(degrees(3)).wait_for_completed()
while not conf:
# move a little
self.last_robot_pose = self.robot.pose
await self.robot.turn_in_place(angle=degrees(20)).wait_for_completed()
odometry = compute_odometry()
detected_markers, camera_image = await marker_processing()
# update, motion, and measurment with the odometry and marker data
curr_x, curr_y, curr_h, conf = pf.update(odometry, detected_markers)
# update gui
self.gui.show_particles(self.pf.particles)
self.gui.show_mean(curr_x, curr_y, curr_h)
self.gui.show_camera_image(camera_image)
self.gui.updated.set()
self.current_arena_pose = Pose(curr_x , curr_y, 0, angle_z=Angle(degrees=curr_h))
def compute_odometry(self, cvt_inch=True):
'''
Compute the odometry given the current pose of the robot (use robot.pose)
Input:
- curr_pose: a cozmo.robot.Pose representing the robot's current location
- cvt_inch: converts the odometry into grid units
Returns:
- 3-tuple (dx, dy, dh) representing the odometry
'''
last_x, last_y, last_h = self.last_robot_pose.position.x, self.last_robot_pose.position.y, \
self.last_robot_pose.rotation.angle_z.degrees
curr_x, curr_y, curr_h = self.robot.pose.position.x, self.robot.pose.position.y, \
self.robot.pose.rotation.angle_z.degrees
dx, dy = rotate_point(curr_x-last_x, curr_y-last_y, -last_h)
if cvt_inch:
dx, dy = dx / grid.scale, dy / grid.scale
return (dx, dy, diff_heading_deg(curr_h, last_h))
async def marker_processing(self, show_diagnostic_image=False):
'''
Obtain the visible markers from the current frame from Cozmo's camera.
Since this is an async function, it must be called using await, for example:
markers, camera_image = await marker_processing(robot, camera_settings, show_diagnostic_image=False)
Input:
- robot: cozmo.robot.Robot object
- camera_settings: 3x3 matrix representing the camera calibration settings
- show_diagnostic_image: if True, shows what the marker detector sees after processing
Returns:
- a list of detected markers, each being a 3-tuple (rx, ry, rh)
(as expected by the particle filter's measurement update)
- a PIL Image of what Cozmo's camera sees with marker annotations
'''
# Wait for the latest image from Cozmo
image_event = await self.robot.world.wait_for(cozmo.camera.EvtNewRawCameraImage, timeout=30)
# Convert the image to grayscale
image = np.array(image_event.image)
image = color.rgb2gray(image)
# Detect the markers
markers, diag = detect.detect_markers(image, self.camera_settings, include_diagnostics=True)
# Measured marker list for the particle filter, scaled by the grid scale
marker_list = [marker['xyh'] for marker in markers]
marker_list = [(x/self.grid.scale, y/self.grid.scale, h) for x,y,h in marker_list]
# Annotate the camera image with the markers
if not show_diagnostic_image:
annotated_image = image_event.image.resize((image.shape[1] * 2, image.shape[0] * 2))
annotator.annotate_markers(annotated_image, markers, scale=2)
else:
diag_image = color.gray2rgb(diag['filtered_image'])
diag_image = Image.fromarray(np.uint8(diag_image * 255)).resize((image.shape[1] * 2, image.shape[0] * 2))
annotator.annotate_markers(diag_image, markers, scale=2)
annotated_image = diag_image
return marker_list, annotated_image
class CozmoThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self, daemon=False)
def run(self):
cozmo.robot.Robot.drive_off_charger_on_connect = False # Cozmo can stay on his charger
cozmo.run_program(run, use_viewer=False)
if __name__ == '__main__':
# cozmo thread
cozmo_thread = CozmoThread()
cozmo_thread.start()
# init
gui.show_particles(pf.particles)
gui.show_mean(0, 0, 0)
gui.start() | [
"[email protected]"
]
| |
f78ec480786556e08f9a2cddea0271a0013e24e1 | 9ff1d0d5049dfe1c14528e098bdd8c934fb2274a | /tests/test3/test_port7_unittest.py | 6465c5d08297ccf489943439d23ab7e7aca49cfa | []
| no_license | 486dx/utility_Python | 43e06b3f74dac140396643d0e5c132fb874d2467 | 598117f7e9fd416f4bc7f1ccea931048a977a0bc | refs/heads/master | 2022-04-23T06:36:36.220406 | 2020-04-06T08:59:35 | 2020-04-06T08:59:35 | 264,519,552 | 1 | 0 | null | 2020-05-16T20:17:30 | 2020-05-16T20:17:30 | null | UTF-8 | Python | false | false | 1,690 | py | # test_port7_unittest.py
import unittest
from portfolio3 import Portfolio
class PortfolioTest(unittest.TestCase):
def test_empty(self):
p = Portfolio()
self.assertEqual(p.cost(), 0.0)
def test_buy_one_stock(self):
p = Portfolio()
p.buy("IBM", 100, 176.48)
self.assertEqual(p.cost(), 17648.0)
def test_buy_two_stocks(self):
p = Portfolio()
p.buy("IBM", 100, 176.48)
p.buy("HPQ", 100, 36.15)
self.assertEqual(p.cost(), 21263.0)
def test_bad_input(self):
p = Portfolio()
with self.assertRaises(TypeError):
p.buy("IBM")
class PortfolioSellTest(unittest.TestCase):
def setUp(self):
self.p = Portfolio()
self.p.buy("MSFT", 100, 27.0)
self.p.buy("DELL", 100, 17.0)
self.p.buy("ORCL", 100, 34.0)
def test_sell(self):
self.p.sell("MSFT", 50)
self.assertEqual(self.p.cost(), 6450)
def test_not_enough(self):
with self.assertRaises(ValueError):
self.p.sell("MSFT", 200)
def test_dont_own_it(self):
with self.assertRaises(ValueError):
self.p.sell("IBM", 1)
# Replace Portfolio.current_prices with a stub implementation.
# This avoids the web, but also skips all our current_prices
# code.
class PortfolioValueTest(unittest.TestCase):
def fake_current_prices(self):
return {'IBM': 140.0, 'HPQ': 32.0}
def setUp(self):
self.p = Portfolio()
self.p.buy("IBM", 100, 120.0)
self.p.buy("HPQ", 100, 30.0)
self.p.current_prices = self.fake_current_prices
def test_value(self):
self.assertEqual(self.p.value(), 17200)
| [
"[email protected]"
]
| |
374eb12b1ec6126e692a94315444e4a7bcf0621b | 4eaab9327d25f851f9e9b2cf4e9687d5e16833f7 | /problems/search_suggestions_system/solution.py | 47a2ff3a14f8b27c1b8af6d2a0b73ebff62b06d6 | []
| no_license | kadhirash/leetcode | 42e372d5e77d7b3281e287189dcc1cd7ba820bc0 | 72aea7d43471e529ee757ff912b0267ca0ce015d | refs/heads/master | 2023-01-21T19:05:15.123012 | 2020-11-28T13:53:11 | 2020-11-28T13:53:11 | 250,115,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | class Solution:
def suggestedProducts(self, products: List[str], searchWord: str) -> List[List[str]]:
products.sort() # time O(nlogn)
array_len = len(products)
ans = []
input_char = ""
for chr in searchWord:
tmp = []
input_char += chr
insertion_index = self.binary_search(products, input_char) # find where input_char can be inserted in-order in the products array
for word_ind in range(insertion_index, min(array_len, insertion_index+3)): # check the following 3 words, if valid
if products[word_ind].startswith(input_char):
tmp.append(products[word_ind])
ans.append(tmp)
return ans
def binary_search(self, array, target): # bisect.bisect_left implementation
lo = 0
hi = len(array)
while lo < hi:
mid = (lo + hi) //2
if array[mid] < target: lo = mid + 1
else: hi = mid
return lo
| [
"[email protected]"
]
| |
fc63244cd75a39edbf500b6fa6de7db12118a2b9 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/auth/application_default/login.py | cd8f38b6c453bd00a3fae5c98e366c42c46a414a | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 9,042 | py | # -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A command to install Application Default Credentials using a user account."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import textwrap
from googlecloudsdk.api_lib.auth import util as auth_util
from googlecloudsdk.calliope import actions
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as c_exc
from googlecloudsdk.command_lib.auth import auth_util as command_auth_util
from googlecloudsdk.command_lib.auth import flags
from googlecloudsdk.command_lib.auth import workforce_login_config as workforce_login_config_util
from googlecloudsdk.core import config
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.credentials import gce as c_gce
from googlecloudsdk.core.credentials import store as c_store
class Login(base.Command):
r"""Acquire new user credentials to use for Application Default Credentials.
Obtains user access credentials via a web flow and puts them in the
well-known location for Application Default Credentials (ADC).
This command is useful when you are developing code that would normally
use a service account but need to run the code in a local development
environment where it's easier to provide user credentials. The credentials
will apply to all API calls that make use of the Application Default
Credentials client library. Do not set the `GOOGLE_APPLICATION_CREDENTIALS`
environment variable if you want to use the credentials generated by this
command in your local development. This command tries to find a quota
project from gcloud's context and write it to ADC so that Google client
libraries can use it for billing and quota. Alternatively, you can use
the `--client-id-file` flag. In this case, the project owning the client ID
will be used for billing and quota. You can create the client ID file
at https://console.cloud.google.com/apis/credentials.
This command has no effect on the user account(s) set up by the
`gcloud auth login` command.
Any credentials previously generated by
`gcloud auth application-default login` will be overwritten.
"""
detailed_help = {
'EXAMPLES':
"""\
If you want your local application to temporarily use your own user
credentials for API access, run:
$ {command}
If you'd like to login by passing in a file containing your own client
id, run:
$ {command} --client-id-file=clientid.json
"""
}
@staticmethod
def Args(parser):
"""Set args for gcloud auth application-default login."""
parser.add_argument(
'--client-id-file',
help='A file containing your own client id to use to login. If '
'--client-id-file is specified, the quota project will not be '
'written to ADC.')
parser.add_argument(
'--scopes',
type=arg_parsers.ArgList(min_length=1),
metavar='SCOPE',
help='The names of the scopes to authorize for. By default '
'{0} scopes are used. '
'The list of possible scopes can be found at: '
'[](https://developers.google.com/identity/protocols/googlescopes).'
.format(', '.join(auth_util.DEFAULT_SCOPES)))
parser.add_argument(
'--login-config',
help='Path to the login configuration file (workforce pool, '
'generated by the Cloud Console or '
'`gcloud iam workforce-pools create-login-config`)',
action=actions.StoreProperty(properties.VALUES.auth.login_config_file))
flags.AddQuotaProjectFlags(parser)
flags.AddRemoteLoginFlags(parser, for_adc=True)
parser.display_info.AddFormat('none')
def Run(self, args):
"""Run the authentication command."""
# TODO(b/203102970): Remove this condition check after the bug is resolved
if properties.VALUES.auth.access_token_file.Get():
raise c_store.FlowError(
'auth/access_token_file or --access-token-file was set which is not '
'compatible with this command. Please unset the property and rerun '
'this command.'
)
if c_gce.Metadata().connected:
message = textwrap.dedent("""
You are running on a Google Compute Engine virtual machine.
The service credentials associated with this virtual machine
will automatically be used by Application Default
Credentials, so it is not necessary to use this command.
If you decide to proceed anyway, your user credentials may be visible
to others with access to this virtual machine. Are you sure you want
to authenticate with your personal account?
""")
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
command_auth_util.PromptIfADCEnvVarIsSet()
if args.client_id_file and not args.launch_browser:
raise c_exc.InvalidArgumentException(
'--no-launch-browser',
'`--no-launch-browser` flow no longer works with the '
'`--client-id-file`. Please replace `--no-launch-browser` with '
'`--no-browser`.'
)
scopes = args.scopes or auth_util.DEFAULT_SCOPES
flow_params = dict(
no_launch_browser=not args.launch_browser,
no_browser=args.no_browser,
remote_bootstrap=args.remote_bootstrap)
# 1. Try the 3PI web flow with --no-browser:
# This could be a 3PI flow initiated via --no-browser.
# If provider_name is present, then this is the 3PI flow.
# We can start the flow as is as the remote_bootstrap value will be used.
if args.remote_bootstrap and 'provider_name' in args.remote_bootstrap:
auth_util.DoInstalledAppBrowserFlowGoogleAuth(
config.CLOUDSDK_EXTERNAL_ACCOUNT_SCOPES,
auth_proxy_redirect_uri=(
'https://sdk.cloud.google/applicationdefaultauthcode.html'
),
**flow_params
)
return
# 2. Try the 3PI web flow with a login configuration file.
login_config_file = workforce_login_config_util.GetWorkforceLoginConfig()
if login_config_file:
if args.client_id_file:
raise c_exc.ConflictingArgumentsException(
'--client-id-file is not currently supported for third party login '
'flows. ')
if args.scopes:
raise c_exc.ConflictingArgumentsException(
'--scopes is not currently supported for third party login flows.')
# Redirect URI must be sdk.cloud.google for 3PI.
creds = workforce_login_config_util.DoWorkforceHeadfulLogin(
login_config_file,
True,
auth_proxy_redirect_uri=(
'https://sdk.cloud.google/applicationdefaultauthcode.html'
),
**flow_params
)
else:
# 3. Try the 1P web flow.
properties.VALUES.auth.client_id.Set(
auth_util.DEFAULT_CREDENTIALS_DEFAULT_CLIENT_ID)
properties.VALUES.auth.client_secret.Set(
auth_util.DEFAULT_CREDENTIALS_DEFAULT_CLIENT_SECRET)
creds = auth_util.DoInstalledAppBrowserFlowGoogleAuth(
scopes,
client_id_file=args.client_id_file,
auth_proxy_redirect_uri=(
'https://sdk.cloud.google.com/applicationdefaultauthcode.html'
),
**flow_params
)
if not creds:
return
target_impersonation_principal, delegates = None, None
impersonation_service_accounts = (
properties.VALUES.auth.impersonate_service_account.Get()
)
if impersonation_service_accounts:
(target_impersonation_principal, delegates
) = c_store.ParseImpersonationAccounts(impersonation_service_accounts)
if not target_impersonation_principal:
if args.IsSpecified('client_id_file'):
command_auth_util.DumpADC(creds, quota_project_disabled=False)
elif args.disable_quota_project:
command_auth_util.DumpADC(creds, quota_project_disabled=True)
else:
command_auth_util.DumpADCOptionalQuotaProject(creds)
else:
# TODO(b/184049366): Supports quota project with impersonated creds.
command_auth_util.DumpImpersonatedServiceAccountToADC(
creds,
target_principal=target_impersonation_principal,
delegates=delegates)
return creds
| [
"[email protected]"
]
| |
8cdd5f52e919892a5acf7fabc7f846d69d487956 | 5491f4b600f7ecd1d0848d60d7b017e5e407d4c7 | /inventario/migrations/0005_ventamodel.py | 79ad0c9268a28f2a5951adb94199d7fd065bfa48 | []
| no_license | GustavoPMex/web-inventario | 409456dd356bbfcadd735cc9b8e2aae7605a0e37 | d0ac36ee791ff0262f9390497da1dd990581a4fd | refs/heads/master | 2023-06-10T10:08:39.029666 | 2021-06-30T23:40:19 | 2021-06-30T23:40:19 | 296,677,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | # Generated by Django 3.0.8 on 2020-09-29 03:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventario', '0004_historicalarticulomodel'),
]
operations = [
migrations.CreateModel(
name='VentaModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=100)),
('proveedor', models.CharField(max_length=100)),
('vendidos', models.IntegerField()),
('precio', models.CharField(max_length=100)),
('fecha_venta', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'Venta',
'verbose_name_plural': 'Ventas',
},
),
]
| [
"[email protected]"
]
| |
0ae646e5fd55b65b3f924b29c97b5843b2eca062 | bd1362c60313784c90013dfc9f0169e64389bf27 | /scripts/ingestors/soilm_ingest.py | a3a1ef7cc473f3149593d222b9f47ed4891c86b8 | []
| no_license | ForceCry/iem | 391aa9daf796591909cb9d4e60e27375adfb0eab | 4b0390d89e6570b99ca83a5fa9b042226e17c1ad | refs/heads/master | 2020-12-24T19:04:55.517409 | 2013-04-09T14:25:36 | 2013-04-09T14:25:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,725 | py | """
Ingest ISU SOILM data!
DailySI
"TIMESTAMP",
"RECORD",
"TAir_C_Avg",
"TAir_C_Max",
"TAir_C_TMx",
"TAir_C_Min",
"TAir_C_TMn",
"SlrMJ_Tot",
"Rain_mm_Tot",
"WS_mps_S_WVT",
"WindDir_D1_WVT",
"WindDir_SD1_WVT",
"WS_mps_Max",
"WS_mps_TMx",
"DailyET",
"TSoil_C_Avg",
"VWC_12_Avg",
"VWC_24_Avg",
"VWC_50_Avg",
"EC12",
"EC24",
"EC50",
"T12_C_Avg",
"T24_C_Avg",
"T50_C_Avg",
"PA",
"PA_2",
"PA_3"
HrlySI
"TIMESTAMP",
"RECORD",
"TAir_C_Avg",
"RH",
"SlrkW_Avg",
"SlrMJ_Tot",
"Rain_mm_Tot",
"WS_mps_S_WVT",
"WindDir_D1_WVT",
"WindDir_SD1_WVT",
"ETAlfalfa",
"SolarRadCalc",
"TSoil_C_Avg",
"VWC_12_Avg",
"VWC_24_Avg",
"VWC_50_Avg",
"EC12",
"EC24",
"EC50",
"T12_C_Avg",
"T24_C_Avg",
"T50_C_Avg",
"PA",
"PA_2",
"PA_3"
"""
import os
import iemdb
import iemtz
import datetime
ISUAG = iemdb.connect('isuag')
icursor = ISUAG.cursor()
STATIONS = {'CAMI4': dict(daily='/mnt/home/mesonet/sm/Calumet/Calumet_DailySI.dat',
hourly='/mnt/home/mesonet/sm/Calumet/Calumet_HrlySI.dat'),
}
def hourly_process(nwsli, maxts):
""" Process the hourly file """
""" Process the daily file """
fn = STATIONS[nwsli]['hourly']
if not os.path.isfile(fn):
return
lines = open(fn).readlines()
if len(lines) < 6:
return
# Read header....
headers = []
for col in lines[1].strip().replace('"', '').split(","):
headers.append(col)
# Read data
for i in range(len(lines)-1,3,-1):
tokens = lines[i].strip().replace('"','').split(",")
if len(tokens) != len(headers):
continue
valid = datetime.datetime.strptime(tokens[ headers.index('TIMESTAMP')],
'%Y-%m-%d %H:%M:%S')
valid = valid.replace(tzinfo=iemtz.CentralStandard)
if valid <= maxts:
break
# We are ready for dbinserting!
dbcols = "station,valid," + ",".join(headers[2:])
dbvals = "'%s','%s-06'," % (nwsli, valid.strftime("%Y-%m-%d %H:%M:%S"))
for v in tokens[2:]:
dbvals += "%s," % (formatter(v),)
sql = "INSERT into sm_hourly (%s) values (%s)" % (dbcols, dbvals[:-1])
icursor.execute(sql)
def formatter(v):
""" Something to format things nicely for SQL"""
if v.find("NAN") > -1:
return 'Null'
if v.find(" ") > -1: #Timestamp
return "'%s-06'" % (v,)
return v
def daily_process(nwsli, maxts):
""" Process the daily file """
fn = STATIONS[nwsli]['daily']
if not os.path.isfile(fn):
return
lines = open(fn).readlines()
if len(lines) < 6:
return
# Read header....
headers = []
for col in lines[1].strip().replace('"', '').split(","):
headers.append(col)
# Read data
for i in range(len(lines)-1,3,-1):
tokens = lines[i].strip().replace('"','').split(",")
if len(tokens) != len(headers):
continue
valid = datetime.datetime.strptime(tokens[ headers.index('TIMESTAMP')][:10],
'%Y-%m-%d')
valid = valid.date() - datetime.timedelta(days=1)
if valid < maxts:
break
if valid == maxts: # Reprocess
icursor.execute("""DELETE from sm_daily WHERE valid = '%s' and
station = '%s' """ % (valid.strftime("%Y-%m-%d") ,nwsli))
# We are ready for dbinserting!
dbcols = "station,valid," + ",".join(headers[2:])
dbvals = "'%s','%s'," % (nwsli, valid.strftime("%Y-%m-%d"))
for v in tokens[2:]:
dbvals += "%s," % (formatter(v),)
sql = "INSERT into sm_daily (%s) values (%s)" % (dbcols, dbvals[:-1])
icursor.execute(sql)
def get_max_timestamps(nwsli):
""" Fetch out our max values """
data = {'hourly': datetime.datetime(2012,1,1, tzinfo=iemtz.CentralStandard),
'daily': datetime.date(2012,1,1)}
icursor.execute("""SELECT max(valid) from sm_daily WHERE station = '%s'""" % (
nwsli,))
row = icursor.fetchone()
if row[0] is not None:
data['daily'] = row[0]
icursor.execute("""SELECT max(valid) from sm_hourly WHERE station = '%s'""" % (
nwsli,))
row = icursor.fetchone()
if row[0] is not None:
data['hourly'] = row[0]
return data
def main():
for nwsli in STATIONS.keys():
maxobs = get_max_timestamps(nwsli)
hourly_process(nwsli, maxobs['hourly'])
daily_process(nwsli, maxobs['daily'])
icursor.close()
ISUAG.commit()
ISUAG.close()
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
de8f1e1f2f085838464375d1849719293a936020 | 0af30c2e3ddcc80a19ea9cfaad9d7e1fedf8b876 | /210311-210314/백)2579 계단 오르기/이동재.py | b27bbc2081db13195ca37f930e92c97bac44a0d8 | []
| no_license | winterash2/algorithm_study_2021_1 | d1cd6077f71f68e7fc3eb6dfae7b2cc220885e4c | c1fee62c7e5e560c3bf7ae5e6166866d0147f23f | refs/heads/master | 2023-04-02T20:11:04.169856 | 2021-04-05T11:18:22 | 2021-04-05T11:18:22 | 327,563,535 | 1 | 2 | null | 2021-01-24T14:17:40 | 2021-01-07T09:28:08 | Python | UTF-8 | Python | false | false | 512 | py | import sys
input = sys.stdin.readline
N = int(input())
scores = []
for _ in range(N):
scores.append(int(input()))
dp1 = [0 for _ in range(N)]
dp2 = [0 for _ in range(N)]
# 0번 칸 초기화
dp1[0] = scores[0]
if N == 1:
print(scores[0])
else: # N이 2보다 클 때
# 2번 칸 초기화
dp1[1] = scores[1]
dp2[1] = scores[1] + dp1[0]
for i in range(2, N):
dp1[i] = scores[i] + max(dp1[i-2], dp2[i-2])
dp2[i] = scores[i] + dp1[i-1]
print(max(dp1[N-1], dp2[N-1])) | [
"[email protected]"
]
| |
64b1ff60158655b97b826b8467eb04fc9536b67f | c264153f9188d3af187905d846fa20296a0af85d | /Python/Python3网络爬虫开发实战/《Python3网络爬虫开发实战》随书源代码/urllib/error/demo3.py | 6928b02a18d8a9762b9a281c84c97d5aa162f9c4 | []
| no_license | IS-OSCAR-YU/ebooks | 5cd3c1089a221759793524df647e231a582b19ba | b125204c4fe69b9ca9ff774c7bc166d3cb2a875b | refs/heads/master | 2023-05-23T02:46:58.718636 | 2021-06-16T12:15:13 | 2021-06-16T12:15:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | from urllib import request, error
try:
response = request.urlopen('http://cuiqingcai.com/index.htm')
except error.HTTPError as e:
print(e.reason, e.code, e.headers, sep='\n')
except error.URLError as e:
print(e.reason)
else:
print('Request Successfully') | [
"[email protected]"
]
| |
c5c570c5d072a814ff270e276deaef84ad277e35 | 56255c15702f4f4a01b7f785f956cee7290d0097 | /segmentation_pytorch/utils/train.py | ef5089b869ed248028f04a015305e45cdec34d74 | []
| no_license | devhliu/PyTorch_UNOdeMSegNet | d2561606aac34ace4664c48bc000d4c4a915699a | 3a446ca71ddd74e612bf2c2acc43e7b210366e5b | refs/heads/master | 2020-12-24T04:41:23.674029 | 2019-11-13T08:07:40 | 2019-11-13T08:07:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,142 | py | import sys
import torch
import pdb
from tqdm import tqdm as tqdm
from torchnet.meter import AverageValueMeter
# from ..models.CRF import dense_crf
class Epoch:
def __init__(self, model, loss, metrics, stage_name, device='cpu', verbose=True):
self.model = model
self.loss = loss
self.metrics = metrics
self.stage_name = stage_name
self.verbose = verbose
self.device = device
self._to_device()
def _to_device(self):
self.model.to(self.device)
self.loss.to(self.device)
for metric in self.metrics:
metric.to(self.device)
def _format_logs(self, logs):
str_logs = ['{} - {:.4}'.format(k, v) for k, v in logs.items()]
s = ', '.join(str_logs)
return s
def batch_update(self, x, y):
raise NotImplementedError
def on_epoch_start(self):
pass
def run(self, dataloader):
self.on_epoch_start()
logs = {}
loss_meter = AverageValueMeter()
metrics_meters = {metric.__name__: AverageValueMeter() for metric in self.metrics}
with tqdm(dataloader, desc=self.stage_name, file=sys.stdout, disable=not (self.verbose)) as iterator:
for x, y in iterator:
# x, y = x.to(self.device), y.to(self.device)
x = x.to(self.device)
if isinstance(y, list):
y = [i.to(self.device) for i in y]
else:
y = y.to(self.device)
loss, y_pred = self.batch_update(x, y)
# update loss logs
loss_value = loss.cpu().detach().numpy()
loss_meter.add(loss_value)
loss_logs = {self.loss.__name__: loss_meter.mean}
logs.update(loss_logs)
# update metrics logs
y = y[-1] if isinstance(y, list) else y
for metric_fn in self.metrics:
metric_value = metric_fn(y_pred, y).cpu().detach().numpy()
metrics_meters[metric_fn.__name__].add(metric_value)
metrics_logs = {k: v.mean for k, v in metrics_meters.items()}
logs.update(metrics_logs)
if self.verbose:
s = self._format_logs(logs)
iterator.set_postfix_str(s)
return logs
class TrainEpoch(Epoch):
def __init__(self, model, loss, metrics, optimizer, device='cpu', verbose=True, crf=False):
super().__init__(
model=model,
loss=loss,
metrics=metrics,
stage_name='train',
device=device,
verbose=verbose,
)
self.crf = crf
self.optimizer = optimizer
def on_epoch_start(self):
self.model.train()
def batch_update(self, x, y):
self.optimizer.zero_grad()
prediction = self.model.forward(x)
if self.crf:
prediction = dense_crf(img=prediction, output_probs=y)
loss = self.loss(prediction, y)
loss.backward()
self.optimizer.step()
if isinstance(prediction, list):
return loss, prediction[-1]
return loss, prediction
class ValidEpoch(Epoch):
def __init__(self, model, loss, metrics, device='cpu', verbose=True):
super().__init__(
model=model,
loss=loss,
metrics=metrics,
stage_name='valid',
device=device,
verbose=verbose,
)
def on_epoch_start(self):
self.model.eval()
def batch_update(self, x, y):
with torch.no_grad():
prediction = self.model.forward(x)
if isinstance(prediction, list):
prediction = prediction[-1]
loss = self.loss(prediction, y, self.model.training)
return loss, prediction
| [
"[email protected]"
]
| |
af3c13b0b6d71fc197d85e36c8e32fa818a832f2 | b72c37e3ccda507b231649cddd5c7845c6c34ba1 | /PythonBasic/Day15/exec5_enumate.py | bcde4f16b170aa836494556ff4f435dfe5176b43 | []
| no_license | ljrdemail/AID1810 | 51c61c255b5c5efc1dc642b46691a614daedd85e | b417bd831bc1550ab953ce7ca23f54e34b8b2692 | refs/heads/master | 2020-04-24T09:45:14.781612 | 2019-02-21T11:26:49 | 2019-02-21T11:26:49 | 171,866,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | # -*- coding:utf-8 -*-
def myenumerate(iterable, start=0):
i = start # 开始索引
for x in iterable:
yield (i, x) # 生成一个元组
i += 1
d = myenumerate("ABCDE", 1)
for i in d:
print(i)
| [
"root"
]
| root |
fed79b9a386ddab376d7acd6d52191fc5ec5f846 | 23fb5b1fb275892b0a27657685c062360630889e | /Week 7/django/src/bookstore/settings.py | ad6bf63e9bc7c5c3b7fdb61d360525456c224875 | [
"MIT"
]
| permissive | carlosal1015/python2017 | 2b596fa1e4cad4de06537ffc99fb0af0dfa4563d | c1eed0201039c6b4daf857dd1f08c47a7b1e3f45 | refs/heads/master | 2020-09-13T17:15:50.419142 | 2018-05-24T12:44:40 | 2018-05-24T12:44:40 | 222,850,901 | 1 | 2 | MIT | 2019-11-20T04:32:23 | 2019-11-20T04:30:54 | null | UTF-8 | Python | false | false | 3,161 | py | """
Django settings for bookstore project.
Generated by 'django-admin startproject' using Django 1.11.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=4*-6vzd*%j--m+ki)mhd+rpdw2v#t@_&r8z8k8typl8292#te'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bookstore.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bookstore.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
855033498433fc4b023163b8a1e030790481cc8e | 102d09ef1d6effe166ad703ba4472c45dfb03263 | /py/Unique Binary Search Trees.py | ff810735f7dccf5e13975b50685aee50ae48a74b | []
| no_license | bitcsdby/Codes-for-leetcode | 5693100d4b66de65d7f135bbdd81b32650aed7d0 | 9e24e621cfb9e7fd46f9f02dfc40a18a702d4990 | refs/heads/master | 2016-09-05T08:43:31.656437 | 2014-08-02T15:14:53 | 2014-08-02T15:14:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | class Solution:
# @return an integer
def numTrees(self, n):
if n == 0 or n == 1:
return 1;
l = [];
l.append(1);
l.append(1);
count = 2;
while count <= n:
c = 0;
tmp = 0;
while c < count:
tmp += l[count-c-1] * l[c];
c += 1;
l.append(tmp);
count += 1;
return l.pop(); | [
"[email protected]"
]
| |
184ebeb33592af81e788e14c06df93a03090abd8 | 5f27bc1a0460a078f6fe33a544f494a5dff7f452 | /script/old/O_0703_arm_move_jaco.py | 1fa6247e4c4c661ef79584d37b0acde343aed2be | []
| no_license | A-Why-not-fork-repositories-Good-Luck/arm_move | 3e381f0310265f47da14beaac136c358fb318f92 | e2e6182cfd93df1935bd3b8e9158134964dc44fa | refs/heads/master | 2023-03-15T18:37:17.337770 | 2020-11-18T06:46:06 | 2020-11-18T06:46:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,159 | py | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2013, SRI International
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of SRI International nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: Acorn Pooley, Mike Lautman
## BEGIN_SUB_TUTORIAL imports
##
## To use the Python MoveIt! interfaces, we will import the `moveit_commander`_ namespace.
## This namespace provides us with a `MoveGroupCommander`_ class, a `PlanningSceneInterface`_ class,
## and a `RobotCommander`_ class. More on these below. We also import `rospy`_ and some messages that we will use:
##
import time
import sys
import copy
import rospy
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
import sensor_msgs.msg
from arm_move.msg._arm_move_msg import arm_move_msg
from arm_move.msg._box_info_msg import box_info_msg
from arm_move.msg._attach_hand_box import attach_hand_box
from arm_move.srv._box_info_srv import *
from arm_move.srv._att_hand_box_srv import *
from arm_move.srv._arm_move_srv import *
from arm_move.srv._work_start_srv import *
from arm_move.srv._arm_goalJoint_srv import *
from math import pi
from std_msgs.msg import String
from moveit_commander.conversions import pose_to_list
ROBOT_ARM_GROUP = 'arm'
ROBOT_EE_GROUP = 'hand'
def all_close(goal, actual, tolerance):
"""
Convenience method for testing if a list of values are within a tolerance of their counterparts in another list
@param: goal A list of floats, a Pose or a PoseStamped
@param: actual A list of floats, a Pose or a PoseStamped
@param: tolerance A float
@returns: bool
"""
all_equal = True
if type(goal) is list:
for index in range(len(goal)):
if abs(actual[index] - goal[index]) > tolerance:
return False
elif type(goal) is geometry_msgs.msg.PoseStamped:
return all_close(goal.pose, actual.pose, tolerance)
elif type(goal) is geometry_msgs.msg.Pose:
return all_close(pose_to_list(goal), pose_to_list(actual), tolerance)
return True
class MoveGroupPythonIntefaceTutorial(object):
"""MoveGroupPythonIntefaceTutorial"""
def __init__(self):
super(MoveGroupPythonIntefaceTutorial, self).__init__()
robot = moveit_commander.RobotCommander()
scene = moveit_commander.PlanningSceneInterface()
self.group_name = 'arm' # this is just for the initialization
move_group = moveit_commander.MoveGroupCommander(self.group_name)
display_trajectory_publisher = rospy.Publisher('/move_group/display_planned_path',
moveit_msgs.msg.DisplayTrajectory,
queue_size=20)
traj_arm_publisher = rospy.Publisher('/traj_arm', moveit_msgs.msg.RobotTrajectory, queue_size=100)
feasibility_flag = rospy.Publisher('/arm_feasibility', String, queue_size=20)
planning_frame = move_group.get_planning_frame()
# eef_link = move_group.get_end_effector_link()
eef_link = 'j2n6s300_end_effector'
print "end eef link list\n", eef_link
group_names = robot.get_group_names()
# Misc variables
self.box_name = ''
self.robot = robot
self.scene = scene
self.move_group = move_group
self.display_trajectory_publisher = display_trajectory_publisher
self.traj_arm_publisher = traj_arm_publisher
self.feasibility_flag = feasibility_flag
self.planning_frame = planning_frame
self.eef_link = eef_link
self.group_names = group_names
self.object_list = []
def go_to_pose_goal_m(self, data):
move_group = moveit_commander.MoveGroupCommander(data.arm_name[0])
# move_group = self.move_group
pose_goal = geometry_msgs.msg.Pose()
pose_goal.position = data.goal_position
pose_goal.orientation = data.goal_orientation
# move_group.set_planner_id('SPARStwo')
# move_group.set_planner_id('RRTstar')
# move_group.set_planner_id('BiTRRT')
move_group.set_num_planning_attempts(10000)
move_group.set_planning_time(5)
move_group.set_goal_position_tolerance(0.01)
move_group.set_goal_orientation_tolerance(0.01)
move_group.set_pose_target(pose_goal)
print "goal pose:", pose_goal
plan = move_group.plan()
move_group.execute(plan, wait=True)
traj_arm_pub = self.traj_arm_publisher
traj_arm_pub.publish(plan)
move_group.clear_pose_targets()
current_pose = self.move_group.get_current_pose().pose
return all_close(pose_goal, current_pose, 0.01)
def goalPose_feasibility_check_m(self, data):
# move_group = self.move_group
move_group = moveit_commander.MoveGroupCommander(data.arm_name[0])
pose_goal = geometry_msgs.msg.Pose()
pose_goal.position = data.position
pose_goal.orientation = data.orientation
# move_group.set_planner_id('SPARStwo')
# move_group.set_planner_id('RRTstar')
# move_group.set_planner_id('BiTRRT')
move_group.set_num_planning_attempts(10000)
move_group.set_planning_time(5)
move_group.set_goal_position_tolerance(0.01)
move_group.set_goal_orientation_tolerance(0.01)
move_group.set_pose_target(pose_goal)
plan = move_group.plan()
# print plan
print "plan.joint_trajectory.joint_names :", plan.joint_trajectory.joint_names
feasibility_flag_pub = self.feasibility_flag
feasible_flag_msg = String()
if len(plan.joint_trajectory.joint_names) == 0:
print "no plan found"
feasible_flag_msg = '0'
feasibility_flag_pub.publish(feasible_flag_msg)
elif len(plan.joint_trajectory.joint_names) > 0:
print "plan found"
feasible_flag_msg = '1'
feasibility_flag_pub.publish(feasible_flag_msg)
time.sleep(3)
traj_arm_pub = self.traj_arm_publisher
traj_arm_pub.publish(plan)
time.sleep(2)
move_group.stop()
move_group.clear_pose_targets()
current_pose = self.move_group.get_current_pose().pose
return all_close(pose_goal, current_pose, 0.01)
def pickup(self, upCM, scale=1):
move_group = self.move_group
waypoints = []
wpose = move_group.get_current_pose().pose
wpose.position.z -= -0.05 # First move up (z)
waypoints.append(copy.deepcopy(wpose))
(plan, fraction) = move_group.compute_cartesian_path(
waypoints, # waypoints to follow
0.01, # eef_step
0.0) # jump_threshold
move_group.execute(plan, wait=True)
return plan, fraction
def display_trajectory(self, plan):
robot = self.robot
display_trajectory_publisher = self.display_trajectory_publisher
display_trajectory = moveit_msgs.msg.DisplayTrajectory()
display_trajectory.trajectory_start = robot.get_current_state()
display_trajectory.trajectory.append(plan)
# Publish
display_trajectory_publisher.publish(display_trajectory);
def execute_plan(self, plan):
move_group = self.move_group
move_group.execute(plan, wait=True)
def wait_for_state_update(self, box_is_known=False, box_is_attached=False, timeout=4):
box_name = self.box_name
scene = self.scene
start = rospy.get_time()
seconds = rospy.get_time()
while (seconds - start < timeout) and not rospy.is_shutdown():
# Test if the box is in attached objects
attached_objects = scene.get_attached_objects([box_name])
is_attached = len(attached_objects.keys()) > 0
is_known = box_name in scene.get_known_object_names()
if (box_is_attached == is_attached) and (box_is_known == is_known):
return True
rospy.sleep(0.1)
seconds = rospy.get_time()
return False
## END_SUB_TUTORIAL
def add_box(self, timeout=4):
box_name = self.box_name
scene = self.scene
box_pose = geometry_msgs.msg.PoseStamped()
box_pose.header.frame_id = "j2n6s300_link_base"
box_pose.pose.orientation.w = 1.0
box_pose.pose.position.z = 0.07 # slightly above the end effector
box_name = "box"
scene.add_box(box_name, box_pose, size=(0.1, 0.1, 0.1))
self.box_name = box_name
return self.wait_for_state_update(box_is_known=True, timeout=timeout)
def add_box_m(self, data, timeout=4):
print "Start 'add box_m'", data.object_name[0]
box_name = data.object_name[0]
box_pose = geometry_msgs.msg.PoseStamped()
box_pose.header.frame_id = "j2n6s300_link_base"
box_pose.pose.position = data.object_position
box_pose.pose.orientation = data.object_orientation
box_scale = (data.object_scale.x, data.object_scale.y, data.object_scale.z)
self.scene.add_box(box_name, box_pose, box_scale)
self.box_name = box_name
self.object_list.append(box_name)
return self.wait_for_state_update(box_is_known=True, timeout=timeout)
def attach_box(self, timeout=4):
box_name = self.box_name
robot = self.robot
scene = self.scene
eef_link = self.eef_link
group_names = self.group_names
grasping_group = 'hand'
touch_links = robot.get_link_names(group=grasping_group)
scene.attach_box(eef_link, box_name, touch_links=touch_links)
return self.wait_for_state_update(box_is_attached=True, box_is_known=False, timeout=timeout)
def attach_box_m(self, data, timeout=4):
robot = self.robot
scene = self.scene
eef_link = self.eef_link
group_names = self.group_names
grasping_group = data.hand_name[0]
touch_links = robot.get_link_names(group=grasping_group)
print "touch links list\n", touch_links
scene.attach_box(eef_link, data.box_name[0], touch_links=touch_links)
return self.wait_for_state_update(box_is_attached=True, box_is_known=False, timeout=timeout)
def detach_box_m(self, data, timeout=4):
scene = self.scene
eef_link = self.eef_link
scene.remove_attached_object(eef_link, name=data.box_name[0])
return self.wait_for_state_update(box_is_known=True, box_is_attached=False, timeout=timeout)
def detach_box(self, timeout=4):
box_name = self.box_name
scene = self.scene
eef_link = self.eef_link
scene.remove_attached_object(eef_link, name=box_name)
return self.wait_for_state_update(box_is_known=True, box_is_attached=False, timeout=timeout)
def remove_box_m(self, data, timeout=4):
scene = moveit_commander.PlanningSceneInterface()
self.scene = scene
self.box_name = data.object_name[0]
scene.remove_world_object(self.box_name)
return self.wait_for_state_update(box_is_attached=False, box_is_known=False, timeout=timeout)
def remove_box(self, timeout=4):
box_name = self.box_name
scene = self.scene
scene.remove_world_object(box_name)
return self.wait_for_state_update(box_is_attached=False, box_is_known=False, timeout=timeout)
def setjoint_m(self, data):
print "go to initial pose"
self.group_name = 'arm' # this is just for the initialization
move_group = moveit_commander.MoveGroupCommander(self.group_name)
joint_goal = move_group.get_current_joint_values()
joint_goal[0] = 0.60
joint_goal[1] = +0.3
joint_goal[2] = -0.054
joint_goal[3] = -2.25
joint_goal[4] = -1.59
joint_goal[5] = -0.3
joint_goal[6] = 0.01
# The go command can be called with joint values, poses, or without any
# parameters if you have already set the pose or joint target for the group
move_group.go(joint_goal, wait=True)
move_group.stop()
current_joints = move_group.get_current_joint_values()
return all_close(joint_goal, current_joints, 0.01)
def move_joints_m(self, data):
self.group_name = data.name[0] # this is just for the initialization
print self.group_name, "planning group!!!!!!!!!!1"
move_group = moveit_commander.MoveGroupCommander(self.group_name)
joint_goal = move_group.get_current_joint_values()
joint_goal[0] = data.position[0]
joint_goal[1] = data.position[1]
joint_goal[2] = data.position[2]
joint_goal[3] = data.position[3]
joint_goal[4] = data.position[4]
joint_goal[5] = data.position[5]
# joint_goal[6] = data.position[6]
move_group.go(joint_goal, wait=True)
move_group.stop()
current_joints = move_group.get_current_joint_values()
return all_close(joint_goal, current_joints, 0.01)
def remove_all_obj_m(self, data):
print "remove all objects_m if 1, data:", data, type(data)
if data.data == '1':
print "remove all start"
for i in tutorial.object_list:
scene = moveit_commander.PlanningSceneInterface()
self.scene = scene
self.box_name = i
scene.remove_world_object(self.box_name)
'''
(function)_s means that it is an server which gets ROS service messages.
'''
def add_box_s(self, data, timeout=4):
print "Start 'add box_s'", data.object_name[0]
box_name = data.object_name[0]
box_pose = geometry_msgs.msg.PoseStamped()
box_pose.header.frame_id = "j2n6s300_link_base"
box_pose.pose.position = data.object_position
box_pose.pose.orientation = data.object_orientation
box_scale = (data.object_scale.x, data.object_scale.y, data.object_scale.z)
self.scene.add_box(box_name, box_pose, box_scale)
self.box_name = box_name
self.object_list.append(box_name)
print "add_box_s ends"
return box_info_srvResponse(
w_flag=1
)
def del_box_s(self, data, timeout=4):
print "delete ", data.object_name[0]
scene = moveit_commander.PlanningSceneInterface()
self.scene = scene
self.box_name = data.object_name[0]
scene.remove_world_object(self.box_name)
print "del_box_s ends"
return box_info_srvResponse(
w_flag=1
)
def att_box_s(self, data, timeout=4):
print "attach ", data.object_name[0]
robot = self.robot
scene = self.scene
eef_link = self.eef_link
group_names = self.group_names
grasping_group = data.hand_name[0]
touch_links = robot.get_link_names(group=grasping_group)
print "touch links list\n", touch_links
scene.attach_box(eef_link, data.object_name[0], touch_links=touch_links)
print "att_box_s ends"
return att_hand_box_srvResponse(
w_flag=1
)
def det_box_s(self, data, timeout=4):
print "dettach ", data.object_name[0]
scene = self.scene
eef_link = self.eef_link
scene.remove_attached_object(eef_link, name=data.object_name[0])
print "det_box_s ends"
return box_info_srvResponse(
w_flag=1
)
def goalPose_feasibility_check_s(self, data):
# move_group = self.move_group
move_group = moveit_commander.MoveGroupCommander(data.arm_name[0])
pose_goal = geometry_msgs.msg.Pose()
pose_goal.position = data.goal_position
pose_goal.orientation = data.goal_orientation
# move_group.set_planner_id('SPARStwo')
# move_group.set_planner_id('RRTstar')
# move_group.set_planner_id('BiTRRT')
move_group.set_num_planning_attempts(10000)
move_group.set_planning_time(5)
move_group.set_goal_position_tolerance(0.01)
move_group.set_goal_orientation_tolerance(0.01)
move_group.set_pose_target(pose_goal)
plan = move_group.plan()
# print plan
print "plan.joint_trajectory.joint_names :", plan.joint_trajectory.joint_names
if len(plan.joint_trajectory.joint_names) == 0:
print "no plan found"
move_group.stop()
move_group.clear_pose_targets()
return arm_move_srvResponse(
w_flag=1,
feasibility=0,
r_trj=plan
)
elif len(plan.joint_trajectory.joint_names) > 0:
print "plan found"
move_group.stop()
move_group.clear_pose_targets()
return arm_move_srvResponse(
w_flag=1,
feasibility=1,
r_trj=plan
)
def move_goal_pose_s(self, data):
# move_group = self.move_group
move_group = moveit_commander.MoveGroupCommander(data.arm_name[0])
pose_goal = geometry_msgs.msg.Pose()
pose_goal.position = data.goal_position
pose_goal.orientation = data.goal_orientation
# move_group.set_planner_id('SPARStwo')
# move_group.set_planner_id('RRTstar')
# move_group.set_planner_id('BiTRRT')
move_group.set_num_planning_attempts(10000)
move_group.set_planning_time(5)
move_group.set_goal_position_tolerance(0.01)
move_group.set_goal_orientation_tolerance(0.01)
move_group.set_pose_target(pose_goal)
plan = move_group.plan()
move_group.execute(plan, wait=True)
move_group.clear_pose_targets()
# print plan
print "plan.joint_trajectory.joint_names :", plan.joint_trajectory.joint_names
if len(plan.joint_trajectory.joint_names) == 0:
print "no plan found"
move_group.stop()
move_group.clear_pose_targets()
return arm_move_srvResponse(
w_flag=1,
feasibility=0,
r_trj=plan
)
elif len(plan.joint_trajectory.joint_names) > 0:
print "plan found"
move_group.stop()
move_group.clear_pose_targets()
return arm_move_srvResponse(
w_flag=1,
feasibility=1,
r_trj=plan
)
def init_joints_s(self, data):
if data.w_start == 1:
print "go to initial pose"
self.group_name = 'arm' # this is just for the initialization
move_group = moveit_commander.MoveGroupCommander(self.group_name)
joint_goal = move_group.get_current_joint_values()
joint_goal[0] = 0.60
joint_goal[1] = +0.3
joint_goal[2] = -0.054
joint_goal[3] = -2.25
joint_goal[4] = -1.59
joint_goal[5] = -0.3
# The go command can be called with joint values, poses, or without any
# parameters if you have already set the pose or joint target for the group
move_group.go(joint_goal, wait=True)
move_group.stop()
print "init_joint_s ends"
current_joints = move_group.get_current_joint_values()
return work_start_srvResponse(
w_flag=1
)
def remove_all_s(self, data):
print "data:", data
if data.w_start == 1:
print "remove all objects"
for i in tutorial.object_list:
scene = moveit_commander.PlanningSceneInterface()
self.scene = scene
self.box_name = i
scene.remove_world_object(self.box_name)
print "remove_all_s ends"
return work_start_srvResponse(
w_flag=1
)
def move_joints_s(self, data):
self.group_name = data.goalPose.name[0] # this is just for the initialization
print self.group_name,"planning group!!!!!!!!!!1"
move_group = moveit_commander.MoveGroupCommander(self.group_name)
joint_goal = move_group.get_current_joint_values()
joint_goal[0] = data.goalPose.position[0]
joint_goal[1] = data.goalPose.position[1]
joint_goal[2] = data.goalPose.position[2]
joint_goal[3] = data.goalPose.position[3]
joint_goal[4] = data.goalPose.position[4]
joint_goal[5] = data.goalPose.position[5]
move_group.go(joint_goal, wait=True)
move_group.stop()
current_joints = move_group.get_current_joint_values()
print "move_joint_s ends"
return arm_goalJoint_srvResponse(
w_flag=1
)
def listener():
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('moveit_arm_controller', anonymous=True)
# =================== message!! ======================
rospy.Subscriber('arm_goalPose', arm_move_msg, tutorial.go_to_pose_goal_m)
rospy.Subscriber('feasibility_check', arm_move_msg, tutorial.goalPose_feasibility_check_m)
rospy.Subscriber('arm_initJoint', String, tutorial.setjoint_m)
rospy.Subscriber('remove_all_objects', String, tutorial.remove_all_obj_m)
rospy.Subscriber('arm_goalJoint', sensor_msgs.msg.JointState, tutorial.move_joints_m)
rospy.Subscriber('add_box_info', box_info_msg, tutorial.add_box_m)
rospy.Subscriber('del_box_info', box_info_msg, tutorial.remove_box_m)
rospy.Subscriber('det_box_info', box_info_msg, tutorial.detach_box_m)
rospy.Subscriber('att_box_info', attach_hand_box, tutorial.attach_box_m)
# =================== service!! =======================
rospy.Service('feasibile_check_srv', arm_move_srv, tutorial.goalPose_feasibility_check_s)
rospy.Service('move_goalpose_srv', arm_move_srv, tutorial.move_goal_pose_s)
rospy.Service('arm_goalJoint_srv', arm_goalJoint_srv, tutorial.move_joints_s)
rospy.Service('arm_initJoint_srv', work_start_srv, tutorial.init_joints_s)
rospy.Service('remove_all_srv', work_start_srv, tutorial.remove_all_s)
rospy.Service('add_box_srv', box_info_srv, tutorial.add_box_s)
rospy.Service('del_box_srv', box_info_srv, tutorial.del_box_s)
rospy.Service('det_box_srv', box_info_srv, tutorial.det_box_s)
rospy.Service('att_box_srv', att_hand_box_srv, tutorial.att_box_s)
rospy.spin()
if __name__ == '__main__':
print "------------------------------"
print "Arm trajectory NODE starts!!!!"
print "------------------------------"
print "Press Ctrl-D to exit at any time"
tutorial = MoveGroupPythonIntefaceTutorial()
object_list = []
listener()
print "end node!!"
| [
"[email protected]"
]
| |
8e83913f251d402a25e5c30c18a1ccbc9ca7bc1b | bfbe642d689b5595fc7a8e8ae97462c863ba267a | /bin/Python27/Lib/site-packages/openmdao.lib-0.8.1-py2.7.egg/openmdao/lib/drivers/newsumtdriver.py | 1fe4b338c4195f801abf288f9fbdec1e2569630d | [
"LicenseRef-scancode-other-permissive",
"MIT"
]
| permissive | mcanthony/meta-core | 0c0a8cde1669f749a4880aca6f816d28742a9c68 | 3844cce391c1e6be053572810bad2b8405a9839b | refs/heads/master | 2020-12-26T03:11:11.338182 | 2015-11-04T22:58:13 | 2015-11-04T22:58:13 | 45,806,011 | 1 | 0 | null | 2015-11-09T00:34:22 | 2015-11-09T00:34:22 | null | UTF-8 | Python | false | false | 19,803 | py | """
``newsumtdriver.py`` - Driver for the NEWSUMT optimizer.
"""
# disable complaints about Module 'numpy' has no 'array' member
# pylint: disable-msg=E1101
# Disable complaints Invalid name "setUp" (should match [a-z_][a-z0-9_]{2,30}$)
# pylint: disable-msg=C0103
# Disable complaints about not being able to import modules that Python
# really can import
# pylint: disable-msg=F0401,E0611
# Disable complaints about Too many arguments (%s/%s)
# pylint: disable-msg=R0913
# Disable complaints about Too many local variables (%s/%s) Used
# pylint: disable-msg=R0914
#public symbols
__all__ = ['NEWSUMTdriver']
import logging
try:
from numpy import zeros, ones
from numpy import int as numpy_int
except ImportError as err:
logging.warn("In %s: %r" % (__file__, err))
from openmdao.lib.datatypes.api import Array, Float, Int
from openmdao.main.api import Case, ExprEvaluator
from openmdao.main.exceptions import RunStopped
from openmdao.main.hasparameters import HasParameters
from openmdao.main.hasconstraints import HasIneqConstraints
from openmdao.main.hasobjective import HasObjective
from openmdao.main.driver_uses_derivatives import DriverUsesDerivatives
from openmdao.util.decorators import add_delegate, stub_if_missing_deps
from openmdao.main.interfaces import IHasParameters, IHasIneqConstraints, \
IHasObjective, implements, IOptimizer
import newsumt.newsumtinterruptible as newsumtinterruptible
# code for redirecting unit stderr and stdout
# output from newsumt Fortran code
# Not using it now
# save = None
# null_fds = None
# def redirect_fortran_stdout_to_null():
# '''
# capture the output intended for
# stdout and just send it to dev/null
# '''
# global save, null_fds
# sys.stdout.flush()
# #sys.stdout = open(os.devnull, 'w')
# #sys.stdout = WritableObject()
# # open 2 fds
# null_fds = [os.open(os.devnull, os.O_RDWR), os.open(os.devnull, os.O_RDWR)]
# # save the current file descriptors to a tuple
# save = os.dup(1), os.dup(2)
# # put /dev/null fds on 1 and 2
# os.dup2(null_fds[0], 1)
# os.dup2(null_fds[1], 2)
# def restore_fortran_stdout():
# '''
# restore stdout to the
# value it has before the call to
# redirect_fortran_stdout_to_null
# '''
# global save, null_fds
# sys.stdout.flush()
# #sys.stdout == sys.__stdout__
# # restore file descriptors so I can print the results
# os.dup2(save[0], 1)
# os.dup2(save[1], 2)
# # close the temporary fds
# os.close(null_fds[0])
# os.close(null_fds[1])
# Disable complaints about Unused argument
# pylint: disable-msg=W0613
def user_function(info, x, obj, dobj, ddobj, g, dg, n2, n3, n4, imode, driver):
"""
Calculate the objective functions, constraints,
and gradients of those. Call back to the driver
to get the values that were plugged
in.
Note, there is some evidence of loss of precision on the output of
this function.
"""
# evaluate objective function or constraint function
if info in [1, 2]:
if imode == 1:
# We are in a finite difference step drive by NEWSUMT
# However, we still take advantage of a component's
# user-defined gradients via Fake Finite Difference.
# Note, NEWSUMT estimates 2nd-order derivatives from
# the first order differences.
# Save baseline states and calculate derivatives
if driver.baseline_point:
driver.calc_derivatives(first=True, savebase=True)
driver.baseline_point = False
# update the parameters in the model
driver.set_parameters(x)
# Run model under Fake Finite Difference
driver.ffd_order = 1
super(NEWSUMTdriver, driver).run_iteration()
driver.ffd_order = 0
else:
# Optimization step
driver.set_parameters(x)
super(NEWSUMTdriver, driver).run_iteration()
driver.baseline_point = True
# evaluate objectives
if info == 1:
obj = driver.eval_objective()
# evaluate constraint functions
if info == 2:
for i, v in enumerate(driver.get_ineq_constraints().values()):
val = v.evaluate(driver.parent)
if '>' in val[2]:
g[i] = val[0]-val[1]
else:
g[i] = val[1]-val[0]
# save constraint values in driver if this isn't a finite difference
if imode != 1:
driver.constraint_vals = g
elif info == 3 :
# evaluate the first and second order derivatives
# of the objective function
# NEWSUMT bug: sometimes we end up here when ifd=-4
if not driver.differentiator:
return obj, dobj, ddobj, g, dg
driver.ffd_order = 1
driver.differentiator.calc_gradient()
driver.ffd_order = 2
driver.differentiator.calc_hessian(reuse_first=True)
driver.ffd_order = 0
obj_name = driver.get_objectives().keys()[0]
dobj = driver.differentiator.get_gradient(obj_name)
i_current = 0
for row, name1 in enumerate(driver.get_parameters().keys()):
for name2 in driver.get_parameters().keys()[0:row+1]:
ddobj[i_current] = driver.differentiator.get_2nd_derivative(obj_name, wrt=(name1, name2))
i_current += 1
elif info in [4, 5]:
# evaluate gradient of nonlinear or linear constraints.
# Linear gradients are only called once, at startup
if info == 5:
# NEWSUMT bug - During initial run, NEWSUMT will ask for analytic
# derivatives of the linear constraints even when ifd=-4. The only
# thing we can do is return zero.
if not driver.differentiator:
return obj, dobj, ddobj, g, dg
driver.ffd_order = 1
driver.differentiator.calc_gradient()
driver.ffd_order = 0
i_current = 0
for param_name in driver.get_parameters().keys():
for con_name in driver.get_ineq_constraints().keys():
dg[i_current] = -driver.differentiator.get_derivative(con_name, wrt=param_name)
i_current += 1
return obj, dobj, ddobj, g, dg
# pylint: enable-msg=W0613
class _contrl(object):
"""Just a primitive data structure for storing contrl common block data.
We save the common blocks to prevent collision in the case where there are
multiple instances of NEWSUMT running in our model."""
def __init__(self):
self.clear()
def clear(self):
""" Clear values. """
# pylint: disable-msg=W0201
self.c = 0.0
self.epsgsn = 0.0
self.epsodm = 0.0
self.epsrsf = 0.0
self.fdch = 0.0
self.g0 = 0.0
self.ifd = 0
self.iflapp = 0
self.iprint = 0
self.jsigng = 0
self.lobj = 0
self.maxgsn = 0
self.maxodm = 0
self.maxrsf = 0
self.mflag = 0
self.ndv = 0
self.ntce = 0
self.p = 0.0
self.ra = 0.0
self.racut = 0.0
self.ramin = 0.0
self.stepmx = 0.0
self.tftn = 0.0
# pylint: enable-msg=W0201
class _countr(object):
"""Just a primitive data structure for storing countr common block data.
We save the common blocks to prevent collision in the case where there are
multiple instances of NEWSUMT running in our model."""
def __init__(self):
self.clear()
def clear(self):
""" Clear values. """
# pylint: disable-msg=W0201
self.iobjct = 0
self.iobapr = 0
self.iobgrd = 0
self.iconst = 0
self.icongr = 0
self.inlcgr = 0
self.icgapr = 0
# pylint: enable-msg=W0201
# pylint: disable-msg=R0913,R0902
@stub_if_missing_deps('numpy')
@add_delegate(HasParameters, HasIneqConstraints, HasObjective)
class NEWSUMTdriver(DriverUsesDerivatives):
""" Driver wrapper of Fortran version of NEWSUMT.
.. todo:: Check to see if this itmax variable is needed.
NEWSUMT might handle it for us.
"""
implements(IHasParameters, IHasIneqConstraints, IHasObjective, IOptimizer)
itmax = Int(10, iotype='in', desc='Maximum number of iterations before \
termination.')
default_fd_stepsize = Float(0.01, iotype='in', desc='Default finite ' \
'difference stepsize. Parameters with ' \
'specified values override this.')
ilin = Array(dtype=numpy_int, default_value=zeros(0,'i4'), iotype='in',
desc='Array designating whether each constraint is linear.')
# Control parameters for NEWSUMT.
# NEWSUMT has quite a few parameters to give the user control over aspects
# of the solution.
epsgsn = Float(0.001, iotype='in', desc='Convergence criteria \
of the golden section algorithm used for the \
one dimensional minimization.')
epsodm = Float(0.001, iotype='in', desc='Convergence criteria \
of the unconstrained minimization.')
epsrsf = Float(0.001, iotype='in', desc='Convergence criteria \
for the overall process.')
g0 = Float(0.1, iotype='in', desc='Initial value of the transition \
parameter.')
ra = Float(1.0, iotype='in', desc='Penalty multiplier. Required if mflag=1')
racut = Float(0.1, iotype='in', desc='Penalty multiplier decrease ratio. \
Required if mflag=1.')
ramin = Float(1.0e-13, iotype='in', desc='Lower bound of \
penalty multiplier. \
Required if mflag=1.')
stepmx = Float(2.0, iotype='in', desc='Maximum bound imposed on the \
initial step size of the one-dimensional \
minimization.')
iprint = Int(0, iotype='in', desc='Print information during NEWSUMT \
solution. Higher values are more verbose. If 0,\
print initial and final designs only.', high=4, low=0)
lobj = Int(0, iotype='in', desc='Set to 1 if linear objective function.')
maxgsn = Int(20, iotype='in', desc='Maximum allowable number of golden \
section iterations used for 1D minimization.')
maxodm = Int(6, iotype='in', desc='Maximum allowable number of one \
dimensional minimizations.')
maxrsf = Int(15, iotype='in', desc='Maximum allowable number of \
unconstrained minimizations.')
mflag = Int(0, iotype='in', desc='Flag for penalty multiplier. \
If 0, initial value computed by NEWSUMT. \
If 1, initial value set by ra.')
def __init__(self):
super(NEWSUMTdriver, self).__init__()
self.iter_count = 0
# Save data from common blocks into the driver
self.contrl = _contrl()
self.countr = _countr()
# define the NEWSUMTdriver's private variables
# note, these are all resized in config_newsumt
# basic stuff
self.design_vals = zeros(0, 'd')
self.constraint_vals = []
# temp storage
self.__design_vals_tmp = zeros(0, 'd')
self._ddobj = zeros(0)
self._dg = zeros(0)
self._dh = zeros(0)
self._dobj = zeros(0)
self._g = zeros(0)
self._gb = zeros(0)
self._g1 = zeros(0)
self._g2 = zeros(0)
self._g3 = zeros(0)
self._s = zeros(0)
self._sn = zeros(0)
self._x = zeros(0)
self._iik = zeros(0, dtype=int)
self._lower_bounds = zeros(0)
self._upper_bounds = zeros(0)
self._iside = zeros(0)
self.fdcv = zeros(0)
# Just defined here. Set elsewhere
self.n1 = self.n2 = self.n3 = self.n4 = 0
# Ready inputs for NEWSUMT
self._obj = 0.0
self._objmin = 0.0
self.isdone = False
self.resume = False
self.uses_Hessians = False
def start_iteration(self):
"""Perform the optimization."""
# Flag used to figure out if we are starting a new finite difference
self.baseline_point = True
# set newsumt array sizes and more...
self._config_newsumt()
self.iter_count = 0
# get the values of the parameters
# check if any min/max constraints are violated by initial values
for i, val in enumerate(self.get_parameters().values()):
value = val.evaluate(self.parent)
self.design_vals[i] = value
# next line is specific to NEWSUMT
self.__design_vals_tmp[i] = value
# Call the interruptible version of SUMT in a loop that we manage
self.isdone = False
self.resume = False
def continue_iteration(self):
"""Returns True if iteration should continue."""
return not self.isdone and self.iter_count < self.itmax
def pre_iteration(self):
"""Checks or RunStopped and evaluates objective."""
super(NEWSUMTdriver, self).pre_iteration()
if self._stop:
self.raise_exception('Stop requested', RunStopped)
def run_iteration(self):
""" The NEWSUMT driver iteration."""
self._load_common_blocks()
try:
( fmin, self._obj, self._objmin, self.design_vals,
self.__design_vals_tmp, self.isdone, self.resume) = \
newsumtinterruptible.newsuminterruptible(user_function,
self._lower_bounds, self._upper_bounds,
self._ddobj, self._dg, self._dh, self._dobj,
self.fdcv, self._g,
self._gb, self._g1, self._g2, self._g3,
self._obj, self._objmin,
self._s, self._sn, self.design_vals, self.__design_vals_tmp,
self._iik, self.ilin, self._iside,
self.n1, self.n2, self.n3, self.n4,
self.isdone, self.resume, analys_extra_args = (self,))
except Exception, err:
self._logger.error(str(err))
raise
self._save_common_blocks()
self.iter_count += 1
# Update the parameters and run one final time with what it gave us.
# This update is needed because I obeserved that the last callback to
# user_function is the final leg of a finite difference, so the model
# is not in sync with the final design variables.
if not self.continue_iteration():
dvals = [float(val) for val in self.design_vals]
self.set_parameters(dvals)
super(NEWSUMTdriver, self).run_iteration()
self.record_case()
def _config_newsumt(self):
"""Set up arrays for the Fortran newsumt routine, and perform some
validation and make sure that array sizes are consistent.
"""
params = self.get_parameters().values()
ndv = len( params )
if ndv < 1:
self.raise_exception('no parameters specified', RuntimeError)
# Create some information arrays using our Parameter data
self._lower_bounds = zeros(ndv)
self._upper_bounds = zeros(ndv)
self._iside = zeros(ndv)
self.fdcv = ones(ndv)*self.default_fd_stepsize
for i, param in enumerate(params):
self._lower_bounds[i] = param.low
self._upper_bounds[i] = param.high
# The way Parameters presently work, we always specify an
# upper and lower bound
self._iside[i] = 3
if param.fd_step:
self.fdcv[i] = param.fd_step
if self.differentiator:
ifd = 0
else:
ifd = -4
self.n1 = ndv
ncon = len( self.get_ineq_constraints() )
if ncon > 0:
self.n2 = ncon
else:
self.n2 = 1
self.n3 = ( ndv * ( ndv + 1 )) / 2
if ncon > 0:
self.n4 = ndv * ncon
else:
self.n4 = 1
self.design_vals = zeros(ndv)
self.constraint_vals = zeros(ncon)
# Linear constraint setting
if len(self.ilin) == 0 :
if ncon > 0:
self.ilin = zeros(ncon, dtype=int)
else:
self.ilin = zeros(1, dtype=int)
elif len(self.ilin) != ncon:
msg = "Dimension of NEWSUMT setting 'ilin' should be equal to " + \
"the number of constraints."
self.raise_exception(msg, RuntimeError)
# Set initial values in the common blocks
self.countr.clear()
self.contrl.clear()
self.contrl.c = 0.2
self.contrl.epsgsn = self.epsgsn
self.contrl.epsodm = self.epsodm
self.contrl.epsrsf = self.epsrsf
self.contrl.fdch = 0.05
self.contrl.g0 = self.g0
self.contrl.ifd = ifd
self.contrl.iflapp = 0
self.contrl.jprint = self.iprint - 1
self.contrl.jsigng = 1
self.contrl.lobj = self.lobj
self.contrl.maxgsn = self.maxgsn
self.contrl.maxodm = self.maxodm
self.contrl.maxrsf = self.maxrsf
self.contrl.mflag = self.mflag
self.contrl.ndv = ndv
self.contrl.ntce = ncon
self.contrl.p = 0.5
self.contrl.ra = self.ra
self.contrl.racut = self.racut
self.contrl.ramin = self.ramin
self.contrl.stepmx = self.stepmx
self.contrl.tftn = 0.0
# work arrays
self.__design_vals_tmp = zeros(self.n1,'d')
self._ddobj = zeros( self.n3 )
self._dg = zeros( self.n4 )
self._dh = zeros( self.n1 )
self._dobj = zeros( self.n1 )
self._g = zeros( self.n2 )
self._gb = zeros( self.n2 )
self._g1 = zeros( self.n2 )
self._g2 = zeros( self.n2 )
self._g3 = zeros( self.n2 )
self._s = zeros( self.n1 )
self._sn = zeros( self.n1 )
self._iik = zeros( self.n1, dtype=int )
def _load_common_blocks(self):
""" Reloads the common blocks using the intermediate info saved in the
class.
"""
for name, value in self.contrl.__dict__.items():
setattr( newsumtinterruptible.contrl, name, value )
for name, value in self.countr.__dict__.items():
setattr( newsumtinterruptible.countr, name, value )
def _save_common_blocks(self):
""" Saves the common block data to the class to prevent trampling by
other instances of NEWSUMT.
"""
common = self.contrl
for name, value in common.__dict__.items():
setattr(common, name, \
type(value)(getattr(newsumtinterruptible.contrl, name)))
common = self.countr
for name, value in common.__dict__.items():
setattr(common, name, \
type(value)(getattr(newsumtinterruptible.countr, name)))
| [
"[email protected]"
]
| |
163d7c44a7e018cae6d6ff4a03b364723f15cc08 | 487c45df5fcbe7fdf6df5a348f6fe163bbb22033 | /leetcode/875_koko_eating_bananas.py | 20b4f0c350be2d1c309eb1f272a208f5b384aa40 | [
"Unlicense"
]
| permissive | leetcode-notes/daily-algorithms-practice | dba03ac1c55262f6bae7d5aa4dac590c3c067e75 | 2a03499ed0b403d79f6c8451c9a839991b23e188 | refs/heads/master | 2023-06-18T14:14:58.770797 | 2021-07-12T05:27:32 | 2021-07-12T05:27:32 | 264,057,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | class Solution:
def minEatingSpeed(self, piles, H: int) -> int:
low, high = 1, max(piles)
def cannot_finish(k, piles):
total = 0
for p in piles:
total += p//k
if p % k:
total += 1
return total > H
while low < high:
mid = low + (high-low)//2
if cannot_finish(mid, piles):
low = mid + 1
else:
high = mid
return low
"""
Success
Details
Runtime: 500 ms, faster than 53.72% of Python3 online
submissions for Koko Eating Bananas.
Memory Usage: 15.4 MB, less than 76.05% of Python3 online
submissions for Koko Eating Bananas.
Next challenges:
Minimize Max Distance to Gas Station
"""
| [
"[email protected]"
]
| |
eb8bd2bd90dfe1850bd04800fbf208772c98a519 | 8e07b5b7a8dd38e0ef2c7ffc97d0392d886f32e6 | /venv/Lib/site-packages/mypy/typeshed/third_party/2and3/paramiko/server.pyi | f43bc83b05520072133af6f1a6c7ad7944981cc9 | []
| no_license | RodrigoNeto/cursopythonyt | fc064a2e6106324e22a23c54bdb9c31040ac9eb6 | 279dad531e21a9c7121b73d84fcbdd714f435e7e | refs/heads/master | 2023-07-03T00:54:09.795054 | 2021-08-13T12:42:24 | 2021-08-13T12:42:24 | 395,646,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,113 | pyi | import threading
from typing import Any, List, Optional, Tuple, Union
from paramiko.channel import Channel
from paramiko.message import Message
from paramiko.pkey import PKey
from paramiko.transport import Transport
class ServerInterface:
def check_channel_request(self, kind: str, chanid: int) -> int: ...
def get_allowed_auths(self, username: str) -> str: ...
def check_auth_none(self, username: str) -> int: ...
def check_auth_password(self, username: str, password: str) -> int: ...
def check_auth_publickey(self, username: str, key: PKey) -> int: ...
def check_auth_interactive(self, username: str, submethods: str) -> Union[int, InteractiveQuery]: ...
def check_auth_interactive_response(self, responses: List[str]) -> Union[int, InteractiveQuery]: ...
def check_auth_gssapi_with_mic(self, username: str, gss_authenticated: int = ..., cc_file: Optional[str] = ...) -> int: ...
def check_auth_gssapi_keyex(self, username: str, gss_authenticated: int = ..., cc_file: Optional[str] = ...) -> int: ...
def enable_auth_gssapi(self) -> bool: ...
def check_port_forward_request(self, address: str, port: int) -> int: ...
def cancel_port_forward_request(self, address: str, port: int) -> None: ...
def check_global_request(self, kind: str, msg: Message) -> Union[bool, Tuple[Any, ...]]: ...
def check_channel_pty_request(
self, channel: Channel, term: str, width: int, height: int, pixelwidth: int, pixelheight: int, modes: str
) -> bool: ...
def check_channel_shell_request(self, channel: Channel) -> bool: ...
def check_channel_exec_request(self, channel: Channel, command: bytes) -> bool: ...
def check_channel_subsystem_request(self, channel: Channel, name: str) -> bool: ...
def check_channel_window_change_request(
self, channel: Channel, width: int, height: int, pixelwidth: int, pixelheight: int
) -> bool: ...
def check_channel_x11_request(
self, channel: Channel, single_connection: bool, auth_protocol: str, auth_cookie: bytes, screen_number: int
) -> bool: ...
def check_channel_forward_agent_request(self, channel: Channel) -> bool: ...
def check_channel_direct_tcpip_request(self, chanid: int, origin: Tuple[str, int], destination: Tuple[str, int]) -> int: ...
def check_channel_env_request(self, channel: Channel, name: str, value: str) -> bool: ...
def get_banner(self) -> Tuple[Optional[str], Optional[str]]: ...
class InteractiveQuery:
name: str
instructions: str
prompts: List[Tuple[str, bool]]
def __init__(self, name: str = ..., instructions: str = ..., *prompts: Union[str, Tuple[str, bool]]) -> None: ...
def add_prompt(self, prompt: str, echo: bool = ...) -> None: ...
class SubsystemHandler(threading.Thread):
def __init__(self, channel: Channel, name: str, server: ServerInterface) -> None: ...
def get_server(self) -> ServerInterface: ...
def start_subsystem(self, name: str, transport: Transport, channel: Channel) -> None: ...
def finish_subsystem(self) -> None: ...
| [
"[email protected]"
]
| |
87285319b453d6b779837ac5d96b87d989629dbd | 1277c0d30434133a7ce6f4d1db6c04d65b0a49c9 | /backend/findme_20524/wsgi.py | 8403d67f78f9ff859caa0a7e2ffa509f5e7f5195 | []
| no_license | crowdbotics-apps/findme-20524 | aef86f49038e1e06967c3d22fee0968ec769c3b4 | da959e3a82c81a93ce2e6d3388ad610ebc7be7f5 | refs/heads/master | 2022-12-23T10:47:01.480756 | 2020-09-21T19:34:35 | 2020-09-21T19:34:35 | 297,441,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for findme_20524 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'findme_20524.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
8faa8b56120958de0b6f1135e29aabb4e6389a29 | ea4e3ac0966fe7b69f42eaa5a32980caa2248957 | /download/unzip/pyobjc/pyobjc-14/pyobjc/stable/PyOpenGL-2.0.2.01/src/shadow/GL.KTX.buffer_region.0100.py | ac8f5465112ac5e17f3261bbe25ef82d3803a274 | []
| no_license | hyl946/opensource_apple | 36b49deda8b2f241437ed45113d624ad45aa6d5f | e0f41fa0d9d535d57bfe56a264b4b27b8f93d86a | refs/heads/master | 2023-02-26T16:27:25.343636 | 2020-03-29T08:50:45 | 2020-03-29T08:50:45 | 249,169,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,082 | py | # This file was created automatically by SWIG.
# Don't modify this file, modify the SWIG interface instead.
# This file is compatible with both classic and new-style classes.
import _buffer_region
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "this"):
if isinstance(value, class_type):
self.__dict__[name] = value.this
if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
del value.thisown
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name) or (name == "thisown"):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
__version__ = _buffer_region.__version__
__date__ = _buffer_region.__date__
__api_version__ = _buffer_region.__api_version__
__author__ = _buffer_region.__author__
__doc__ = _buffer_region.__doc__
glBufferRegionEnabled = _buffer_region.glBufferRegionEnabled
glNewBufferRegion = _buffer_region.glNewBufferRegion
glDeleteBufferRegion = _buffer_region.glDeleteBufferRegion
glReadBufferRegion = _buffer_region.glReadBufferRegion
glDrawBufferRegion = _buffer_region.glDrawBufferRegion
glInitBufferRegionKTX = _buffer_region.glInitBufferRegionKTX
__info = _buffer_region.__info
GL_KTX_FRONT_REGION = _buffer_region.GL_KTX_FRONT_REGION
GL_KTX_BACK_REGION = _buffer_region.GL_KTX_BACK_REGION
GL_KTX_Z_REGION = _buffer_region.GL_KTX_Z_REGION
GL_KTX_STENCIL_REGION = _buffer_region.GL_KTX_STENCIL_REGION
| [
"[email protected]"
]
| |
fbab5560e9894901c5617e613add83c277d25710 | 8e8acc57b63a66cb1450fa4d015d4ddcd74cce85 | /liaoxuefengLessons/ObjectOrientedProgramming/ENUM.py | 5e50eaa8c9becb7d3b84f7e8a321feb1a34f2cb0 | []
| no_license | indeyo/PythonStudy | fc2241db7cec8075a59a307ff47c9de83494844b | 099feb4e4c8dec9e68887cedd95705d831e51b0f | refs/heads/master | 2021-03-29T19:04:24.553848 | 2020-06-05T15:07:33 | 2020-06-05T15:07:33 | 247,978,205 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,062 | py | #!/usr/bin/env python3
#-*- coding: utf-8 -*-
"""
@Project : StudyPython0-100
@File : ENUM.py
@Time : 2019-08-05 22:57:52
@Author : indeyo_lin
@Version :
@Remark :
"""
"""
练习:
把Student的gender属性改造为枚举类型,可以避免使用字符串:
"""
# from enum import Enum, unique
#
# class Gender(Enum):
# Male = 0
# Female = 1
#
# class Student():
#
# def __init__(self, name, gender):
# self.name = name
# self.gender = gender
#
# # 测试:
# # 这道题完全不需要改嘛!!!直接通过
# bart = Student('Bart', Gender.Male)
# if bart.gender == Gender.Male:
# print('测试通过!')
# else:
# print('测试失败!')
from enum import Enum
Month = Enum('Month', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'))
for name, member in Month.__members__.items():
print(name, '=>', member, ',', member.value)
@unique
class Weekday(Enum):
Sun = 0 # Sun的value被设定为0
Mon = 1
Tue = 2
Wed = 3
Thu = 4
Fri = 5
Sat = 6 | [
"[email protected]"
]
| |
3b6e664d5482c257c0400170a43bf6b823377024 | 5cf7f81791a9d66ba495512f0b1d2c8b6cccbd3d | /python/test/test_rhomb_H_and_R.py | ac268c603a510df1fc1881d48b3b0bc262075ef6 | [
"BSD-3-Clause"
]
| permissive | odidev/spglib | 9e0eecbb77b20e09f1affec42af48dc6a1c60e82 | e807f1193ad57af8b916245fc397e4667baaaf92 | refs/heads/develop | 2023-07-03T03:03:37.592891 | 2021-05-17T08:45:07 | 2021-05-17T08:45:07 | 390,248,634 | 0 | 0 | BSD-3-Clause | 2021-07-28T11:44:17 | 2021-07-28T07:01:36 | null | UTF-8 | Python | false | false | 2,280 | py | import unittest
import numpy as np
from spglib import get_symmetry_dataset, find_primitive
from vasp import read_vasp
import os
data_dir = os.path.dirname(os.path.abspath(__file__))
dirnames = ('trigonal', )
rhomb_numbers = (146, 148, 155, 160, 161, 166, 167)
tmat = [[0.6666666666666666, -0.3333333333333333, -0.3333333333333333],
[0.3333333333333333, 0.3333333333333333, -0.6666666666666666],
[0.3333333333333333, 0.3333333333333333, 0.3333333333333333]]
class TestRhombSettingHR(unittest.TestCase):
def setUp(self):
"""Extract filename of rhombohedral cell"""
self._filenames = []
for d in dirnames:
dirname = os.path.join(data_dir, "data", d)
filenames = []
trigo_filenames = os.listdir(dirname)
for number in rhomb_numbers:
filenames += [fname for fname in trigo_filenames
if str(number) in fname]
self._filenames += [os.path.join(dirname, fname)
for fname in filenames]
def tearDown(self):
pass
def test_rhomb_prim_agreement_over_settings(self):
for fname in self._filenames:
cell = read_vasp(fname)
symprec = 1e-5
dataset_H = get_symmetry_dataset(cell, symprec=symprec)
hall_number_R = dataset_H['hall_number'] + 1
dataset_R = get_symmetry_dataset(cell,
hall_number=hall_number_R,
symprec=symprec)
plat, _, _ = find_primitive(cell)
plat_H = np.dot(dataset_H['std_lattice'].T, tmat).T
plat_R = dataset_R['std_lattice']
np.testing.assert_allclose(plat, plat_H,
atol=1e-5, err_msg="%s" % fname)
np.testing.assert_allclose(plat_R, plat_H,
atol=1e-5, err_msg="%s" % fname)
np.testing.assert_allclose(plat_R, plat,
atol=1e-5, err_msg="%s" % fname)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestRhombSettingHR)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
| [
"[email protected]"
]
| |
61bbe9c6a03dc155f5c1f6a09c732284f2f3acdf | 0d9c964fd7644395a3f0763f484e485fcc67f762 | /new/src/21.03.2020/list_of_business.py | 2e49049cc01217aba3b71d33a8cc65d4af44bb18 | [
"Apache-2.0"
]
| permissive | VladBaryliuk/my_start_tasks | eaa2e6ff031f2f504be11f0f64f5d99bd1a68a0e | bf387543e6fa3ee303cbef04d2af48d558011ed9 | refs/heads/main | 2023-04-14T14:00:08.415787 | 2021-04-24T13:47:38 | 2021-04-24T13:47:38 | 354,538,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 291 | py | from tkinter import *
root = Tk()
root.geometry('300x400')
btn2 = Button(text = 'save')
btn2.pack()
text = Entry()
text.pack()
list = Text()
list.pack()
def add ():
todo = text.get() + '\n'
list.insert (END, todo)
btn = Button(text = 'enter',command=add)
btn.pack()
root.mainloop()
| [
"[email protected]"
]
| |
c1f0f56f1f31047cfc5c943b9b8cb27094c83a27 | 69bb1d0e824625876207d492722adfdb9d959ad1 | /Codeforces/antonAndDanik.py | c059ac795188e2be373516cbb3ff30f3a2ece7af | []
| no_license | domiee13/dungcaythuattoan | 8e2859264515e0fac3e9f0642a8b79ce5d966fff | 7e95d037d47d6e4777e9cf56b9827c3e42f556b3 | refs/heads/master | 2023-03-28T03:58:44.225136 | 2021-03-29T10:32:52 | 2021-03-29T10:32:52 | 277,798,242 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,413 | py | # A. Anton and Danik
# time limit per test1 second
# memory limit per test256 megabytes
# inputstandard input
# outputstandard output
# Anton likes to play chess, and so does his friend Danik.
# Once they have played n games in a row. For each game it's known who was the winner — Anton or Danik. None of the games ended with a tie.
# Now Anton wonders, who won more games, he or Danik? Help him determine this.
# Input
# The first line of the input contains a single integer n (1 ≤ n ≤ 100 000) — the number of games played.
# The second line contains a string s, consisting of n uppercase English letters 'A' and 'D' — the outcome of each of the games. The i-th character of the string is equal to 'A' if the Anton won the i-th game and 'D' if Danik won the i-th game.
# Output
# If Anton won more games than Danik, print "Anton" (without quotes) in the only line of the output.
# If Danik won more games than Anton, print "Danik" (without quotes) in the only line of the output.
# If Anton and Danik won the same number of games, print "Friendship" (without quotes).
# Examples
# inputCopy
# 6
# ADAAAA
# outputCopy
# Anton
# inputCopy
# 7
# DDDAADA
# outputCopy
# Danik
# inputCopy
# 6
# DADADA
# outputCopy
# Friendship
t = int(input())
s = input()
if s.count('A')>s.count('D'):
print("Anton")
elif s.count('A')<s.count('D'):
print("Danik")
else:
print("Friendship") | [
"[email protected]"
]
| |
4d7886f416baba1c84d182a66f20391da7c27df2 | 0d5c77661f9d1e6783b1c047d2c9cdd0160699d1 | /python/paddle/fluid/tests/unittests/test_row_conv_op.py | 07dcd108689ae6069e30fe22029258d192215549 | [
"Apache-2.0"
]
| permissive | xiaoyichao/anyq_paddle | ae68fabf1f1b02ffbc287a37eb6c0bcfbf738e7f | 6f48b8f06f722e3bc5e81f4a439968c0296027fb | refs/heads/master | 2022-10-05T16:52:28.768335 | 2020-03-03T03:28:50 | 2020-03-03T03:28:50 | 244,155,581 | 1 | 0 | Apache-2.0 | 2022-09-23T22:37:13 | 2020-03-01T13:36:58 | C++ | UTF-8 | Python | false | false | 3,441 | py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from op_test import OpTest
def row_conv_forward(x, lod, wt):
out = np.zeros_like(x)
num_sequences = len(lod[0])
seq_info = [0]
for seq_len in lod[0]:
seq_info.append(seq_info[-1] + seq_len)
context_length = wt.shape[0]
for i in range(num_sequences): # loop over number of sequences
start = seq_info[i]
end = seq_info[i + 1]
curinput = x[start:end, :]
curoutput = out[start:end, :]
cur_timesteps = end - start
for j in range(cur_timesteps): # loop over different timesteps
for k in range(context_length):
if j + k >= cur_timesteps:
continue
curoutput[j, :] += curinput[j + k, :] * wt[k, :]
return out
class TestRowConvOp1(OpTest):
def setUp(self):
self.op_type = "row_conv"
lod = [[2, 3, 2]]
T = sum(lod[0])
D = 16
context_length = 2
x = np.random.random((T, D)).astype("float32")
wt = np.random.random((context_length, D)).astype("float32")
self.inputs = {'X': (x, lod), 'Filter': wt}
out = row_conv_forward(x, lod, wt)
self.outputs = {'Out': (out, lod)}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X', 'Filter'], 'Out', max_relative_error=0.05)
def test_check_grad_ignore_x(self):
self.check_grad(
['Filter'], 'Out', max_relative_error=0.05, no_grad_set=set('X'))
def test_check_grad_ignore_wt(self):
self.check_grad(
['X'], 'Out', max_relative_error=0.05, no_grad_set=set('Filter'))
class TestRowConvOp2(OpTest):
def setUp(self):
self.op_type = "row_conv"
lod = [[20, 30, 50]]
T = sum(lod[0])
D = 35
context_length = 35
x = np.random.random((T, D)).astype("float32")
wt = np.random.random((context_length, D)).astype("float32")
self.inputs = {'X': (x, lod), 'Filter': wt}
out = row_conv_forward(x, lod, wt)
self.outputs = {'Out': (out, lod)}
def test_check_output(self):
self.check_output()
#max_relative_error is increased from 0.05 to 0.06 as for higher
#dimensional input, the dX on CPU for some values has max_rel_error
#slightly more than 0.05
def test_check_grad_normal(self):
self.check_grad(['X', 'Filter'], 'Out', max_relative_error=0.06)
def test_check_grad_ignore_x(self):
self.check_grad(
['Filter'], 'Out', max_relative_error=0.06, no_grad_set=set('X'))
def test_check_grad_ignore_wt(self):
self.check_grad(
['X'], 'Out', max_relative_error=0.06, no_grad_set=set('Filter'))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
5f5c03bcd52eb2348ea2bfae56c4eb554064760a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_210/263.py | 07aad036673e87dff6e60957731771366d880485 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,406 | py | import operator
fin = open('B-small-attempt2.in', 'r')
fout = open('output.out', 'w')
tcs = int(fin.readline())
for tc in range(0, tcs):
inptemp = fin.readline().split(' ')
ac = int(inptemp[0])
aj = int(inptemp[1])
acs = list()
ajs = list()
for i in range(0, ac):
acinp = fin.readline().split(' ')
acs.append([int(acinp[0]), int(acinp[1])])
for i in range(0, aj):
ajinp = fin.readline().split(' ')
ajs.append([int(ajinp[0]), int(ajinp[1])])
acs.sort(key=operator.itemgetter(0))
ajs.sort(key=operator.itemgetter(0))
result = -1
if ac == 2 and aj == 0:
time1 = acs[1][1] - acs[0][0]
time2 = acs[1][0] - acs[0][1]
print("time1, 2",time1, time2)
if time1 <= 720 or time2 >= 720:
result = 2
else:
result = 4
if ac == 0 and aj == 2:
time1 = ajs[1][1] - ajs[0][0]
time2 = ajs[1][0] - ajs[0][1]
print("time1, 2", time1, time2)
if time1 <= 720 or time2 >= 720:
result = 2
else:
result = 4
if ac == 1 and aj == 0:
result = 2
if ac == 0 and aj == 1:
result = 2
if ac == 1 and aj == 1:
result = 2
print("Case #%d: %d" %(tc+1, result))
fout.write("Case #%d: %d\n" %(tc+1, result))
fin.close()
fout.close()
| [
"[email protected]"
]
| |
e7846284c7e134592127b48bc185fe593b0949ec | fe7b700cfe3c06d89d18ffad3eeeb3b8220c1759 | /pipeline/feature-classification/exp-3/selection-extraction/pca/pipeline_classifier_mrsi.py | 30793e60571a5f7f0342ae4b772cf21d2691ce80 | [
"MIT"
]
| permissive | DivyaRavindran007007/mp-mri-prostate | 928684a607cf03a2d76ea3e3e5b971bbd3a1dd01 | bd420534b4b5c464e5bbb4a07eabdc8724831f8a | refs/heads/master | 2021-06-08T21:09:15.850708 | 2016-10-20T16:08:57 | 2016-10-20T16:08:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,823 | py | """This pipeline is intended to make the classification of MRSI modality
features."""
from __future__ import division
import os
import numpy as np
from sklearn.externals import joblib
from sklearn.preprocessing import label_binarize
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier
from protoclass.data_management import GTModality
# Define the path where the patients are stored
path_patients = '/data/prostate/experiments'
# Define the path where the features have been extracted
path_features = '/data/prostate/extraction/mp-mri-prostate'
# Define a list of the path where the feature are kept
mrsi_features = ['mrsi-spectra']
ext_features = ['_spectra_mrsi.npy']
# Define the path of the balanced data
path_balanced = '/data/prostate/balanced/mp-mri-prostate/exp-3/smote'
ext_balanced = '_mrsi.npz'
# Define the path of the ground for the prostate
path_gt = ['GT_inv/prostate', 'GT_inv/pz', 'GT_inv/cg', 'GT_inv/cap']
# Define the label of the ground-truth which will be provided
label_gt = ['prostate', 'pz', 'cg', 'cap']
# Generate the different path to be later treated
path_patients_list_gt = []
# Create the generator
id_patient_list = [name for name in os.listdir(path_patients)
if os.path.isdir(os.path.join(path_patients, name))]
id_patient_list = sorted(id_patient_list)
for id_patient in id_patient_list:
# Append for the GT data - Note that we need a list of gt path
path_patients_list_gt.append([os.path.join(path_patients, id_patient, gt)
for gt in path_gt])
# Load all the data once. Splitting into training and testing will be done at
# the cross-validation time
data = []
data_bal = []
label = []
label_bal = []
for idx_pat in range(len(id_patient_list)):
print 'Read patient {}'.format(id_patient_list[idx_pat])
# For each patient we nee to load the different feature
patient_data = []
for idx_feat in range(len(mrsi_features)):
# Create the path to the patient file
filename_feature = (id_patient_list[idx_pat].lower().replace(' ', '_') +
ext_features[idx_feat])
path_data = os.path.join(path_features, mrsi_features[idx_feat],
filename_feature)
single_feature_data = np.load(path_data)
# Check if this is only one dimension data
if len(single_feature_data.shape) == 1:
single_feature_data = np.atleast_2d(single_feature_data).T
patient_data.append(single_feature_data)
# Concatenate the data in a single array
patient_data = np.concatenate(patient_data, axis=1)
print 'Imbalanced feature loaded ...'
# Load the dataset from each balancing method
data_bal_meth = []
label_bal_meth = []
pat_chg = (id_patient_list[idx_pat].lower().replace(' ', '_') +
ext_balanced)
filename = os.path.join(path_balanced, pat_chg)
npz_file = np.load(filename)
data_bal.append(npz_file['data_resampled'])
label_bal.append(npz_file['label_resampled'])
print 'Balanced data loaded ...'
# Create the corresponding ground-truth
gt_mod = GTModality()
gt_mod.read_data_from_path(label_gt,
path_patients_list_gt[idx_pat])
print 'Read the GT data for the current patient ...'
# Concatenate the training data
data.append(patient_data)
# Extract the corresponding ground-truth for the testing data
# Get the index corresponding to the ground-truth
roi_prostate = gt_mod.extract_gt_data('prostate', output_type='index')
# Get the label of the gt only for the prostate ROI
gt_cap = gt_mod.extract_gt_data('cap', output_type='data')
label.append(gt_cap[roi_prostate])
print 'Data and label extracted for the current patient ...'
# Define the different level of sparsity
sparsity_level = [2, 4, 8, 16, 24, 32, 36]
results_sp = []
for sp in sparsity_level:
result_cv = []
# Go for LOPO cross-validation
for idx_lopo_cv in range(len(id_patient_list)):
# Display some information about the LOPO-CV
print 'Round #{} of the LOPO-CV'.format(idx_lopo_cv + 1)
# Get the testing data
testing_data = data[idx_lopo_cv]
testing_label = np.ravel(label_binarize(label[idx_lopo_cv], [0, 255]))
print 'Create the testing set ...'
# Create the training data and label
# We need to take the balanced data
training_data = [arr for idx_arr, arr in enumerate(data_bal)
if idx_arr != idx_lopo_cv]
training_label = [arr for idx_arr, arr in enumerate(label_bal)
if idx_arr != idx_lopo_cv]
# Concatenate the data
training_data = np.vstack(training_data)
training_label = np.ravel(label_binarize(
np.hstack(training_label).astype(int), [0, 255]))
print 'Create the training set ...'
# Learn the PCA projection
pca = PCA(n_components=sp, whiten=True)
training_data = pca.fit_transform(training_data)
testing_data = pca.transform(testing_data)
# Perform the classification for the current cv and the
# given configuration
crf = RandomForestClassifier(n_estimators=100, n_jobs=-1)
pred_prob = crf.fit(training_data,
np.ravel(training_label)).predict_proba(
testing_data)
result_cv.append([pred_prob, crf.classes_])
results_sp.append(result_cv)
# Save the information
path_store = '/data/prostate/results/mp-mri-prostate/exp-3/selection-extraction/pca/mrsi'
if not os.path.exists(path_store):
os.makedirs(path_store)
joblib.dump(results_sp, os.path.join(path_store,
'results.pkl'))
| [
"[email protected]"
]
| |
06e9af435b48d5945c4ae92e1b4270ba096357cc | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /iBqJcagS56wmDpe4x_7.py | 3acaa1ddc25b89eab9db4328cabbfff41f70461d | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | """
The volume of a spherical shell is the difference between the enclosed volume
of the outer sphere and the enclosed volume of the inner sphere:

Create a function that takes `r1` and `r2` as arguments and returns the volume
of a spherical shell rounded to the nearest thousandth.

### Examples
vol_shell(3, 3) ➞ 0
vol_shell(7, 2) ➞ 1403.245
vol_shell(3, 800) ➞ 2144660471.753
### Notes
The inputs are always positive numbers. `r1` could be the inner radius or the
outer radius, don't return a negative number.
"""
from math import pi
def vol_shell(r1, r2):
return round((4/3 *pi*(abs(r1**3 - r2**3))),3)
| [
"[email protected]"
]
| |
344734125bb7c7899ca6cc6c2558fd173da78d68 | 279ed7207ac2c407487416b595e12f573049dd72 | /pybvk/bvk/bvkmodels/ni_676.py | 8e2c8f20a537ec5b2eaa574c6f66b29f2b1de7de | []
| no_license | danse-inelastic/pybvk | 30388455e211fec69130930f2925fe16abe455bd | 922c8c0a8c50a9fabd619fa06e005cacc2d13a15 | refs/heads/master | 2016-09-15T22:21:13.131688 | 2014-06-25T17:12:34 | 2014-06-25T17:12:34 | 34,995,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | # ni_676.py
# BvK force constants
element = "Ni"
lattice_type = "fcc"
temperature = 676 # Units: K
reference = "De Wit, G.A., Brockhouse, B.N.: J. Appl. Phys. 39 (1968) 451"
details = "All fits use the measured elastic constants. This fit uses general force up to fourth neighbour, axially symmetric force for fifth neighbour."
a = 3.52 # lattice parameters in angstroms
# Units: N m^-1
force_constants = { "110": { "xx": 16.250,
"zz": -0.970,
"xy": 19.390 },
"200": { "xx": 1.070,
"yy": 0.056 },
"211": { "xx": 0.963,
"yy": 0.449,
"yz": -0.391,
"xz": 0.458 },
"220": { "xx": 0.115,
"zz": -0.457,
"xy": 0.222 },
"310": { "xx": -0.256,
"yy": -0.063,
"zz": -0.040,
"xy": -0.072 } }
| [
"[email protected]"
]
| |
8ff8c60155eca0198afd7158b8f4dbb5f00a51d5 | 163cb8cae7d364a090565710ee9f347e5cdbf38f | /new_deeplab/utils/get_dataset_colormap_test.py | 90005ebbf542c89e44a7dd4783811474cc59853d | [
"CC-BY-4.0",
"CC-BY-3.0"
]
| permissive | abhineet123/river_ice_segmentation | 2b671f7950aac6ab2b1185e3288490bc5e079bc1 | df694107be5ad6509206e409f5cde4428a715654 | refs/heads/master | 2023-05-01T11:52:10.897922 | 2023-04-25T22:55:04 | 2023-04-25T22:55:04 | 179,993,952 | 15 | 8 | null | null | null | null | UTF-8 | Python | false | false | 3,955 | py | # Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for get_dataset_colormap.py."""
import numpy as np
import tensorflow as tf
from new_deeplab.utils import get_dataset_colormap
class VisualizationUtilTest(tf.test.TestCase):
def testBitGet(self):
"""Test that if the returned bit value is correct."""
self.assertEqual(1, get_dataset_colormap.bit_get(9, 0))
self.assertEqual(0, get_dataset_colormap.bit_get(9, 1))
self.assertEqual(0, get_dataset_colormap.bit_get(9, 2))
self.assertEqual(1, get_dataset_colormap.bit_get(9, 3))
def testPASCALLabelColorMapValue(self):
"""Test the getd color map value."""
colormap = get_dataset_colormap.create_pascal_label_colormap()
# Only test a few sampled entries in the color map.
self.assertTrue(np.array_equal([128., 0., 128.], colormap[5, :]))
self.assertTrue(np.array_equal([128., 192., 128.], colormap[23, :]))
self.assertTrue(np.array_equal([128., 0., 192.], colormap[37, :]))
self.assertTrue(np.array_equal([224., 192., 192.], colormap[127, :]))
self.assertTrue(np.array_equal([192., 160., 192.], colormap[175, :]))
def testLabelToPASCALColorImage(self):
"""Test the value of the converted label value."""
label = np.array([[0, 16, 16], [52, 7, 52]])
expected_result = np.array([
[[0, 0, 0], [0, 64, 0], [0, 64, 0]],
[[0, 64, 192], [128, 128, 128], [0, 64, 192]]
])
colored_label = get_dataset_colormap.label_to_color_image(
label, get_dataset_colormap.get_pascal_name())
self.assertTrue(np.array_equal(expected_result, colored_label))
def testUnExpectedLabelValueForLabelToPASCALColorImage(self):
"""Raise ValueError when input value exceeds range."""
label = np.array([[120], [300]])
with self.assertRaises(ValueError):
get_dataset_colormap.label_to_color_image(
label, get_dataset_colormap.get_pascal_name())
def testUnExpectedLabelDimensionForLabelToPASCALColorImage(self):
"""Raise ValueError if input dimension is not correct."""
label = np.array([120])
with self.assertRaises(ValueError):
get_dataset_colormap.label_to_color_image(
label, get_dataset_colormap.get_pascal_name())
def testGetColormapForUnsupportedDataset(self):
with self.assertRaises(ValueError):
get_dataset_colormap.create_label_colormap('unsupported_dataset')
def testUnExpectedLabelDimensionForLabelToADE20KColorImage(self):
label = np.array([250])
with self.assertRaises(ValueError):
get_dataset_colormap.label_to_color_image(
label, get_dataset_colormap.get_ade20k_name())
def testFirstColorInADE20KColorMap(self):
label = np.array([[1, 3], [10, 20]])
expected_result = np.array([
[[120, 120, 120], [6, 230, 230]],
[[4, 250, 7], [204, 70, 3]]
])
colored_label = get_dataset_colormap.label_to_color_image(
label, get_dataset_colormap.get_ade20k_name())
self.assertTrue(np.array_equal(colored_label, expected_result))
def testMapillaryVistasColorMapValue(self):
colormap = get_dataset_colormap.create_mapillary_vistas_label_colormap()
self.assertTrue(np.array_equal([190, 153, 153], colormap[3, :]))
self.assertTrue(np.array_equal([102, 102, 156], colormap[6, :]))
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
]
| |
ddfdd2f0efe461b056235acb80be18b8c1228721 | 34165333483426832b19830b927a955649199003 | /publish/library_app/reports/library_book_report.py | f1b54fc8a5cc254109a6f75a8fa9f9b3ecad1cee | []
| no_license | terroristhouse/Odoo12 | 2d4315293ac8ca489d9238f464d64bde4968a0a9 | d266943d7affdff110479656543521889b4855c9 | refs/heads/master | 2022-12-02T01:09:20.160285 | 2020-08-16T13:56:07 | 2020-08-16T13:56:07 | 283,630,459 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | from odoo import fields,models
class BookReport(models.Model):
_name = 'library.book.report'
_description = 'Book Report'
_auto = False
name = fields.Char('Title')
publisher_id = fields.Many2one('res.partner')
date_published = fields.Date()
def init(self):
self.env.cr.execute("""
CREATE OR REPLACE VIEW library_book_report AS
(SELECT * FROM library_book WHERE active=True)
""")
| [
"[email protected]"
]
| |
99831b86797def2356ed377f20ea20834b08bcec | 94d4ccd443a37c8090a84d730d006edef869b816 | /recursion_and_backtracking/rat_in_maze.py | 8fde6e9bcdff326a7021e497c7d29b0c638e1f3d | []
| no_license | sudo-hemant/CP_CipherSchools | e0fc51d6f77f80709a88a7711ef6360f1fdd13e3 | 4f741f5f6fbbb182bd03135fb3180f5a40acbb1e | refs/heads/master | 2023-03-09T20:59:59.704014 | 2021-02-19T15:27:15 | 2021-02-19T15:27:15 | 338,825,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py |
# https://practice.geeksforgeeks.org/problems/rat-maze-with-multiple-jumps-1587115621/1/?track=DSASP-Backtracking&batchId=154
from collections import deque
def solve(n, maze):
res = [ [0 for i in range(n)] for j in range(n)]
if is_path(0, 0, res, n, maze):
print_sol(n, res)
else:
print(-1)
def is_path(i, j, res, n, maze):
if i == n - 1 and j == n - 1:
res[i][j] = 1
return True
if is_safe(i, j, n, maze):
res[i][j] = 1
for jump in range(1, maze[i][j] + 1):
if jump >= n:
break
if is_path(i, j + jump, res, n, maze):
return True
if is_path(i + jump, j, res, n, maze):
return True
res[i][j] = 0
return False
return False
def is_safe(i, j, n, maze):
if i >= 0 and j >= 0 and i < n and j < n and maze[i][j]:
return True
return False
def print_sol(n, sol):
for i in range(n):
for j in range(n):
print(sol[i][j], end=" ")
print()
if __name__ == "__main__":
t = int(input())
while(t>0):
n = int(input())
maze = [[0 for i in range(n)] for j in range(n)]
for i in range(n):
maze[i] = [int(x) for x in input().strip().split()]
solve(n, maze)
t=t-1 | [
"[email protected]"
]
| |
80edbd1d65d545f84f4122c4822dc23a4c57785d | 70d39e4ee19154a62e8c82467ef75b601e584738 | /pyth3/mysql/just_mysql_pandas_things_.py | 84e264411f2cd3f02839fd45febb7a3e86ce9f2e | []
| no_license | babywyrm/sysadmin | 6f2724be13ae7e5b9372278856a8c072073beffb | 2a5f3d29c7529bc917d4ff9be03af30ec23948a5 | refs/heads/master | 2023-08-16T03:50:38.717442 | 2023-08-16T03:05:55 | 2023-08-16T03:05:55 | 210,228,940 | 10 | 5 | null | 2023-05-01T23:15:31 | 2019-09-22T23:42:50 | PowerShell | UTF-8 | Python | false | false | 6,634 | py | # MySQL Querying Using Pandas
# Author: Elena Adlaf
# Version 1.2, 10/16/17
# This Python file shows how to query results from table, 't', in database, 'af', stored on a local MySQL server while
# importing the values directly into a Pandas dataframe.
# The table lists details about pieces created by the custom furniture business, Artfully Functional,
# with fields for ID, size type, year built, labor hours, materials cost, sale prices (wholesale or retail,
# before or after sales tax) and potential profits. A second table, 'a', contains additional information and is
# used to demonstrate queries indexing or joining multiple tables.
# Import modules.
import mysql.connector
import pandas as pd
# Create variables for 1) a connector to the local database with user and password and 2) the read-to-pandas command
cnx = mysql.connector.connect(user='root', password='...', database='af')
g = pd.read_sql_query
# To import the entire table, 't', into a Pandas dataframe:
df = g('SELECT * FROM t', cnx)
# Look at the shape of the dataframe and index the first five records for all of the fields.
print(df.shape)
print(df.iloc[0:5, 0:14])
print(df.iloc[0:5, 14:])
# Most tables will likely be too large to import in full, so we can import only the data of interest by
# querying the database through Pandas.
# Return the column names and column info of the table, 't'.
col_names = g('SHOW COLUMNS FROM t', cnx)
print(col_names)
# Select only Name and Retail_High columns and limit the number of records returned.
namehighretail_firstten = g('SELECT Name, Retail_High FROM t LIMIT 10', cnx)
print(namehighretail_firstten)
# Select all unique values from the Yr column.
years = g('SELECT DISTINCT Yr FROM t', cnx)
print(years)
# Return the number of records in the table.
num_tablerows = g('SELECT COUNT(*) FROM t', cnx)
print(num_tablerows)
# Return the number of non-missing values in the Labor column.
num_laborvalues = g('SELECT COUNT(Labor) FROM t', cnx)
print(num_laborvalues)
# Return the number of distinct values in Yr column.
num_years = g('SELECT COUNT(DISTINCT Yr) FROM t', cnx)
print(num_years)
# Select names of all pieces with a Retail_Low value greater than or equal to $500
over500usd = g('SELECT Name FROM t WHERE Retail_Low >= 500', cnx)
print(over500usd)
# Select the ID number of all pieces whose Sale value is null.
idprofitnull = g('SELECT ID FROM t WHERE Sale IS NULL', cnx)
print(idprofitnull)
# Return the number of items whose build year is not 2017.
num_not2017 = g('SELECT COUNT(*) FROM t WHERE Yr <> 2017', cnx)
print(num_not2017)
# Select name and location (disposition) of items with a low retail price over 100 or a low wholesale price over 50.
nameloc_price = g('SELECT Name, Disposition FROM t WHERE Retail_Low > 100 OR Wholesale_Low > 50', cnx)
print(nameloc_price)
# Select the labor hours of items built in 2015 or 2017 and located at Holloway or Art Show
laborhours_notforsale = g("SELECT Labor FROM t WHERE (Yr = 2015 OR Yr = 2017) AND (Disposition = 'Holloway' OR "
"Disposition = 'Art Show')", cnx)
print(laborhours_notforsale)
# Select the class of items whose potential profit (retail high) is between 10 and 50.
class_ptlprofit = g('SELECT Class_version FROM t WHERE Ptnlprofit_rtl_High BETWEEN 10 AND 50', cnx)
print(class_ptlprofit)
# Select the disposition, class, and potential high wholesale profit for the items with disposition as Classic Tres,
# Art Show or For Sale. Calculate the sum of the returned potential profits.
ptlprofit_forsale = g("SELECT Disposition, Class_version, Ptnlprofit_whsle_High FROM t WHERE Disposition IN "
"('Classic Tres', 'Art Show', 'For Sale') AND Ptnlprofit_whsle_High > 0", cnx)
print(ptlprofit_forsale)
print(ptlprofit_forsale.sum(axis=0, numeric_only=True))
# Select the ID, name and class_version designation of all C-class items.
c_class_items = g("SELECT ID, Name, Class_version FROM t WHERE Class_version LIKE 'C%'", cnx)
print(c_class_items)
# Select name and retail prices of all tables. Calculate the lowest and highest table prices.
tables_retail = g("SELECT Name, Retail_Low, Retail_High FROM t WHERE Name LIKE '% Table' AND Retail_Low <> 0", cnx)
print(tables_retail)
print(tables_retail.agg({'Retail_Low' : ['min'], 'Retail_High' : ['max']}))
# Select names and labor hours of tables that don't include side tables.
noside = g("SELECT Name, Labor FROM t WHERE Name LIKE '% Table' AND Name NOT LIKE '%_ide %'", cnx)
print(noside)
# Return the average retail high price.
ave_rtlhigh = g('SELECT AVG(Retail_High) FROM t', cnx)
print(ave_rtlhigh)
# Return the sum of the retail low prices minus the sum of the Materials_Base column aliased as est_profit.
rtllow_minuscost = g('SELECT SUM(Retail_Low) - SUM(Materials_Base) AS est_profit FROM t', cnx)
print(rtllow_minuscost)
# Return the maximum materials base value increased by 20% aliased as max_material.
max_material = g('SELECT MAX(Materials_Base)*1.2 AS max_material FROM t', cnx)
print(max_material)
# Select the name and price of the lowest wholesale priced cabinet that is for sale, aliased as cabinet_low.
cabinet_low = g("SELECT Name, MIN(Wholesale_Low) AS cabinet_low FROM t WHERE Name LIKE '% Cabinet' AND Disposition = "
"'For Sale'", cnx)
print(cabinet_low)
# Select names of pieces built in 2017 in descending order by retail_high price.
high_to_low_priced = g('SELECT Name FROM t WHERE Yr = 2017 ORDER BY Retail_High DESC', cnx)
print(high_to_low_priced)
# Select number of items and years built grouped by year in descending order by count.
groupyear_sortcount = g('SELECT COUNT(*), Yr FROM t GROUP BY Yr ORDER BY COUNT(*) DESC', cnx)
print(groupyear_sortcount)
# Select Class_version categories (A1, B1, C1) aliased as Size and average wholesale low price grouped by Size.
size_aveprice = g("SELECT Class_version AS Size, AVG(Wholesale_Low) FROM t WHERE Class_version IN ('A1', 'B1', "
"'C1') GROUP BY Size", cnx)
print(size_aveprice)
# The items in tables 't' and 'a' have the same ID column, so information can be queried from both simultaneously with
# the JOIN command.
# Return the column names and column info of the table, 'a'.
table_a_colnames = g('SHOW COLUMNS FROM a', cnx)
print(table_a_colnames)
# Select the ID and disposition from table 't' and the corresponding number of website photos for those items from
# table 'a'.
webphotos = g('SELECT ID, Class_version, Disposition, Website FROM t JOIN a ON ID = ID2 WHERE Website > 0', cnx)
print(webphotos)
# After querying is complete, cnx.close() closes the connection to the database.
cnx.close()
| [
"[email protected]"
]
| |
a54dba4d3ebcdf78eb1020f011bb1570ffd11720 | 3595d51ff2499bb990b87a25b17516edf6907696 | /boards/models.py | a9fc0f9597a2654f291a202c6c60a21410fac535 | [
"Apache-2.0"
]
| permissive | maxlipsky/infomate.club | 01fa55b3dfd318212b0c328dd48019b585d3ef9d | 9729b568622120f8cba3d22fefdcfba81d1b5b88 | refs/heads/master | 2020-12-08T19:53:37.231403 | 2020-01-16T19:04:14 | 2020-01-16T19:04:14 | 233,079,581 | 1 | 0 | Apache-2.0 | 2020-01-10T15:49:08 | 2020-01-10T15:49:07 | null | UTF-8 | Python | false | false | 6,435 | py | import uuid
from datetime import datetime, timedelta
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.db import models
from slugify import slugify
from boards.icons import DOMAIN_ICONS
class Board(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
slug = models.SlugField(unique=True)
name = models.CharField(max_length=120, db_index=True)
avatar = models.URLField(max_length=512, null=True)
curator_name = models.CharField(max_length=120)
curator_title = models.CharField(max_length=120)
curator_url = models.URLField(null=True)
curator_bio = models.CharField(max_length=120, null=True)
curator_footer = models.TextField(null=True)
schema = models.TextField(null=True)
created_at = models.DateTimeField(db_index=True)
updated_at = models.DateTimeField()
refreshed_at = models.DateTimeField(null=True)
is_visible = models.BooleanField(default=True)
is_private = models.BooleanField(default=True)
index = models.PositiveIntegerField(default=0)
class Meta:
db_table = "boards"
ordering = ["index", "name"]
def save(self, *args, **kwargs):
if not self.created_at:
self.created_at = datetime.utcnow()
if not self.slug:
self.slug = slugify(self.name).lower()
self.updated_at = datetime.utcnow()
return super().save(*args, **kwargs)
def board_name(self):
return self.name or self.curator_name
def natural_refreshed_at(self):
if not self.refreshed_at:
return "now..."
return naturaltime(self.refreshed_at)
class BoardBlock(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
board = models.ForeignKey(Board, related_name="blocks", on_delete=models.CASCADE, db_index=True)
name = models.CharField(max_length=512, null=True)
slug = models.SlugField()
created_at = models.DateTimeField(db_index=True)
updated_at = models.DateTimeField()
index = models.PositiveIntegerField(default=0)
class Meta:
db_table = "board_blocks"
ordering = ["index"]
def save(self, *args, **kwargs):
if not self.created_at:
self.created_at = datetime.utcnow()
if not self.slug:
self.slug = slugify(self.name).lower()
self.updated_at = datetime.utcnow()
return super().save(*args, **kwargs)
class BoardFeed(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
board = models.ForeignKey(Board, related_name="feeds", on_delete=models.CASCADE, db_index=True)
block = models.ForeignKey(BoardBlock, related_name="feeds", on_delete=models.CASCADE, db_index=True)
name = models.CharField(max_length=512, null=True)
comment = models.TextField(null=True)
url = models.URLField(max_length=512)
icon = models.URLField(max_length=512, null=True)
rss = models.URLField(max_length=512, null=True)
created_at = models.DateTimeField(db_index=True)
last_article_at = models.DateTimeField(null=True)
refreshed_at = models.DateTimeField(null=True)
frequency = models.FloatField(default=0.0) # per week
columns = models.SmallIntegerField(default=1)
articles_per_column = models.SmallIntegerField(default=15)
index = models.PositiveIntegerField(default=0)
class Meta:
db_table = "board_feeds"
ordering = ["index"]
def save(self, *args, **kwargs):
if not self.created_at:
self.created_at = datetime.utcnow()
self.updated_at = datetime.utcnow()
return super().save(*args, **kwargs)
def last_articles(self):
return self.articles.all()[:15 * self.columns]
def articles_by_column(self):
articles = self.articles.all()[:self.articles_per_column * self.columns]
return [
(column, articles[column * self.articles_per_column:self.articles_per_column * (column + 1)])
for column in range(self.columns)
]
def natural_last_article_at(self):
if not self.last_article_at:
return None
return naturaltime(self.last_article_at)
class Article(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
uniq_id = models.TextField(db_index=True)
board = models.ForeignKey(Board, related_name="articles", on_delete=models.CASCADE, db_index=True)
feed = models.ForeignKey(BoardFeed, related_name="articles", on_delete=models.CASCADE, db_index=True)
url = models.URLField(max_length=2048)
type = models.CharField(max_length=16)
domain = models.CharField(max_length=256, null=True)
title = models.CharField(max_length=256)
image = models.URLField(max_length=512, null=True)
description = models.TextField(null=True)
summary = models.TextField(null=True)
created_at = models.DateTimeField(db_index=True)
updated_at = models.DateTimeField()
class Meta:
db_table = "articles"
ordering = ["-created_at"]
def save(self, *args, **kwargs):
if not self.created_at:
self.created_at = datetime.utcnow()
self.updated_at = datetime.utcnow()
return super().save(*args, **kwargs)
def icon(self):
article_icon = DOMAIN_ICONS.get(self.domain)
if not article_icon:
return ""
if article_icon.startswith("fa:"):
return f"""<i class="{article_icon[3:]}"></i> """
return f"""<img src="{article_icon}" alt="{self.domain}" class="icon"> """
def natural_created_at(self):
if not self.created_at:
return None
return naturaltime(self.created_at)
def is_fresh(self):
frequency = self.feed.frequency
now = datetime.utcnow()
if frequency <= 1:
# low frequency feed — any post this week is new
return self.created_at > now - timedelta(days=7)
elif frequency <= 20:
# average frequency — mark today posts
return self.created_at > now - timedelta(days=1)
elif frequency >= 100:
# extra high frequency — mark newest posts
return self.created_at > now - timedelta(hours=3)
# normal frequency - mark 6-hour old posts
return self.created_at > now - timedelta(hours=6)
| [
"[email protected]"
]
| |
b8241865b3838ea090162eb428a1c8b48348b10e | e692a9074d21c456dbdcb938ce7c74d8254f6ad9 | /Module 010/module 010.py | 815c2efdfd0e3789bf37330b86d323acc2079d0b | []
| no_license | Marius-Juston/ECE-110 | e29b08d5a305a315467635a138ef62a1a638e4ed | 962761b9bb23b02cc2a687bc691d568db8c82b31 | refs/heads/master | 2022-05-29T08:17:58.809019 | 2020-05-05T16:48:45 | 2020-05-05T16:48:45 | 261,530,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | import matplotlib.pyplot as plt
import numpy as np
from matplotlib.axes import Axes
from matplotlib.figure import Figure
if __name__ == '__main__':
with open('data-20200502-1713.circuitjs.txt', 'r') as file:
time_step = float(file.readline().split(' ')[4])
lines = tuple(map(int, file.readlines()))
x = np.arange(0, len(lines)) * time_step
fig: Figure = plt.figure(figsize=(11.69, 8.27))
ax: Axes = fig.gca()
ax.plot(x, lines)
ax.set_xlabel("Time (s)")
ax.set_ylabel("Voltage (V)")
ax.set_title("Audio Output (mjuston2)")
fig.tight_layout()
fig.savefig("figure.png")
# plt.show()
| [
"[email protected]"
]
| |
2c4e9748a4fe10c33bdca30bdba1637018100b86 | 9ec4bc3cdba9e46fe05712daeec3e35f5b6bb704 | /hallicrafter2/device/ics.py | ce1fe1212d2c1fc2085fe53bbaeb7981d6a1c3fd | []
| no_license | derekmerck/hallicrafter | 04d7d31017079fcc0c9c9361ad7e653f6e0e6418 | b9439bb9f9b311ca1f8a224ce25c64c836901381 | refs/heads/master | 2020-06-02T03:23:23.086094 | 2019-09-03T02:06:03 | 2019-09-03T02:06:03 | 191,018,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,220 | py | from .device import Device
class SirenIC(Device):
# Control a UM3561 ic
# See https://www.instructables.com/id/Siren-Generation-using-IC-UM3561/ for pinout
#
# 1. sel1
# 2. gnd
# 3. out -> 10k ohm -> NPN transistor that drives speaker gnd line
# 4. not connected (testing)
# 5. active (3-5vin)
# 6. sel2
# 7. osc1
# 8. osc2 bridge -> osc1 with a 220k ohm resistor
#
# S1 S2 Sound
# --------------------
# NC NC Police (default)
# 5v NC Fire brigade
# Gnd NC Ambulance
# Any 5v Machine gun
class AlarmProfile(object):
POLICE = "police"
FIRE = "fire"
AMBULANCE = "ambulance"
MACHINE_GUN = "machine gun"
def __init__(self, pin_active, pin_sel1, pin_sel2, name="ic_srn0", interval=0.1, *args, **kwargs):
Device.__init__(self, name=name, interval=interval, *args, **kwargs)
import digitalio
self.pin_active = digitalio.DigitalInOut(pin_active)
self.pin_active.direction = digitalio.Direction.OUTPUT
self.pin_sel1 = digitalio.DigitalInOut(pin_sel1)
self.pin_sel1.direction = digitalio.Direction.OUTPUT
self.pin_sel2 = digitalio.DigitalInOut(pin_sel2)
self.pin_sel2.direction = digitalio.Direction.OUTPUT
self.data["active"] = False
self.data["profile"] = SirenIC.AlarmProfile.POLICE
def write(self):
if self.data["profile"] == SirenIC.AlarmProfile.POLICE:
self.pin_sel1.value = False
self.pin_sel2.value = False
elif self.data["profile"] == SirenIC.AlarmProfile.FIRE:
self.pin_sel1.value = True
self.pin_sel2.value = False
elif self.data["profile"] == SirenIC.AlarmProfile.AMBULANCE:
self.pin_sel1.value = False
self.pin_sel2.value = True
elif self.data["profile"] == SirenIC.AlarmProfile.MACHINE_GUN:
self.pin_sel1.value = True
self.pin_sel2.value = True
else:
raise ValueError("Unknown alarm profile {}".format(self.data["profile"]))
self.pin_active.value = self.data["active"]
# print("Siren is {}".format(self.pin_active.value))
| [
"[email protected]"
]
| |
20965d57bf76e26a205182ffc8240ddad375cf2b | 0db97db08743783019efe022190f409d22ff95bd | /aliyun/api/rest/Rds20140815DescribeModifyParameterLogRequest.py | a738e55de39ed911b27aa2c242f097a771646719 | [
"Apache-2.0"
]
| permissive | snowyxx/aliyun-python-demo | 8052e2a165f1b869affe632dda484d6ca203bd9b | ed40887ddff440b85b77f9b2a1fcda11cca55c8b | refs/heads/master | 2021-01-10T03:37:31.657793 | 2016-01-21T02:03:14 | 2016-01-21T02:03:14 | 49,921,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | '''
Created by auto_sdk on 2015.06.02
'''
from aliyun.api.base import RestApi
class Rds20140815DescribeModifyParameterLogRequest(RestApi):
def __init__(self,domain='rds.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DBInstanceId = None
self.EndTime = None
self.PageNumber = None
self.PageSize = None
self.StartTime = None
def getapiname(self):
return 'rds.aliyuncs.com.DescribeModifyParameterLog.2014-08-15'
| [
"[email protected]"
]
| |
1cf471f736f9047d1985610fbf89b38dffb9bb5d | aeeaf40350a652d96a392010071df8a486c6e79f | /archive/python/Python/binary_search/374.guess-number-higher-or-lower.py | 1f51d98818399ede0698e42d7d7bd0cde96a1879 | [
"MIT"
]
| permissive | linfengzhou/LeetCode | 11e6c12ce43cf0053d86437b369a2337e6009be3 | cb2ed3524431aea2b204fe66797f9850bbe506a9 | refs/heads/master | 2021-01-23T19:34:37.016755 | 2018-04-30T20:44:40 | 2018-04-30T20:44:40 | 53,916,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | # The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num):
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
| [
"[email protected]"
]
| |
4ae49460f06822543fc2ff34e14d8fef115016f7 | 2af6a5c2d33e2046a1d25ae9dd66d349d3833940 | /res_bw/scripts/common/lib/abc.py | 37d2a8d88679def4d589700c441407cc6fa1a0d0 | []
| no_license | webiumsk/WOT-0.9.12-CT | e6c8b5bb106fad71b5c3056ada59fb1aebc5f2b2 | 2506e34bd6634ad500b6501f4ed4f04af3f43fa0 | refs/heads/master | 2021-01-10T01:38:38.080814 | 2015-11-11T00:08:04 | 2015-11-11T00:08:04 | 45,803,240 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 6,294 | py | # 2015.11.10 21:32:36 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/abc.py
"""Abstract Base Classes (ABCs) according to PEP 3119."""
import types
from _weakrefset import WeakSet
class _C:
pass
_InstanceType = type(_C())
def abstractmethod(funcobj):
"""A decorator indicating abstract methods.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract methods are overridden.
The abstract methods can be called using any of the normal
'super' call mechanisms.
Usage:
class C:
__metaclass__ = ABCMeta
@abstractmethod
def my_abstract_method(self, ...):
...
"""
funcobj.__isabstractmethod__ = True
return funcobj
class abstractproperty(property):
"""A decorator indicating abstract properties.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract properties are overridden.
The abstract properties can be called using any of the normal
'super' call mechanisms.
Usage:
class C:
__metaclass__ = ABCMeta
@abstractproperty
def my_abstract_property(self):
...
This defines a read-only property; you can also define a read-write
abstract property using the 'long' form of property declaration:
class C:
__metaclass__ = ABCMeta
def getx(self): ...
def setx(self, value): ...
x = abstractproperty(getx, setx)
"""
__isabstractmethod__ = True
class ABCMeta(type):
"""Metaclass for defining Abstract Base Classes (ABCs).
Use this metaclass to create an ABC. An ABC can be subclassed
directly, and then acts as a mix-in class. You can also register
unrelated concrete classes (even built-in classes) and unrelated
ABCs as 'virtual subclasses' -- these and their descendants will
be considered subclasses of the registering ABC by the built-in
issubclass() function, but the registering ABC won't show up in
their MRO (Method Resolution Order) nor will method
implementations defined by the registering ABC be callable (not
even via super()).
"""
_abc_invalidation_counter = 0
def __new__(mcls, name, bases, namespace):
cls = super(ABCMeta, mcls).__new__(mcls, name, bases, namespace)
abstracts = set((name for name, value in namespace.items() if getattr(value, '__isabstractmethod__', False)))
for base in bases:
for name in getattr(base, '__abstractmethods__', set()):
value = getattr(cls, name, None)
if getattr(value, '__isabstractmethod__', False):
abstracts.add(name)
cls.__abstractmethods__ = frozenset(abstracts)
cls._abc_registry = WeakSet()
cls._abc_cache = WeakSet()
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
return cls
def register(cls, subclass):
"""Register a virtual subclass of an ABC."""
if not isinstance(subclass, (type, types.ClassType)):
raise TypeError('Can only register classes')
if issubclass(subclass, cls):
return
if issubclass(cls, subclass):
raise RuntimeError('Refusing to create an inheritance cycle')
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1
def _dump_registry(cls, file = None):
"""Debug helper to print the ABC registry."""
print >> file, 'Class: %s.%s' % (cls.__module__, cls.__name__)
print >> file, 'Inv.counter: %s' % ABCMeta._abc_invalidation_counter
for name in sorted(cls.__dict__.keys()):
if name.startswith('_abc_'):
value = getattr(cls, name)
print >> file, '%s: %r' % (name, value)
def __instancecheck__(cls, instance):
"""Override for isinstance(instance, cls)."""
subclass = getattr(instance, '__class__', None)
if subclass is not None and subclass in cls._abc_cache:
return True
subtype = type(instance)
if subtype is _InstanceType:
subtype = subclass
if subtype is subclass or subclass is None:
if cls._abc_negative_cache_version == ABCMeta._abc_invalidation_counter and subtype in cls._abc_negative_cache:
return False
return cls.__subclasscheck__(subtype)
else:
return cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)
def __subclasscheck__(cls, subclass):
"""Override for issubclass(subclass, cls)."""
if subclass in cls._abc_cache:
return True
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
elif subclass in cls._abc_negative_cache:
return False
ok = cls.__subclasshook__(subclass)
if ok is not NotImplemented:
if not isinstance(ok, bool):
raise AssertionError
if ok:
cls._abc_cache.add(subclass)
else:
cls._abc_negative_cache.add(subclass)
return ok
cls in getattr(subclass, '__mro__', ()) and cls._abc_cache.add(subclass)
return True
for rcls in cls._abc_registry:
if issubclass(subclass, rcls):
cls._abc_cache.add(subclass)
return True
for scls in cls.__subclasses__():
if issubclass(subclass, scls):
cls._abc_cache.add(subclass)
return True
cls._abc_negative_cache.add(subclass)
return False
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\abc.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.10 21:32:36 Střední Evropa (běžný čas)
| [
"[email protected]"
]
| |
96e13ce85156c34b4c16aa46eb26cb5774458507 | b9e5aebb49734ad47825130529bd64e59f690ecf | /chapter_9/die.py | 6c02c0f3e4e49e928c96c1881f050c10ddd4aaf1 | []
| no_license | mikegirenko/python-learning | dab0f67d990d95035f93720986c84aaf422f7a9f | db9e3f0e3897caf703169d1f14b15a9aa1901161 | refs/heads/master | 2021-07-09T08:03:40.535653 | 2020-08-05T00:13:41 | 2020-08-05T00:13:41 | 169,983,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | from random import randint
class Die:
def __init__(self, sides=6):
self. sides = sides
def roll_die(self):
print('\t', randint(1, self.sides))
print('Printing 6 sided roll:')
six_sided_roll = Die()
for i in range(1, 11):
six_sided_roll.roll_die()
print('Printing 10 sided roll:')
ten_sided_roll = Die(10)
i = 1
while i <= 10:
ten_sided_roll.roll_die()
i += 1
print('Printing 20 sided roll:')
twenty_sided_roll = Die(20)
i = 1
while True:
if i == 11:
break
else:
twenty_sided_roll.roll_die()
i += 1
| [
"[email protected]"
]
| |
dd422b4ebe4b9e6aeb1fc219d30133cd31641577 | 296287f05a1efed570b8da9ce56d3f6492126d73 | /snippets/draw_text_in_image.py | fcbb7d055099100cada3113b7ce8812f110ddacb | []
| no_license | formazione/python_book | 145f8a2598b6b75736a7c33a796b9fdd8cff668e | d7822b312c1db028bb70e25385a74b227a9a2609 | refs/heads/main | 2023-07-05T20:15:18.166771 | 2021-08-12T14:14:25 | 2021-08-12T14:14:25 | 320,499,187 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,167 | py | from PIL import Image,ImageDraw,ImageFont
import tkinter as tk
def create_img_with_text(text=""):
if text == "":
text = "Pythonprogramming.altervista.org"
# sample text and font
unicode_text = u"Pythonprogramming.altervista.org"
font = ImageFont.truetype(
"C:\\Program Files\\Android\\Android Studio\\jre\\jre\\lib\\fonts\\DroidSans.ttf",
24,
encoding="unic")
# get the line size
text_width, text_height = font.getsize(unicode_text)
# create a blank canvas with extra space between lines
canvas2 = Image.new('RGB', (text_width + 10, text_height + 10), "orange")
# draw the text onto the text canvas2, and use black as the text color
draw = ImageDraw.Draw(canvas2)
draw.text((5,5), text, 'blue', font)
canvas2.save("mytext.png", "PNG")
canvas2.show()
def win_with_image():
root = tk.Tk()
root.title("Animation")
root.state("zoomed")
canvas = tk.Canvas(root, width=400, height=500)
print(canvas['width'])
canvas.pack()
img = tk.PhotoImage(file="mytext.png")
canvas.create_image(int(canvas['width']) // 2,int(canvas['height']) // 2, image=img, anchor=tk.W)
root.mainloop()
create_img_with_text("This is cool")
# win_with_image()
| [
"[email protected]"
]
| |
ee3dbda8b19a10b1e5348fd84e2fbaa94ac30ee0 | 07504838d12c6328da093dce3726e8ed096cecdb | /pylon/resources/properties/minPressureSetpoint.py | 37a8d2471523d2fe28bceff3606f5ef910265dfe | []
| no_license | lcoppa/fiat-lux | 9caaa7f3105e692a149fdd384ec590676f06bf00 | 7c166bcc08768da67c241078b397570de159e240 | refs/heads/master | 2020-04-04T02:47:19.917668 | 2013-10-10T10:22:51 | 2013-10-10T10:22:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,283 | py | """minPressureSetpoint standard property type, originally defined in resource
file set standard 00:00:00:00:00:00:00:00-0."""
# Copyright (C) 2013 Echelon Corporation. All Rights Reserved.
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software" to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# This file is generated from device resource files using an automated
# database to source code conversion process. Grammar and punctuation within
# the embedded documentation may not be correct, as this data is gathered and
# combined from several sources. The machine-generated code may not meet
# compliance with PEP-8 and PEP-257 recommendations at all times.
# Generated at 23-Sep-2013 09:14.
import pylon.resources.datapoints.press
from pylon.resources.standard import standard
class minPressureSetpoint(pylon.resources.datapoints.press.press):
"""minPressureSetpoint standard property type. Minimum pressure.
Setpoint for the operational low pressure limit."""
def __init__(self):
super().__init__(
)
self._default_bytes = b'\x00\x00'
self._original_name = 'SCPTminPressureSetpoint'
self._property_scope, self._property_key = 0, 234
self._definition = standard.add(self)
if __name__ == '__main__':
# unit test code.
item = minPressureSetpoint()
pass
| [
"[email protected]"
]
| |
078c40258e6bf4fcda2fc2317f847dddfb2bce21 | 83292e8ee5b14a30f61dcaf3067129e161832366 | /douban_film.py | a798c36cfe2919cb8fa74d911c62c1883780d1e7 | []
| no_license | A620-Work-Exchange/Application-Integration | 19197513f1aef67f27b4b984a736cd28ff9c8ac1 | baada55dd1b988112afd6bd4dc781670983337b8 | refs/heads/master | 2020-05-20T20:03:38.842375 | 2019-05-18T09:11:20 | 2019-05-18T09:11:20 | 185,736,359 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | from urllib.request import urlopen
from time import sleep
import requests
from json_util import JsonUtil
import json
def get_top_film():
url = 'https://api.douban.com/v2/movie/top250'
for start in range(0, 250, 50):
req = requests.get(url, params={'start': start, 'count': 50})
data = req.json()
for movie in data['subjects']:
print(movie)
movie_str = json.dumps(movie)
JsonUtil.write_file('1.json', movie_str)
sleep(0.3)
get_top_film() | [
"[email protected]"
]
| |
55cf1f4ce50995a9d07d8447d478a6db0d5bb5be | bb198232df12a1adb9e8a6164ff2a403bf3107cf | /wifi-dump-parser-3/template-parsing.py | 5ebdb33d76793457a8eace4b4066f6e2e0ee8ee9 | []
| no_license | vanello/wifi-arsenal | 9eb79a43dfdd73d3ead1ccd5d2caf9bad9e327ee | 1ca4c5a472687f8f017222893f09a970652e9a51 | refs/heads/master | 2021-01-16T22:00:37.657041 | 2015-09-03T03:40:43 | 2015-09-03T03:40:43 | 42,060,303 | 1 | 0 | null | 2015-09-07T15:24:11 | 2015-09-07T15:24:11 | null | UTF-8 | Python | false | false | 21,786 | py | #Author : Abhinav Narain
#Date : May 6, 2013
#Purpose : To read the binary files with data from BISmark deployment in homes
# Gives the frames: transmitted and received by the Access point in human readable form
# To test the output of the files with the dumps on clients; and understanding the trace
#
# Contention delay
#
import os,sys,re
import gzip
import struct
from header import *
from mac_parser import *
from stats import *
from rate import *
try:
import cPickle as pickle
except ImportError:
import pickle
missing_files=[]
tx_time_data_series=[]
tx_time_mgmt_series=[]
tx_time_ctrl_series=[]
rx_time_data_series=[]
rx_time_mgmt_series=[]
rx_time_ctrl_series=[]
if len(sys.argv) !=5 :
print len(sys.argv)
print "Usage : python station-process.py data/<data.gz> mgmt/<mgmt.gz> ctrl/<ctrl.gz> <outputfile> "
sys.exit(1)
#compare regular expression for filenameif argv[1] for the lexicographic /time ordering so that we load them in order in the first place
data_f_dir=sys.argv[1]
mgmt_f_dir=sys.argv[2]
ctrl_f_dir=sys.argv[3]
output_type=sys.argv[4]
data_fs=os.listdir(data_f_dir)
ctrl_fs=os.listdir(ctrl_f_dir)
data_file_header_byte_count=0
ctrl_file_header_byte_count=0
mgmt_file_header_byte_count=0
file_counter=0
file_timestamp=0
filename_list=[]
unix_time=set()
for data_f_n in data_fs :
filename_list.append(data_f_n.split('-'))
unix_time.add(int(data_f_n.split('-')[1]))
if not (data_f_n.split('-')[2]=='d'):
print "its not a data file ; skip "
continue
filename_list.sort(key=lambda x : int(x[3]))
filename_list.sort(key=lambda x : int(x[1]))
for data_f_name_list in filename_list : #data_fs :
data_f_name="-".join(data_f_name_list)
data_f= gzip.open(data_f_dir+data_f_name,'rb')
data_file_content=data_f.read()
data_f.close()
data_file_current_timestamp=0
data_file_seq_n=0
bismark_id_data_file=0
start_64_timestamp_data_file=0
for i in xrange(len(data_file_content )):
if data_file_content[i]=='\n':
bismark_data_file_header = str(data_file_content[0:i])
ents= bismark_data_file_header.split(' ')
bismark_id_data_file=ents[0]
start_64_timestamp_data_file= int(ents[1])
data_file_seq_no= int(ents[2])
data_file_current_timestamp=int(ents[3])
data_file_header_byte_count =i
break
data_contents=data_file_content.split('\n----\n')
header_and_correct_data_frames = data_contents[0]
err_data_frames = data_contents[1]
correct_data_frames_missed=data_contents[2]
err_data_frames_missed=data_contents[3]
ctrl_f_name = data_f_name
ctrl_f_name =re.sub("-d-","-c-",ctrl_f_name)
try :
ctrl_f= gzip.open(ctrl_f_dir+ctrl_f_name,'rb')
ctrl_file_content=ctrl_f.read()
except :
print "CTRL file not present ", ctrl_f_name
missing_files.append([ctrl_f_name,data_file_current_timestamp])
continue
ctrl_f.close()
mgmt_f_name = data_f_name
mgmt_f_name = re.sub("-d-","-m-",mgmt_f_name)
try :
mgmt_f= gzip.open(mgmt_f_dir+mgmt_f_name,'rb')
mgmt_file_content=mgmt_f.read()
except :
print "MGMT file not present ",mgmt_f_name
missing_files.append([mgmt_f_name,data_file_current_timestamp])
continue
mgmt_f.close()
mgmt_file_current_timestamp=0
mgmt_file_seq_no=0
bismark_id_mgmt_file=0
start_64_timestamp_mgmt_file=0
ctrl_file_current_timestamp=0
ctrl_file_seq_no=0
bismark_id_ctrl_file=0
start_64_timestamp_ctrl_file=0
for i in xrange(len(mgmt_file_content )):
if mgmt_file_content[i]=='\n':
bismark_mgmt_file_header = str(mgmt_file_content[0:i])
ents= bismark_mgmt_file_header.split(' ')
bismark_id_mgmt_file=ents[0]
start_64_timestamp_mgmt_file=int(ents[1])
mgmt_file_seq_no= int(ents[2])
mgmt_file_current_timestamp= int(ents[3])
mgmt_file_header_byte_count =i
break
mgmt_contents=mgmt_file_content.split('\n----\n')
header_and_beacon_mgmt_frames = mgmt_contents[0]
common_mgmt_frames = mgmt_contents[1]
err_mgmt_frames=mgmt_contents[2]
beacon_mgmt_frames_missed=mgmt_contents[3]
common_mgmt_frames_missed=mgmt_contents[4]
err_mgmt_frames_missed=mgmt_contents[5]
for i in xrange(len(ctrl_file_content )):
if ctrl_file_content[i]=='\n':
bismark_ctrl_file_header = str(ctrl_file_content[0:i])
ents= bismark_ctrl_file_header.split(' ')
bismark_id_ctrl_file= ents[0]
start_64_timestamp_ctrl_file= int(ents[1])
ctrl_file_seq_no= int(ents[2])
ctrl_file_current_timestamp=int(ents[3])
ctrl_file_header_byte_count =i
break
ctrl_contents=ctrl_file_content.split('\n----\n')
header_and_correct_ctrl_frames = ctrl_contents[0]
err_ctrl_frames = ctrl_contents[1]
correct_ctrl_frames_missed=ctrl_contents[2]
err_ctrl_frames_missed=ctrl_contents[3]
#done with reading the binary blobs from file ; now check for timestamps are correct
if (not (ctrl_file_current_timestamp == mgmt_file_current_timestamp == data_file_current_timestamp )) :
print "timestamps don't match "
sys.exit(1)
else :
file_timestamp=ctrl_file_current_timestamp
if (not (ctrl_file_seq_no == mgmt_file_seq_no == data_file_seq_no)):
print "sequence number don't match "
sys.exit(1)
if (len(ctrl_contents) != 4 or len(data_contents) != 4 or len(mgmt_contents) !=6) :
print "for ctrl " ,len (ctrl_contents) ,"for data", len(data_contents), "for mgmt", len(mgmt_contents)
print "file is malformed or the order of input folders is wrong "
continue
'''
if (data_file_current_timestamp < t1-1):
continue
if (data_file_current_timestamp >t2-1):
break
print t1, data_file_current_timestamp, t2
'''
#The following code block parses the data file
#print "----------done with missed .. now with actual data "
correct_data_frames=header_and_correct_data_frames[data_file_header_byte_count+1:]
data_index=0
for idx in xrange(0,len(correct_data_frames)-DATA_STRUCT_SIZE ,DATA_STRUCT_SIZE ):
global file_timestamp
frame=correct_data_frames[data_index:data_index+DATA_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem=defaultdict(list)
monitor_elem=defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success:
for key in frame_elem.keys():
tsf=key
parse_data_frame(frame,radiotap_len,frame_elem)
temp=frame_elem[tsf]
temp.insert(0,tsf)
#print temp
if radiotap_len == RADIOTAP_RX_LEN:
rx_time_data_series.append(temp)
elif radiotap_len ==RADIOTAP_TX_LEN :
tx_time_data_series.append(temp)
else :
print "impossible radiotap len detected ; Report CERN", radiotap_len
else:
print "success denied; correct data frame"
data_index=data_index+DATA_STRUCT_SIZE
del frame_elem
del monitor_elem
'''
data_index=0
for idx in xrange(0,len(err_data_frames)-DATA_ERR_STRUCT_SIZE,DATA_ERR_STRUCT_SIZE ):
frame=err_data_frames[data_index:data_index+DATA_ERR_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem=defaultdict(list)
monitor_elem=defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success:
for key in frame_elem.keys():
tsf=key
parse_err_data_frame(frame,radiotap_len,frame_elem)
temp=frame_elem[tsf]
temp.insert(0,tsf)
if radiotap_len == RADIOTAP_RX_LEN:
rx_time_data_series.append(temp)
elif radiotap_len ==RADIOTAP_TX_LEN :
tx_time_series.append(temp)
print "wrong err data tx frame "
else :
print "impossible radiotap len detected ; Report CERN"
else :
print "success denied; incorrect data frame"
data_index= data_index+DATA_ERR_STRUCT_SIZE
del frame_elem
del monitor_elem
'''
#The following code block parses the mgmt files
beacon_mgmt_frames=header_and_beacon_mgmt_frames[mgmt_file_header_byte_count+1:]
mgmt_index=0
for idx in xrange(0,len(beacon_mgmt_frames)-MGMT_BEACON_STRUCT_SIZE ,MGMT_BEACON_STRUCT_SIZE ):
global file_timestamp
frame=beacon_mgmt_frames[mgmt_index:mgmt_index+MGMT_BEACON_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem,monitor_elem=defaultdict(list),defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
if not( radiotap_len ==RADIOTAP_RX_LEN or radiotap_len == RADIOTAP_TX_LEN) :
print "the radiotap header is not correct "
sys.exit(1)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success :
for key in frame_elem.keys():
tsf=key
temp=frame_elem[tsf]
temp.insert(0,tsf)
parse_mgmt_beacon_frame(frame,radiotap_len,frame_elem)
if radiotap_len== RADIOTAP_TX_LEN :
tx_time_mgmt_series.append(temp)
print temp
else :
print "beacon success denied"
mgmt_index=mgmt_index+MGMT_BEACON_STRUCT_SIZE
del frame_elem
del monitor_elem
mgmt_index=0
for idx in xrange(0,len(common_mgmt_frames)-MGMT_COMMON_STRUCT_SIZE,MGMT_COMMON_STRUCT_SIZE ):
global file_timestamp
frame=common_mgmt_frames[mgmt_index:mgmt_index+MGMT_COMMON_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem,monitor_elem=defaultdict(list),defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
if not( radiotap_len ==RADIOTAP_RX_LEN or radiotap_len == RADIOTAP_TX_LEN) :
print "the radiotap header is not correct "
sys.exit(1)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success==1 :
for key in frame_elem.keys():
tsf=key
temp=frame_elem[tsf]
temp.insert(0,tsf)
parse_mgmt_common_frame(frame,radiotap_len,frame_elem)
if radiotap_len== RADIOTAP_TX_LEN :
tx_time_mgmt_series.append(temp)
else :
print "common mgmt success denied"
mgmt_index= mgmt_index+MGMT_COMMON_STRUCT_SIZE
del frame_elem
del monitor_elem
'''
mgmt_index=0
for idx in xrange(0,len(err_mgmt_frames)-MGMT_ERR_STRUCT_SIZE,MGMT_ERR_STRUCT_SIZE ):
global file_timestamp
frame=err_mgmt_frames[mgmt_index:mgmt_index+MGMT_ERR_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem,monitor_elem=defaultdict(list),defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
if not( radiotap_len ==RADIOTAP_RX_LEN or radiotap_len == RADIOTAP_TX_LEN) :
print "the radiotap header is not correct "
sys.exit(1)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success==1 :
for key in frame_elem.keys():
tsf=key
temp=frame_elem[tsf]
temp.insert(0,tsf)
parse_mgmt_err_frame(frame,radiotap_len,frame_elem)
if radiotap_len== RADIOTAP_TX_LEN :
tx_time_mgmt_common_series.append(temp)
print "err: mgmt tx frame"
else:
print "success denied"
mgmt_index= mgmt_index+MGMT_ERR_STRUCT_SIZE
del frame_elem
del monitor_elem
'''
#print "----------done with missed .. now with actual ctrl data "
correct_ctrl_frames=header_and_correct_ctrl_frames[ctrl_file_header_byte_count+1:]
ctrl_index=0
for idx in xrange(0,len(correct_ctrl_frames)-CTRL_STRUCT_SIZE ,CTRL_STRUCT_SIZE ):
global file_timestamp
frame=correct_ctrl_frames[ctrl_index:ctrl_index+CTRL_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem, monitor_elem=defaultdict(list),defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
if not( radiotap_len ==RADIOTAP_RX_LEN or radiotap_len == RADIOTAP_TX_LEN) :
print "the radiotap header is not correct "
sys.exit(1)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success :
for key in frame_elem.keys():
tsf=key
parse_ctrl_frame(frame,radiotap_len,frame_elem)
temp=frame_elem[tsf]
temp.insert(0,tsf)
if radiotap_len ==RADIOTAP_TX_LEN :
tx_time_ctrl_series.append(temp)
elif radiotap_len ==RADIOTAP_RX_LEN :
rx_time_ctrl_series.append(temp)
else :
print "ctrl success denied"
ctrl_index=ctrl_index+CTRL_STRUCT_SIZE
del frame_elem
del monitor_elem
'''
ctrl_index=0
for idx in xrange(0,len(err_ctrl_frames)-CTRL_ERR_STRUCT_SIZE,CTRL_ERR_STRUCT_SIZE):
global file_timestamp
frame=err_ctrl_frames[ctrl_index:ctrl_index+CTRL_ERR_STRUCT_SIZE]
offset,success,tsf= 8,-1,0
header = frame[:offset]
frame_elem,monitor_elem=defaultdict(list),defaultdict(list)
(version,pad,radiotap_len,present_flag)=struct.unpack('<BBHI',header)
if not( radiotap_len ==RADIOTAP_RX_LEN or radiotap_len == RADIOTAP_TX_LEN) :
print "the radiotap header is not correct "
sys.exit(1)
(success,frame_elem,monitor_elem)=parse_radiotap(frame,radiotap_len,present_flag,offset,monitor_elem,frame_elem)
if success ==1:
for key in frame_elem.keys():
tsf=key
parse_ctrl_err_frame(frame,radiotap_len,frame_elem)
temp=frame_elem[tsf]
temp.insert(0,tsf)
if radiotap_len == RADIOTAP_RX_LEN:
rx_time_ctrl_series.append(temp)
elif radiotap_len ==RADIOTAP_TX_LEN :
tx_time_ctrl_series.append(temp)
print "wrong: ctrl frame "
else :
print "success denied"
ctrl_index= ctrl_index+CTRL_ERR_STRUCT_SIZE
del frame_elem
del monitor_elem
'''
file_counter +=1
if file_counter %10 == 0:
print file_counter
print "now processing the files to calculate time "
tx_time_data_series.sort(key=lambda x:x[0])
tx_time_ctrl_series.sort(key=lambda x:x[0])
tx_time_mgmt_series.sort(key=lambda x:x[0])
mgmt_zero_queue_tx=[]
mgmt_retx_count=[]
ctrl_zero_tx=[]
ctrl_retx_count=[]
Station_list=list(Station)
Station_tx_retx_count = defaultdict(list)
for i in range(0,len(tx_time_ctrl_series)):
frame=tx_time_ctrl_series[i]
c_frame_tx_flags_radiotap=frame[1]
c_frame_retx=frame[2]
c_frame_mpdu_queue_size=frame[5]
c_frame_retx=frame[2]
if c_frame_tx_flags_radiotap[0]==0 :
ctrl_retx_count.append(c_frame_retx)
if c_frame_mpdu_queue_size ==0 and c_frame_retx==0 :
ctrl_zero_tx.append()
for i in range(0,len(tx_time_mgmt_series)):
frame=tx_time_mgmt_series[i]
c_frame_tx_flags_radiotap=frame[1]
c_frame_retx=frame[2]
c_frame_mpdu_queue_size=frame[5]
c_frame_retx=frame[2]
c_frame_total_time=frame[4]
if c_frame_tx_flags_radiotap[0]==0 :
ctrl_retx_count.append(c_frame_retx)
if c_frame_mpdu_queue_size ==0 and c_frame_retx==0 :
ctrl_zero_tx.append(c_frame_total_time)
print "in tx looping "
Station_tx_series=defaultdict(list)
for j in range(0,len(Station_list)):
for i in range(0,len(tx_time_data_series)):
frame = tx_time_data_series[i]
if frame[11]==Station_list[j] :
width, half_gi, shortPreamble= 0,0,0
phy,kbps=frame[7],temp[0]*500
prop_time=(frame[-1]*8.0 *1000000)/ (frame[3] *1000000) #frame[-1] is the size of frame in bytes
temp= frame[10]
abg_rix,pktlen= temp[0], frame[17]
airtime,curChan=-1,-23
if abg_rix == -1 :
n_rix=temp[-1][0]
airtime=ath_pkt_duration(n_rix, pktlen, width, half_gi,shortPreamble)
else :
airtime= -1 #ath9k_hw_computetxtime(phy,kbps,pktlen,abg_rix,shortPreamble,curChan)
Station_tx_series[frame[11]].append([frame[0],frame[1],frame[2],frame[3],frame[4],frame[5],frame[6],frame[8],frame[7],frame[9],frame[13],frame[14],frame[15],frame[16],frame[17],airtime,prop_time])
# 0 ,1 ,2 ,3 ,4 ,5 ,6 ,7 ,8 ,9 ,10 ,11
#time [0],txflags[1],retx[2],success_rate[3],total_time[4],Q len [5],A-Q len [6], Q-no[7],phy_type[8],retx_rate_list[9],seq no[13],fragment no[14],mac-layer-flags[15], frame-prop-type[16], framesize[17],prop time,temp
# 12 ,13 ,14 ,16, 17
print"format:tsf, txflags, retx, successful bitrate, total time,Qlen,AMPDU-Q len,Q no, phy-type,retx rate list,seq no, frag no, mac-layer flags, frame prop type,frame size, frame-prop time"
for j in Station_tx_series.keys():
#j is the station name
print "TX Station :", j
list_of_frames= Station_tx_series[j]
for i in range(1,len(list_of_frames)):
frame=list_of_frames[i]
''' #used when looking for consecutive frames
previous_frame=list_of_frames[i-1]
c_frame_departure=frame[0]
p_frame_departure=previous_frame[0]
c_frame_total_time=frame[4]
p_frame_total_time=previous_frame[4]
c_frame_mpdu_queue_size=frame[5]
p_frame_mpdu_queue_size=previous_frame[5]
c_frame_ampdu_queue_size=frame[6]
p_frame_ampdu_queue_size=previous_frame[6]
c_frame_queue_no=frame[7]
p_frame_queue_no=previous_frame[7]
c_frame_phy_flag=frame[8]
p_frame_phy_flag=previous_frame[8]
c_frame_seq_no=frame[10]
p_frame_seq_no=previous_frame[10]
c_frame_frag_no=frame[11]
p_frame_frag_no=previous_frame[11]
c_frame_size= frame[-1]
p_frame_size= previous_frame[-1]
c_frame_tx_flags=frame[1]
'''
c_tx_flags_radiotap=frame[1]
#if c_tx_flags_radiotap[0]==0 :
c_frame_mpdu_queue_size= frame[5]
c_frame_retx= frame[2]
if c_frame_mpdu_queue_size ==0 and c_frame_retx==0 :
print frame
# 0 ,1 ,2 ,3 ,4 ,5 ,6 ,7 ,8 ,9 ,10 ,11
#time [0],txflags[1],retx[2],success_rate[3],total_time[4],Q len [5],A-Q len [6], Q-no[7],phy_type[8],retx_rate_list[9],seq no[12],fragment no[13],mac-layer-flags[14], frame-prop-type[15], framesize[16],prop time
# 12 ,13 ,14 ,16
print "done with a station "
print "in rx_looping "
Station_rx_series=defaultdict(list)
print "RECIVED FRAMES "
print "format : time,flags,freq, rx_flags,success rate, rx_queue_time,framesize , signal,RSSI, seq number,frag no,retry frame,prop time"
for i in range(0,len(rx_time_data_series)):
frame = rx_time_data_series[i]
for i in range(0,len(Station_list)):
if frame[12]==Station_list[i] :
prop_time=(frame[10]*8.0 *1000000)/ (frame[8] *1000000)
Station_rx_series[frame[12]].append([frame[0],frame[1],frame[2],frame[7],frame[8],frame[9] ,frame[10],frame[4],frame[11],frame[14],frame[15],frame[16][1],prop_time])
#print frame[12],frame[0],frame[1],frame[2],frame[7],frame[8],frame[9],frame[10],frame[4],frame[11],frame[14],frame[15],frame[16][1],prop_time
#time [0],flags[1],freq[2], rx_flags[7],success rate [8], rx_queue_time[9],framesize [10], signal [4],RSSI [11], seq number [14], fragment no [15],retry frame [16][1],prop time
'''
for j in Station_rx_series.keys():
list_of_frames= Station_rx_series[j]
print "RX Station ",j
for i in range(1,len(list_of_frames)):
frame= list_of_frames[i]
print frame
'''
for i in range(0,len(missing_files)):
print missing_files[i]
print "number of files that can't be located ", len(missing_files)
| [
"[email protected]"
]
| |
f2e7f0e94bba710d8fdae5692b1f3256e1ae55d1 | 0bfb55b41282803db96b90e7bba73d86be7e8553 | /submissions/migrations/0002_auto_20161028_0540.py | cd60356ef018a13dc5711524a56d9a60a4a3a77a | [
"MIT"
]
| permissive | OpenFurry/honeycomb | eebf2272f8ae95eb686ad129555dbebcf1adcd63 | c34eeaf22048948fedcae860db7c25d41b51ff48 | refs/heads/master | 2021-01-11T01:52:40.978564 | 2016-12-29T18:08:38 | 2016-12-29T18:08:38 | 70,649,821 | 2 | 2 | null | 2016-12-29T18:08:39 | 2016-10-12T01:22:38 | Python | UTF-8 | Python | false | false | 2,187 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-28 05:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('usermgmt', '0001_initial'),
('submissions', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='submission',
name='allowed_groups',
field=models.ManyToManyField(blank=True, to='usermgmt.FriendGroup'),
),
migrations.AddField(
model_name='submission',
name='folders',
field=models.ManyToManyField(blank=True, through='submissions.FolderItem', to='submissions.Folder'),
),
migrations.AddField(
model_name='submission',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='folderitem',
name='folder',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='submissions.Folder'),
),
migrations.AddField(
model_name='folderitem',
name='submission',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='submissions.Submission'),
),
migrations.AddField(
model_name='folder',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='folder',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='submissions.Folder'),
),
migrations.AddField(
model_name='folder',
name='submissions',
field=models.ManyToManyField(through='submissions.FolderItem', to='submissions.Submission'),
),
]
| [
"[email protected]"
]
| |
2a97d4fde1b262d7d7571c5622491d16841bed3f | 313bb88c43d74995e7426f9482c6c8e670fdb63c | /07-modules/example6_module.py | 8926b868c6f45aa8be74c33928f9bfcea9bd86be | []
| no_license | martakedzior/python-course | 8e93fcea3e9e1cb51920cb1fcf3ffbb310d1d654 | 3af2296c2092023d91ef5ff3b4ef9ea27ec2f227 | refs/heads/main | 2023-05-06T07:26:58.452520 | 2021-05-26T16:50:26 | 2021-05-26T16:50:26 | 339,822,876 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | import example6_module
import shapes
if __name__ == '__ main__':
print(shapes2.rectangle_aera(3, 4))
else:
print('Jestem teraz modulem')
print('wartość zmiennej __name__:', __name__)
print('moduł - wartość zmiennej __name__:', __name__) | [
"[email protected]"
]
| |
2c5fcb6a21f19430ac8de3c70be24f2e6b1711a8 | 90cad1df7b7d424feb8e71ff3d77e772d446afdf | /test/test_reebill/test_excel_export.py | 56fabef7c4bff8d7f0b9276c42aab2347c599839 | []
| no_license | razagilani/billing | acb8044c22b4075250c583f599baafe3e09abc2e | fd2b20019eeedf0fcc781e5d81ff240be90c0b37 | refs/heads/master | 2021-05-01T14:46:32.138870 | 2016-03-09T18:55:09 | 2016-03-09T18:55:09 | 79,589,205 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16,940 | py | from copy import deepcopy
from datetime import date, datetime
from itertools import cycle
from StringIO import StringIO
import unittest
import mock
from core import init_config, init_model
from core.model import Register, Charge, Session, Utility, Address,\
Supplier, RateClass, UtilityAccount
from core.model.utilbill import UtilBill, Charge
from reebill.reports.excel_export import Exporter
from reebill.reebill_model import ReeBill, Payment
from reebill.reebill_dao import ReeBillDAO
from reebill.payment_dao import PaymentDAO
from test import create_tables
class ExporterSheetTest(unittest.TestCase):
def setUp(self):
#Set up the mock
self.mock_StateDB = mock.create_autospec(ReeBillDAO)
self.payment_dao = mock.Mock(autospec=PaymentDAO)
self.exp = Exporter(self.mock_StateDB, self.payment_dao)
def test_get_reebill_details_dataset(self):
def make_reebill(id, month):
result = mock.Mock(autospec=ReeBill)
result.id = id
result.sequence = 1
result.version = 0
result.issued = 1
result.utilbills = [mock.Mock(autospec=UtilBill)]
result.billing_address = 'Monroe Towers, Silver Spring, MD'
result.service_address = 'Monroe Towers, Silver Spring, MD'
result.issue_date = date(2013, 4, 1)
result.ree_value = 4.3
result.prior_balance = 2.20
result.payment_received = None
result.total_adjustment = 0.00
result.balance_forward = 62.29
result.ree_charge = 122.20
result.late_charge = 32.2
result.balance_due = 5.01
result.get_period.return_value = (
date(2011, month, 3), date(2011, month+1, 2))
result.get_total_actual_charges.return_value = 743.49
result.get_total_hypothetical_charges.return_value = 980.33
result.get_total_renewable_energy.return_value = 188.20
return result
def make_payment(date_applied, amount):
result = mock.Mock(autospec=Payment)
result.date_applied = date_applied
result.credit = amount
return result
def get_payments_for_reebill_id(id):
if id == 1:
return [
make_payment(datetime(2011, 1, 30, 0, 0, 0), 400.13), # '10003'
make_payment(datetime(2011, 2, 01, 0, 0, 0), 13.37)
]
else:
return []
self.payment_dao.get_payments_for_reebill_id.side_effect = \
get_payments_for_reebill_id
self.mock_StateDB.get_all_reebills_for_account.side_effect = cycle([
[make_reebill(1, 1)], # For account '10003'
[make_reebill(2, 2), make_reebill(3, 3), make_reebill(4, 4)] # 10004
])
# No start or end date
dataset = self.exp.get_export_reebill_details_dataset(
['10003', '10004'])
correct_data = [
('10003', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-01-03', '2011-02-02', '980.33', '743.49', '4.30',
'2.20', None, '2011-01-30', '400.13', '0.00', '62.29',
'122.20', 32.2, '5.01', '', '-117.90', '-117.90',
'188.20', '1.26'),
('10003', 1, 0, None, None, None, None, None, None, None,
None, None, None, '2011-02-01', '13.37', None, None,
None, None, None, None, None, None, None, None),
('10004', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-02-03', '2011-03-02', '980.33', '743.49', '4.30',
'2.20', None, None, None, '0.00', '62.29', '122.20',
32.2, '5.01', '', '-117.90', '-117.90', '188.20',
'1.26'),
('10004', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-03-03', '2011-04-02', '980.33', '743.49', '4.30',
'2.20', None, None, None, '0.00', '62.29', '122.20',
32.2, '5.01', '', '-117.90', '-235.80', '188.20',
'1.26'),
('10004', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-04-03', '2011-05-02', '980.33', '743.49', '4.30',
'2.20', None, None, None, '0.00', '62.29', '122.20',
32.2, '5.01', '', '-117.90', '-353.70', '188.20',
'1.26')
]
for indx,row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
# Only start date
dataset = self.exp.get_export_reebill_details_dataset(
['10003', '10004'], begin_date=date(2011, 4, 1))
correct_data = [
('10004', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-04-03', '2011-05-02', '980.33', '743.49', '4.30',
'2.20', None, None, None, '0.00', '62.29', '122.20',
32.2, '5.01', '', '-117.90', '-117.90', '188.20',
'1.26')
]
for indx,row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
# Only end date
dataset = self.exp.get_export_reebill_details_dataset(
['10003', '10004'], end_date=date(2011, 3, 5))
correct_data = [
('10003', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-01-03', '2011-02-02', '980.33', '743.49', '4.30',
'2.20', None, '2011-01-30', '400.13', '0.00', '62.29',
'122.20', 32.2, '5.01', '', '-117.90', '-117.90',
'188.20', '1.26'),
('10003', 1, 0, None, None, None, None, None, None, None,
None, None, None, '2011-02-01', '13.37', None, None,
None, None, None, None, None, None, None, None),
('10004', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-02-03', '2011-03-02', '980.33', '743.49', '4.30',
'2.20', None, None, None, '0.00', '62.29', '122.20',
32.2, '5.01', '', '-117.90', '-117.90', '188.20',
'1.26')
]
for indx,row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
# Start and end date
dataset = self.exp.get_export_reebill_details_dataset(
['10003', '10004'], begin_date=date(2011, 2, 1),
end_date=date(2011, 3, 5))
correct_data = [
('10004', 1, 0, u'Monroe Towers, Silver Spring, MD',
u'Monroe Towers, Silver Spring, MD', '2013-04-01',
'2011-02-03', '2011-03-02', '980.33', '743.49', '4.30',
'2.20', None, None, None, '0.00', '62.29', '122.20',
32.2, '5.01', '', '-117.90', '-117.90', '188.20',
'1.26')
]
for indx,row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
def test_account_charges_sheet(self):
def make_utilbill(month):
result = mock.Mock(autospec=UtilBill)
result.period_start = datetime(2013, month, 3)
result.period_end = datetime(2013, month+1, 4)
result.state = UtilBill.Complete
return result
def make_charge(group, desc, number):
result = mock.Mock(autospec=Charge)
result.total = number
result.group = group
result.description = desc
return result
def make_reebill(seq):
result = mock.Mock(autospec=ReeBill)
result.sequence = seq
ub = make_utilbill(seq)
result.utilbills = [ub]
result.utilbill = ub
return result
r1 = make_reebill(1)
r1.utilbill.charges = [make_charge(x,y,z) for x,y,z in [
('Group1', "Description1", 1.11),
('Group1', "Description2", 2.22),
('Group2', "Description3", 3.33),
]]
r2 = make_reebill(2)
r2.utilbill.charges = [make_charge(x,y,z) for x,y,z in [
('Group1', "Description1", 4.44),
('Group2', "Description2", 5.55),
('Group2', "Description3", 6.66),
]]
r3 = make_reebill(3)
r3.utilbill.charges = [make_charge(x,y,z) for x,y,z in [
('Group1', "Description1", 4.44),
('Group2', "Description2", 5.55),
('Group2', "Description3", 6.66),
]]
# No start date or end date
dataset = self.exp.get_account_charges_sheet('999999', [r1, r2, r3])
correct_data = [('999999', 1, '2013-01-03', '2013-02-04', '2013-01',
'No', '1.11', '2.22', '3.33', ''),
('999999', 2, '2013-02-03', '2013-03-04', '2013-02',
'No', '4.44', '', '6.66', '5.55'),
('999999', 3, '2013-03-03', '2013-04-04', '2013-03',
'No', '4.44', '', '6.66', '5.55')]
headers = ['Account', 'Sequence', 'Period Start', 'Period End',
'Billing Month', 'Estimated', 'Group1: Description1',
'Group1: Description2', 'Group2: Description3',
'Group2: Description2']
self.assertEqual(headers, dataset.headers)
for indx, row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
# Only start date
dataset = self.exp.get_account_charges_sheet(
'999999', [r1, r2, r3], start_date=datetime(2013, 2, 1))
correct_data = [('999999', 2, '2013-02-03', '2013-03-04', '2013-02',
'No', '4.44', '5.55', '6.66'),
('999999', 3, '2013-03-03', '2013-04-04', '2013-03',
'No', '4.44', '5.55', '6.66')]
headers = ['Account', 'Sequence', 'Period Start', 'Period End',
'Billing Month', 'Estimated', 'Group1: Description1',
'Group2: Description2', 'Group2: Description3']
self.assertEqual(headers, dataset.headers)
for indx, row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
# Only end date
dataset = self.exp.get_account_charges_sheet(
'999999', [r1, r2, r3], end_date=datetime(2013, 3, 5))
correct_data = [('999999', 1, '2013-01-03', '2013-02-04', '2013-01',
'No', '1.11', '2.22', '3.33', ''),
('999999', 2, '2013-02-03', '2013-03-04', '2013-02',
'No', '4.44', '', '6.66', '5.55')]
headers = ['Account', 'Sequence', 'Period Start', 'Period End',
'Billing Month', 'Estimated', 'Group1: Description1',
'Group1: Description2', 'Group2: Description3',
'Group2: Description2']
self.assertEqual(headers, dataset.headers)
for indx, row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
# Both start and end date
dataset = self.exp.get_account_charges_sheet(
'999999', [r1, r2], start_date=datetime(2013, 2, 1),
end_date=datetime(2013, 3, 5))
correct_data = [('999999', 2, '2013-02-03', '2013-03-04', '2013-02',
'No', '4.44', '5.55', '6.66')]
headers = ['Account', 'Sequence', 'Period Start', 'Period End',
'Billing Month', 'Estimated', 'Group1: Description1',
'Group2: Description2', 'Group2: Description3']
self.assertEqual(headers, dataset.headers)
for indx, row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
def test_get_energy_usage_sheet(self):
def make_charge(number):
result = mock.Mock(autospec=Charge)
result.total = number
result.group = str(number)
result.description = ''
return result
#Setup Mock
u1 = mock.Mock(autospec=UtilBill)
u1.utility_account.account = '10003'
u1.rate_class.name = 'DC Non Residential Non Heat'
u1.period_start = date(2011,11,12)
u1.period_end = date(2011,12,14)
u1.charges = [make_charge(x) for x in [3.37, 17.19, 43.7, 164.92,
23.14, 430.02, 42.08, 7.87, 11.2]]
# replacement for document above
register1 = mock.Mock(autospec=Register)
register1.description = ''
register1.quantity = 561.9
register1.unit = 'therms'
register1.estimated = False
register1.reg_type = 'total'
register1.register_binding = Register.TOTAL
register1.active_periods = None
u1._registers = [register1]
u2 = deepcopy(u1)
u2.period_start = date(2011,12,15)
u2.period_end = date(2012,01,14)
u2._registers = [deepcopy(register1)]
dataset = self.exp.get_energy_usage_sheet([u1, u2])
correct_data = [('10003', u'DC Non Residential Non Heat', 561.9, u'therms', '2011-11-12', '2011-12-14', 3.37, 17.19, 43.7, 164.92, 23.14, 430.02, 42.08, 7.87, 11.2),
('10003', u'DC Non Residential Non Heat', 561.9, u'therms', '2011-12-15', '2012-01-14', 3.37, 17.19, 43.7, 164.92, 23.14, 430.02, 42.08, 7.87, 11.2),]
headers = ['Account', 'Rate Class', 'Total Energy', 'Units',
'Period Start', 'Period End', '3.37: ', '17.19: ', '43.7: ',
'164.92: ', '23.14: ', '430.02: ', '42.08: ', '7.87: ',
'11.2: ']
self.assertEqual(headers, dataset.headers)
for indx,row in enumerate(dataset):
self.assertEqual(row, correct_data[indx])
self.assertEqual(len(dataset), len(correct_data))
class ExporterDataBookTest(unittest.TestCase):
def setUp(self):
init_config('test/tstsettings.cfg')
create_tables()
init_model()
self.exp = Exporter(ReeBillDAO(), PaymentDAO())
s = Session()
utility = Utility(name='New Utility', address=Address())
s.add(utility)
supplier = Supplier(name='New Supplier', address=Address())
s.add(supplier)
rate_class = RateClass(name='New Rate Class', utility=utility,
service='electric')
s.add(rate_class)
utility_account = UtilityAccount(
'some name',
'20001',
utility,
supplier,
rate_class,
Address(),
Address(),
'1234567890'
)
s.add(utility_account)
s.add(
UtilBill(
utility_account, utility,
rate_class, supplier=supplier,
period_start=date(2010, 11, 1), period_end=date(2011, 2, 3),
date_received=datetime.utcnow().date(),
state=UtilBill.Estimated,
)
)
def test_exports_returning_binaries(self):
"""
This test simply calls all export functions returning binaries. This
way we can at least verify that the code in those functions is
syntactically correct and calls existing methods
"""
string_io = StringIO()
# export_account_charges
self.exp.export_account_charges(string_io)
self.exp.export_account_charges(string_io, '20001')
self.exp.export_account_charges(string_io, '20001', date(2010, 11, 1))
self.exp.export_account_charges(string_io, '20001',
date(2010, 11, 1), date(2011, 2, 3))
# export_energy_usage
self.exp.export_energy_usage(string_io)
the_account = Session().query(UtilityAccount).filter_by(
account='20001').one()
self.exp.export_energy_usage(string_io, the_account)
# export_reebill_details
self.exp.export_reebill_details(string_io)
self.exp.export_reebill_details(string_io, '20001')
self.exp.export_reebill_details(string_io, '20001', date(2010, 11, 1))
self.exp.export_reebill_details(string_io, '20001',
date(2010, 11, 1), date(2011, 2, 3))
| [
"[email protected]"
]
| |
64d45d9b34c9e2d7e84fae07e4afc49f2795317a | 0c9ba4d9c73fb3b4ee972aed0b6d844d8a9546a9 | /TerchaerCode/s13day3课上代码/day3/s1.py | 2b5de365e8984bdb16be174cabf9b4b954ffbc68 | []
| no_license | SesameMing/Python51CTONetwork | d38179122c8daaed83d7889f17e4c3b7d81e8554 | 76169c581245abf2bcd39ed60dc8c9d11698fd3a | refs/heads/master | 2020-04-15T12:47:15.234263 | 2018-06-02T15:04:04 | 2018-06-02T15:04:04 | 65,876,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,765 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:Alex Li
# 1、set,无序,不重复序列
# li = [11,222]
# print(li)
# a. 创建
# li = []
# list((11,22,33,4))
# list __init__,内部执行for循环(11,22,33,4) [11,22,33,4]
# 原理,list
# dic = {"k1":123}
# se = {"123", "456"}
# s = set() # 创建空集合
# li = [11,22,11,22]
# s1 = set(li)
# print(s1)
# b. 功能
# set()\
# 创建集合
# s1 = {11,22}
# s2 = set()
# s3 = set([11,22,33,4])
## 操作集合
# s = set()
# print(s)
# s.add(123)
# s.add(123)
# s.add(123)
# print(s)
# s.clear()
# print(s)
# s1 = {11,22,33}
# s2 = {22,33,44}
# s3 = s1.difference(s2)
# A中存在,B中不存在
# s3 = s2.difference(s1)
# s3 = s1.symmetric_difference(s2)
# print(s1)
# print(s2)
# print(s3)
# s1.difference_update(s2)
# s1.symmetric_difference_update(s2)
# print(s1)
# s1 = {11,22,33}
# s1.discard(1111)
# s1.remove(11111)
# ret = s1.pop()
# print(s1)
# print(ret)
# s1 = {11,22,33}
# s2 = {22,33,44}
# s3 = s1.union(s2)
# print(s3)
# s3 = s1.intersection(s2)
# s1.intersection_update(s2)
# print(s3)
"""
s1 = {11,22,33}
s1.add(11)
s1.add(12)
s1.add(13)
# li = [11,22,3,11,2]
# li = (11,22,3,11,2)
li = "alexalex"
s1.update(li)
print(s1)
"""
# li = [11,22,33] # list __init__
# li() # list __call__
# li[0] # list __getitem__
# li[0] = 123 # list __setitem__
# def li[1] # list __delitem__
old_dict = {
"#1": 8,
"#2": 4,
"#4": 2,
}
new_dict = {
"#1": 4,
"#2": 4,
"#3": 2,
}
# old_kyes = old_dict.keys()
# old_set = set(old_kyes)
new_set = set(new_dict.keys())
old_set = set(old_dict.keys())
remove_set = old_set.difference(new_set)
add_set = new_set.difference(old_set)
update_set = old_set.intersection(new_set)
import re
re.match() | [
"[email protected]"
]
| |
5acdfe52eb25d3cd6de5b1bea421707d7b5ba3cd | 0630a7addb90600293f0ee0787dd6ab0ac77b09a | /LinkedList/FindDuplicatesInArray.py | 35abfd91e5fb529b564d57ba99c6e779fb8a5c5a | [
"MIT"
]
| permissive | aritraaaa/Competitive_Programming | 48ecd3b6e28549889160c04cdbd19a5ad06fa49b | ee7eadf51939a360d0b004d787ebabda583e92f0 | refs/heads/master | 2023-06-12T07:04:07.698239 | 2021-07-01T11:11:24 | 2021-07-01T11:11:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | class Solution:
# @param A : tuple of integers
# @return an integer
def repeatedNumber(self, A):
'''
# This is the marker concept, by list modification
A = list(A)
if len(A) <= 1:
return -1
for i in range(len(A)):
num = abs(A[i])
if A[num]>=0:
A[num] = -1 * A[num] # marking negative means already visited A[i]
else:
return num
return -1
'''
# This is modified Floyd Warshall concept, cycle in linked list type
# https://medium.com/solvingalgo/solving-algorithmic-problems-find-a-duplicate-in-an-array-3d9edad5ad41
slow, fast = A[0], A[A[0]]
while slow!=fast:
slow = A[slow]
fast = A[A[fast]]
# the slow and fast pointers are at the same point now, i.e start point of cycle
slow = 0
while slow!=fast:
slow = A[slow]
fast = A[fast]
if slow == 0:
return -1
else:
return slow
| [
"[email protected]"
]
| |
e31ee367fc6802635bca02f0078aae7a1c53faf9 | cc2d37a71eac8422b0722533766b3ee95b5b5d1a | /data_import/zmmutypes.py | 4480cded4f86f0e6a1b75637c6e882db37268004 | []
| no_license | JelleZijlstra/taxonomy | 88018a1a0ec114875c45cf87ffc825957fc3e870 | 56aac782e2cbbd084a14d2ad1b1572729ba387be | refs/heads/master | 2023-08-17T03:20:13.576928 | 2023-08-07T00:47:41 | 2023-08-07T00:47:41 | 10,962,492 | 0 | 0 | null | 2023-08-17T04:53:54 | 2013-06-26T08:21:35 | Python | UTF-8 | Python | false | false | 12,168 | py | import copy
import re
import unicodedata
from collections.abc import Iterable
from typing import Any
from taxonomy.db import constants, models
from . import lib
from .lib import DataT, PagesT
SOURCE = lib.Source("zmmutypes.txt", "ZMMU-types.pdf")
ORDER = "Отряд"
SECTION_INTRO = ("ЛЕКТОТИП", "ГОЛОТИП", "ПАРАТИП", "СИНТИП", "ПАРАЛЕКТОТИП", "НЕОТИП")
LABEL = (
"МЕСТО СБОРА",
"L OCALITY",
"LOCALITY",
"ДАТА",
"КОММЕНТАРИЙ",
"COMMENT",
"ПРИМЕЧАНИЕ",
"NOTICE",
"КОММЕНТАРИИ",
"ПРИМЕЧАНИЯ",
)
NAME_LINE = re.compile(
r"""
^(?P<original_name>[A-Z]\.?\[?[a-z]+\]?(\s\([A-Z][a-z]+\))?(\s[a-z\-\.\[\]]+)+)\s
(«[^»]+»\s)?
(?P<latin_authority>[A-Z].+?),\s(?P<year>\d{4})\s
\((?P<cyrillic_authority>[^:]+?),\s(?P<ref_year>\d{4}[а-яa-z]?):\s(?P<page_described>[^\)]+)\)\.?
$
""",
re.VERBOSE,
)
KEY_TO_KIND = {
"ЛЕКТОТИП": constants.SpeciesGroupType.lectotype,
"ГОЛОТИП": constants.SpeciesGroupType.holotype,
"НЕОТИП": constants.SpeciesGroupType.neotype,
}
def make_translation_table() -> dict[str, str]:
out = {}
with open("data_import/data/zmmu-transcribe.txt") as f:
for line in f:
line = unicodedata.normalize("NFC", line.strip())
if " " in line:
cyrillic, transcribed = line.split()
out[transcribed] = cyrillic
return out
def translate_chars(lines: Iterable[str]) -> Iterable[str]:
table = {ord(a): ord(b) for a, b in make_translation_table().items()}
for line in lines:
yield line.translate(table)
def extract_pages(lines: Iterable[str]) -> Iterable[list[str]]:
"""Split the text into pages."""
current_lines: list[str] = []
for line in lines:
if line.startswith("\x0c"):
yield current_lines
current_lines = []
line = line[1:]
current_lines.append(line)
yield current_lines
def label_pages(pages: Iterable[list[str]]) -> PagesT:
for i, lines in enumerate(pages):
if i < 164 or i > 240:
continue # Before mammal section
for i in range(1, len(lines) + 1):
if re.match(r"^\s+\d+\s+$", lines[-i]):
page_number = int(lines[-i].strip())
break
yield page_number, lines[:-i]
def align_columns(pages: PagesT) -> PagesT:
for page, lines in pages:
lines = lib.dedent_lines(lines)
try:
lines = lib.split_lines(lines, page, min_column=15, dedent_right=False)
except lib.NoSplitFound:
# Separately split the parts before and after the "Order ..." line.
lineno = 0
for i, line in enumerate(lines):
if line.lstrip().startswith(ORDER):
lineno = i
break
else:
assert False, f"could not find order in {page}"
before = lines[:lineno]
after = lines[lineno + 1 :]
lines = lib.split_lines(before, page) + lib.split_lines(after, page)
yield page, lines
def extract_names(pages: PagesT) -> DataT:
current_name: dict[str, Any] = {}
current_section: dict[str, Any] = {}
current_lines: list[str] = []
current_label = ""
def start_label(label: str, line: str) -> None:
nonlocal current_lines, current_label
assert current_name, f"cannot start {label} with {line!r} on an empty name"
if current_section:
container = current_section
else:
container = current_name
assert label not in container, f"duplicate label {label} for {container}"
current_lines = [line]
container[label] = current_lines
current_label = label
def start_section(label: str, line: str) -> None:
nonlocal current_section
if label in current_name:
section_label = line
else:
section_label = label
# This one is repeated in the source, apparently by mistake.
if section_label != "ПАРАТИП S-32814 Пол: ? Шкура в полной со-":
assert (
section_label not in current_name
), f"duplicate label {section_label} for {current_name}"
current_section = {"label": section_label}
current_name[section_label] = current_section
start_label(label, line)
for page, lines in pages:
for line in lines:
line = line.rstrip()
if not line:
continue
if current_section or not current_name:
if lib.initial_count(line, " ") > 3:
continue
if current_label == "ДАТА" and re.search(r"[a-z], \d{4}\)?$", line):
continue
if re.match(r"^†?[a-z]+$", line):
if current_name:
yield current_name
current_name = {"pages": [page], "root_name": line}
current_section = {}
current_label = ""
elif "name_line" not in current_name:
start_label("name_line", line)
elif line.startswith(SECTION_INTRO):
start_section(line.split()[0], line)
elif line.startswith(LABEL):
for label in LABEL:
if line.startswith(label):
start_label(label, line)
else:
current_lines.append(line)
if page == 228:
break # start of references
yield current_name
def extract_references(pages: PagesT) -> Iterable[list[str]]:
current_lines = []
for _, lines in pages:
for line in lines:
if line.strip() == "ЛИТЕРАТУРА" or not line.strip():
continue
if line.startswith(" "):
current_lines.append(line)
else:
if current_lines:
yield current_lines
current_lines = [line]
yield current_lines
def make_references_dict(refs: Iterable[list[str]]) -> dict[tuple[str, str], str]:
out = {}
for ref in refs:
text = lib.clean_line_list(ref)
if text == "The Times' Atlas of the World, 7th ed. London: Times Books, 1986.":
continue
match = re.match(r"^([^\d]+)(\d{4}(-\d+)?[^\.]?)\.", text)
assert match, text
year = match.group(2)
authors = match.group(1)
authority = ", ".join(a.split()[0] for a in authors.split(", ") if a)
out[(authority, year)] = text
return out
def handle_specimen(data: dict[str, Any]) -> dict[str, Any]:
detail = data[data["label"].split()[0]]
match = re.match(r"^(\(\?\) )?(S-\d+) Пол: (\??m\.|f\.|\?,?) (.*)$", detail)
if not match:
print(detail)
else:
data["type_specimen"] = f"ZMMU {match.group(2)}"
data["gender_value"] = {
"?m.": constants.SpecimenGender.male,
"m.": constants.SpecimenGender.male,
"f.": constants.SpecimenGender.female,
"?": constants.SpecimenGender.unknown,
"?,": constants.SpecimenGender.unknown,
}[match.group(3)]
rest = match.group(4)
if "ювенильный" in rest:
data["age"] = constants.SpecimenAge.juvenile
data["body_parts"] = rest
for label in ("LOCALITY", "L OCALITY"):
if label in data:
value = data[label]
data["loc"] = value
country = value.split()[-1].strip("«»[].")
country = lib.NAME_SYNONYMS.get(country, country)
try:
data["type_locality"] = models.Region.get(
models.Region.name == country
).get_location()
except models.Region.DoesNotExist:
pass
date_coll = data["ДАТА"]
try:
date, collector = date_coll.split(" КОЛЛ.: ", maxsplit=1)
except ValueError:
print(date_coll)
else:
if date != "?":
data["date"] = date.rstrip(".")
if collector != "?":
data["collector"] = collector
return data
def split_fields(names: DataT, refs_dict: dict[tuple[str, str], str]) -> DataT:
for name in names:
name["raw_text"] = copy.deepcopy(name)
match = NAME_LINE.match(name["name_line"].replace(" [sic!]", ""))
if not match:
assert False, f'failed to match {name["name_line"]}'
else:
name.update(match.groupdict())
name["authority"] = name["latin_authority"]
name["original_name"] = re.sub(
r"([a-zA-Z])\.\[([a-z]+)\] ", r"\1\2 ", name["original_name"]
)
refs_key = (name["cyrillic_authority"], name["ref_year"])
if refs_key in refs_dict:
name["verbatim_citation"] = refs_dict[refs_key]
paratypes = []
paralectotypes = []
syntypes = []
for key, value in list(name.items()):
if key != "raw_text" and isinstance(value, dict):
value = handle_specimen(value)
if key.startswith("ПАРАТИП"):
paratypes.append(value)
del name[key]
elif key.startswith("СИНТИП"):
syntypes.append(value)
del name[key]
elif key.startswith("ПАРАЛЕКТОТИП"):
paralectotypes.append(value)
del name[key]
elif key in KEY_TO_KIND:
name["species_type_kind"] = KEY_TO_KIND[key]
for subkey, subval in value.items():
if re.match(r"^[a-z_]+$", subkey):
name[subkey] = subval
if paratypes:
name["paratypes"] = paratypes
if paralectotypes:
name["paralectotypes"] = paralectotypes
if syntypes:
name["syntypes"] = syntypes
name["species_type_kind"] = constants.SpeciesGroupType.syntypes
yield name
def main() -> DataT:
lines = lib.get_text(SOURCE)
lines = translate_chars(lines)
unlabeled_pages = extract_pages(lines)
pages = label_pages(unlabeled_pages)
pages = lib.validate_pages(pages, verbose=False)
pages = align_columns(pages)
names: DataT = list(extract_names(pages))
refs = extract_references(pages)
refs_dict = make_references_dict(refs)
names = lib.clean_text(names)
names = split_fields(names, refs_dict)
names = lib.translate_to_db(names, "ZMMU", SOURCE, verbose=False)
conf = lib.NameConfig(
original_name_fixes={
"Neomys fodiens brachyotis": "Neomys fodiens brachyotus",
"Lepus mandshuricus sbph. melanotus": (
"Lepus mandschuricus subphasa melanonotus"
),
"Lepus timidus transbaikalensis": "Lepus timidus transbaicalicus",
"Citellus (Urocitellus) eversmanni incertedens": (
"Citellus (Urocitellus) eversmanni intercedens"
),
"Gulo gulo camtshaticus": "Gulo gulo kamtschaticus",
"A.[lticola] a.[rgentatus] tarasovi": "Alticola argentatus tarasovi",
"Microtus oeconomus": "Microtus oeconomus naumovi",
"Myotis emarginatus turcomanus": "Myotis emarginatus turcomanicus",
},
authority_fixes={
"Vorontsov & Boyeskorov et al.": "Vorontsov, Boyeskorov & Mezhzherin",
"Lavrenchenko, Likhnova, Baskevich & Bekele": (
"Lavrenchenko, Likhnova & Baskevich"
),
"Vorontsov, Boyeskorov & Lyapunova et al.": (
"Vorontsov, Boyeskorov, Lyapunova & Revin"
),
},
)
names = lib.associate_names(names, conf, max_distance=2)
names = lib.write_to_db(names, SOURCE, dry_run=False)
lib.print_field_counts(names)
return names
if __name__ == "__main__":
for p in main():
print(p)
| [
"[email protected]"
]
| |
bb090a14d03d9ae34916626a733163fb80a13d07 | 6fd5d30cf21716893388442eb0f9c16e13b91315 | /ABC/146/b.py | c2dd6c1a2676c0ffb2fe3790a90434aca68c06bd | []
| no_license | mgmk2/atcoder-python | 23d45f3195977f1f5839f6a6315e19cac80da2be | beec5857a8df2957ff7b688f717d4253b4196e10 | refs/heads/master | 2021-06-09T20:00:22.500222 | 2021-05-04T15:36:39 | 2021-05-04T15:36:39 | 179,711,330 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | n = int(input())
s = input()
a = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
ans = ''
for i in range(len(s)):
idx = n + a.find(s[i])
ans += a[idx % 26]
print(ans)
| [
"[email protected]"
]
| |
e34f54532e8403ba6405c2e1be24e8b4eb190ba3 | bcc3359817a74c97b8804d415b5b578d03ca4fc9 | /test/assets/classes/message.py | ae9a9cd7c2fcfd518fe6587ade21a5477f78edb7 | []
| no_license | pydget/pyspare | 21c7677e66987ef4625dc7a71f041beb025b0350 | 46ef0e3c4eca1ceb52a86cae3d790483d25b2906 | refs/heads/master | 2023-02-19T19:18:13.743639 | 2021-01-16T01:27:20 | 2021-01-16T01:27:20 | 288,975,432 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | from dataclasses import dataclass
@dataclass
class Message:
__slots__ = 'origin', 'target', 'body'
def __init__(self, origin, target, body):
self.origin = origin
self.target = target
self.body = body
| [
"[email protected]"
]
| |
ad03b895afc6d180aa2358f68de8fcb600e871dd | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /W3Hptw6ieTtrWNw4H_17.py | 8740c16f02a86224026dad019c28269cb2d8f877 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,753 | py | """
The basic **Polybius Square** is a 5x5 square grid with the letters A-Z
written into the grid. "I" and "J" typically share a slot (as there are 26
letters and only 25 slots).
| 1| 2| 3| 4| 5
---|---|---|---|---|---
**1**| A| B| C| D| E
**2**| F| G| H| I/J| K
**3**| L| M| N| O| P
**4**| Q| R| S| T| U
**5**| V| W| X| Y| Z
The **Bifid** cipher uses the Polybius square but adds a layer of complexity.
Start with a secret message. Remove spaces and punctuation.
plaintext = "ikilledmufasa"
Encipher the message using the basic Polybius cipher (see my [previous
challenge](https://edabit.com/challenge/2C3gtb4treAFyWJMg) — right click and
select "open in new tab"), but write the numbers in two rows under the
message, like so:
i| k| i| l| l| e| d| m| u| f| a| s| a
---|---|---|---|---|---|---|---|---|---|---|---|---
2| 2| 2| 3| 3| 1| 1| 3| 4| 2| 1| 4| 1
4| 5| 4| 1| 1| 5| 4| 2| 5| 1| 1| 3| 1
Read off the numbers horizontally, in pairs:
22 23 31 13 42 14 14 54 11 54 25 11 31
Generate the ciphertext by converting these new pairs of numbers into new
letters using the Polybius square.
ciphertext = "ghlcrddyaykal"
Create a function that takes a plaintext or ciphertext, and returns the
corresponding ciphertext or plaintext.
### Examples
bifid("I killed Mufasa!") ➞ "ghlcrddyaykal"
bifid("ghlcrddyaykal") ➞ "ikilledmufasa"
bifid("hi") ➞ "go"
### Notes
N/A
"""
def bifid(text):
text = text.upper()
tabel = []
nr = 0
plaintext = ''
ok = 2
if ' ' in text:
ok = 1
else:
ok = 0
for i in range(len(text)):
if (text[i] < 'a' or text[i] > 'z') and (text[i] < 'A' or text[i] > 'Z'):
plaintext = plaintext
else:
plaintext += text[i]
for i in range(5):
a = []
for j in range(5):
if nr == 9:
nr += 1
a.append(chr(65 + nr))
nr += 1
else:
a.append(chr(65 + nr))
nr += 1
tabel.append(a)
linie1 = ''
linie2 = ''
if ok == 1:
for i in range(len(plaintext)):
for j in range(len(tabel)):
if tabel[j][0] > plaintext[i]:
linie1 = linie1 + str(j)
linie2 = linie2 + str(tabel[j - 1] .index(plaintext[i]) + 1)
break
if j == len(tabel) - 1 and ord(plaintext[i]) >= ord(tabel[j][0]):
linie1 = linie1 + str(j + 1)
linie2 = linie2 + str(tabel[j].index(plaintext[i]) + 1)
linief = linie1 + linie2
message = ''
for i in range(0, len(linief), 2):
message += tabel[int(linief[i]) - 1][int(linief[i + 1]) - 1]
message = message.lower()
return message
else:
linie1 = ''
linie2 = ''
for i in range(len(plaintext)):
for j in range(len(tabel)):
if tabel[j][0] > plaintext[i]:
linie1 = linie1 + str(j)
linie2 = linie2 + str(tabel[j - 1].index(plaintext[i]) + 1)
break
if j == len(tabel) - 1 and ord(plaintext[i]) >= ord(tabel[j][0]):
linie1 = linie1 + str(j + 1)
linie2 = linie2 + str(tabel[j].index(plaintext[i]) + 1)
linief = ''
for i in range(len(linie1)):
linief += linie1[i] + linie2[i]
linie1 = linief[0:len(linie1)]
linie2 = linief[len(linie2):]
message = ''
for i in range(len(linie1)):
message += tabel[int(linie1[i]) - 1][int(linie2[i]) - 1]
message = message.lower()
return message
| [
"[email protected]"
]
| |
a3522fca20b8464003183ee290a2778619feb8d8 | cb4db25a0b13f058f1a31b38d80d76a118d1e2dc | /venv/lib/python3.6/site-packages/google/cloud/pubsub_v1/subscriber/policy/thread.py | 39f161a3b93e9a362f65cc6dcd60b839cd2cad34 | [
"MIT"
]
| permissive | Hackaton-Dragons/Never-Boils | 73df2b65f54a77d961ce53dea350b7d2a4261154 | 2d43e6e07fb18409d5a964f44f481d28d2352531 | refs/heads/master | 2020-03-09T20:27:54.554616 | 2018-10-08T05:52:33 | 2018-10-08T05:52:33 | 128,985,616 | 1 | 0 | MIT | 2018-04-15T13:32:45 | 2018-04-10T19:35:32 | Python | UTF-8 | Python | false | false | 12,056 | py | # Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from concurrent import futures
import logging
import sys
import threading
import grpc
from six.moves import queue as queue_mod
from google.cloud.pubsub_v1 import types
from google.cloud.pubsub_v1.subscriber import _helper_threads
from google.cloud.pubsub_v1.subscriber.futures import Future
from google.cloud.pubsub_v1.subscriber.policy import base
from google.cloud.pubsub_v1.subscriber.message import Message
_LOGGER = logging.getLogger(__name__)
_CALLBACK_WORKER_NAME = 'Thread-Consumer-CallbackRequestsWorker'
def _callback_completed(future):
"""Simple callback that just logs a future's result.
Used on completion of processing a message received by a
subscriber.
Args:
future (concurrent.futures.Future): A future returned
from :meth:`~concurrent.futures.Executor.submit`.
"""
_LOGGER.debug('Result: %s', future.result())
def _do_nothing_callback(message):
"""Default callback for messages received by subscriber.
Does nothing with the message and returns :data:`None`.
Args:
message (~google.cloud.pubsub_v1.subscriber.message.Message): A
protobuf message returned by the backend and parsed into
our high level message type.
Returns:
NoneType: Always.
"""
return None
class Policy(base.BasePolicy):
"""A consumer class based on :class:`threading.Thread`.
This consumer handles the connection to the Pub/Sub service and all of
the concurrency needs.
Args:
client (~.pubsub_v1.subscriber.client): The subscriber client used
to create this instance.
subscription (str): The name of the subscription. The canonical
format for this is
``projects/{project}/subscriptions/{subscription}``.
flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow
control settings.
executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A
ThreadPoolExecutor instance, or anything duck-type compatible
with it.
queue (~queue.Queue): (Optional.) A Queue instance, appropriate
for crossing the concurrency boundary implemented by
``executor``.
"""
def __init__(self, client, subscription, flow_control=types.FlowControl(),
executor=None, queue=None):
super(Policy, self).__init__(
client=client,
flow_control=flow_control,
subscription=subscription,
)
# Default the callback to a no-op; the **actual** callback is
# provided by ``.open()``.
self._callback = _do_nothing_callback
# Create a queue for keeping track of shared state.
self._request_queue = self._get_queue(queue)
# Also maintain an executor.
self._executor = self._get_executor(executor)
# The threads created in ``.open()``.
self._dispatch_thread = None
self._leases_thread = None
@staticmethod
def _get_queue(queue):
"""Gets a queue for the constructor.
Args:
queue (Optional[~queue.Queue]): A Queue instance, appropriate
for crossing the concurrency boundary implemented by
``executor``.
Returns:
~queue.Queue: Either ``queue`` if not :data:`None` or a default
queue.
"""
if queue is None:
return queue_mod.Queue()
else:
return queue
@staticmethod
def _get_executor(executor):
"""Gets an executor for the constructor.
Args:
executor (Optional[~concurrent.futures.ThreadPoolExecutor]): A
ThreadPoolExecutor instance, or anything duck-type compatible
with it.
Returns:
~concurrent.futures.ThreadPoolExecutor: Either ``executor`` if not
:data:`None` or a default thread pool executor with 10 workers
and a prefix (if supported).
"""
if executor is None:
executor_kwargs = {}
if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6):
executor_kwargs['thread_name_prefix'] = (
'ThreadPoolExecutor-SubscriberPolicy')
return futures.ThreadPoolExecutor(
max_workers=10,
**executor_kwargs
)
else:
return executor
def close(self):
"""Close the existing connection.
.. warning::
This method is not thread-safe. For example, if this method is
called while another thread is executing :meth:`open`, then the
policy could end up in an undefined state. The **same** policy
instance is not intended to be used by multiple workers (though
each policy instance **does** have a thread-safe private queue).
Returns:
~google.api_core.future.Future: The future that **was** attached
to the subscription.
Raises:
ValueError: If the policy has not been opened yet.
"""
if self._future is None:
raise ValueError('This policy has not been opened yet.')
# Stop consuming messages.
self._request_queue.put(_helper_threads.STOP)
self._dispatch_thread.join() # Wait until stopped.
self._dispatch_thread = None
self._consumer.stop_consuming()
self._leases_thread.join()
self._leases_thread = None
self._executor.shutdown()
# The subscription is closing cleanly; resolve the future if it is not
# resolved already.
if not self._future.done():
self._future.set_result(None)
future = self._future
self._future = None
return future
def _start_dispatch(self):
"""Start a thread to dispatch requests queued up by callbacks.
.. note::
This assumes, but does not check, that ``_dispatch_thread``
is :data:`None`.
Spawns a thread to run :meth:`dispatch_callback` and sets the
"dispatch thread" member on the current policy.
"""
_LOGGER.debug('Starting callback requests worker.')
dispatch_worker = _helper_threads.QueueCallbackWorker(
self._request_queue,
self.dispatch_callback,
)
# Create and start the helper thread.
thread = threading.Thread(
name=_CALLBACK_WORKER_NAME,
target=dispatch_worker,
)
thread.daemon = True
thread.start()
_LOGGER.debug('Started helper thread %s', thread.name)
self._dispatch_thread = thread
def _start_lease_worker(self):
"""Spawn a helper thread that maintains all of leases for this policy.
.. note::
This assumes, but does not check, that ``_leases_thread`` is
:data:`None`.
Spawns a thread to run :meth:`maintain_leases` and sets the
"leases thread" member on the current policy.
"""
_LOGGER.debug('Starting lease maintenance worker.')
thread = threading.Thread(
name='Thread-LeaseMaintenance',
target=self.maintain_leases,
)
thread.daemon = True
thread.start()
self._leases_thread = thread
def open(self, callback):
"""Open a streaming pull connection and begin receiving messages.
.. warning::
This method is not thread-safe. For example, if this method is
called while another thread is executing :meth:`close`, then the
policy could end up in an undefined state. The **same** policy
instance is not intended to be used by multiple workers (though
each policy instance **does** have a thread-safe private queue).
For each message received, the ``callback`` function is fired with
a :class:`~.pubsub_v1.subscriber.message.Message` as its only
argument.
Args:
callback (Callable): The callback function.
Returns:
~google.api_core.future.Future: A future that provides
an interface to block on the subscription if desired, and
handle errors.
Raises:
ValueError: If the policy has already been opened.
"""
if self._future is not None:
raise ValueError('This policy has already been opened.')
# Create the Future that this method will return.
# This future is the main thread's interface to handle exceptions,
# block on the subscription, etc.
self._future = Future(policy=self)
# Start the thread to pass the requests.
self._callback = callback
self._start_dispatch()
# Actually start consuming messages.
self._consumer.start_consuming(self)
self._start_lease_worker()
# Return the future.
return self._future
def dispatch_callback(self, action, kwargs):
"""Map the callback request to the appropriate gRPC request.
Args:
action (str): The method to be invoked.
kwargs (Dict[str, Any]): The keyword arguments for the method
specified by ``action``.
Raises:
ValueError: If ``action`` isn't one of the expected actions
"ack", "drop", "lease", "modify_ack_deadline" or "nack".
"""
if action == 'ack':
self.ack(**kwargs)
elif action == 'drop':
self.drop(**kwargs)
elif action == 'lease':
self.lease(**kwargs)
elif action == 'modify_ack_deadline':
self.modify_ack_deadline(**kwargs)
elif action == 'nack':
self.nack(**kwargs)
else:
raise ValueError(
'Unexpected action', action,
'Must be one of "ack", "drop", "lease", '
'"modify_ack_deadline" or "nack".')
def on_exception(self, exception):
"""Handle the exception.
If the exception is one of the retryable exceptions, this will signal
to the consumer thread that it should "recover" from the failure.
This will cause the stream to exit when it returns :data:`False`.
Returns:
bool: Indicates if the caller should recover or shut down.
Will be :data:`True` if the ``exception`` is "acceptable", i.e.
in a list of retryable / idempotent exceptions.
"""
# If this is in the list of idempotent exceptions, then we want to
# retry. That entails just returning None.
if isinstance(exception, self._RETRYABLE_STREAM_ERRORS):
return True
# Set any other exception on the future.
self._future.set_exception(exception)
return False
def on_response(self, response):
"""Process all received Pub/Sub messages.
For each message, schedule a callback with the executor.
"""
for msg in response.received_messages:
_LOGGER.debug(
'Using %s to process new message received:\n%r',
self._callback, msg)
message = Message(msg.message, msg.ack_id, self._request_queue)
future = self._executor.submit(self._callback, message)
future.add_done_callback(_callback_completed)
| [
"[email protected]"
]
| |
a44762f7f83ec08c0a592bc170b50259d8bd49e2 | 292417a70e83d33fc4cedaed34d1b8e859ffe1a7 | /market/urls.py | 113708503a9a96ee4ed392f28d9d1321ee1c94c8 | []
| no_license | cooluks2/Niche-market-mine | c739144b61dfecd641f19bfa20439388d9dd562d | 2eacedd83ae3d1690ac56f9ae4089a44737c4771 | refs/heads/master | 2022-12-10T09:45:13.759650 | 2020-08-31T02:52:07 | 2020-08-31T02:52:07 | 289,786,363 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,253 | py | from django.urls import path
from market.views import StoreDV, StoreLV, StoreCreateView, StoreUpdateView, StoreDeleteView, store_download
from market.views import MarketDV, MarketCreateView, MarketUpdateView, MarketDeleteView, market_download
from mysite.views import HomeView
from market.models import Market, Location
app_name = 'market'
urlpatterns = [
path('', HomeView.as_view(), name='home'),
path('<int:pk>/', MarketDV.as_view(), name='market'),
path('market_add/', MarketCreateView.as_view(), name="market_add"),
path('<int:pk>/market_update/', MarketUpdateView.as_view(), name="market_update"),
path('<int:pk>/market_delete/', MarketDeleteView.as_view(), name="market_delete"),
path('market_download/<int:id>', market_download, name="market_download"),
path('store/<int:pk>/', StoreLV.as_view(), name='store'),
path('store/<int:fk>/<int:pk>/', StoreDV.as_view(), name='store_detail'),
path('<int:fk>/store_add/', StoreCreateView.as_view(), name="store_add"),
path('<int:pk>/store_update/', StoreUpdateView.as_view(), name="store_update"),
path('<int:pk>/store_delete/', StoreDeleteView.as_view(), name="store_delete"),
path('store_download/<int:id>', store_download, name="store_download"),
]
| [
"[email protected]"
]
| |
6cb65b44504b20720b6967c08c0fb580dd2850cb | cd6a835b14596620d46236ce2ec8003b42dcd393 | /machina/apps/forum/urls.py | 588a345c6d1914ddacaafe935dbb9bae7b6ff0a3 | []
| no_license | VanHai88/covert-site | bfec3ed75a75f4a29614906d982fd565ac1e011b | 2385ebaf1ed6c0eb42027f6665f545ce60828c12 | refs/heads/master | 2023-06-08T19:43:18.339787 | 2021-06-22T09:28:00 | 2021-06-22T09:28:00 | 379,212,850 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | """
Forum URLs
==========
This module defines URL patterns associated with the django-machina's ``forum`` application.
"""
from django.urls import path
from machina.core.loading import get_class
from machina.core.urls import URLPatternsFactory
class ForumURLPatternsFactory(URLPatternsFactory):
""" Allows to generate the URL patterns of the ``forum`` application. """
app_namespace = 'forum'
index_view = get_class('forum.views', 'IndexView')
forum_view = get_class('forum.views', 'ForumView')
def get_urlpatterns(self):
""" Returns the URL patterns managed by the considered factory / application. """
return [
path('', self.index_view.as_view(), name='index'),
path(
'forum/<str:slug>/<str:uuid>/',
self.forum_view.as_view(),
name='forum',
),
]
urlpatterns_factory = ForumURLPatternsFactory()
| [
"[email protected]"
]
| |
3370e689410d396a827a715f14aedb1803000b7e | f048f66977ebcfd3973f5cb41911e5de8b1bf7f5 | /pullenti/ner/NumberSpellingType.py | 72db57f98484782ba6c07c2f861ed30185173d7c | []
| no_license | AAA1911/PullentiPython | e01223d2d8656a8fbcc0873446a12d7e5c913f4a | f25b228c8eef9b70acb1285f405c976542342319 | refs/heads/master | 2020-12-22T12:56:21.701229 | 2019-12-11T08:34:43 | 2019-12-11T08:34:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | # Copyright (c) 2013, Pullenti. All rights reserved. Non-Commercial Freeware.
# This class is generated using the converter UniSharping (www.unisharping.ru) from Pullenti C#.NET project (www.pullenti.ru).
# See www.pullenti.ru/downloadpage.aspx.
from enum import IntEnum
class NumberSpellingType(IntEnum):
""" Возможные типы написаний """
DIGIT = 0
""" Цифрами """
ROMAN = 1
""" Римскими цифрами """
WORDS = 2
""" Прописью (словами) """
AGE = 3
""" Возраст (летие) """
@classmethod
def has_value(cls, value):
return any(value == item.value for item in cls) | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.