blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
23d44eef4f0999f81021921c669b70a8abf10c82 | daeb851843a55ca3f34008765ebf8ff18c0d1ecd | /LL/page_object.py | b343f01ab97ca8d9b5a5378d41005f0f05b768ae | [] | no_license | Cola1995/Py | f5066df0ef5b60183f5e5e2ec6b77b2be536e7f8 | bab82d8f5aae18e612dbe9bcd38f0d4260463b16 | refs/heads/master | 2020-04-08T06:30:36.212074 | 2018-12-27T06:14:53 | 2018-12-27T06:14:53 | 159,099,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,832 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
import time
class Page(object):
'''基础类,用于页面对象类的继承'''
login='http://www.126.com'
def __init__(self,selenium_driver,base_url=login_url):
self.base_url=base_url
self.driver=selenium_driver
self.timeout=30
def on_page(self):
return self.driver.current_url==(self.base_url+self.url)
def _open(self,url):
url=self.base_url+url
self.driver.get(url)
assert self.on_page, 'Did not land on %s' % url
def open(self):
self._open(self.url)
def find_element(self,*loc):
return self.driver.find_element(*loc)
class LoginPage(Page):
'''126邮箱登录页面模型'''
url='/'
#定位器
username_loc=(By.ID,"idInput")
password_loc=(By.ID,"pwdInput")
submit_loc=(By.ID,"loginBtn")
def type_username(self,username):
self.find_element(*self.username_loc).send_keys(username)
def type_password(self,password):
self.find_element(*self.password_loc).send_keys(password)
def submit(self):
self.find_element(*self.submit_loc).click()
def test_user_login(driver,username,password):
login_page=LoginPage(driver)
login_page.open()
login_page.type_username(username)
login_page.type_password(password)
login_page.submit()
def main():
try:
driver=webdriver.Chrome()
username='testingwtb'
password='a123456'
test_user_login(driver,username,password)
sleep(3)
text=driver.find_element_by_xpath("//span[@id='spnUid']").text
assert(text=='[email protected]'),"用户名不匹配,登录失败"
finally:
driver.close()
if __name__=='__main__':
main()
| [
"[email protected]"
] | |
7e27e21ec2be3f0cfe1472b07ba46412a460a797 | 759f52976ad2cd9236da561ca254e11e08003487 | /part7/ex45/v2-replace-config/replacement_result.py | 4adea4014216b0100cfdcd8bde50cb4b26d7dd94 | [] | no_license | mbaeumer/fiftyseven | 57b571c3e09640a2ab0ed41e5d06643c12b48001 | d79b603d5b37bf1f4127d9253f8526ea3897dc08 | refs/heads/master | 2020-06-10T20:52:25.311992 | 2017-11-15T18:28:38 | 2017-11-15T18:28:38 | 75,877,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | #!/usr/bin/python
class ReplacementResult:
def __init__(self, config, content):
self.config = config
self.content = content
| [
"[email protected]"
] | |
7a3e609fcf05b7eca4cfe7b4769b1f7eb50fe39a | e3b5698bc6a63551e0f30dc958428f2dd548eada | /homework/webtoon/migrations/0002_auto_20180215_1109.py | a86e1b9aba0e782e7cc0e3731216de107e578db3 | [] | no_license | standbyme227/stopout | 472021047af57a23c1a6c73db05d7c989e5baa16 | c33981e9ca143cdf6fd9c93664d90a50a07b45a3 | refs/heads/master | 2021-04-29T20:51:33.550436 | 2018-02-16T06:47:04 | 2018-02-16T06:47:04 | 121,605,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | # Generated by Django 2.0.2 on 2018-02-15 11:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webtoon', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='webtoon',
name='img_url',
field=models.CharField(blank=True, max_length=100),
),
migrations.AlterField(
model_name='webtoon',
name='week_webtoon',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"[email protected]"
] | |
f7097802ca48e1dc7adb1d85d44f5f6130f7a905 | 85554f18eb2f4db5fa57326e55c84c91abd97fb2 | /graphlayer/core.py | ba96c502660606fdb0bb2707941c6e4d2713b572 | [
"BSD-2-Clause"
] | permissive | mwilliamson/python-graphlayer | f6e58bd1e812aabb365a569d579ebcb5f8aaa51c | 7fe773fb2e07028d5cfc1f30523ad17ba405c689 | refs/heads/master | 2023-05-12T03:37:36.607591 | 2023-01-14T21:26:33 | 2023-01-14T21:26:33 | 144,635,452 | 28 | 7 | BSD-2-Clause | 2023-01-14T21:27:20 | 2018-08-13T21:25:50 | Python | UTF-8 | Python | false | false | 2,117 | py | from . import iterables
def create_graph(resolvers):
return define_graph(resolvers).create_graph({})
def define_graph(resolvers):
return GraphDefinition(resolvers)
class GraphDefinition(object):
def __init__(self, resolvers):
self._resolvers = iterables.to_dict(
(resolver.type, resolver)
for resolver in _flatten(resolvers)
)
def create_graph(self, dependencies):
return Graph(self._resolvers, dependencies)
class Graph(object):
def __init__(self, resolvers, dependencies):
self._resolvers = resolvers
self._injector = Injector(dependencies)
def resolve(self, *args, type=None):
if type is None:
type = args[0].type
resolver = self._resolvers.get(type)
if resolver is None:
raise GraphError("could not find resolver for query of type: {}".format(type))
else:
return self._injector.call_with_dependencies(resolver, self, *args)
class Injector(object):
def __init__(self, dependencies):
self._dependencies = dependencies.copy()
self._dependencies[Injector] = self
def get(self, key):
return self._dependencies[key]
def call_with_dependencies(self, func, *args, **kwargs):
dependencies = getattr(func, "dependencies", dict())
dependency_kwargs = iterables.to_dict(
(arg_name, self.get(dependency_key))
for arg_name, dependency_key in dependencies.items()
)
return func(*args, **kwargs, **dependency_kwargs)
def _flatten(value):
if isinstance(value, (list, tuple)):
return [
subelement
for element in value
for subelement in _flatten(element)
]
else:
return [value]
def resolver(type):
def register_resolver(func):
func.type = type
return func
return register_resolver
def dependencies(**kwargs):
def register_dependency(func):
func.dependencies = kwargs
return func
return register_dependency
class GraphError(Exception):
pass
| [
"[email protected]"
] | |
7b15c8ad37a0ad03a79f07873b58187b10f10f51 | 38d93c5fd72fee380ec431b2ca60a069eef8579d | /Baekjoon,SWEA, etc/SWEA/SWEA2105.py | 4e3c17854fc9101aa88779572cadeffd409f6287 | [] | no_license | whgusdn321/Competitive-programming | 5d1b681f5bee90de5678219d91cd0fa764476ddd | 3ff8e6b1d2facd31a8210eddeef851ffd0dce02a | refs/heads/master | 2023-01-01T01:34:22.936373 | 2020-10-24T11:05:08 | 2020-10-24T11:05:08 | 299,181,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,511 | py | """
이 문제는 상당히 복잡하다. 약 3시간에 걸쳐서 푼 문제로, 한번 다시 풀어볼 필요성이 굉장히 크다.
생각해야 할 부분이 많아서 문제 자체가 복잡하다..
이 문제를 다 풀고, 문제를 풀기위해서 생각해야 하는 부분들을 다시한번 정리해보았다.
1.가는 방향은 어딘데? -> 원래 기초적인 DFS/BFS문제들은 대부분 상,하,좌,우 만 나와서 거기에 항상 익숙해져 있다. 하지만 이 문제는
기본적으로 가는방향이 다르다. 따라서 우선은 가는방향(마름모꼴을 이루면서, 나중에 합쳐지게 dfs가 돌게) 잘 구현을 해야 한다는 것을 알 수 있다.
1-1)그리고 가면서 문을 열어둔다.(return되고 다음줄에 또 함수 호출)
2.go(specific_i, specific_j, dy, dx, visited, sofar)는 그래서 불려서 뭘 하는데?
-> 이것을 그림(maap)과 연동시켜서 약간 이미지를 그리면서 도데체 이 함수가 call되었을때 무엇을 하는지, 즉 특정한 순간에 무엇을 하는지! 약간 느낌을 가지자.
-> 이 함수는 다음에 갈 곳이 갈 수 있다면(범위안에 들고, 중복 방문이 아닌경우), visited에 추가시켜 준다.
3. 어떤경우에 종료조건이 되는가? 그리고 종료되면서 visited, sofar는 각 함수에서 pop하는데, 함수가 return이 되면서 visited, sofar의 배열들이 어떻게 변하는지 잘 알아보자.
->개인적으로 이부분이 가장 힘들었다. 일반적으로 문을 열어두어 모든 가능한 경로를 방문하는 dfs방식은, 가기전에 visited에 추가하고 return 조건에서 pop을 해서 하는
사전지식이 있었다. 그래서 약간 감과 이러한 지식을 이용하여 코드를 짜서, 어찌어찌 맞았다. 하지만 이부분에 대하여 약간 보강을 해야할 것 같다.
"""
def go(i, j, dy, dx, visited, sofar):
# visited
global N, maap, results
if (dy, dx) == (1, 1):
next1 = (i+1, j+1)
next2 = (i+1, j-1)
if 0<=next1[0]<N and 0<=next1[1]<N and maap[next1[0]][next1[1]] not in visited:
visited.append(maap[next1[0]][next1[1]])
sofar.append('SE')
go(next1[0], next1[1], 1,1, visited, sofar)
sofar.pop()
visited.pop()
if visited and 0 <= next2[0] < N and 0 <= next2[1] <N and maap[next2[0]][next2[1]] not in visited:
visited.append(maap[next2[0]][next2[1]])
sofar.append('SW')
go(next2[0], next2[1], 1, -1, visited, sofar)
sofar.pop()
visited.pop()
elif (dy, dx) == (1, -1):
next1 = (i+1, j-1)
next2 = (i-1, j-1)
if 0<=next1[0]<N and 0<=next1[1]<N and maap[next1[0]][next1[1]] not in visited:
visited.append(maap[next1[0]][next1[1]])
sofar.append('SW')
go(next1[0], next1[1], 1, -1, visited, sofar)
sofar.pop()
visited.pop()
if 0<=next2[0]<N and 0<=next2[1] <N and maap[next2[0]][next2[1]] not in visited:
go(i, j, -1, -1, visited, sofar)
elif (dy, dx) == (-1, -1):
boool = True
next = [i, j]
_N = len(visited)
for i in range(len(sofar)):
if sofar[i] == 'SE':
next = [next[0]-1, next[1]-1]
if 0<=next[0]<N and 0<=next[1]<N and maap[next[0]][next[1]] not in visited:
visited.append(maap[next[0]][next[1]])
else:
boool = False
break
else:
next = [next[0]-1, next[1]+1]
if 0<=next[0]<N and 0<=next[1]<N and maap[next[0]][next[1]] not in visited:
visited.append(maap[next[0]][next[1]])
else:
boool = False
break
if boool:
results.append(len(visited))
while len(visited) != _N:
visited.pop()
return
test_cases = int(input())
for test_case in range(1, test_cases + 1):
N = int(input())
maap = []
for _ in range(N):
temp = [int(a) for a in input().split()]
maap.append(temp)
results = []
for y in range(N):
for x in range(N):
go(y, x, 1, 1, [], [])
if results:
print('#{} {}'.format(test_case, max(results)))
else:
print('#{} {}'.format(test_case, -1))
| [
"[email protected]"
] | |
6f408f78f4985ecc1686c686582bc51783bc1df1 | e81e09cdc62dcd196020dc01b07adc24faaee582 | /czsc/trader.py | b12513bed9ae6a6c6df4c34877ae940e6a8b09a8 | [
"MIT"
] | permissive | dingfengqian/czsc | 41cbd4155245ae8a50b51af485ae5516fea94156 | 00bfd91f0a0e27533cdd45b83b2fec197056b4b3 | refs/heads/master | 2023-02-27T18:58:48.810874 | 2021-01-28T11:48:56 | 2021-01-28T11:48:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,444 | py | # coding: utf-8
import pandas as pd
from datetime import datetime
from .factors import KlineGeneratorBy1Min, CzscFactors
from .data.jq import get_kline
from .enum import FdNine, FdFive, FdSeven, FdThree, Factors
class CzscTrader:
"""缠中说禅股票 选股/择时"""
def __init__(self, symbol, max_count=1000, end_date=None):
"""
:param symbol:
"""
self.symbol = symbol
if end_date:
self.end_date = pd.to_datetime(end_date)
else:
self.end_date = datetime.now()
self.max_count = max_count
self.__generate_factors()
self.freqs = ['1分钟', '5分钟', '30分钟', '日线']
def __generate_factors(self):
symbol = self.symbol
max_count = self.max_count
end_date = self.end_date
kg = KlineGeneratorBy1Min(max_count=max_count*2, freqs=['1分钟', '5分钟', '15分钟', '30分钟', '日线'])
k1min = get_kline(symbol, end_date=end_date, freq="1min", count=max_count)
k5min = get_kline(symbol, end_date=end_date, freq="5min", count=max_count)
k15min = get_kline(symbol, end_date=end_date, freq="15min", count=max_count)
k30min = get_kline(symbol, end_date=end_date, freq="30min", count=max_count)
kd = get_kline(symbol, end_date=end_date, freq="D", count=max_count)
kg.init_kline("1分钟", k1min)
kg.init_kline("5分钟", k5min)
kg.init_kline("15分钟", k15min)
kg.init_kline("30分钟", k30min)
kg.init_kline("日线", kd)
kf = CzscFactors(kg, max_count=max_count)
self.kf = kf
self.s = kf.s
self.end_dt = self.kf.end_dt
def run_selector(self):
"""执行选股:优先输出大级别的机会"""
s = self.s
if s['日线右侧多头因子'] in [Factors.DLA1.value, Factors.DLA2.value, Factors.DLA3.value, Factors.DLA4.value]:
return s['日线右侧多头因子']
ka = self.kf.kas['30分钟']
max_high = max([x.high for x in ka.bi_list[-10:]])
# third_bs = ["三买A1", "三买B1", "三买C1", "三买D1"]
if "三买" in s['30分钟_第N笔的五笔形态']:
if s['1分钟_第N笔的七笔形态'] == FdSeven.L3A1.value:
return "30分钟第三买点且BaA式右侧底A"
elif max_high == ka.bi_list[-2].high:
return "30分钟第三买点且第4笔创近9笔新高"
else:
return "30分钟第三买点"
# nine_values = [x.value for x in FdNine.__members__.values() if x.name[0] in ["L", "S"]]
# seven_values = [x.value for x in FdSeven.__members__.values() if x.name[0] in ["L", "S"]]
# if s['30分钟_第N笔的七笔形态'] in seven_values:
# return "30分钟_第N笔的七笔形态_{}".format(s['30分钟_第N笔的七笔形态'])
# if s['30分钟_第N笔的九笔形态'] in nine_values:
# return "30分钟_第N笔的九笔形态_{}".format(s['30分钟_第N笔的九笔形态'])
return "other"
def run_history(self):
"""对已经完成的三买走势进行研究"""
s = self.s
if "三买" in s['30分钟_第N-2笔的五笔形态']:
return "30分钟第N-2笔第三买点"
return "other"
def take_snapshot(self, file_html, width="1400px", height="680px"):
self.kf.take_snapshot(file_html, width, height)
| [
"[email protected]"
] | |
a4ca3c5c84201afc3162db257b96a975c1498516 | 85f5dff291acf1fe7ab59ca574ea9f4f45c33e3b | /api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py | abfbe322f1a66fb00b4864d1b01ac60ec64e5721 | [
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | sadnub/tacticalrmm | a4ecaf994abe39244a6d75ed2166222abb00d4f4 | 0af95aa9b1084973642da80e9b01a18dcacec74a | refs/heads/develop | 2023-08-30T16:48:33.504137 | 2023-04-10T22:57:44 | 2023-04-10T22:57:44 | 243,405,684 | 0 | 2 | MIT | 2020-09-08T13:03:30 | 2020-02-27T01:43:56 | Python | UTF-8 | Python | false | false | 393 | py | # Generated by Django 3.1.4 on 2021-01-10 05:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("checks", "0012_auto_20210110_0503"),
]
operations = [
migrations.AlterField(
model_name="checkhistory",
name="y",
field=models.PositiveIntegerField(null=True),
),
]
| [
"[email protected]"
] | |
616e64a571f1f9e005417d5543fe7d7c7e3e386b | 47e819cb96e0e9f45419960d30753f027047c224 | /stockSpider/stockSpider/spiders/stockSpider_Mysql.py | 07da58c3433aa9dc81c8f259d7e65b28dc803254 | [] | no_license | 1141938529/ClassExercises | 3600d75aa559b6938f3efb5831dbec803489cac0 | 0082e81a6cded1c8332c4a8d79a4104bfd8ab1c9 | refs/heads/master | 2021-05-08T16:38:30.391977 | 2018-02-04T13:10:45 | 2018-02-04T13:10:45 | 120,165,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,442 | py | import scrapy
import time
from stockSpider.items import stockItem
class StockSpider(scrapy.Spider):
name = 'stockspider2'
start_urls = ['http://stock.10jqka.com.cn/']
stock_data = stockItem()
custom_settings = {
'ITEM_PIPELINES': {'stockSpider.pipelines.StockPipeline': 100, },
}
def parse(self, response):
stock_list = response.xpath("//div[@id='rzrq']//table[@class='m-table']/tbody/tr/td[2]/a")
for stock in stock_list:
stock_name = stock.xpath("./text()").extract()[0]
link = stock.xpath("./@href").extract()[0]
stock_id = link.split('/')[-2]
# print(stock_name + '===============' + link)
# with open("D://PyDownload//stockdata//" + stock_name + '.txt', mode='w', encoding='utf8'):
# pass
yield scrapy.Request(url=link,
callback=self.get_stock_data,
meta={'stock_name': stock_name, 'base_url': link, 'index': 2})
pass
def get_stock_data(self, response):
print(response.url)
time.sleep(3)
data_table = response.xpath("//table[@class='m-table']/tbody/tr")
# stock_id = response.url.split('/')[6]
stock_name = response.meta['stock_name']
for row in data_table:
data_list = row.xpath("./td/text()").extract()
self.stock_data['xuhao'] = data_list[0].strip()
self.stock_data['jysj'] = data_list[1].strip()
self.stock_data['rz_ye'] = data_list[2].strip()
self.stock_data['rz_mr'] = data_list[3].strip()
self.stock_data['rz_ch'] = data_list[4].strip()
self.stock_data['rz_jmr'] = data_list[5].strip()
self.stock_data['rq_yl'] = data_list[6].strip()
self.stock_data['rq_mc'] = data_list[7].strip()
self.stock_data['rq_ch'] = data_list[8].strip()
self.stock_data['rq_jmc'] = data_list[9].strip()
self.stock_data['rzqye'] = data_list[10].strip()
yield self.stock_data
index = response.meta['index']
if index > 4:
return
url_str = response.meta['base_url'] + "order/desc/page/" + str(index) + "/ajax/1/"
yield scrapy.Request(url=url_str, callback=self.get_stock_data, meta={
'stock_name': stock_name, 'base_url': response.meta['base_url'], 'index': index + 1
})
| [
"[email protected]"
] | |
348002ee97e41ba31b8e636ed2d20c5119e807d7 | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/vm/aaz/latest/sig/_list.py | e71eb6c7d1e9f264b494785f90083e5b52068ccc | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 16,595 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"sig list",
)
class List(AAZCommand):
"""List galleries under a resource group.
"""
_aaz_info = {
"version": "2021-10-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/providers/microsoft.compute/galleries", "2021-10-01"],
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/galleries", "2021-10-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
return self.build_paging(self._execute_operations, self._output)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg()
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
condition_0 = has_value(self.ctx.subscription_id) and has_value(self.ctx.args.resource_group) is not True
condition_1 = has_value(self.ctx.args.resource_group) and has_value(self.ctx.subscription_id)
if condition_0:
self.GalleriesList(ctx=self.ctx)()
if condition_1:
self.GalleriesListByResourceGroup(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True)
next_link = self.deserialize_output(self.ctx.vars.instance.next_link)
return result, next_link
class GalleriesList(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/providers/Microsoft.Compute/galleries",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2021-10-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.next_link = AAZStrType(
serialized_name="nextLink",
)
_schema_on_200.value = AAZListType(
flags={"required": True},
)
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element
_element.id = AAZStrType(
flags={"read_only": True},
)
_element.location = AAZStrType(
flags={"required": True},
)
_element.name = AAZStrType(
flags={"read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.tags = AAZDictType()
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.value.Element.properties
properties.description = AAZStrType()
properties.identifier = AAZObjectType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.sharing_profile = AAZObjectType(
serialized_name="sharingProfile",
)
properties.sharing_status = AAZObjectType(
serialized_name="sharingStatus",
)
properties.soft_delete_policy = AAZObjectType(
serialized_name="softDeletePolicy",
)
identifier = cls._schema_on_200.value.Element.properties.identifier
identifier.unique_name = AAZStrType(
serialized_name="uniqueName",
flags={"read_only": True},
)
sharing_profile = cls._schema_on_200.value.Element.properties.sharing_profile
sharing_profile.community_gallery_info = AAZObjectType(
serialized_name="communityGalleryInfo",
)
sharing_profile.groups = AAZListType(
flags={"read_only": True},
)
sharing_profile.permissions = AAZStrType()
community_gallery_info = cls._schema_on_200.value.Element.properties.sharing_profile.community_gallery_info
community_gallery_info.community_gallery_enabled = AAZBoolType(
serialized_name="communityGalleryEnabled",
flags={"read_only": True},
)
community_gallery_info.eula = AAZStrType()
community_gallery_info.public_name_prefix = AAZStrType(
serialized_name="publicNamePrefix",
)
community_gallery_info.public_names = AAZListType(
serialized_name="publicNames",
flags={"read_only": True},
)
community_gallery_info.publisher_contact = AAZStrType(
serialized_name="publisherContact",
)
community_gallery_info.publisher_uri = AAZStrType(
serialized_name="publisherUri",
)
public_names = cls._schema_on_200.value.Element.properties.sharing_profile.community_gallery_info.public_names
public_names.Element = AAZStrType()
groups = cls._schema_on_200.value.Element.properties.sharing_profile.groups
groups.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element.properties.sharing_profile.groups.Element
_element.ids = AAZListType()
_element.type = AAZStrType()
ids = cls._schema_on_200.value.Element.properties.sharing_profile.groups.Element.ids
ids.Element = AAZStrType()
sharing_status = cls._schema_on_200.value.Element.properties.sharing_status
sharing_status.aggregated_state = AAZStrType(
serialized_name="aggregatedState",
flags={"read_only": True},
)
sharing_status.summary = AAZListType()
summary = cls._schema_on_200.value.Element.properties.sharing_status.summary
summary.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element.properties.sharing_status.summary.Element
_element.details = AAZStrType()
_element.region = AAZStrType()
_element.state = AAZStrType(
flags={"read_only": True},
)
soft_delete_policy = cls._schema_on_200.value.Element.properties.soft_delete_policy
soft_delete_policy.is_soft_delete_enabled = AAZBoolType(
serialized_name="isSoftDeleteEnabled",
)
tags = cls._schema_on_200.value.Element.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class GalleriesListByResourceGroup(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2021-10-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.next_link = AAZStrType(
serialized_name="nextLink",
)
_schema_on_200.value = AAZListType(
flags={"required": True},
)
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element
_element.id = AAZStrType(
flags={"read_only": True},
)
_element.location = AAZStrType(
flags={"required": True},
)
_element.name = AAZStrType(
flags={"read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.tags = AAZDictType()
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.value.Element.properties
properties.description = AAZStrType()
properties.identifier = AAZObjectType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.sharing_profile = AAZObjectType(
serialized_name="sharingProfile",
)
properties.sharing_status = AAZObjectType(
serialized_name="sharingStatus",
)
properties.soft_delete_policy = AAZObjectType(
serialized_name="softDeletePolicy",
)
identifier = cls._schema_on_200.value.Element.properties.identifier
identifier.unique_name = AAZStrType(
serialized_name="uniqueName",
flags={"read_only": True},
)
sharing_profile = cls._schema_on_200.value.Element.properties.sharing_profile
sharing_profile.community_gallery_info = AAZObjectType(
serialized_name="communityGalleryInfo",
)
sharing_profile.groups = AAZListType(
flags={"read_only": True},
)
sharing_profile.permissions = AAZStrType()
community_gallery_info = cls._schema_on_200.value.Element.properties.sharing_profile.community_gallery_info
community_gallery_info.community_gallery_enabled = AAZBoolType(
serialized_name="communityGalleryEnabled",
flags={"read_only": True},
)
community_gallery_info.eula = AAZStrType()
community_gallery_info.public_name_prefix = AAZStrType(
serialized_name="publicNamePrefix",
)
community_gallery_info.public_names = AAZListType(
serialized_name="publicNames",
flags={"read_only": True},
)
community_gallery_info.publisher_contact = AAZStrType(
serialized_name="publisherContact",
)
community_gallery_info.publisher_uri = AAZStrType(
serialized_name="publisherUri",
)
public_names = cls._schema_on_200.value.Element.properties.sharing_profile.community_gallery_info.public_names
public_names.Element = AAZStrType()
groups = cls._schema_on_200.value.Element.properties.sharing_profile.groups
groups.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element.properties.sharing_profile.groups.Element
_element.ids = AAZListType()
_element.type = AAZStrType()
ids = cls._schema_on_200.value.Element.properties.sharing_profile.groups.Element.ids
ids.Element = AAZStrType()
sharing_status = cls._schema_on_200.value.Element.properties.sharing_status
sharing_status.aggregated_state = AAZStrType(
serialized_name="aggregatedState",
flags={"read_only": True},
)
sharing_status.summary = AAZListType()
summary = cls._schema_on_200.value.Element.properties.sharing_status.summary
summary.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element.properties.sharing_status.summary.Element
_element.details = AAZStrType()
_element.region = AAZStrType()
_element.state = AAZStrType(
flags={"read_only": True},
)
soft_delete_policy = cls._schema_on_200.value.Element.properties.soft_delete_policy
soft_delete_policy.is_soft_delete_enabled = AAZBoolType(
serialized_name="isSoftDeleteEnabled",
)
tags = cls._schema_on_200.value.Element.tags
tags.Element = AAZStrType()
return cls._schema_on_200
class _ListHelper:
"""Helper class for List"""
__all__ = ["List"]
| [
"[email protected]"
] | |
ffc0214e985810cf932673154e187b4d8ef072ba | 060ce17de7b5cdbd5f7064d1fceb4ded17a23649 | /fn_remedy/tests/mocks/datatable_mock.py | 6afc05d176fb51eb4f60e7d1d4634a555752613f | [
"MIT"
] | permissive | ibmresilient/resilient-community-apps | 74bbd770062a22801cef585d4415c29cbb4d34e2 | 6878c78b94eeca407998a41ce8db2cc00f2b6758 | refs/heads/main | 2023-06-26T20:47:15.059297 | 2023-06-23T16:33:58 | 2023-06-23T16:33:58 | 101,410,006 | 81 | 107 | MIT | 2023-03-29T20:40:31 | 2017-08-25T14:07:33 | Python | UTF-8 | Python | false | false | 3,534 | py | # (c) Copyright IBM Corp. 2021. All Rights Reserved.
from pytest_resilient_circuits import BasicResilientMock, resilient_endpoint
import requests_mock
import json
import six
class DTResilientMock(BasicResilientMock):
"""DTResilientMock a Mock class which inherits the base
endpoints and adds mock endpoints for the DataTable object
for testing.
:param BasicResilientMock: A mock object which covers some of the most common endpoints, this class inherits from it to avoid you needing to use it seperately
:type BasicResilientMock: object
"""
mock_data_table_rows = [
{
"dt_col_id": 3001,
"dt_col_name": "Joe Blogs",
"dt_col_email": "[email protected]",
"dt_col_status": "In Progress"
},
{
"dt_col_id": 3002,
"dt_col_name": "Mary Blogs",
"dt_col_email": "[email protected]",
"dt_col_status": "In Progress"
},
{
"dt_col_id": 3003,
"dt_col_name": "Mary Blogs",
"dt_col_email": "[email protected]",
"dt_col_status": "Active"
}
]
mock_data_table_updated_rows = [{
"dt_col_id": 3002,
"dt_col_name": "Mary Blogs",
"dt_col_email": "[email protected]",
"dt_col_status": "Complete"
}]
mock_success_delete = {
'message': None,
'hints': [],
'success': True,
'title': None
}
@staticmethod
def format_datatable_row(row, row_id):
formatted_row = {}
for key, value in row.items():
formatted_row[key] = {
"row_id": row_id,
"id": key,
"value": value
}
return {"id": row_id, "cells": formatted_row}
@staticmethod
def get_datatable_rows(rows):
row_id = 0
return_rows = []
for row in rows:
row_id += 1
return_rows.append(
DTResilientMock.format_datatable_row(row, row_id))
return return_rows
@resilient_endpoint("GET", r"/incidents/[0-9]+/table_data/mock_data_table\?handle_format=names$")
def mock_datatable_get(self, request):
""" Handle GET request for mock_data_table """
data = {"rows": DTResilientMock.get_datatable_rows(
self.mock_data_table_rows)}
return requests_mock.create_response(request,
status_code=200,
content=six.b(json.dumps(data)))
@resilient_endpoint("DELETE", r"/incidents/[0-9]+/table_data/mock_data_table/row_data/[0-9]\?handle_format=names$")
def mock_datatable_delete_row(self, request):
""" Handle DELETE request for mock_data_table """
data = self.mock_success_delete
return requests_mock.create_response(request,
status_code=200,
content=six.b(json.dumps(data)))
@resilient_endpoint("PUT", r"/incidents/[0-9]+/table_data/mock_data_table/row_data/2\?handle_format=names$")
def mock_datatable_put(self, request):
""" Handle PUT request for mock_data_table """
data = DTResilientMock.get_datatable_rows(
self.mock_data_table_updated_rows)[0]
return requests_mock.create_response(request,
status_code=200,
content=six.b(json.dumps(data)))
| [
"[email protected]"
] | |
3fa858904b5ef254d2669f70c272e376e8302d88 | cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1 | /xlsxwriter/test/comparison/test_print_options02.py | 0c43e2d0203b0635a207340a0ab2f4eec5d330e6 | [
"BSD-2-Clause"
] | permissive | glasah/XlsxWriter | bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec | 1e8aaeb03000dc2f294ccb89b33806ac40dabc13 | refs/heads/main | 2023-09-05T03:03:53.857387 | 2021-11-01T07:35:46 | 2021-11-01T07:35:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,175 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('print_options02.xlsx')
self.ignore_files = ['xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels']
self.ignore_elements = {'[Content_Types].xml': ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml': ['<pageMargins', '<pageSetup']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with print options."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.center_horizontally()
worksheet.write('A1', 'Foo')
workbook.close()
self.assertExcelEqual()
| [
"[email protected]"
] | |
c7b2ada29b4f10fd565b5a55c71e32c9fae15022 | 536584e323161a97db79453b1aa192c89979a755 | /tests/fixtures/defxmlschema/chapter21/example21083.py | 7c2028b325e23a198f25c7a11efe69dc5736746a | [
"MIT"
] | permissive | brunato/xsdata | c792ae2749afb4ac5704f77b138f0f871002100c | dfa84cfd6be5373c929f5bc4178fe1fb7b7d2e3b | refs/heads/master | 2022-04-13T09:59:57.008871 | 2020-04-04T09:01:42 | 2020-04-04T09:06:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | from dataclasses import dataclass, field
from typing import Optional
@dataclass
class CustomerType:
"""
:ivar name:
"""
name: Optional[str] = field(
default=None,
metadata=dict(
name="name",
type="Element",
namespace="http://datypic.com/all",
required=True
)
)
| [
"[email protected]"
] | |
3c56f6119b20f0df091f3ba81613185aebbd9d45 | c54a6b935007f71c7e5fba8caf590d948cfa65cf | /contributors/models.py | e601c727116d75ebc1ba8892eaa77e69cb6c65a7 | [] | no_license | nescode/contributor | 6896de8fb0edaf8c1054e8a49958d3373f9aacec | 18383e86309e5f1db221cda5d09b02bf98bad623 | refs/heads/master | 2023-08-07T02:40:18.214732 | 2020-07-07T06:44:22 | 2020-07-07T06:44:22 | 262,589,144 | 0 | 3 | null | 2021-09-22T18:59:30 | 2020-05-09T14:31:18 | Python | UTF-8 | Python | false | false | 601 | py | from django.db import models
ROLE_CHOICES = (
('developer', 'developer'),
('designer', ' designer'),
('documentation team', 'documentation team'),
)
class CreateContributor(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField(max_length=255)
phone = models.CharField(max_length=13)
resume = models.FileField(upload_to='resumes')
contributed = models.BooleanField(default=False)
role = models.CharField(choices=ROLE_CHOICES, max_length=25)
about = models.CharField(max_length=255)
def __str__(self):
return self.name
| [
"[email protected]"
] | |
744205388ea80c3088f6f57e7efd3534b2f233ea | 05a7aba91ba6f1a774bd21d0d4085b02ecaee189 | /Community/service_point_watcher/sp_watcher.py | 4fab3b4d531afbd4a94cbe763f7cd63b28ac9eb7 | [
"Apache-2.0",
"LicenseRef-scancode-commercial-license"
] | permissive | hungtranusa/gateway-workflows | 568dcb54793fef812ebc2ee0014e3d0f50032ba5 | 379a5d323818befcfe5d2764f93db5b41b9b9884 | refs/heads/master | 2020-12-02T23:07:02.248091 | 2019-12-20T13:03:17 | 2019-12-20T13:03:17 | 231,142,780 | 0 | 0 | Apache-2.0 | 2019-12-31T20:20:45 | 2019-12-31T20:20:44 | null | UTF-8 | Python | false | false | 11,641 | py | # Copyright 2019 BlueCat Networks (USA) Inc. and its affiliates
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# By: Akira Goto ([email protected])
# Date: 2019-08-28
# Gateway Version: 19.5.1
# Description: Service Point Watcher sp_watcher.py
import os
import sys
import pytz
import json
from datetime import datetime, timedelta
from pytz import timezone
from threading import Lock
from apscheduler.schedulers.background import BackgroundScheduler
from dnsedge.edgeapi import EdgeAPI
from .snmp_trap_sender import send_status_notification, send_pulling_stopped_notification
class SPWatcherException(Exception): pass
class SPWatcher(object):
_unique_instance = None
_lock = Lock()
_service_points_file = os.path.dirname(os.path.abspath(__file__)) + '/service_points.json'
_config_file = os.path.dirname(os.path.abspath(__file__)) + '/config.json'
@classmethod
def __internal_new__(cls):
return super().__new__(cls)
@classmethod
def get_instance(cls, debug=False):
if cls._unique_instance is None:
with cls._lock:
if cls._unique_instance is None:
cls._unique_instance = cls.__internal_new__()
cls._unique_instance._debug = debug
cls._unique_instance._scheduler = None
cls._unique_instance._job = None
cls._unique_instance._service_points = []
cls._unique_instance._load()
return cls._unique_instance
def _load(self):
with open(SPWatcher._service_points_file) as f:
self._service_points = json.load(f)
with open(SPWatcher._config_file) as f:
self._config = json.load(f)
def get_value(self, key):
value = None
with SPWatcher._lock:
if key in self._config.keys():
value = self._config[key]
return value
def set_value(self, key, value):
with SPWatcher._lock:
self._config[key] = value
def get_service_points(self):
service_points = []
with SPWatcher._lock:
service_points = self._service_points
return service_points
def get_service_point_summaries(self):
service_points = self.get_service_points()
service_point_summaries = []
for sp in service_points:
sps = {}
sps['id'] = sp['id']
sps['name'] = sp['linked_name']
sps['ipaddress'] = sp['ipaddress']
sps['site'] = sp['site']
sps['connected'] = sp['connected']
sps['status'] = sp['status']
sps['pulling_severity'] = sp['pulling_severity']
service_point_summaries.append(sps)
return service_point_summaries
def set_service_points(self, service_points):
with SPWatcher._lock:
self._service_points = service_points
def clear_service_points(self):
with SPWatcher._lock:
self._service_points = []
def save(self):
with SPWatcher._lock:
with open(SPWatcher._service_points_file, 'w') as f:
json.dump(self._service_points, f, indent=4)
with open(SPWatcher._config_file, 'w') as f:
json.dump(self._config, f, indent=4)
def _construct_site_dic(self, edge_api):
site_dic = {}
sites = edge_api.get_sites()
for site in sites:
site_dic[site['id']] = site['name']
return site_dic
def _issue_traps(self, service_point, status, sp_status):
trap_servers = self._config['trap_servers']
pulling_severity = 'UNKNOWN'
if service_point['status'] != 'UNKNOWN' and service_point['status'] != status:
send_status_notification(
trap_servers,
service_point,
'spStatus',
status
)
if service_point['diagnostics'] is not None:
prev_sp_statuses = service_point['diagnostics']['spServicesStatuses']
crnt_sp_statuses = sp_status['spServicesStatuses']
for key in crnt_sp_statuses.keys():
if prev_sp_statuses[key]['status'] != crnt_sp_statuses[key]['status']:
send_status_notification(
trap_servers,
service_point,
key,
crnt_sp_statuses[key]['status']
)
if 'dns-gateway-service' in crnt_sp_statuses.keys():
getway_service = crnt_sp_statuses['dns-gateway-service']
if 'additionalDetails' in getway_service.keys():
additional_details = getway_service['additionalDetails']
last_pulling_timestamp = \
additional_details['settingsDiagnostics']['lastSettingsPollingTimestamp'] // 1000
last_pulling_time = datetime.fromtimestamp(last_pulling_timestamp)
now = datetime.now()
delay = now - last_pulling_time
if delay > timedelta(hours=1):
pulling_severity = 'CRITICAL'
elif delay > timedelta(minutes=15):
pulling_severity = 'WARNING'
else:
pulling_severity = 'NORMAL'
if service_point['pulling_severity'] != 'UNKNOWN' and \
service_point['pulling_severity'] != pulling_severity:
send_pulling_stopped_notification(
trap_servers,
service_point,
pulling_severity,
last_pulling_time
)
return pulling_severity
def _analyze_service_point(self, edge_api, service_point, timeout):
status = 'UNKNOWN'
sp_status = edge_api.get_service_point_status(service_point['ipaddress'], timeout)
pulling_severity = 'UNKNOWN'
if sp_status is not None:
status = sp_status['spStatus']
pulling_severity = self._issue_traps(service_point, status, sp_status)
else:
status = 'UNREACHED'
if service_point['status'] != 'UNKNOWN' and service_point['status'] != status:
send_status_notification(
self._config['trap_servers'],
service_point,
'spStatus',
status
)
service_point['status'] = status
service_point['diagnostics'] = sp_status
service_point['pulling_severity'] = pulling_severity
def _construct_linked_name(self, edge_api, name, ipaddress):
return "<a href='%s' target='_blank'>%s</a>" % \
(edge_api.get_service_point_status_url(ipaddress), name)
def _collect_service_points(self, edge_api):
service_points = []
site_dic = self._construct_site_dic(edge_api)
sps = edge_api.get_service_points()
for sp in sps:
service_point = {}
service_point['id'] = sp['id']
service_point['name'] = sp['name']
service_point['ipaddress'] = sp['ipAddresses'][0].split('/')[0] if 0 < len(sp['ipAddresses']) else ''
service_point['site'] = site_dic[sp['siteId']] if sp['siteId'] in site_dic.keys() else ''
service_point['connected'] = sp['connectionState']
service_point['status'] = 'UNKNOWN'
service_point['diagnostics'] = None
service_point['pulling_severity'] = 'UNKNOWN'
service_point['linked_name'] = service_point['name']
if service_point['ipaddress'] != '':
service_point['linked_name'] = \
self._construct_linked_name(edge_api, service_point['name'], service_point['ipaddress'])
service_points.append(service_point)
if 0 < len(service_points):
service_points.sort(key=lambda x:x['site'])
return service_points
def _find_service_points(self, id):
found = None
for sp in self.get_service_points():
if sp['id'] == id:
found = sp
break
return found
def watch_service_points(self):
if self._debug:
print('Watch Service Points is called....')
edge_api = EdgeAPI(self.get_value('edge_url'), debug=False)
service_points = self.get_service_points()
if 0 == len(service_points):
return False
timeout = self.get_value('timeout')
for sp in service_points:
self._analyze_service_point(edge_api, sp, timeout)
return True
def collect_service_points(self):
succeed = False
try:
interval = self.get_value('execution_interval')
edge_api = EdgeAPI(self.get_value('edge_url'), debug=True)
if not edge_api.validate_edgeurl():
return succeed
if edge_api.login(self.get_value('edge_client_id'), self.get_value('edge_secret')):
service_points = self._collect_service_points(edge_api)
print('Service Points from edge is <%d>' % len(service_points))
self.set_service_points(service_points)
edge_api.logout()
except Exception as e:
if self._debug:
print('DEBUG: Exceptin <%s>' % str(e))
return succeed
def update_service_points(self, service_points):
succeed = False
updated_sps = []
for sp in service_points:
found = self._find_service_points(sp['id'])
if found is not None:
updated_sps.append(found)
print('Updated SPs <%d>' % len(updated_sps))
self.set_service_points(updated_sps)
return succeed
def register_job(self):
succeed = False
try:
edge_api = EdgeAPI(self.get_value('edge_url'), debug=False)
if not edge_api.validate_edgeurl():
return succeed
if self._scheduler is None:
self._scheduler = BackgroundScheduler(daemon=True, timezone=pytz.utc)
self._scheduler.start()
if self._job is not None:
self._job.remove()
self._job = None
interval = self.get_value('execution_interval')
if interval is not None and 0 < interval:
self.watch_service_points()
self._job = \
self._scheduler.add_job(self.watch_service_points, 'interval', seconds=interval)
succeed = True
except Exception as e:
if self._debug:
print('DEBUG: Exceptin <%s>' % str(e))
return succeed
#
# Followings are code that should be executed when this module is loaded.
#
sp_watcher = SPWatcher.get_instance(debug=True)
print("SPWatcher is loaded.......")
if sp_watcher.register_job():
print("Watching Job is registered.......")
| [
"[email protected]"
] | |
8737ee08d054ee98756e8737324359bab8119602 | 93289539257faa129aa2d17a42148f7d73ce4e9e | /Python/2523_StarPrint.py | 120f583317657dfe94f2a603557e8cb717213ea7 | [] | no_license | Manngold/baekjoon-practice | d015dd518144a75b5cb3d4e831d6c95a3c70544f | 54f9efcb6460647c2a0f465731b582fe6de89cf3 | refs/heads/master | 2021-06-25T13:04:23.162531 | 2020-10-14T08:34:28 | 2020-10-14T08:34:28 | 148,895,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | n = int(input())
for i in range(1, n+1):
print("*" * i)
for i in range(n - 1, 0, -1):
print("*" * i)
| [
"[email protected]"
] | |
285e62bda18170573f5265c179ae55cc4ae29976 | dbba757ea53e2dc2ccf8b4d8683b06134f927091 | /licitacao/apps.py | add51d681d3236c614a3f025110522f1fa893523 | [
"MIT"
] | permissive | WesGtoX/audit | 144d3ac4e1b5381bf9666427220faaa41b961aa7 | c260cac2ab0499e75c6b9fbc5fe40708a81c3346 | refs/heads/master | 2021-06-24T21:02:13.025855 | 2019-10-08T21:48:49 | 2019-10-08T21:48:49 | 213,088,213 | 2 | 0 | MIT | 2021-03-19T10:37:40 | 2019-10-06T00:16:09 | CSS | UTF-8 | Python | false | false | 93 | py | from django.apps import AppConfig
class LicitacaoConfig(AppConfig):
name = 'licitacao'
| [
"[email protected]"
] | |
361d6992dff363f16c32d032157aec6bf96b77e7 | 930c207e245c320b108e9699bbbb036260a36d6a | /BRICK-RDFAlchemy/generatedCode/brick/brickschema/org/schema/_1_0_2/Brick/FCU_Discharge_Fan_VFD_Speed_Status.py | 67d7b76a4f9139b1e9c6dea660ff014753e989cf | [] | no_license | InnovationSE/BRICK-Generated-By-OLGA | 24d278f543471e1ce622f5f45d9e305790181fff | 7874dfa450a8a2b6a6f9927c0f91f9c7d2abd4d2 | refs/heads/master | 2021-07-01T14:13:11.302860 | 2017-09-21T12:44:17 | 2017-09-21T12:44:17 | 104,251,784 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 699 | py | from rdflib import Namespace, Graph, Literal, RDF, URIRef
from rdfalchemy.rdfSubject import rdfSubject
from rdfalchemy import rdfSingle, rdfMultiple, rdfList
from brick.brickschema.org.schema._1_0_2.Brick.FCU_Fan_Status import FCU_Fan_Status
from brick.brickschema.org.schema._1_0_2.Brick.Discharge_Fan_VFD_Speed_Status import Discharge_Fan_VFD_Speed_Status
from brick.brickschema.org.schema._1_0_2.Brick.Supply_Fan_VFD_Speed_Status import Supply_Fan_VFD_Speed_Status
class FCU_Discharge_Fan_VFD_Speed_Status(FCU_Fan_Status,Discharge_Fan_VFD_Speed_Status,Supply_Fan_VFD_Speed_Status):
rdf_type = Namespace('https://brickschema.org/schema/1.0.2/Brick#').FCU_Discharge_Fan_VFD_Speed_Status
| [
"[email protected]"
] | |
29cddd507b018e9e9a65049588042e6995ffd44f | 8d90e2eae476ecbe88d46ef2f03fe7ba92cc733b | /Programming Basics with Python/For-cycle/For_C_lab_ex9_left_right_sum.py | 04d51d4f9ce78c72726b3f293a70e9907e509856 | [] | no_license | KaterinaMutafova/SoftUni | c3f8bae3c2bf7bd4038da010ca03edc412672468 | 7aeef6f25c3479a8d677676cb1d66df20ca0d411 | refs/heads/main | 2023-03-08T10:53:49.748153 | 2021-02-19T15:55:13 | 2021-02-19T15:55:13 | 317,597,660 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 338 | py | n = int(input())
left_sum = 0
right_sum = 0
for i in range(n):
num_1 = int(input())
left_sum += num_1
for i in range(n):
num_2 = int(input())
right_sum += num_2
if left_sum == right_sum:
print(f"Yes, sum = {left_sum}")
else:
diff = abs(left_sum - right_sum)
print(f"No, diff = {diff}")
| [
"[email protected]"
] | |
15b34b27bee228e2af26d52019fb4474cccf8260 | ef8b6298ae5d247c65454eeeb7b1198b18fe5580 | /CodingDojangPython/turtle_graphics_8.py | b1a6b1cd79d1f51ce5914ab864c944f2c257fb85 | [] | no_license | Supreme-YS/PythonWorkspace | 01728fc06b9f1ce9d48de8ab662c0b528c27df1d | 9b1d17c805e45285eb3594da84df02939cab2a85 | refs/heads/master | 2023-07-28T16:44:36.930819 | 2021-09-06T14:03:50 | 2021-09-06T14:03:50 | 289,924,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | import turtle as t
t.shape('turtle')
t.speed('fastest') # 거북이 속도를 가장 빠르게 설정
for i in range(300): # 300번 반복
t.forward(i) # i만큼 앞으로 이동. 반복할 때마다 선이 길어짐
t.right(91) # 오른쪽으로 91도 회전
| [
"[email protected]"
] | |
be6e6e07769c9f3eeeaea821620b48fd28531e98 | 3b1229c458aa232bfcf11cd6da5f1275e9bb3a8f | /python/Python基础/截图和代码/面对对象3、异常、模块/PaxHeader/04-异常.py | a8ffe02dfb89db7b7127dbb4a18f6bb56302af57 | [] | no_license | sunjianbo/learning | 4fee3ddc5e3d4040a49f2ef3e6f239fd6a67b393 | 384cb4e73cc67e390ee2f4be0da9fe0319d93644 | refs/heads/master | 2021-02-17T16:32:22.557614 | 2020-03-09T05:29:51 | 2020-03-09T05:29:51 | 245,111,571 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | 82 path=Python基础/截图和代码/面对对象3、异常、模块/04-异常.py
| [
"sunjianbo"
] | sunjianbo |
fd6d861b9e060cf2c1d270f546716f557e8a67da | aab5143d8ee3ba15c9367b146287b7d5a6a41052 | /tests/command_download_aoj.py | 53a02cb26d8c3e6fc67e0fa6750e8afed449ff91 | [
"MIT"
] | permissive | kfaRabi/online-judge-tools | fc1e5796446a3f1b729507e018203aeff3193e3f | 79de8d37e1aa78a7c4c82c6a666f1f1602caf545 | refs/heads/master | 2020-04-23T19:49:57.394790 | 2019-02-25T20:43:43 | 2019-02-26T09:15:27 | 171,419,052 | 0 | 0 | MIT | 2019-02-19T06:34:26 | 2019-02-19T06:34:23 | Python | UTF-8 | Python | false | false | 5,208 | py | import os
import unittest
import tests.command_download
class DownloadAOJTest(unittest.TestCase):
def snippet_call_download(self, *args, **kwargs):
tests.command_download.snippet_call_download(self, *args, **kwargs)
def test_call_download_aoj_DSL_1_A(self):
self.snippet_call_download('http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=DSL_1_A', {
'sample-1.in': 'cb3a243a13637cddedf245cd0f6eab86',
'sample-1.out': '29cc7a34bb5a15da3d14ef4a82a4c530',
})
def test_call_download_aoj_0100(self):
self.snippet_call_download('http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=0100', {
'sample-1.in': '4f0f7b3b0b73c97c5283395edde3dbe8',
'sample-1.out': '26d3b085a160c028485f3865d07b9192',
})
def test_call_download_aoj_1371(self):
self.snippet_call_download('http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=1371', {
'sample-6.in': '3521658c02c291ad5a4e5cbaa3cb0260',
'sample-2.out': 'b026324c6904b2a9cb4b88d6d61c81d1',
'sample-3.in': 'b9775d52323c110b406d53b9805cee01',
'sample-3.out': '6d7fce9fee471194aa8b5b6e47267f03',
'sample-1.out': '897316929176464ebc9ad085f31e7284',
'sample-5.in': '0b06c70869a30733379a72e2a8c03758',
'sample-4.out': 'b026324c6904b2a9cb4b88d6d61c81d1',
'sample-7.out': '897316929176464ebc9ad085f31e7284',
'sample-6.out': 'b026324c6904b2a9cb4b88d6d61c81d1',
'sample-5.out': '897316929176464ebc9ad085f31e7284',
'sample-2.in': 'f3c536f039be83a4ef0e8f026984d87d',
'sample-1.in': '56092c4794d713f93d2bb70a66aa6ca1',
'sample-4.in': '318d4b3abfa30cc8fad4b1d34430aea3',
'sample-7.in': 'dcac31a5a6542979ce45064ab0bfa83d',
})
def test_call_download_aoj_2256(self):
self.snippet_call_download('http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=2256&lang=jp', {
'sample-1.in': 'c89817f1ee0b53209d66abc94e457f7f',
'sample-1.out': 'b9c2c5761360aad068453f4e64dd5a4e',
})
def test_call_download_aoj_2310(self):
self.snippet_call_download('http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=2310&lang=jp', {
'sample-1.in': '27ed9e879684b438fa6cc80c4261daf7',
'sample-1.out': '48a24b70a0b376535542b996af517398',
'sample-2.in': 'bb84849858ca512e14e071e25120ed78',
'sample-2.out': '6d7fce9fee471194aa8b5b6e47267f03',
'sample-3.in': '4c4ae7fb491ec5c6ad57d9d5711e44a6',
'sample-3.out': '9ae0ea9e3c9c6e1b9b6252c8395efdc1',
'sample-4.in': 'ad1109594a97eabe9bee60a743006de7',
'sample-4.out': '84bc3da1b3e33a18e8d5e1bdd7a18d7a',
'sample-5.in': 'b80447e0bc0c4ecc6fb3001b6a4e79f6',
'sample-5.out': 'c30f7472766d25af1dc80b3ffc9a58c7',
})
def test_call_download_aoj_2511(self):
self.snippet_call_download('http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=2511', {
'sample-1.in': '0483a0080de977d5e1db1ab87eae3fa9',
'sample-1.out': '346ce6367eff6bb3c9915601f2ae1e75',
})
def test_call_download_aoj_DSL_3_B(self):
self.snippet_call_download('https://onlinejudge.u-aizu.ac.jp/courses/library/3/DSL/3/DSL_3_B', {
'sample-1.in': '36adbcbb268e04ef7667fb2d965eed2c',
'sample-1.out': '26ab0db90d72e28ad0ba1e22ee510510',
'sample-2.in': '89a4280a03ec0001ec91f3fedbafadc1',
'sample-2.out': '6d7fce9fee471194aa8b5b6e47267f03',
'sample-3.in': '22d823cf994ebee157a0cdc8219a600d',
'sample-3.out': '897316929176464ebc9ad085f31e7284',
})
def test_call_download_aoj_2394(self):
self.snippet_call_download('https://onlinejudge.u-aizu.ac.jp/challenges/sources/JAG/Spring/2394?year=2011', {
'sample-1.in': '05dfaf25ae93e601a10cfb278db7679c',
'sample-1.out': '80982df7f6dac58f828e2e8b12e87a0a',
})
def test_call_download_aoj_system_ITP1_1_B(self):
self.snippet_call_download(
'http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=ITP1_1_B', {
'1.in': 'b026324c6904b2a9cb4b88d6d61c81d1',
'1.out': 'b026324c6904b2a9cb4b88d6d61c81d1',
'2.in': '6d7fce9fee471194aa8b5b6e47267f03',
'2.out': '66a7c1d5cb75ef2542524d888fd32f4a',
'3.in': '9caff0735bc6e80121cedcb98ca51821',
'3.out': 'fef5f767008b27f5c3801382264f46ef',
'4.in': '919d117956d3135c4c683ff021352f5c',
'4.out': 'b39ffd5aa5029d696193c8362dcb1d19',
}, is_system=True)
def test_call_download_aoj_system_1169(self):
# NOTE: the data exists, but AOJ says "..... (terminated because of the limitation)"
self.snippet_call_download(
'http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=1169&lang=jp',
{
# '1.in': 'f0ecaede832a038d0e940c2c4d0ab5e5',
# '1.out': '8d2f7846dc2fc10ef37dcb548635c788',
},
is_system=True)
| [
"[email protected]"
] | |
e68f96a3c8e0dccbe87352d539826d189e75c6ea | 663d429e1f552ef958d37cfe4a0707354b544a9a | /新建文件夹/theading_demo.py | 7d32b29371111338cbc1370c36a639394c73a531 | [] | no_license | nie000/mylinuxlearn | 72a33024648fc4393442511c85d7c439e169a960 | 813ed75a0018446cd661001e8803f50880d09fff | refs/heads/main | 2023-06-20T07:46:11.842538 | 2021-07-15T13:46:43 | 2021-07-15T13:46:43 | 307,377,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,187 | py | # import time
#
# def lo1():
# time.sleep(4)
# print('<------lo1-------->')
#
#
# def lo2():
# time.sleep(2)
# print('<------lo2-------->')
#
#
# def main():
# t1 = time.time()
# lo1()
# lo2()
# t2 = time.time()
#
# print('total time: {}'.format(t2-t1))
#
# if __name__ == "__main__":
# main()
# import time
# import threading
#
# def lo1():
# time.sleep(4)
# print('<------lo1-------->')
#
#
# def lo2():
# time.sleep(2)
# print('<------lo2-------->')
#
#
# def main():
# t1 = time.time()
# f1 = threading.Thread(target=lo1)
# f2 = threading.Thread(target=lo2)
# f1.start()
# f2.start()
# print('没有等到')
# f1.join()
# f2.join()
# t2 = time.time()
#
# print('total time: {}'.format(t2-t1))
#
# if __name__ == "__main__":
# main()
# import time
# #
# def lo1():
# a=0
# for index in range(100000000):
# a+=index
# print('<------lo1-------->')
#
#
# def lo2():
# a = 0
# for index in range(100000000):
# a += index
# print('<------lo2-------->')
#
#
# def main():
# t1 = time.time()
# lo1()
# lo2()
# t2 = time.time()
#
# print('total time: {}'.format(t2-t1))
#
# if __name__ == "__main__":
# main()
# import time
# import threading
#
# def lo1():
# a=0
# for index in range(100000000):
# a+=index
# print('<------lo1-------->')
#
#
# def lo2():
# a = 0
# for index in range(100000000):
# a += index
# print('<------lo2-------->')
#
#
# def main():
# t1 = time.time()
# f1 = threading.Thread(target=lo1)
# f2 = threading.Thread(target=lo2)
# f1.start()
# f2.start()
# print('没有等到')
# f1.join()
# f2.join()
# t2 = time.time()
#
# print('total time: {}'.format(t2-t1))
#
# if __name__ == "__main__":
# main()
# import time
# import threading
# from multiprocessing import Process
# def lo1():
# a=0
# for index in range(100000000):
# a+=index
# print('<------lo1-------->')
# def lo2():
# a = 0
# for index in range(100000000):
# a += index
# print('<------lo2-------->')
# def main():
# t1 = time.time()
# f1 =Process(target=lo1) #进程
# f2 =Process(target=lo2) #进程
# f1.start()
# f2.start()
# print('没有等到')
# f1.join()
# f2.join()
# t2 = time.time()
#
# print('total time: {}'.format(t2-t1))
#
# if __name__ == "__main__":
# main()
# import time
# import threading
#
# def lo1(a):
# a=0
# for index in range(100000000):
# a+=index
# print('<------lo1-------->')
#
#
# def lo2(b):
# a = 0
# for index in range(100000000):
# a += index
# print('<------lo2-------->')
#
#
# def main():
# t1 = time.time()
# f1 = threading.Thread(target=lo1,args=(1,))
# f2 = threading.Thread(target=lo2,args=(2,))
# f1.start()
# f2.start()
# print('没有等到')
# f1.join()
# f2.join()
# t2 = time.time()
#
# print('total time: {}'.format(t2-t1))
#
# if __name__ == "__main__":
# main()
# import threading
# import time
#
#
# class TestThread(threading.Thread):
# def __init__(self, target=None, args=None):
# # 调用父类方法
# super().__init__()
# self.target = target
# self.args = args
#
# # 当调用函数的时候使用的方法
# def run(self):
#
# self.target(*self.args)
#
# def test(i):
# time.sleep(i)
# print('execute thread:{}'.format(i))
#
# def loop():
# my_tasks = []
# for i in range(5):
# my_tasks.append(TestThread(target=test, args=(i,)))
# for i in my_tasks:
# i.start()
# for i in my_tasks:
# i.join()
# print("all down")
# loop()
import threading
import time
a = 0
def add():
global a
for i in range(1000000):
a += 1
def minus():
global a
for i in range(1000000):
a -= 1
def main():
t1=threading.Thread(target=add)
t2=threading.Thread(target=minus)
t1.start()
t2.start()
t1.join()
t2.join()
time.sleep(2)
print(a)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
f7108362ca0be8b69099cbdc38f4759264539fd8 | dc3c39d4dcaa1c4e4bd0e9405642159580a87767 | /findlove/settings.py | 869d4bdd5287dbfc4c64cd3e62d58556558361bc | [] | no_license | ylearner/findlove | f0d98a5ed99565ee8db77ab8b767b4c874d28260 | d5dd1ebe98fbb827e52088e98357b16accbcf3c8 | refs/heads/master | 2022-02-23T04:54:31.036032 | 2019-07-20T10:35:16 | 2019-07-20T10:35:16 | 197,905,314 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,273 | py | """
Django settings for findlove project.
Generated by 'django-admin startproject' using Django 1.11.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'z2*tpu#wbcawhed3a+k8u5sc1j(#hz*+l(v5*-k$$71!^hnx2p'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'user',
'index',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'findlove.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'findlove.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
] | [
"[email protected]"
] | |
862748235151ed238905e5074eb14d4850a9d11c | 20927c6b6dbb360bf0fd13d70115bdb27e7196e7 | /0x0A-python-inheritance/0-main.py~ | a0817dce1ea8a7a0fc19bf8c91bb07dc3abd1dcc | [] | no_license | PauloMorillo/holbertonschool-higher_level_programming | 27fc1c0a1ae5784bd22d07daaedb602ee618867d | 8a42a60aa4ea52b5cc2fb73e57f38aa6c5196c98 | refs/heads/master | 2021-08-16T17:13:45.568038 | 2020-07-29T01:20:25 | 2020-07-29T01:20:25 | 207,305,140 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | #!/usr/bin/python3
lookup = __import__('0-lookup').lookup
class MyClass1(object):
pass
class MyClass2(object):
my_attr1 = 3
def my_meth(self):
pass
print(lookup(MyClass1))
print(lookup(MyClass2))
print(lookup(int))
| [
"[email protected]"
] | ||
413873a9e910423c4d7a172c6080316e21adde8a | e2e08d7c97398a42e6554f913ee27340226994d9 | /pyautoTest-master(ICF-7.5.0)/test_case/scg/scg_Custom_Rules/test_c155535.py | 686cb9c5c388b3fdef651d4013c96e2f837745fe | [] | no_license | lizhuoya1111/Automated_testing_practice | 88e7be512e831d279324ad710946232377fb4c01 | b3a532d33ddeb8d01fff315bcd59b451befdef23 | refs/heads/master | 2022-12-04T08:19:29.806445 | 2020-08-14T03:51:20 | 2020-08-14T03:51:20 | 287,426,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,636 | py | import pytest
import time
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from page_obj.scg.scg_def import *
from page_obj.scg.scg_button import *
from page_obj.scg.scg_def_firewall import *
from page_obj.scg.scg_def_sslvpn import *
from page_obj.scg.scg_dev import *
from page_obj.scg.scg_def_ifname_OEM import *
from page_obj.scg.scg_def_machine_learning import *
from page_obj.scg.scg_def_custom_rules import *
test_id = "155535"
# 1. 协议选择modbus,事件处理选择“阻断”,其余配置均正确,点击保存。
def test_c155535(browser):
try:
login_web(browser, url=dev1)
# # 清空自定义规则
# del_all_custom_rules_lzy(browser)
# 增加自定义规则
add_custom_rules_complete_lzy(browser, protocol_modubus='yes', protocol_s7='yes/no', protocol='modbus',
function='读取线圈状态', start_address='0',
end_address_or_length='end_address', end_address='9',
length='', start_data='', end_data='', action_modbus='阻断',
PduType='', FunctionType='', action_s7='', save='yes', cancel='no')
sleep(0.5)
# 获取信息
sleep(0.5)
info1 = browser.find_element_by_xpath('//*[@id="table"]/tbody/tr[2]').text
print(info1)
# 还原
# 删除自定义规则 //*[@id="table"]/tbody/tr[2]/td[7]/a[2]/img
try_num = 2
while try_num < 999:
try:
browser.find_element_by_xpath('//*[@id="table"]/tbody/tr['+str(try_num+1)+']/td[7]/a[2]/img').click()
break
except:
print('没有多余条目')
break
sleep(12)
delete_sslvpn_safe_site_lzy(browser, number='1')
try:
assert '阻断' in info1
rail_pass(test_run_id, test_id)
except:
rail_fail(test_run_id, test_id)
assert '阻断' in info1
except Exception as err:
# 如果上面的步骤有报错,重新设备,恢复配置
print(err)
reload(hostip=dev1)
# 删除自定义规则
sleep(1)
login_web(browser, url=dev1)
delete_sslvpn_safe_site_lzy(browser, number='1')
rail_fail(test_run_id, test_id)
assert False
if __name__ == '__main__':
pytest.main(["-v", "-s", "test_c" + str(test_id) + ".py"])
| [
"[email protected]"
] | |
79c1195ef4085e2d612189df158046cecb1a24af | 425db5a849281d333e68c26a26678e7c8ce11b66 | /LeetCodeSolutions/LeetCode_0763.py | ef5ed806ec733581044ae63e06a8c6bc47b835e1 | [
"MIT"
] | permissive | lih627/python-algorithm-templates | e8092b327a02506086414df41bbfb2af5d6b06dc | a61fd583e33a769b44ab758990625d3381793768 | refs/heads/master | 2021-07-23T17:10:43.814639 | 2021-01-21T17:14:55 | 2021-01-21T17:14:55 | 238,456,498 | 29 | 8 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | class Solution:
def partitionLabels(self, S: str) -> List[int]:
parts = {}
for idx, c in enumerate(S):
if c not in parts:
parts[c] = [idx, idx]
else:
parts[c][1] = idx
_parts = [v for k, v in parts.items()]
_parts.sort()
ret = []
cur = _parts[0]
for s, e in _parts[1:]:
if s > cur[1]:
ret.append(cur[1] - cur[0] + 1)
cur = [s, e]
else:
cur[1] = max(e, cur[1])
ret.append(cur[1] - cur[0] + 1)
return ret
| [
"[email protected]"
] | |
24f2c153c0a666812ed68fc440c33f3285d0bae6 | 44cbc067afcced7fac7ad7f4584d0d16d66bf5b4 | /ansiblemetrics/general/lines_blank.py | 15d2145c0b6d973e6c3bf69221cf9e16cf3bbb2d | [
"Apache-2.0"
] | permissive | ElsevierSoftwareX/SOFTX_2020_231 | fb75820e99dbd6f2380146ecf5b8893d69942260 | e3ad95ebdc324ae308669d437ec60bd726580102 | refs/heads/master | 2023-01-18T22:53:04.785576 | 2020-11-18T11:52:39 | 2020-11-18T11:52:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | from ansiblemetrics.lines_metric import LinesMetric
class LinesBlank(LinesMetric):
""" This class is responsible for providing the methods to count the blank lines of code (bloc) in a given .yaml file"""
def count(self):
bloc = 0
for l in self.yml.splitlines():
if not l.strip():
bloc += 1
return bloc | [
"[email protected]"
] | |
46721e360b7369375cc7838c189ffee765b1c1cf | 8d78ee989a82bbff99d72facaa471a686961cb5b | /djangoProject/venv/Lib/site-packages/PIL/PcdImagePlugin.py | 47a8708f7f16402699fe1f1bac76fcb0c71268b4 | [] | no_license | jaydevdesai/Course_Enrollment_System | f12ad576bba73f23f49093a73e363742f87f86a7 | 0e3ebe5af76ab6b42c31d18ac45a7ef5b4d6bf59 | refs/heads/master | 2023-06-07T06:31:51.921429 | 2021-07-04T13:52:02 | 2021-07-04T13:52:02 | 382,835,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,504 | py | #
# The Python Imaging Library.
# $Id$
#
# PCD file handling
#
# History:
# 96-05-10 fl Created
# 96-05-27 fl Added draft mode (128x192, 256x384)
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
from . import Image, ImageFile
from ._binary import i8
##
# Image plugin for PhotoCD images. This plugin only reads the 768x512
# image from the file; higher resolutions are encoded in a proprietary
# encoding.
class PcdImageFile(ImageFile.ImageFile):
format = "PCD"
format_description = "Kodak PhotoCD"
def _open(self):
# rough
self.fp.seek(2048)
s = self.fp.read(2048)
if s[:4] != b"PCD_":
raise SyntaxError("not a PCD file")
orientation = i8(s[1538]) & 3
self.tile_post_rotate = None
if orientation == 1:
self.tile_post_rotate = 90
elif orientation == 3:
self.tile_post_rotate = -90
self.mode = "RGB"
self._size = 768, 512 # FIXME: not correct for rotated images!
self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)]
def load_end(self):
if self.tile_post_rotate:
# Handle rotated PCDs
self.im = self.im.rotate(self.tile_post_rotate)
self._size = self.im.size
#
# registry
Image.register_open(PcdImageFile.format, PcdImageFile)
Image.register_extension(PcdImageFile.format, ".pcd")
| [
"[email protected]"
] | |
1bc68658fddf40b1db193f9142a8e2a0bc57e007 | a7ded5d3d19a98e61a44189cffe3703f7938e0db | /xero_python/accounting/models/employee.py | 79e810326968c282fbed3ab4b731143981f9d788 | [
"MIT"
] | permissive | liseekeralbert/xero-python | dfd1076344f763d74f81f701e32600cf88bcc7b2 | d27ab1894ecd84d2a9af0ca91583593756b21ab3 | refs/heads/master | 2022-12-16T07:41:14.331308 | 2020-09-18T17:12:35 | 2020-09-18T17:12:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,895 | py | # coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.3.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class Employee(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"employee_id": "str",
"status": "str",
"first_name": "str",
"last_name": "str",
"external_link": "ExternalLink",
"updated_date_utc": "datetime[ms-format]",
"status_attribute_string": "str",
"validation_errors": "list[ValidationError]",
}
attribute_map = {
"employee_id": "EmployeeID",
"status": "Status",
"first_name": "FirstName",
"last_name": "LastName",
"external_link": "ExternalLink",
"updated_date_utc": "UpdatedDateUTC",
"status_attribute_string": "StatusAttributeString",
"validation_errors": "ValidationErrors",
}
def __init__(
self,
employee_id=None,
status=None,
first_name=None,
last_name=None,
external_link=None,
updated_date_utc=None,
status_attribute_string=None,
validation_errors=None,
): # noqa: E501
"""Employee - a model defined in OpenAPI""" # noqa: E501
self._employee_id = None
self._status = None
self._first_name = None
self._last_name = None
self._external_link = None
self._updated_date_utc = None
self._status_attribute_string = None
self._validation_errors = None
self.discriminator = None
if employee_id is not None:
self.employee_id = employee_id
if status is not None:
self.status = status
if first_name is not None:
self.first_name = first_name
if last_name is not None:
self.last_name = last_name
if external_link is not None:
self.external_link = external_link
if updated_date_utc is not None:
self.updated_date_utc = updated_date_utc
if status_attribute_string is not None:
self.status_attribute_string = status_attribute_string
if validation_errors is not None:
self.validation_errors = validation_errors
@property
def employee_id(self):
"""Gets the employee_id of this Employee. # noqa: E501
The Xero identifier for an employee e.g. 297c2dc5-cc47-4afd-8ec8-74990b8761e9 # noqa: E501
:return: The employee_id of this Employee. # noqa: E501
:rtype: str
"""
return self._employee_id
@employee_id.setter
def employee_id(self, employee_id):
"""Sets the employee_id of this Employee.
The Xero identifier for an employee e.g. 297c2dc5-cc47-4afd-8ec8-74990b8761e9 # noqa: E501
:param employee_id: The employee_id of this Employee. # noqa: E501
:type: str
"""
self._employee_id = employee_id
@property
def status(self):
"""Gets the status of this Employee. # noqa: E501
Current status of an employee – see contact status types # noqa: E501
:return: The status of this Employee. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this Employee.
Current status of an employee – see contact status types # noqa: E501
:param status: The status of this Employee. # noqa: E501
:type: str
"""
allowed_values = [
"ACTIVE",
"ARCHIVED",
"GDPRREQUEST",
"DELETED",
"None",
] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
@property
def first_name(self):
"""Gets the first_name of this Employee. # noqa: E501
First name of an employee (max length = 255) # noqa: E501
:return: The first_name of this Employee. # noqa: E501
:rtype: str
"""
return self._first_name
@first_name.setter
def first_name(self, first_name):
"""Sets the first_name of this Employee.
First name of an employee (max length = 255) # noqa: E501
:param first_name: The first_name of this Employee. # noqa: E501
:type: str
"""
if first_name is not None and len(first_name) > 255:
raise ValueError(
"Invalid value for `first_name`, "
"length must be less than or equal to `255`"
) # noqa: E501
self._first_name = first_name
@property
def last_name(self):
"""Gets the last_name of this Employee. # noqa: E501
Last name of an employee (max length = 255) # noqa: E501
:return: The last_name of this Employee. # noqa: E501
:rtype: str
"""
return self._last_name
@last_name.setter
def last_name(self, last_name):
"""Sets the last_name of this Employee.
Last name of an employee (max length = 255) # noqa: E501
:param last_name: The last_name of this Employee. # noqa: E501
:type: str
"""
if last_name is not None and len(last_name) > 255:
raise ValueError(
"Invalid value for `last_name`, "
"length must be less than or equal to `255`"
) # noqa: E501
self._last_name = last_name
@property
def external_link(self):
"""Gets the external_link of this Employee. # noqa: E501
:return: The external_link of this Employee. # noqa: E501
:rtype: ExternalLink
"""
return self._external_link
@external_link.setter
def external_link(self, external_link):
"""Sets the external_link of this Employee.
:param external_link: The external_link of this Employee. # noqa: E501
:type: ExternalLink
"""
self._external_link = external_link
@property
def updated_date_utc(self):
"""Gets the updated_date_utc of this Employee. # noqa: E501
:return: The updated_date_utc of this Employee. # noqa: E501
:rtype: datetime
"""
return self._updated_date_utc
@updated_date_utc.setter
def updated_date_utc(self, updated_date_utc):
"""Sets the updated_date_utc of this Employee.
:param updated_date_utc: The updated_date_utc of this Employee. # noqa: E501
:type: datetime
"""
self._updated_date_utc = updated_date_utc
@property
def status_attribute_string(self):
"""Gets the status_attribute_string of this Employee. # noqa: E501
A string to indicate if a invoice status # noqa: E501
:return: The status_attribute_string of this Employee. # noqa: E501
:rtype: str
"""
return self._status_attribute_string
@status_attribute_string.setter
def status_attribute_string(self, status_attribute_string):
"""Sets the status_attribute_string of this Employee.
A string to indicate if a invoice status # noqa: E501
:param status_attribute_string: The status_attribute_string of this Employee. # noqa: E501
:type: str
"""
self._status_attribute_string = status_attribute_string
@property
def validation_errors(self):
"""Gets the validation_errors of this Employee. # noqa: E501
Displays array of validation error messages from the API # noqa: E501
:return: The validation_errors of this Employee. # noqa: E501
:rtype: list[ValidationError]
"""
return self._validation_errors
@validation_errors.setter
def validation_errors(self, validation_errors):
"""Sets the validation_errors of this Employee.
Displays array of validation error messages from the API # noqa: E501
:param validation_errors: The validation_errors of this Employee. # noqa: E501
:type: list[ValidationError]
"""
self._validation_errors = validation_errors
| [
"[email protected]"
] | |
397f19d4b75b7aea4234abb4b8304525b1030cbe | f4b60f5e49baf60976987946c20a8ebca4880602 | /lib64/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/mon/target.py | cabbe846fbe9d58285c5d527f867aaecdc092217 | [] | no_license | cqbomb/qytang_aci | 12e508d54d9f774b537c33563762e694783d6ba8 | a7fab9d6cda7fadcc995672e55c0ef7e7187696e | refs/heads/master | 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 | Python | UTF-8 | Python | false | false | 5,412 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Target(Mo):
meta = ClassMeta("cobra.model.mon.Target")
meta.isAbstract = True
meta.moClassName = "monTarget"
meta.moClassName = "monTarget"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Monitoring Target"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x800040000000001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.stats.Reportable")
meta.childClasses.add("cobra.model.syslog.Src")
meta.childClasses.add("cobra.model.fault.LcP")
meta.childClasses.add("cobra.model.snmp.Src")
meta.childClasses.add("cobra.model.stats.HierColl")
meta.childClasses.add("cobra.model.event.SevAsnP")
meta.childClasses.add("cobra.model.callhome.Src")
meta.childClasses.add("cobra.model.health.Pol")
meta.childClasses.add("cobra.model.fault.SevAsnP")
meta.childClasses.add("cobra.model.stats.ExportP")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.snmp.Src", "snmpsrc-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Pol", "health-"))
meta.childNamesAndRnPrefix.append(("cobra.model.syslog.Src", "slsrc-"))
meta.childNamesAndRnPrefix.append(("cobra.model.event.SevAsnP", "esevp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.callhome.Src", "chsrc-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.SevAsnP", "fsevp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.stats.Reportable", "stat-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.LcP", "flcp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.stats.HierColl", "coll-"))
meta.childNamesAndRnPrefix.append(("cobra.model.stats.ExportP", "exp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Comp")
meta.superClasses.add("cobra.model.mon.ATarget")
meta.concreteSubClasses.add("cobra.model.mon.FabricTarget")
meta.concreteSubClasses.add("cobra.model.mon.EPGTarget")
meta.concreteSubClasses.add("cobra.model.mon.InfraTarget")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5582, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "scope", "scope", 5, PropCategory.REGULAR)
prop.label = "Target Scope"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("scope", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
2fc476b25c16b8bdd9f4e0e6b1bd076670904495 | 733ce69fcc11ea5ceed3783c6aa256f15510fcad | /venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py | 232dd95d2b2e6939f429a9ac6d5ebb44041e57fc | [] | no_license | ethanbahmanyari0122/scooteq | c7fc25ab6619b43d8dbe5c5e44e9412ebbc700ba | ca5bf60cdacd92f41e318b23766316f4cd4db5fa | refs/heads/master | 2023-06-01T23:57:03.671517 | 2021-06-18T10:35:31 | 2021-06-18T10:35:31 | 377,186,463 | 1 | 0 | null | 2021-06-18T08:53:09 | 2021-06-15T14:12:29 | Python | UTF-8 | Python | false | false | 5,723 | py | """A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a scooteq type of tree, you need to
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method which takes a tree as sole argument and
returns an iterator which generates tokens.
"""
from __future__ import absolute_import, division, unicode_literals
from .. import constants
from .._utils import default_etree
__all__ = ["getTreeWalker", "pprint"]
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
:arg str treeType: the name of the tree type required (case-insensitive).
Supported values are:
* "dom": The xml.dom.minidom DOM implementation
* "etree": A generic walker for tree implementations exposing an
elementtree-like interface (known to work with ElementTree,
cElementTree and lxml.etree).
* "lxml": Optimized walker for lxml.etree
* "genshi": a Genshi stream
:arg implementation: A module implementing the tree type e.g.
xml.etree.ElementTree or cElementTree (Currently applies to the "etree"
tree type only).
:arg kwargs: keyword arguments passed to the etree walker--for other
walkers, this has no effect
:returns: a TreeWalker class
"""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType == "dom":
from . import dom
treeWalkerCache[treeType] = dom.TreeWalker
elif treeType == "genshi":
from . import genshi
treeWalkerCache[treeType] = genshi.TreeWalker
elif treeType == "lxml":
from . import etree_lxml
treeWalkerCache[treeType] = etree_lxml.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
def concatenateCharacterTokens(tokens):
pendingCharacters = []
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
pendingCharacters.append(token["data"])
else:
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
pendingCharacters = []
yield token
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
def pprint(walker):
"""Pretty printer for tree walkers
Takes a TreeWalker instance and pretty prints the output of walking the tree.
:arg walker: a TreeWalker instance
"""
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
# tag name
if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
if token["namespace"] in constants.prefixes:
ns = constants.prefixes[token["namespace"]]
else:
ns = token["namespace"]
name = "%s %s" % (ns, token["name"])
else:
name = token["name"]
output.append("%s<%s>" % (" " * indent, name))
indent += 2
# attributes (sorted for consistent ordering)
attrs = token["data"]
for (namespace, localname), value in sorted(attrs.items()):
if namespace:
if namespace in constants.prefixes:
ns = constants.prefixes[namespace]
else:
ns = namespace
name = "%s %s" % (ns, localname)
else:
name = localname
output.append("%s%s=\"%s\"" % (" " * indent, name, value))
# self-closing
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
(" " * indent,
token["name"],
token["publicId"],
token["systemId"] if token["systemId"] else ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">""" %
(" " * indent,
token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>" % (" " * indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" " * indent,))
elif type == "Characters":
output.append("%s\"%s\"" % (" " * indent, token["data"]))
elif type == "SpaceCharacters":
assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
else:
raise ValueError("Unknown token type, %s" % type)
return "\n".join(output)
| [
"[email protected]"
] | |
109b898a3d962708caae6619f13ebf7b6b6a54c2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02239/s041311605.py | e539baf989142410359c23b219a92f876dc2bf59 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 702 | py | # -*- coding: utf-8 -*-
import sys
from collections import deque
sys.setrecursionlimit(10 ** 9)
def input(): return sys.stdin.readline().strip()
def INT(): return int(input())
def MAP(): return map(int, input().split())
def LIST(): return list(map(int, input().split()))
INF=float('inf')
N=INT()
nodes=[[] for i in range(N)]
for i in range(N):
l=LIST()
u=l[0]
l=l[2:]
for v in l:
nodes[u-1].append(v-1)
nodes[u-1].sort()
que=deque()
que.append((0, 0))
costs=[-1]*N
while len(que):
cost,node=que.popleft()
if costs[node]==-1:
costs[node]=cost
for v in nodes[node]:
que.append((cost+1, v))
for i in range(N):
print(i+1, costs[i])
| [
"[email protected]"
] | |
fe289312a128747beaccaa044fd07f24db8438cf | c098a0f39cc448ea06bb9e61f4b8155d9feeee82 | /vsts/vsts/test/v4_1/models/test_iteration_details_model.py | 535993773b4bc11bd85c9542a49f8cf105bc9dac | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | nimisha-srinivas/vsts-python-api | 360713f009f948a425ccf5c65ded4ed9d79df07e | 666db9dc30b5bdee026a2534dc2ab3965fad285c | refs/heads/master | 2020-03-13T02:38:13.461082 | 2018-04-23T16:27:04 | 2018-04-23T16:27:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,904 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class TestIterationDetailsModel(Model):
"""TestIterationDetailsModel.
:param action_results:
:type action_results: list of :class:`TestActionResultModel <test.v4_1.models.TestActionResultModel>`
:param attachments:
:type attachments: list of :class:`TestCaseResultAttachmentModel <test.v4_1.models.TestCaseResultAttachmentModel>`
:param comment:
:type comment: str
:param completed_date:
:type completed_date: datetime
:param duration_in_ms:
:type duration_in_ms: number
:param error_message:
:type error_message: str
:param id:
:type id: int
:param outcome:
:type outcome: str
:param parameters:
:type parameters: list of :class:`TestResultParameterModel <test.v4_1.models.TestResultParameterModel>`
:param started_date:
:type started_date: datetime
:param url:
:type url: str
"""
_attribute_map = {
'action_results': {'key': 'actionResults', 'type': '[TestActionResultModel]'},
'attachments': {'key': 'attachments', 'type': '[TestCaseResultAttachmentModel]'},
'comment': {'key': 'comment', 'type': 'str'},
'completed_date': {'key': 'completedDate', 'type': 'iso-8601'},
'duration_in_ms': {'key': 'durationInMs', 'type': 'number'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'outcome': {'key': 'outcome', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[TestResultParameterModel]'},
'started_date': {'key': 'startedDate', 'type': 'iso-8601'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, action_results=None, attachments=None, comment=None, completed_date=None, duration_in_ms=None, error_message=None, id=None, outcome=None, parameters=None, started_date=None, url=None):
super(TestIterationDetailsModel, self).__init__()
self.action_results = action_results
self.attachments = attachments
self.comment = comment
self.completed_date = completed_date
self.duration_in_ms = duration_in_ms
self.error_message = error_message
self.id = id
self.outcome = outcome
self.parameters = parameters
self.started_date = started_date
self.url = url
| [
"[email protected]"
] | |
13c1a4c6632f875df3a07fa3d9fe9b8490999779 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp86_1500.py | 75ab23a87ef4476fdd4b40decf942eef1ee61be9 | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,816 | py | ITEM: TIMESTEP
1500
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
2.9806737844785403e-01 4.6901932621545264e+01
2.9806737844785403e-01 4.6901932621545264e+01
2.9806737844785403e-01 4.6901932621545264e+01
ITEM: ATOMS id type xs ys zs
8 1 0.11971 0.0675017 0.0615897
35 1 0.0657462 0.132649 0.0698687
130 1 0.0684496 0.0493715 0.120174
165 1 0.126658 0.125616 0.120213
1041 1 0.497579 0.498004 0.00269598
42 1 0.3223 0.193028 0.00169277
1035 1 0.308191 0.498133 0.0578776
12 1 0.249302 0.0612679 0.062841
39 1 0.192411 0.128245 0.0683984
43 1 0.314233 0.12395 0.0620901
134 1 0.18412 0.0630363 0.11308
138 1 0.312007 0.0547118 0.120137
169 1 0.247923 0.123452 0.117905
29 1 0.881806 0.00748983 0.00178514
65 1 0.00191867 0.242365 0.00770009
16 1 0.384226 0.0657906 0.0545819
47 1 0.441163 0.127634 0.0581934
142 1 0.441185 0.069924 0.120533
173 1 0.373968 0.12022 0.125316
5 1 0.131283 0.0100136 0.000382582
86 1 0.682457 0.317506 0.00242011
177 1 0.500066 0.125493 0.122677
20 1 0.50313 0.065701 0.0536768
24 1 0.620917 0.0591022 0.0589325
51 1 0.562079 0.122834 0.0578954
146 1 0.557955 0.067723 0.13074
181 1 0.618562 0.129834 0.119839
257 1 1.00064 0.0054228 0.256538
28 1 0.746478 0.065562 0.0512303
55 1 0.677992 0.12106 0.066449
59 1 0.811157 0.126756 0.0604577
150 1 0.695957 0.0574499 0.121967
154 1 0.798826 0.0614509 0.11357
185 1 0.749596 0.132279 0.126995
481 1 0.9893 0.366625 0.371998
60 1 0.750083 0.187168 0.0563618
161 1 0.0064549 0.1199 0.129371
4 1 0.00982676 0.0655545 0.0684746
512 1 0.881523 0.432986 0.446334
32 1 0.869423 0.059907 0.0661708
63 1 0.94039 0.126162 0.0671197
158 1 0.941368 0.0588021 0.117164
189 1 0.873255 0.117859 0.125642
41 1 0.250496 0.126406 0.00377422
40 1 0.128566 0.187413 0.0607377
67 1 0.0698612 0.247236 0.05994
72 1 0.129305 0.306922 0.0608059
162 1 0.0615687 0.189284 0.125436
194 1 0.0609832 0.307756 0.125149
197 1 0.129285 0.238697 0.132534
36 1 1.00182 0.189705 0.0711668
44 1 0.257758 0.185029 0.0556496
71 1 0.190883 0.246922 0.0664934
75 1 0.319894 0.246537 0.0765269
76 1 0.251662 0.312929 0.0615806
166 1 0.196664 0.187882 0.127603
170 1 0.31188 0.182424 0.133753
198 1 0.184118 0.305253 0.128223
201 1 0.247683 0.247654 0.126403
202 1 0.311933 0.31013 0.130433
48 1 0.372756 0.19019 0.0681652
79 1 0.431511 0.250233 0.0687027
80 1 0.375612 0.304791 0.0734987
174 1 0.43648 0.184457 0.123656
205 1 0.376507 0.248092 0.136487
206 1 0.432248 0.317902 0.128395
52 1 0.503949 0.192143 0.0660918
84 1 0.498571 0.302718 0.0721307
546 1 0.0716686 0.183575 0.498169
1413 1 0.127588 0.498176 0.380703
209 1 0.495556 0.243052 0.131557
56 1 0.622412 0.194382 0.0608025
83 1 0.560511 0.256376 0.0592782
88 1 0.624174 0.315537 0.0728863
178 1 0.562104 0.187296 0.123118
210 1 0.564153 0.32253 0.122733
213 1 0.611069 0.248268 0.13172
10 1 0.315601 0.0619131 -0.00149808
87 1 0.681026 0.248926 0.0512813
91 1 0.808673 0.245862 0.0623306
92 1 0.750855 0.298439 0.0590403
182 1 0.678025 0.184271 0.131556
186 1 0.814076 0.195623 0.126424
214 1 0.678572 0.304494 0.128618
217 1 0.741544 0.248556 0.127463
218 1 0.813274 0.303891 0.122105
68 1 0.00666624 0.30669 0.055636
193 1 0.00201402 0.251737 0.118446
64 1 0.878841 0.1861 0.0632183
95 1 0.944023 0.250563 0.0688024
96 1 0.877775 0.311918 0.0735
190 1 0.934911 0.183362 0.128939
221 1 0.878176 0.24491 0.129251
222 1 0.950792 0.311001 0.127004
99 1 0.0622687 0.368563 0.0673935
104 1 0.124941 0.432971 0.0681213
226 1 0.0581525 0.433097 0.126857
229 1 0.126059 0.369405 0.125166
100 1 0.0110136 0.43604 0.0641874
9 1 0.247109 0.00376457 0.00156714
511 1 0.938286 0.374235 0.441782
510 1 0.940808 0.433875 0.371625
103 1 0.178518 0.370534 0.0624402
107 1 0.306996 0.369475 0.0693025
108 1 0.248049 0.436426 0.0581722
230 1 0.193553 0.438937 0.119967
233 1 0.248886 0.373563 0.128863
234 1 0.304879 0.433531 0.120051
131 1 0.0723859 0.00359281 0.190378
545 1 0.990968 0.127132 0.500726
111 1 0.423725 0.381438 0.0699498
112 1 0.37142 0.438551 0.0587053
237 1 0.362573 0.374099 0.136154
238 1 0.428838 0.439446 0.12502
241 1 0.501122 0.371175 0.118641
143 1 0.445679 0.00841476 0.182686
602 1 0.805896 0.315083 0.496852
116 1 0.494631 0.433641 0.0624598
115 1 0.559799 0.377123 0.0608813
120 1 0.624122 0.43854 0.0680219
242 1 0.560056 0.441389 0.124207
245 1 0.624235 0.386366 0.130698
1153 1 0.00532262 0.495613 0.121452
259 1 0.0607389 0.00164451 0.312669
1165 1 0.369661 0.49722 0.120019
1169 1 0.493069 0.497414 0.130446
119 1 0.683891 0.373858 0.0654997
123 1 0.814099 0.380102 0.0637001
124 1 0.746851 0.439312 0.0643966
246 1 0.685391 0.441636 0.121223
249 1 0.747211 0.374898 0.119827
250 1 0.81276 0.440475 0.126127
225 1 0.00685797 0.370038 0.125845
127 1 0.939014 0.375333 0.057663
128 1 0.876706 0.439668 0.062753
253 1 0.876306 0.376443 0.12306
254 1 0.9459 0.436562 0.119578
509 1 0.878784 0.368051 0.369856
598 1 0.698252 0.321036 0.496854
136 1 0.125266 0.0714926 0.183947
163 1 0.0602312 0.121007 0.187248
258 1 0.0643086 0.061478 0.25539
264 1 0.118484 0.0563107 0.318638
291 1 0.064771 0.126225 0.312091
293 1 0.128092 0.117472 0.260689
132 1 -0.000219768 0.0573028 0.187858
283 1 0.810322 0.00101247 0.312873
140 1 0.256916 0.0586704 0.190718
167 1 0.192827 0.122831 0.18206
171 1 0.314978 0.121719 0.187503
262 1 0.196117 0.0659942 0.24915
266 1 0.317201 0.0552059 0.252258
268 1 0.255621 0.058859 0.308576
295 1 0.19627 0.118143 0.313492
297 1 0.254579 0.122277 0.247203
299 1 0.315817 0.117492 0.311336
106 1 0.318231 0.43619 0.00265755
31 1 0.942572 0.0016229 0.0634027
144 1 0.369064 0.0530968 0.189227
175 1 0.441487 0.121917 0.183826
270 1 0.440623 0.057775 0.250814
272 1 0.374657 0.0497066 0.312272
301 1 0.372113 0.120505 0.240855
303 1 0.431335 0.116132 0.314764
276 1 0.499507 0.058682 0.310096
305 1 0.501894 0.124196 0.254146
125 1 0.874446 0.376558 -0.000405039
117 1 0.628719 0.376089 0.0054265
148 1 0.505793 0.0673749 0.189332
152 1 0.620675 0.0636481 0.190504
179 1 0.559984 0.131839 0.187969
274 1 0.562558 0.062877 0.257353
280 1 0.624102 0.0599308 0.310433
307 1 0.564477 0.126113 0.309382
309 1 0.626198 0.120552 0.246171
415 1 0.926731 0.00245946 0.437416
101 1 0.11613 0.369102 0.00662217
77 1 0.374287 0.258971 0.00627553
156 1 0.751781 0.0670543 0.190475
183 1 0.68074 0.121803 0.185378
187 1 0.812232 0.132447 0.190299
278 1 0.683559 0.0601441 0.249147
282 1 0.815093 0.0612483 0.252846
284 1 0.74916 0.0550769 0.306964
311 1 0.684656 0.11982 0.311076
313 1 0.748881 0.130297 0.247158
315 1 0.815015 0.124402 0.308259
507 1 0.813632 0.379603 0.437051
155 1 0.809855 -0.000473741 0.189523
289 1 0.995616 0.118729 0.255788
260 1 0.00380211 0.0656432 0.315115
160 1 0.879515 0.0578516 0.190068
191 1 0.940513 0.112203 0.184812
286 1 0.935233 0.0578941 0.256826
288 1 0.871641 0.0647287 0.311939
317 1 0.872858 0.125075 0.249269
319 1 0.931944 0.124709 0.310021
137 1 0.245468 0.000316664 0.130417
168 1 0.130561 0.169845 0.188281
195 1 0.0588015 0.246296 0.186687
200 1 0.133599 0.311178 0.190421
290 1 0.0666567 0.186093 0.24634
296 1 0.119953 0.188695 0.319869
322 1 0.0694133 0.3109 0.255849
323 1 0.0562176 0.248674 0.308345
325 1 0.129213 0.247447 0.264486
328 1 0.129266 0.309785 0.320825
321 1 0.00462303 0.249257 0.244596
172 1 0.256046 0.187791 0.197372
199 1 0.190781 0.25104 0.203512
203 1 0.313724 0.25074 0.192313
204 1 0.249465 0.312625 0.187489
294 1 0.195027 0.177201 0.262834
298 1 0.311502 0.179991 0.259833
300 1 0.256636 0.180635 0.324638
326 1 0.192633 0.312912 0.260437
327 1 0.190749 0.241791 0.326402
329 1 0.25916 0.241395 0.260231
330 1 0.31529 0.310703 0.254614
331 1 0.319289 0.242107 0.32344
332 1 0.255866 0.307825 0.317485
176 1 0.375967 0.189374 0.190281
207 1 0.435085 0.254523 0.191701
208 1 0.37589 0.313155 0.19642
302 1 0.434356 0.176859 0.252472
304 1 0.378566 0.184408 0.326426
333 1 0.373496 0.250391 0.250454
334 1 0.436119 0.319014 0.259535
335 1 0.433341 0.248048 0.310289
336 1 0.371046 0.309247 0.313688
180 1 0.495038 0.185176 0.1992
340 1 0.498944 0.307005 0.321047
337 1 0.494862 0.258415 0.250899
308 1 0.494487 0.180938 0.31694
212 1 0.49477 0.315927 0.188484
184 1 0.62472 0.198064 0.19185
211 1 0.556354 0.254822 0.197548
216 1 0.618097 0.315853 0.194211
306 1 0.567663 0.189034 0.25628
312 1 0.621849 0.189078 0.315527
338 1 0.557785 0.320079 0.258059
339 1 0.562401 0.251291 0.32165
341 1 0.621434 0.25483 0.258565
344 1 0.62729 0.323431 0.312785
314 1 0.813919 0.184533 0.249537
188 1 0.748291 0.190531 0.184116
215 1 0.688198 0.24729 0.19664
219 1 0.816624 0.253285 0.185171
220 1 0.755846 0.321257 0.185975
310 1 0.68189 0.185275 0.247666
316 1 0.754638 0.19587 0.314191
342 1 0.683917 0.315316 0.244668
343 1 0.694029 0.251721 0.311045
345 1 0.751981 0.255505 0.246992
346 1 0.821501 0.309186 0.241781
347 1 0.816908 0.254297 0.308218
348 1 0.754875 0.307963 0.30922
196 1 0.00325357 0.310782 0.188495
324 1 0.991961 0.315269 0.307108
164 1 1.00041 0.191841 0.179819
292 1 0.999026 0.186984 0.308297
192 1 0.882506 0.180136 0.190199
223 1 0.944678 0.250944 0.189137
224 1 0.882134 0.312855 0.17863
318 1 0.939827 0.187441 0.244375
320 1 0.875177 0.192593 0.308599
349 1 0.879017 0.252487 0.242117
350 1 0.931872 0.318892 0.246382
351 1 0.941038 0.248355 0.299527
352 1 0.881236 0.306627 0.314421
227 1 0.0697345 0.368909 0.197057
232 1 0.126633 0.437626 0.183401
354 1 0.0674773 0.437709 0.250534
355 1 0.0622049 0.362717 0.316854
357 1 0.12275 0.374678 0.253
360 1 0.129096 0.427342 0.31535
228 1 0.0055516 0.432271 0.18343
1553 1 0.500627 0.500576 0.501742
505 1 0.76029 0.384979 0.370206
231 1 0.186825 0.378611 0.187183
235 1 0.309985 0.376496 0.194629
236 1 0.250088 0.435569 0.180966
358 1 0.18781 0.429635 0.253862
359 1 0.191261 0.371615 0.315102
361 1 0.245311 0.375481 0.249619
362 1 0.308776 0.437434 0.247973
363 1 0.314536 0.37363 0.325664
364 1 0.249063 0.433114 0.315392
506 1 0.820466 0.443606 0.374039
1417 1 0.253561 0.49975 0.377133
621 1 0.370858 0.380914 0.501832
239 1 0.437355 0.383305 0.190993
240 1 0.374458 0.438909 0.187966
365 1 0.377538 0.375971 0.250075
366 1 0.436769 0.432286 0.2507
367 1 0.432256 0.377634 0.315849
368 1 0.374174 0.435647 0.30852
244 1 0.490832 0.441914 0.194292
372 1 0.491288 0.441835 0.312999
141 1 0.385299 0.00187121 0.126147
594 1 0.57504 0.314582 0.490766
369 1 0.498401 0.374179 0.259037
243 1 0.557935 0.383056 0.18661
248 1 0.616861 0.439103 0.190132
370 1 0.553287 0.44183 0.255153
371 1 0.5558 0.380796 0.314827
373 1 0.616085 0.382987 0.251306
376 1 0.624607 0.446348 0.315212
574 1 0.933968 0.184294 0.498166
618 1 0.302477 0.441439 0.501063
1027 1 0.0678914 0.497987 0.0569021
247 1 0.684099 0.377766 0.183403
251 1 0.816268 0.37404 0.185302
252 1 0.750919 0.437848 0.182059
374 1 0.685618 0.441843 0.25585
375 1 0.686604 0.379529 0.307095
377 1 0.755552 0.377756 0.249862
378 1 0.814719 0.435398 0.251854
379 1 0.818005 0.372926 0.304491
380 1 0.754545 0.440143 0.314405
356 1 0.00698443 0.43193 0.316198
353 1 -9.74068e-05 0.376953 0.251462
255 1 0.937991 0.377198 0.181498
256 1 0.870531 0.437449 0.183702
381 1 0.87087 0.376444 0.242468
382 1 0.941132 0.436285 0.250645
383 1 0.93001 0.380889 0.305958
384 1 0.870173 0.435287 0.312843
62 1 0.943252 0.188702 0.0088124
386 1 0.0542344 0.0670868 0.380811
392 1 0.124872 0.057167 0.436018
419 1 0.0607927 0.123973 0.446552
421 1 0.12632 0.119254 0.382067
417 1 0.999685 0.13197 0.371888
388 1 0.993045 0.0655274 0.445079
82 1 0.563481 0.313304 0.00416732
403 1 0.565725 0.00114388 0.435789
78 1 0.438529 0.311248 0.00459623
390 1 0.188432 0.0520322 0.372853
394 1 0.30578 0.0518818 0.376073
396 1 0.250241 0.0633779 0.442294
423 1 0.186191 0.11227 0.442972
425 1 0.251666 0.110357 0.370037
427 1 0.3162 0.111363 0.44226
508 1 0.749446 0.441666 0.437567
30 1 0.943865 0.0584146 -0.000858312
1055 1 0.942335 0.494018 0.060552
110 1 0.439046 0.436262 0.00738812
398 1 0.445782 0.0535566 0.377591
400 1 0.381829 0.0599739 0.434069
429 1 0.379175 0.109177 0.373668
431 1 0.433775 0.12247 0.438776
433 1 0.494219 0.11802 0.376908
404 1 0.501422 0.0596108 0.439287
1159 1 0.190597 0.496621 0.181503
1439 1 0.942079 0.50102 0.428398
502 1 0.694435 0.448025 0.371677
402 1 0.556839 0.0547493 0.370337
408 1 0.617435 0.0669786 0.446406
435 1 0.555903 0.13002 0.439023
437 1 0.623637 0.132334 0.374683
1161 1 0.255907 0.500875 0.128258
1291 1 0.306885 0.499574 0.313611
61 1 0.868078 0.120913 0.00374993
406 1 0.676577 0.0593284 0.374941
410 1 0.806034 0.059949 0.379383
412 1 0.748766 0.0637121 0.437762
439 1 0.69151 0.121926 0.44082
441 1 0.747291 0.124874 0.374709
443 1 0.797405 0.126259 0.440259
1281 1 0.00310802 0.494697 0.262581
15 1 0.443844 0.00401084 0.0608394
147 1 0.56298 0.00300092 0.200684
414 1 0.932754 0.0606393 0.371521
416 1 0.867578 0.0615079 0.43165
445 1 0.865384 0.132607 0.369549
447 1 0.929218 0.125058 0.427816
1311 1 0.938814 0.490919 0.315272
135 1 0.184288 0.00122663 0.190268
411 1 0.812136 0.000345111 0.446385
418 1 0.0584101 0.192024 0.381903
424 1 0.130143 0.180648 0.443881
450 1 0.059257 0.309083 0.380747
451 1 0.0636795 0.255789 0.4471
453 1 0.122626 0.251558 0.375857
456 1 0.125043 0.306747 0.446122
452 1 0.99362 0.309693 0.434352
449 1 0.995838 0.249729 0.370713
54 1 0.685156 0.182708 -0.00223565
46 1 0.435214 0.195603 0.00875205
6 1 0.190619 0.0699306 0.00829621
1155 1 0.0551219 0.497975 0.197327
422 1 0.183054 0.177096 0.381681
426 1 0.317202 0.170982 0.384114
428 1 0.251111 0.17179 0.445421
454 1 0.190083 0.310574 0.37563
455 1 0.185387 0.240777 0.438255
457 1 0.253503 0.24744 0.383683
458 1 0.312713 0.310844 0.384868
459 1 0.312761 0.237543 0.440118
460 1 0.237765 0.308302 0.438523
1171 1 0.55529 0.499263 0.185764
281 1 0.748636 0.00239224 0.242066
610 1 0.0645526 0.433353 0.495224
638 1 0.937431 0.433912 0.501887
430 1 0.435441 0.181646 0.386251
432 1 0.379505 0.185574 0.443332
461 1 0.379291 0.246285 0.384884
462 1 0.432327 0.310338 0.37371
463 1 0.43483 0.259005 0.44092
464 1 0.379696 0.324045 0.441655
468 1 0.500661 0.314399 0.430078
436 1 0.499069 0.186125 0.439057
465 1 0.491365 0.253644 0.377376
626 1 0.568465 0.439938 0.495815
261 1 0.130195 0.0100271 0.24616
467 1 0.56573 0.256021 0.431472
434 1 0.559876 0.185501 0.3798
469 1 0.633306 0.25259 0.36992
472 1 0.630952 0.320379 0.429283
466 1 0.56623 0.320534 0.376444
440 1 0.623852 0.189033 0.436476
23 1 0.696343 0.00433919 0.0604156
1157 1 0.127744 0.499075 0.120798
475 1 0.815729 0.253502 0.435722
473 1 0.754937 0.257364 0.382249
470 1 0.696451 0.322359 0.3694
442 1 0.810937 0.192671 0.372294
474 1 0.809721 0.318391 0.370367
444 1 0.743331 0.182507 0.433107
471 1 0.69354 0.249417 0.439618
476 1 0.750687 0.324532 0.433847
438 1 0.689731 0.184086 0.365701
157 1 0.874784 7.93892e-06 0.133406
385 1 0.994157 0.00303984 0.375031
1415 1 0.18404 0.491112 0.440981
448 1 0.873661 0.189656 0.443565
446 1 0.937156 0.196288 0.366252
420 1 1.00029 0.190676 0.444062
480 1 0.874682 0.316796 0.431134
479 1 0.941219 0.245926 0.435996
477 1 0.876209 0.24584 0.375222
478 1 0.938511 0.300464 0.375145
105 1 0.2513 0.377966 0.00313714
1283 1 0.0679031 0.490306 0.316548
90 1 0.815929 0.311906 0.0055648
566 1 0.682506 0.191374 0.49631
504 1 0.633506 0.443316 0.431237
484 1 0.998147 0.440274 0.43784
488 1 0.126607 0.428643 0.439658
482 1 0.0636413 0.437739 0.379889
483 1 0.0620839 0.36595 0.4431
485 1 0.134895 0.374038 0.377755
503 1 0.685862 0.38031 0.436256
492 1 0.242936 0.437244 0.440515
491 1 0.312453 0.366221 0.438668
487 1 0.18678 0.367014 0.441903
573 1 0.869494 0.122427 0.496668
489 1 0.249071 0.379093 0.377947
486 1 0.187086 0.433987 0.37213
490 1 0.316415 0.43482 0.378391
499 1 0.564461 0.378207 0.435493
1183 1 0.939945 0.493704 0.190136
500 1 0.496792 0.43794 0.431944
497 1 0.50125 0.373927 0.378168
496 1 0.380201 0.437232 0.44223
494 1 0.436794 0.441638 0.375092
495 1 0.441158 0.378473 0.432797
109 1 0.376045 0.374381 0.00307785
493 1 0.376362 0.375203 0.378306
1285 1 0.136184 0.49602 0.253287
498 1 0.568687 0.443873 0.384667
501 1 0.630514 0.38749 0.374797
1423 1 0.433207 0.495211 0.43803
1051 1 0.820117 0.499179 0.057123
1419 1 0.312747 0.498108 0.438571
407 1 0.68925 0.00480066 0.432767
159 1 0.945491 -0.00175918 0.180007
395 1 0.309846 0.00471707 0.439282
1435 1 0.817226 0.498616 0.435475
1181 1 0.886762 0.496072 0.115276
558 1 0.44449 0.185985 0.500054
121 1 0.743059 0.378065 0.00843819
570 1 0.807914 0.189158 0.497173
93 1 0.869231 0.251506 0.00659814
630 1 0.682595 0.438676 0.495228
625 1 0.508522 0.375499 0.494502
1541 1 0.119867 0.496845 0.491026
73 1 0.242327 0.250779 0.00679056
526 1 0.436111 0.0567003 0.497593
534 1 0.676784 0.0547235 0.496632
633 1 0.753978 0.382508 0.494509
38 1 0.193906 0.183142 0.00396959
1537 1 0.00297104 0.497032 0.498033
102 1 0.174981 0.44338 0.00508765
33 1 1.00261 0.122633 0.0122447
118 1 0.693055 0.441903 0.00547186
609 1 1.00048 0.369847 0.494391
37 1 0.125154 0.122544 0.00761025
585 1 0.248605 0.243182 0.494172
45 1 0.377912 0.132091 0.00320244
74 1 0.309773 0.3191 0.0047929
520 1 0.128163 0.0606666 0.562833
547 1 0.0653543 0.128611 0.565072
642 1 0.0635491 0.0619951 0.617395
677 1 0.123253 0.121997 0.627255
550 1 0.189957 0.177605 0.501577
516 1 0.998205 0.0663761 0.558338
518 1 0.189145 0.056909 0.506668
524 1 0.245803 0.0639873 0.565305
551 1 0.188054 0.121836 0.567796
555 1 0.311688 0.118771 0.567905
646 1 0.182673 0.0608933 0.62638
650 1 0.308596 0.0538358 0.627897
681 1 0.25484 0.124537 0.625843
1545 1 0.244174 0.497093 0.500929
538 1 0.805705 0.0698993 0.503588
671 1 0.936997 0.00585197 0.680297
528 1 0.381106 0.0568678 0.565572
559 1 0.43596 0.119501 0.574195
654 1 0.44312 0.0606595 0.628316
685 1 0.375112 0.123534 0.628209
689 1 0.502854 0.127883 0.629411
532 1 0.500561 0.0595107 0.568654
665 1 0.751961 -0.000511389 0.630718
519 1 0.194419 0.00178074 0.562521
1695 1 0.937444 0.499017 0.685501
536 1 0.626922 0.0634937 0.565423
563 1 0.563409 0.118565 0.562446
658 1 0.563357 0.0636275 0.633913
693 1 0.627168 0.129601 0.62331
535 1 0.690808 0.00456556 0.56394
593 1 0.504838 0.242035 0.50362
89 1 0.752773 0.243927 0.993576
1943 1 0.681334 0.496768 0.937268
540 1 0.744652 0.0706345 0.566279
567 1 0.679464 0.124297 0.56051
571 1 0.806574 0.131791 0.566229
662 1 0.682208 0.062589 0.630459
666 1 0.80571 0.0683671 0.629222
697 1 0.735169 0.131402 0.617847
775 1 0.198214 0.00284767 0.819856
97 1 0.99736 0.377286 1.00538
1675 1 0.308691 0.497268 0.681605
673 1 0.00986782 0.128539 0.626087
544 1 0.873131 0.0671063 0.568783
575 1 0.924872 0.126188 0.561307
670 1 0.944583 0.0708404 0.635056
701 1 0.866484 0.129409 0.622917
923 1 0.819461 0.00163718 0.941841
993 1 0.994701 0.379555 0.875372
1547 1 0.308293 0.497159 0.559528
1024 1 0.868871 0.449009 0.936257
552 1 0.124995 0.18226 0.564053
579 1 0.0646699 0.235076 0.566034
584 1 0.125923 0.307446 0.557535
674 1 0.0660654 0.185741 0.628443
706 1 0.0655262 0.303338 0.612455
709 1 0.126624 0.244701 0.625026
580 1 0.999099 0.295653 0.565231
705 1 -0.000392421 0.251252 0.631787
556 1 0.248988 0.178811 0.562448
583 1 0.189887 0.242108 0.565406
587 1 0.319413 0.246955 0.568611
588 1 0.251576 0.302041 0.560493
678 1 0.190636 0.183095 0.626763
682 1 0.311617 0.190633 0.627104
710 1 0.185019 0.30837 0.623091
713 1 0.248208 0.24384 0.62254
714 1 0.310843 0.315211 0.628601
793 1 0.75062 0.0105852 0.754027
614 1 0.188143 0.431905 0.507534
560 1 0.377002 0.183505 0.567961
591 1 0.433644 0.242677 0.568886
592 1 0.374735 0.313584 0.571157
686 1 0.437287 0.187546 0.62764
717 1 0.373716 0.245504 0.625385
718 1 0.440416 0.319172 0.620094
596 1 0.50147 0.31301 0.553504
721 1 0.501928 0.24984 0.620776
641 1 0.000439771 0.00592159 0.629752
577 1 0.00327483 0.246758 0.503965
564 1 0.497588 0.179256 0.56637
568 1 0.626599 0.19475 0.561179
595 1 0.567053 0.255944 0.566315
600 1 0.628447 0.31766 0.560271
690 1 0.56552 0.189808 0.621541
722 1 0.563419 0.321981 0.624296
725 1 0.612197 0.248538 0.63621
1927 1 0.188075 0.493293 0.943295
572 1 0.743478 0.194734 0.555599
599 1 0.681041 0.261138 0.564653
603 1 0.809723 0.246052 0.56582
604 1 0.75145 0.314634 0.564891
694 1 0.6748 0.199254 0.631249
698 1 0.802363 0.18311 0.620453
726 1 0.685716 0.317075 0.629257
729 1 0.746197 0.25397 0.629058
730 1 0.816765 0.317113 0.629217
899 1 0.0788823 0.00051732 0.93442
617 1 0.250951 0.371252 0.501333
548 1 -0.000622395 0.183999 0.560106
576 1 0.86902 0.185026 0.566508
607 1 0.939379 0.236625 0.563206
608 1 0.878764 0.301646 0.571285
702 1 0.934171 0.180073 0.629229
733 1 0.870418 0.253336 0.637221
734 1 0.936361 0.309624 0.630034
611 1 0.0489701 0.374381 0.564168
616 1 0.11902 0.43166 0.552156
738 1 0.0605274 0.444765 0.622057
741 1 0.115772 0.377007 0.618937
612 1 0.997253 0.436136 0.566603
789 1 0.627739 0.00534223 0.757137
1023 1 0.929501 0.388552 0.933168
589 1 0.383277 0.250321 0.504838
615 1 0.193552 0.363511 0.567125
619 1 0.309673 0.377709 0.562374
620 1 0.244916 0.440245 0.565257
742 1 0.185959 0.434233 0.617892
745 1 0.245916 0.376178 0.624511
746 1 0.30887 0.440529 0.619371
634 1 0.817427 0.434208 0.5024
1022 1 0.934275 0.438711 0.867088
669 1 0.874258 0.0094503 0.62827
605 1 0.869426 0.256169 0.500663
1691 1 0.807073 0.497874 0.684208
623 1 0.440421 0.379663 0.565876
624 1 0.376782 0.440239 0.556052
749 1 0.381246 0.381178 0.624129
750 1 0.43454 0.441345 0.625793
628 1 0.498975 0.437402 0.566221
753 1 0.497392 0.376095 0.620911
613 1 0.126092 0.37229 0.50096
795 1 0.813801 0.000870959 0.816792
637 1 0.869219 0.372716 0.499516
578 1 0.0520448 0.31327 0.510535
787 1 0.562828 0.00157245 0.812867
113 1 0.497855 0.372812 1.00043
541 1 0.867519 0.00373804 0.503611
627 1 0.563311 0.38834 0.563132
632 1 0.629704 0.446649 0.560728
754 1 0.56408 0.43902 0.624952
757 1 0.624819 0.373852 0.622117
53 1 0.62243 0.13309 0.99904
1021 1 0.867734 0.383563 0.869537
631 1 0.686612 0.380008 0.558573
635 1 0.817232 0.367585 0.564349
636 1 0.752864 0.4441 0.55398
758 1 0.687085 0.430608 0.630038
761 1 0.752879 0.380607 0.614315
762 1 0.812147 0.441965 0.623915
557 1 0.377477 0.120742 0.508129
530 1 0.555827 0.0591892 0.502644
769 1 0.990814 0.0123513 0.742399
996 1 0.995947 0.438461 0.936738
1933 1 0.367088 0.490254 0.883585
1811 1 0.568691 0.497707 0.806963
737 1 0.997127 0.376427 0.623884
639 1 0.943486 0.372315 0.560628
640 1 0.871506 0.432019 0.562767
765 1 0.881598 0.376314 0.625851
766 1 0.937092 0.442107 0.623625
522 1 0.322316 0.0625517 0.50802
1823 1 0.935893 0.496851 0.81252
648 1 0.123455 0.0643139 0.685917
675 1 0.0665326 0.126306 0.693153
770 1 0.0631209 0.0592353 0.757027
776 1 0.125982 0.0663275 0.812544
803 1 0.0604879 0.124322 0.816592
805 1 0.131569 0.13904 0.757011
772 1 0.00929211 0.0614125 0.815612
1015 1 0.685926 0.370599 0.935373
2 1 0.0616434 0.0686916 0.998999
652 1 0.254221 0.0598215 0.697179
679 1 0.192362 0.126769 0.687915
683 1 0.305758 0.129295 0.690696
774 1 0.192434 0.0724462 0.758881
778 1 0.31739 0.062403 0.747349
780 1 0.256488 0.0723404 0.816222
807 1 0.183333 0.132465 0.815745
809 1 0.25452 0.127267 0.755676
811 1 0.309426 0.135318 0.81823
779 1 0.315329 0.00711059 0.807115
1941 1 0.62785 0.4927 0.875992
656 1 0.371064 0.0618487 0.679784
687 1 0.439325 0.119956 0.682786
782 1 0.430554 0.060608 0.750277
784 1 0.379933 0.0665289 0.814431
813 1 0.372539 0.12406 0.742918
815 1 0.44 0.121335 0.80499
660 1 0.502703 0.0574468 0.694682
1805 1 0.368898 0.505464 0.749047
1921 1 0.999541 0.496422 0.876254
1945 1 0.741794 0.496918 0.874653
817 1 0.498978 0.126205 0.735631
788 1 0.504886 0.0648181 0.801811
664 1 0.633384 0.0673028 0.687571
691 1 0.566201 0.132146 0.685863
786 1 0.572494 0.0681616 0.741985
792 1 0.623076 0.0657115 0.809722
819 1 0.560207 0.136756 0.809732
821 1 0.619921 0.13195 0.751829
668 1 0.748784 0.0718719 0.694541
695 1 0.680501 0.130253 0.679695
699 1 0.817731 0.123294 0.693151
790 1 0.6825 0.0695043 0.746972
794 1 0.810546 0.0687539 0.758322
796 1 0.742333 0.0646931 0.810082
823 1 0.684517 0.126385 0.805321
825 1 0.74761 0.133943 0.753282
827 1 0.822774 0.1244 0.816116
1821 1 0.876943 0.500892 0.744757
909 1 0.367048 -0.00173107 0.872453
801 1 0.0105806 0.115428 0.750516
644 1 0.00788497 0.0621546 0.686357
672 1 0.884532 0.0661752 0.689268
703 1 0.943837 0.131173 0.692569
798 1 0.938167 0.0684681 0.750988
800 1 0.875567 0.0623915 0.812015
829 1 0.882845 0.126479 0.751791
831 1 0.937535 0.122268 0.816718
1018 1 0.802025 0.439548 0.877628
707 1 0.0594175 0.254449 0.695254
680 1 0.12188 0.195252 0.686509
712 1 0.119866 0.308548 0.679863
802 1 0.0697053 0.186603 0.757413
808 1 0.125126 0.19647 0.820653
834 1 0.0572444 0.320033 0.750554
835 1 0.0653071 0.251561 0.804678
837 1 0.129071 0.256736 0.751923
840 1 0.125626 0.315338 0.798568
684 1 0.250833 0.19397 0.684202
711 1 0.191295 0.253598 0.687828
715 1 0.315503 0.251241 0.684319
716 1 0.25089 0.309265 0.680416
806 1 0.190537 0.20182 0.754613
810 1 0.308449 0.191219 0.756632
812 1 0.24031 0.195593 0.818355
838 1 0.187708 0.321171 0.739312
839 1 0.193198 0.264707 0.809925
841 1 0.248375 0.257328 0.746311
842 1 0.303393 0.319864 0.745566
843 1 0.311876 0.25075 0.818966
844 1 0.253553 0.319499 0.811631
688 1 0.371153 0.183879 0.687427
719 1 0.435092 0.249803 0.68714
720 1 0.378855 0.313692 0.683296
814 1 0.442138 0.181348 0.745084
816 1 0.371687 0.184143 0.803402
845 1 0.372084 0.242857 0.745969
846 1 0.433662 0.311905 0.743505
847 1 0.43838 0.242409 0.802184
848 1 0.372616 0.305912 0.80882
849 1 0.490381 0.252624 0.744859
692 1 0.50383 0.189297 0.683646
820 1 0.502008 0.190547 0.810392
724 1 0.492963 0.317565 0.683457
852 1 0.489622 0.311253 0.808502
696 1 0.619854 0.190653 0.696599
723 1 0.55061 0.260164 0.686702
728 1 0.621997 0.30782 0.688539
818 1 0.554853 0.193234 0.74206
824 1 0.629407 0.189808 0.814408
850 1 0.560524 0.31458 0.749395
851 1 0.56131 0.250273 0.814582
853 1 0.625039 0.251038 0.753853
856 1 0.620854 0.317269 0.81811
700 1 0.750056 0.183168 0.686268
727 1 0.696041 0.260406 0.697059
731 1 0.80611 0.24163 0.688326
732 1 0.754398 0.321521 0.68499
822 1 0.691321 0.192561 0.753128
826 1 0.815725 0.181563 0.754583
828 1 0.756467 0.192851 0.813641
854 1 0.680545 0.316759 0.75169
855 1 0.680699 0.261319 0.814276
857 1 0.756258 0.251478 0.758805
858 1 0.811512 0.309583 0.74995
859 1 0.822476 0.250128 0.808702
860 1 0.748108 0.320353 0.805782
708 1 0.999236 0.313021 0.684184
833 1 -0.000677928 0.258969 0.755552
804 1 0.0014583 0.186502 0.814374
676 1 0.00518629 0.196127 0.689976
836 1 0.00887314 0.312444 0.81658
704 1 0.867639 0.187435 0.683219
735 1 0.943421 0.250042 0.688302
736 1 0.872149 0.324263 0.68897
830 1 0.938319 0.189639 0.754216
832 1 0.877116 0.191414 0.807527
861 1 0.878776 0.250293 0.736778
862 1 0.93268 0.309828 0.747442
863 1 0.941478 0.256384 0.810541
864 1 0.87356 0.318556 0.80751
1009 1 0.502399 0.382861 0.875979
739 1 0.0651897 0.37632 0.682688
744 1 0.120619 0.436943 0.685815
866 1 0.0569138 0.438857 0.744079
867 1 0.0644962 0.379994 0.813872
869 1 0.118243 0.380295 0.73974
872 1 0.119311 0.436649 0.806461
740 1 0.00879888 0.431709 0.678579
743 1 0.175611 0.373862 0.675691
747 1 0.312805 0.382421 0.682128
748 1 0.248991 0.445209 0.687238
870 1 0.185219 0.427844 0.741253
871 1 0.194959 0.376833 0.815164
873 1 0.249907 0.38724 0.744163
874 1 0.307725 0.444161 0.753917
875 1 0.31585 0.382109 0.816618
876 1 0.248302 0.442437 0.812702
1005 1 0.372021 0.37501 0.880598
1012 1 0.497027 0.439615 0.934375
751 1 0.438534 0.381619 0.687961
752 1 0.372774 0.441897 0.681016
877 1 0.381999 0.37814 0.742557
878 1 0.439861 0.448099 0.742653
879 1 0.4232 0.369889 0.81838
880 1 0.376564 0.442766 0.809572
756 1 0.504844 0.443655 0.690884
884 1 0.499677 0.439987 0.807285
907 1 0.31493 0.00266255 0.942028
881 1 0.494512 0.368621 0.752113
755 1 0.553607 0.379145 0.68534
760 1 0.624251 0.435004 0.684401
882 1 0.569247 0.447069 0.745479
883 1 0.557875 0.378818 0.804116
885 1 0.622948 0.373974 0.745468
888 1 0.621474 0.433487 0.815521
1795 1 0.0582367 0.498315 0.809929
1006 1 0.438358 0.436551 0.864472
759 1 0.686886 0.372552 0.694508
763 1 0.817138 0.384656 0.679525
764 1 0.749338 0.436311 0.682713
886 1 0.679556 0.435874 0.74719
887 1 0.688381 0.372133 0.8168
889 1 0.752413 0.377075 0.746139
890 1 0.819213 0.440782 0.746385
891 1 0.811506 0.374593 0.810554
892 1 0.756998 0.435522 0.802764
1017 1 0.751031 0.374399 0.873736
1007 1 0.432879 0.374138 0.941311
1008 1 0.378128 0.432839 0.942821
868 1 1.00047 0.436885 0.806895
865 1 0.995704 0.379269 0.742096
767 1 0.936892 0.380021 0.689777
768 1 0.875748 0.437307 0.681534
893 1 0.876509 0.37299 0.752002
894 1 0.933714 0.435399 0.75276
895 1 0.933129 0.378248 0.810486
896 1 0.868877 0.438089 0.806872
919 1 0.687527 0.009001 0.940024
927 1 0.932767 0.00176183 0.943446
898 1 0.0712698 0.0560639 0.874905
904 1 0.132986 0.0646775 0.938677
931 1 0.0625454 0.12692 0.932486
933 1 0.131238 0.126898 0.879921
929 1 -0.000422429 0.12847 0.875414
900 1 0.00777431 0.0632579 0.92989
18 1 0.56229 0.0716916 0.994564
1551 1 0.441128 0.497006 0.555905
1020 1 0.751972 0.427928 0.938177
902 1 0.19394 0.0658388 0.872635
906 1 0.325142 0.0715673 0.868648
908 1 0.251335 0.0760249 0.940133
935 1 0.189238 0.131916 0.943696
937 1 0.254908 0.131961 0.878002
939 1 0.317036 0.126727 0.938401
554 1 0.317176 0.190226 0.507118
1014 1 0.686118 0.427917 0.879611
1929 1 0.247312 0.497683 0.88154
513 1 0.995359 0.00360581 0.499779
25 1 0.746888 0.00769065 0.989955
1019 1 0.815401 0.371757 0.943346
543 1 0.937589 0.00395912 0.568779
126 1 0.936221 0.435721 0.999462
910 1 0.445461 0.0620063 0.870235
912 1 0.378836 0.0649619 0.927977
941 1 0.3821 0.137011 0.872903
943 1 0.440179 0.138139 0.930307
945 1 0.500748 0.126726 0.870434
1817 1 0.741285 0.494604 0.751527
1671 1 0.18671 0.494477 0.68551
1002 1 0.298668 0.43487 0.882266
916 1 0.504389 0.0676533 0.944788
914 1 0.559962 0.0681291 0.874427
920 1 0.620428 0.0619766 0.935861
947 1 0.560262 0.125167 0.932783
949 1 0.623714 0.125089 0.873893
14 1 0.435017 0.0614299 0.991282
514 1 0.0620774 0.0614757 0.499778
26 1 0.815717 0.058811 0.999412
918 1 0.690009 0.0643234 0.872806
922 1 0.815994 0.0539228 0.874795
924 1 0.748456 0.0640204 0.935298
951 1 0.68612 0.132473 0.93514
953 1 0.758082 0.126214 0.866824
955 1 0.818567 0.125855 0.940967
998 1 0.184966 0.43666 0.885573
997 1 0.124172 0.374514 0.874377
791 1 0.69121 0.00494134 0.818988
785 1 0.498512 0.000338083 0.757385
926 1 0.942178 0.0507196 0.868655
928 1 0.872844 0.0709194 0.94051
957 1 0.879354 0.11872 0.876608
959 1 0.940329 0.126711 0.933091
58 1 0.819283 0.18865 0.997173
930 1 0.0610475 0.193287 0.870403
936 1 0.127952 0.194555 0.939208
962 1 0.0721227 0.312476 0.865463
963 1 0.0666399 0.24812 0.93296
965 1 0.134635 0.251337 0.868784
968 1 0.125826 0.310424 0.936295
932 1 0.00818285 0.19233 0.933072
1949 1 0.87838 0.496067 0.87517
85 1 0.625113 0.248709 0.993258
549 1 0.128653 0.1165 0.502917
999 1 0.175642 0.377788 0.943024
1003 1 0.31397 0.374539 0.945365
934 1 0.192483 0.185761 0.880921
938 1 0.303232 0.194637 0.876225
940 1 0.255611 0.186159 0.946009
966 1 0.185839 0.317118 0.876898
967 1 0.186534 0.258111 0.94036
969 1 0.241409 0.25187 0.874545
970 1 0.315144 0.313726 0.881517
971 1 0.308727 0.25489 0.942611
972 1 0.25432 0.320716 0.934076
994 1 0.0488405 0.436012 0.87357
995 1 0.0630748 0.375441 0.942108
57 1 0.756022 0.123798 0.995309
973 1 0.374138 0.249376 0.876513
944 1 0.371791 0.185825 0.938722
942 1 0.439588 0.191805 0.872049
976 1 0.379343 0.309 0.936116
975 1 0.439402 0.246744 0.940677
974 1 0.430653 0.310425 0.876965
948 1 0.503727 0.197414 0.933823
1001 1 0.243265 0.377703 0.886286
980 1 0.491749 0.311747 0.934186
946 1 0.567714 0.188179 0.872765
984 1 0.624986 0.31901 0.941622
978 1 0.552427 0.316704 0.868253
981 1 0.622571 0.254757 0.879483
979 1 0.561607 0.259476 0.938082
952 1 0.625545 0.192481 0.937609
977 1 0.495952 0.249104 0.872383
1049 1 0.759177 0.499343 0.999583
1011 1 0.563277 0.372435 0.932983
1013 1 0.624179 0.373332 0.873909
1016 1 0.622744 0.433692 0.941201
49 1 0.49728 0.138301 0.9945
988 1 0.749387 0.313782 0.934941
986 1 0.817322 0.319576 0.875153
982 1 0.680298 0.311915 0.875527
954 1 0.823393 0.196162 0.881217
983 1 0.688937 0.251417 0.931557
987 1 0.819432 0.252148 0.94144
956 1 0.752697 0.191551 0.930978
950 1 0.692027 0.192496 0.868255
985 1 0.758588 0.257856 0.870869
1000 1 0.118832 0.434996 0.937481
539 1 0.809711 -0.000792225 0.570785
22 1 0.680463 0.0670092 0.997732
1010 1 0.56224 0.442452 0.873634
961 1 0.991036 0.251141 0.869756
964 1 0.0042413 0.30819 0.934597
989 1 0.881167 0.253847 0.867351
992 1 0.878348 0.316153 0.935689
991 1 0.937613 0.251312 0.939368
960 1 0.886814 0.187982 0.944636
958 1 0.93078 0.192643 0.873552
990 1 0.939437 0.322089 0.877491
667 1 0.814172 0.0104038 0.690357
645 1 0.133182 8.07755e-05 0.625444
94 1 0.947412 0.306687 0.999725
1004 1 0.250883 0.43684 0.94973
1669 1 0.120861 0.491248 0.621345
1923 1 0.0591433 0.491948 0.938874
773 1 0.132014 0.00857246 0.750204
921 1 0.752396 0.000274772 0.873139
797 1 0.867559 0.00438439 0.7461
569 1 0.741886 0.120386 0.505162
542 1 0.934997 0.058457 0.497961
901 1 0.135319 0.00344316 0.874291
50 1 0.568134 0.195698 0.993566
561 1 0.499793 0.121778 0.504978
34 1 0.0630555 0.185037 0.998341
1935 1 0.432492 0.498634 0.943734
537 1 0.747864 0.00727379 0.501365
562 1 0.559158 0.178032 0.509544
917 1 0.624841 0.00406131 0.873326
1681 1 0.503949 0.496029 0.625582
122 1 0.813943 0.439665 0.995762
1673 1 0.241039 0.499085 0.622106
903 1 0.187838 0.00245299 0.937899
517 1 0.130683 0.00322005 0.502354
783 1 0.435773 0.00624664 0.81081
515 1 0.0589912 0.0039879 0.562205
1797 1 0.121165 0.498625 0.743857
905 1 0.259186 0.0108899 0.882537
1555 1 0.565222 0.493053 0.559742
523 1 0.313971 0.000911454 0.56717
1815 1 0.682587 0.491867 0.809655
1689 1 0.739446 0.495154 0.620745
1925 1 0.119933 0.494095 0.877871
590 1 0.438462 0.322822 0.51151
527 1 0.444276 -0.000501182 0.565646
114 1 0.56121 0.428106 0.995851
69 1 0.133262 0.248817 1.00089
70 1 0.190001 0.314451 0.998104
1025 1 0.997714 0.493392 0.996379
81 1 0.502614 0.26158 0.995636
66 1 0.0666094 0.307214 0.995633
601 1 0.749477 0.256173 0.501444
98 1 0.0565253 0.434666 0.994034
553 1 0.257303 0.113486 0.504708
565 1 0.620855 0.125289 0.502205
586 1 0.310783 0.313475 0.501459
606 1 0.938983 0.298538 0.507587
582 1 0.188281 0.306637 0.508247
622 1 0.443565 0.427871 0.501538
581 1 0.128883 0.24252 0.503089
597 1 0.625709 0.252127 0.500364
629 1 0.622114 0.377503 0.500589
1032 1 0.118445 0.559859 0.0580495
1059 1 0.0600595 0.620373 0.0553473
1154 1 0.0597567 0.56087 0.115424
1189 1 0.118902 0.616139 0.127388
1167 1 0.431465 0.500091 0.191378
1565 1 0.885132 0.502885 0.502526
1036 1 0.24909 0.561343 0.062519
1063 1 0.188926 0.63242 0.058893
1067 1 0.315479 0.621223 0.066996
1158 1 0.180888 0.561788 0.120128
1162 1 0.310083 0.557885 0.125451
1193 1 0.246167 0.618884 0.122494
1508 1 0.998795 0.937597 0.441503
275 1 0.561852 0.99474 0.312855
1297 1 0.499795 0.500517 0.254359
1040 1 0.374264 0.566377 0.0638739
1071 1 0.429958 0.628574 0.0617736
1166 1 0.430541 0.561756 0.131226
1197 1 0.38577 0.626638 0.128302
1177 1 0.757141 0.50306 0.116147
1030 1 0.18764 0.566342 -0.00109271
1044 1 0.489452 0.565016 0.0637885
1201 1 0.494333 0.632345 0.125222
1048 1 0.620727 0.559828 0.0552939
1075 1 0.550534 0.621313 0.0628221
1170 1 0.555401 0.56789 0.128479
1205 1 0.63192 0.625785 0.115238
287 1 0.934848 0.994806 0.317942
1299 1 0.559432 0.500947 0.314368
1052 1 0.759802 0.560931 0.0620033
1079 1 0.692511 0.616122 0.0540494
1083 1 0.819317 0.619964 0.0638714
1174 1 0.69275 0.567947 0.119749
1178 1 0.825656 0.55979 0.139237
1209 1 0.755157 0.619881 0.125382
7 1 0.189016 0.996718 0.0613395
27 1 0.813202 0.996367 0.0573274
1098 1 0.315534 0.813534 0.00209135
1039 1 0.429214 0.50348 0.0592808
1185 1 0.00465429 0.626524 0.120067
1028 1 0.99989 0.557806 0.0592209
1086 1 0.94373 0.690098 0.000835125
1056 1 0.884833 0.563815 0.0575843
1087 1 0.934373 0.63364 0.0614961
1182 1 0.941895 0.566726 0.119592
1213 1 0.880644 0.627322 0.132434
1057 1 0.0021962 0.623095 -0.000710942
1411 1 0.0582458 0.508077 0.429918
1533 1 0.870341 0.879389 0.37625
1064 1 0.118361 0.692275 0.0607259
1091 1 0.0590515 0.754303 0.0643972
1096 1 0.12201 0.815772 0.0581446
1186 1 0.0658748 0.67859 0.128811
1218 1 0.0674288 0.80974 0.125993
1221 1 0.129475 0.750195 0.118757
1287 1 0.195563 0.501238 0.316711
273 1 0.498983 0.9941 0.252376
1179 1 0.822365 0.498575 0.197388
1068 1 0.250275 0.687977 0.0669279
1095 1 0.186501 0.752225 0.0550377
1099 1 0.311986 0.737947 0.0594964
1100 1 0.252146 0.807748 0.0628186
1190 1 0.182216 0.687959 0.126394
1194 1 0.316536 0.687657 0.129139
1222 1 0.183717 0.810805 0.118777
1225 1 0.238476 0.744123 0.127182
1226 1 0.310815 0.80689 0.129502
533 1 0.626984 0.996665 0.493643
405 1 0.626205 0.999138 0.367974
1626 1 0.818665 0.816213 0.501385
11 1 0.317856 0.995882 0.0510368
1072 1 0.369246 0.683944 0.0606953
1103 1 0.44178 0.74861 0.0571841
1104 1 0.371184 0.802935 0.0632959
1198 1 0.433714 0.695434 0.123792
1229 1 0.381845 0.759006 0.130764
1230 1 0.452949 0.81424 0.117146
1076 1 0.499736 0.693107 0.0612311
267 1 0.310491 0.994939 0.320804
1108 1 0.511879 0.808842 0.0614357
1233 1 0.500636 0.746915 0.125283
1080 1 0.63016 0.692294 0.0574018
1107 1 0.570243 0.744624 0.0662982
1112 1 0.624181 0.813864 0.059703
1202 1 0.560455 0.679837 0.120956
1234 1 0.572552 0.807012 0.125345
1237 1 0.631614 0.748782 0.124122
1562 1 0.815957 0.554809 0.495218
1654 1 0.685802 0.944123 0.495649
1597 1 0.870435 0.625902 0.498448
1084 1 0.749558 0.690875 0.0596875
1111 1 0.690161 0.76256 0.0615281
1115 1 0.816485 0.753183 0.0664529
1116 1 0.754666 0.817365 0.0673593
1206 1 0.688098 0.688096 0.123125
1210 1 0.822545 0.685804 0.1278
1238 1 0.686337 0.819034 0.121608
1241 1 0.745109 0.750123 0.12019
1242 1 0.812544 0.812964 0.12975
269 1 0.381905 0.998732 0.242311
413 1 0.85616 0.997063 0.371579
1026 1 0.0576811 0.555751 -0.00106389
1031 1 0.184061 0.505071 0.0612989
1217 1 1.00127 0.756815 0.128985
1060 1 0.00408722 0.693223 0.0574508
1092 1 0.00105157 0.809021 0.0606264
1088 1 0.875915 0.693284 0.0623659
1119 1 0.938212 0.751727 0.0663959
1120 1 0.871758 0.816736 0.0641978
1214 1 0.945109 0.688667 0.124461
1245 1 0.876202 0.749864 0.124387
1246 1 0.942145 0.816337 0.119684
1301 1 0.623152 0.502371 0.250819
1429 1 0.635508 0.505771 0.374928
409 1 0.746132 1.00237 0.371643
1123 1 0.0570983 0.869621 0.0539245
1128 1 0.125331 0.938141 0.0578026
1250 1 0.0652926 0.940983 0.117025
1253 1 0.122775 0.877082 0.126238
1124 1 0.00888153 0.93717 0.060368
1249 1 0.00552718 0.872026 0.13271
1127 1 0.188178 0.870885 0.0616816
1131 1 0.310207 0.877605 0.0563343
1132 1 0.251866 0.944689 0.06457
1254 1 0.189941 0.934933 0.123758
1257 1 0.252338 0.868135 0.119386
1258 1 0.315999 0.943011 0.117808
1534 1 0.933953 0.939009 0.382119
1535 1 0.93828 0.881087 0.445203
1295 1 0.424202 0.507819 0.315328
1309 1 0.880311 0.501506 0.253662
1536 1 0.867312 0.938244 0.438555
1135 1 0.450041 0.878168 0.0576109
1136 1 0.388232 0.939336 0.0569719
1261 1 0.382617 0.874672 0.12163
1262 1 0.443423 0.933537 0.11809
391 1 0.188434 1.00094 0.437863
153 1 0.757503 0.997379 0.120204
1265 1 0.511093 0.87228 0.116258
1140 1 0.510143 0.9333 0.060928
1139 1 0.570182 0.8738 0.0542941
1144 1 0.625362 0.932589 0.0625524
1266 1 0.565389 0.937493 0.123615
1269 1 0.615466 0.877834 0.122994
1307 1 0.825071 0.503939 0.311283
1421 1 0.375329 0.510076 0.380432
1145 1 0.749465 0.87466 -0.00347985
1143 1 0.68773 0.878312 0.0634877
1147 1 0.809866 0.876338 0.0558686
1148 1 0.742941 0.936338 0.0626068
1270 1 0.683587 0.929973 0.129184
1273 1 0.753269 0.877241 0.128375
1274 1 0.813866 0.938632 0.121848
1610 1 0.303043 0.817208 0.493999
1531 1 0.814108 0.879341 0.438511
285 1 0.867551 0.99597 0.253215
277 1 0.630929 0.996586 0.250556
393 1 0.246006 0.994985 0.379341
1151 1 0.945099 0.880718 0.0572009
1152 1 0.877076 0.937535 0.053727
1277 1 0.88415 0.881331 0.119714
1278 1 0.941986 0.934364 0.129625
149 1 0.628593 0.993954 0.125012
1047 1 0.687861 0.505996 0.0617358
1427 1 0.563441 0.507738 0.437946
1657 1 0.757697 0.875372 0.494498
139 1 0.309763 0.993715 0.180525
133 1 0.129582 0.997907 0.121521
1160 1 0.128566 0.560228 0.19236
1187 1 0.0600774 0.617486 0.186516
1282 1 0.0623254 0.563023 0.256589
1288 1 0.125298 0.556834 0.318223
1315 1 0.0528929 0.63226 0.313416
1317 1 0.116746 0.626882 0.262406
1156 1 0.00373821 0.555905 0.183348
1284 1 0.993934 0.557878 0.313081
1303 1 0.691559 0.507681 0.310942
1175 1 0.689116 0.501506 0.179702
279 1 0.686002 0.99206 0.313152
1164 1 0.249301 0.559699 0.181772
1191 1 0.184217 0.622874 0.18753
1195 1 0.317445 0.624203 0.179695
1286 1 0.194836 0.563604 0.256405
1290 1 0.312223 0.569744 0.250965
1292 1 0.24637 0.569117 0.3234
1319 1 0.183083 0.622147 0.313042
1321 1 0.248439 0.623222 0.251072
1323 1 0.306076 0.622905 0.31265
1168 1 0.371046 0.559003 0.184116
1199 1 0.437384 0.631112 0.19407
1294 1 0.437825 0.562226 0.245932
1296 1 0.376117 0.573584 0.308339
1325 1 0.374162 0.631589 0.251052
1327 1 0.431012 0.636143 0.313918
1300 1 0.487109 0.571666 0.308152
1329 1 0.496088 0.624092 0.249589
1172 1 0.491616 0.560874 0.186716
399 1 0.440403 0.989912 0.440282
1176 1 0.6298 0.562309 0.181808
1203 1 0.564184 0.623037 0.187652
1298 1 0.571539 0.563873 0.24462
1304 1 0.6261 0.560997 0.309034
1331 1 0.558658 0.622459 0.305612
1333 1 0.634607 0.62071 0.236152
1538 1 0.0555975 0.558793 0.495338
1289 1 0.253707 0.500907 0.254686
1526 1 0.687235 0.937216 0.372005
1180 1 0.752459 0.563569 0.189445
1207 1 0.69051 0.62476 0.18508
1211 1 0.80648 0.625546 0.189303
1302 1 0.69425 0.559177 0.249172
1306 1 0.818225 0.562092 0.256885
1308 1 0.757399 0.5665 0.309827
1335 1 0.683764 0.633107 0.311596
1337 1 0.751919 0.629465 0.252773
1339 1 0.815375 0.63678 0.313727
151 1 0.689399 0.993761 0.18018
1163 1 0.308787 0.500989 0.191667
1313 1 0.000476854 0.625153 0.252401
1184 1 0.883675 0.55436 0.188975
1215 1 0.941879 0.622139 0.193819
1310 1 0.943472 0.561986 0.245515
1312 1 0.882159 0.567956 0.312506
1341 1 0.874176 0.625748 0.247698
1343 1 0.93754 0.628075 0.31095
1431 1 0.689832 0.507416 0.438503
397 1 0.374691 0.9985 0.376779
1192 1 0.127664 0.68261 0.191746
1219 1 0.0796004 0.75053 0.185514
1224 1 0.134336 0.814025 0.186005
1314 1 0.0542425 0.690333 0.256596
1320 1 0.123957 0.688255 0.31249
1346 1 0.0601551 0.812439 0.256464
1347 1 0.0636223 0.751306 0.311795
1349 1 0.124981 0.75246 0.248075
1352 1 0.12475 0.815556 0.316862
1188 1 0.0123512 0.692614 0.189401
1220 1 0.00645877 0.808399 0.189385
1316 1 0.997437 0.688275 0.317563
1196 1 0.245831 0.681323 0.185186
1223 1 0.183193 0.740577 0.190867
1227 1 0.30728 0.747373 0.188868
1228 1 0.240282 0.814548 0.181198
1318 1 0.180682 0.676661 0.25377
1322 1 0.305466 0.686053 0.243698
1324 1 0.247516 0.689083 0.31589
1350 1 0.193551 0.796017 0.251295
1351 1 0.185369 0.750993 0.318795
1353 1 0.244203 0.735924 0.252577
1354 1 0.30676 0.805911 0.247863
1355 1 0.304668 0.747189 0.318326
1356 1 0.250383 0.815713 0.318329
1200 1 0.371326 0.688818 0.188718
1231 1 0.437113 0.749792 0.187459
1232 1 0.370121 0.822557 0.194244
1326 1 0.431743 0.702712 0.255475
1328 1 0.368876 0.695142 0.322636
1357 1 0.376483 0.75977 0.25173
1358 1 0.440643 0.812633 0.249046
1359 1 0.434715 0.754155 0.325745
1360 1 0.368247 0.814055 0.313477
1332 1 0.49924 0.68305 0.308908
1364 1 0.49423 0.808529 0.310363
1236 1 0.500823 0.808634 0.183959
1361 1 0.503126 0.750888 0.245387
1204 1 0.499659 0.687687 0.183548
1208 1 0.619526 0.686626 0.177608
1235 1 0.565356 0.747911 0.182663
1330 1 0.562241 0.690629 0.244696
1336 1 0.619081 0.678107 0.303981
1363 1 0.562561 0.736618 0.313044
1365 1 0.620449 0.755939 0.252034
1368 1 0.619228 0.814365 0.309078
1240 1 0.626149 0.810523 0.183512
1362 1 0.555787 0.818966 0.254317
1212 1 0.748599 0.689515 0.183358
1239 1 0.686782 0.742176 0.184086
1243 1 0.821201 0.745073 0.189121
1244 1 0.749383 0.816878 0.186014
1334 1 0.675713 0.68902 0.249283
1338 1 0.819908 0.689462 0.249354
1340 1 0.741629 0.693197 0.307095
1366 1 0.691781 0.811576 0.247722
1367 1 0.681017 0.756898 0.311239
1369 1 0.759463 0.757931 0.247688
1370 1 0.821852 0.81396 0.246426
1371 1 0.81715 0.740227 0.321712
1372 1 0.754516 0.803549 0.31222
1345 1 0.00162055 0.751771 0.247425
1348 1 0.00640181 0.811666 0.315662
1216 1 0.883981 0.682968 0.193407
1247 1 0.941173 0.746264 0.187639
1248 1 0.875733 0.810507 0.180746
1342 1 0.941424 0.687406 0.24852
1344 1 0.882355 0.688202 0.320436
1373 1 0.876336 0.74629 0.248812
1374 1 0.944084 0.81238 0.251618
1375 1 0.937315 0.751525 0.312663
1376 1 0.87758 0.816196 0.316862
1609 1 0.240643 0.747543 0.4935
1305 1 0.756531 0.500907 0.248809
1251 1 0.0639873 0.870993 0.191868
1256 1 0.119631 0.931112 0.18865
1378 1 0.0614266 0.944726 0.252958
1379 1 0.0665874 0.87549 0.313351
1381 1 0.122951 0.865133 0.247574
1384 1 0.128168 0.940068 0.31032
1252 1 0.0161084 0.939579 0.180336
1255 1 0.186593 0.879234 0.186495
1259 1 0.302317 0.872699 0.184165
1260 1 0.247309 0.938434 0.195392
1382 1 0.183464 0.939502 0.242176
1383 1 0.181815 0.86642 0.309076
1385 1 0.237669 0.865494 0.245012
1386 1 0.31372 0.935001 0.251915
1387 1 0.315428 0.876268 0.314092
1388 1 0.238487 0.93489 0.306293
1528 1 0.632421 0.932286 0.432162
1550 1 0.43171 0.559292 0.496802
1033 1 0.241705 0.503033 0.00536317
1532 1 0.755098 0.941853 0.439278
1263 1 0.440396 0.870983 0.175638
1264 1 0.375369 0.934613 0.174985
1389 1 0.377544 0.878725 0.254097
1390 1 0.432722 0.935755 0.231726
1391 1 0.4379 0.868996 0.311986
1392 1 0.381283 0.935325 0.316667
1268 1 0.49882 0.936578 0.182447
1393 1 0.49288 0.874607 0.250381
1396 1 0.503181 0.932477 0.311455
1529 1 0.758481 0.865789 0.377774
1267 1 0.560465 0.873777 0.189692
1272 1 0.626322 0.938275 0.191304
1394 1 0.562004 0.932459 0.250528
1395 1 0.563997 0.877145 0.315471
1397 1 0.629356 0.869288 0.249019
1400 1 0.629462 0.928415 0.310771
1517 1 0.368396 0.868662 0.379901
1527 1 0.695669 0.878914 0.436553
401 1 0.499035 0.99145 0.376458
1271 1 0.695698 0.876616 0.188776
1275 1 0.818253 0.883332 0.185168
1276 1 0.749764 0.938305 0.195219
1398 1 0.688984 0.932082 0.256596
1399 1 0.690191 0.865709 0.311733
1401 1 0.755028 0.872479 0.250761
1402 1 0.813032 0.926397 0.250875
1403 1 0.816974 0.874967 0.312461
1404 1 0.750359 0.930349 0.308241
145 1 0.506655 0.993987 0.123646
1380 1 0.99571 0.9385 0.317638
1377 1 0.998408 0.870681 0.250092
389 1 0.12754 0.993215 0.375969
1279 1 0.940389 0.863589 0.185205
1280 1 0.870709 0.942006 0.193229
1405 1 0.875578 0.875002 0.240271
1406 1 0.930812 0.940312 0.245573
1407 1 0.936498 0.879569 0.318003
1408 1 0.875805 0.93491 0.316292
1410 1 0.0552845 0.564885 0.36155
1416 1 0.124642 0.564867 0.436915
1443 1 0.0643206 0.620697 0.444682
1445 1 0.12326 0.620825 0.373659
1412 1 0.997601 0.564114 0.43074
1441 1 0.00845057 0.630003 0.381069
1557 1 0.624776 0.505532 0.494295
1520 1 0.37443 0.93377 0.436258
1449 1 0.250521 0.628836 0.38105
1451 1 0.31167 0.628607 0.439217
1420 1 0.254049 0.561583 0.443181
1447 1 0.18217 0.618843 0.444134
1414 1 0.194421 0.557499 0.389846
1418 1 0.313616 0.562337 0.372409
1422 1 0.430974 0.569105 0.378146
1455 1 0.443234 0.625775 0.452345
1424 1 0.368675 0.576577 0.442957
1453 1 0.373554 0.628783 0.376646
1457 1 0.497111 0.626054 0.370786
1428 1 0.498832 0.569091 0.428031
1518 1 0.439066 0.921451 0.379212
1594 1 0.814997 0.692692 0.497311
1622 1 0.686468 0.80983 0.494748
1461 1 0.622311 0.623236 0.369492
1426 1 0.561175 0.566717 0.374981
1432 1 0.62382 0.573763 0.434648
1459 1 0.556457 0.630379 0.434086
1530 1 0.803337 0.933278 0.366002
1606 1 0.185115 0.814723 0.494454
3 1 0.0623483 1.00103 0.0524709
1437 1 0.880819 0.502848 0.371455
1524 1 0.517101 0.930894 0.44457
1519 1 0.438524 0.864724 0.44041
1430 1 0.690989 0.567247 0.377054
1463 1 0.683444 0.625099 0.427362
1436 1 0.750074 0.564808 0.443541
1434 1 0.82505 0.568042 0.374058
1465 1 0.759481 0.621742 0.374054
1467 1 0.813134 0.617955 0.437797
1425 1 0.487693 0.510352 0.368497
1561 1 0.75496 0.49962 0.498406
1440 1 0.879454 0.565179 0.444505
1469 1 0.880003 0.629499 0.378617
1471 1 0.932434 0.632736 0.437349
1438 1 0.941048 0.57156 0.379415
1480 1 0.124037 0.810961 0.43942
1477 1 0.123528 0.75078 0.374411
1442 1 0.0620366 0.688122 0.373371
1475 1 0.0631109 0.751579 0.441276
1448 1 0.116878 0.682026 0.433964
1474 1 0.0549837 0.806097 0.382022
1473 1 0.994728 0.748218 0.383789
1444 1 0.998164 0.691369 0.448322
1043 1 0.554517 0.507118 0.0671547
1482 1 0.30912 0.813955 0.379555
1478 1 0.19051 0.807725 0.380827
1484 1 0.243753 0.821669 0.440593
1479 1 0.176056 0.744138 0.433576
1452 1 0.238123 0.682242 0.439205
1481 1 0.254483 0.749152 0.385797
1450 1 0.310619 0.683079 0.379946
1446 1 0.186332 0.686405 0.374824
1483 1 0.31076 0.746482 0.44641
1409 1 0.00305276 0.502067 0.371847
1510 1 0.189918 0.933798 0.378771
1511 1 0.185463 0.880249 0.441617
1173 1 0.618957 0.503784 0.128085
1488 1 0.36997 0.814007 0.441085
1487 1 0.436 0.754561 0.437944
1454 1 0.436709 0.687768 0.379025
1456 1 0.37532 0.684112 0.439695
1486 1 0.438217 0.817627 0.38088
1485 1 0.370527 0.756424 0.382223
1489 1 0.501627 0.755999 0.376432
1513 1 0.249131 0.872616 0.381377
263 1 0.186279 1.00226 0.312806
1515 1 0.307714 0.878642 0.435047
1460 1 0.500413 0.700276 0.446634
1492 1 0.502965 0.809042 0.446074
1490 1 0.567906 0.811217 0.377131
1458 1 0.561583 0.683502 0.374416
1464 1 0.628735 0.68711 0.435645
1493 1 0.620627 0.746187 0.372667
1496 1 0.636139 0.818152 0.43423
1491 1 0.568456 0.748974 0.433385
1514 1 0.301662 0.932895 0.378241
1053 1 0.881039 0.503249 0.00208429
1495 1 0.684032 0.742021 0.438743
1498 1 0.820317 0.81473 0.376595
1497 1 0.756161 0.744196 0.370434
1462 1 0.689017 0.690421 0.36706
1499 1 0.813344 0.758802 0.43525
1466 1 0.814581 0.684464 0.38467
1468 1 0.747465 0.685551 0.443379
1494 1 0.699456 0.804396 0.374625
1500 1 0.753977 0.815732 0.443011
1521 1 0.506302 0.866575 0.382177
1476 1 0.999234 0.81611 0.44619
1503 1 0.939762 0.756873 0.443226
1501 1 0.88762 0.756019 0.375833
1472 1 0.871625 0.69072 0.439782
1502 1 0.950476 0.812828 0.376192
1470 1 0.936007 0.691019 0.377172
1504 1 0.87953 0.816 0.434335
1433 1 0.762451 0.503878 0.370416
1522 1 0.567908 0.931105 0.377073
1525 1 0.628394 0.869334 0.373099
1505 1 0.00473168 0.881051 0.378236
1506 1 0.0599267 0.940795 0.371169
1509 1 0.12304 0.874897 0.381971
1507 1 0.0604623 0.871843 0.437793
1512 1 0.12348 0.941274 0.443595
387 1 0.0520765 1.00082 0.437996
1523 1 0.569822 0.871624 0.444869
1516 1 0.253088 0.941565 0.446033
265 1 0.247461 0.998557 0.252828
19 1 0.561333 1.00051 0.0632335
271 1 0.439148 0.998763 0.31588
1605 1 0.126267 0.756399 0.500688
1554 1 0.566904 0.564966 0.501297
1293 1 0.36562 0.501031 0.248382
1625 1 0.746113 0.750021 0.496343
129 1 0.00595737 0.997769 0.119507
1066 1 0.311778 0.678572 0.000432428
1085 1 0.87424 0.623749 0.00368997
1065 1 0.250975 0.62296 0.00276488
1574 1 0.178527 0.689367 0.494025
1569 1 0.999745 0.630767 0.499559
21 1 0.636897 0.996301 0.000342434
1621 1 0.628317 0.754741 0.496465
1558 1 0.690812 0.566034 0.496718
1546 1 0.312609 0.561267 0.499474
1589 1 0.623566 0.62918 0.494738
1581 1 0.375038 0.63148 0.501627
1037 1 0.368922 0.502612 0.00383767
1081 1 0.756874 0.620393 0.00528172
1126 1 0.188245 0.94287 0.0043172
1082 1 0.817352 0.692793 0.00158527
1138 1 0.568282 0.941162 0.00215359
1150 1 0.937377 0.9443 0.00413407
1117 1 0.875862 0.75994 0.00402363
1077 1 0.628768 0.61577 0.00472181
1133 1 0.375805 0.876374 0.00870433
1544 1 0.120127 0.5592 0.558854
1571 1 0.0632097 0.627906 0.565988
1666 1 0.059163 0.562777 0.624997
1701 1 0.131646 0.617776 0.623519
1540 1 0.00368479 0.57005 0.55365
1567 1 0.936854 0.505671 0.561559
1585 1 0.502497 0.617002 0.507157
1793 1 0.993005 0.498878 0.744323
1665 1 0.998559 0.507885 0.620174
1633 1 0.997848 0.874824 0.506166
1548 1 0.255696 0.561579 0.567498
1575 1 0.189178 0.619829 0.568832
1579 1 0.308006 0.619596 0.560827
1670 1 0.186864 0.55805 0.618358
1674 1 0.312532 0.561221 0.625488
1705 1 0.247621 0.622791 0.624652
521 1 0.257036 0.998766 0.507261
1667 1 0.0600861 0.50467 0.681488
1137 1 0.504808 0.872649 0.994616
1552 1 0.365595 0.561563 0.561854
1583 1 0.429841 0.617679 0.556484
1678 1 0.440564 0.554154 0.61779
1709 1 0.384348 0.617924 0.625959
1556 1 0.504755 0.553521 0.561811
1713 1 0.491408 0.618187 0.615485
1114 1 0.810642 0.813076 1.00241
799 1 0.937119 0.999645 0.801653
1061 1 0.119997 0.621312 0.995622
1560 1 0.624404 0.561877 0.563323
1587 1 0.568433 0.62758 0.564337
1682 1 0.565546 0.566834 0.621216
1717 1 0.635718 0.629161 0.618508
1641 1 0.245655 0.877612 0.50169
2045 1 0.873522 0.884328 0.871138
1582 1 0.432026 0.693871 0.503226
1149 1 0.87466 0.869726 1.00281
1564 1 0.763044 0.564759 0.557542
1591 1 0.69093 0.622114 0.557427
1595 1 0.817325 0.630666 0.561828
1686 1 0.692319 0.568298 0.61819
1690 1 0.813531 0.560426 0.628264
1721 1 0.756576 0.621561 0.627143
1046 1 0.693323 0.55984 0.995803
1570 1 0.0635562 0.690059 0.501306
1697 1 0.000375599 0.620398 0.617303
1568 1 0.865043 0.568502 0.560141
1599 1 0.925564 0.62817 0.566429
1694 1 0.933876 0.57154 0.619218
1725 1 0.873618 0.628566 0.633403
1618 1 0.561214 0.815346 0.505135
657 1 0.496546 0.991755 0.629704
1576 1 0.120257 0.688097 0.564878
1603 1 0.0594119 0.761444 0.571161
1608 1 0.122915 0.813256 0.566082
1698 1 0.06041 0.690473 0.620093
1730 1 0.0627244 0.81337 0.638126
1733 1 0.122322 0.743435 0.630271
1604 1 0.000187382 0.816859 0.560846
1687 1 0.68533 0.503164 0.694943
1685 1 0.630785 0.5042 0.623243
1062 1 0.186166 0.690023 1.00066
1580 1 0.24107 0.682373 0.563223
1607 1 0.180931 0.745119 0.55832
1611 1 0.307888 0.747585 0.564883
1612 1 0.250321 0.803582 0.554517
1702 1 0.184285 0.683872 0.626554
1706 1 0.309286 0.68239 0.614842
1734 1 0.181497 0.80833 0.62297
1737 1 0.242556 0.741261 0.628516
1738 1 0.305787 0.805514 0.619436
1947 1 0.805996 0.502779 0.929646
1584 1 0.370595 0.685033 0.560545
1615 1 0.426446 0.746203 0.564246
1616 1 0.370697 0.809546 0.558621
1710 1 0.436881 0.68205 0.620805
1741 1 0.3665 0.752311 0.623886
1742 1 0.431777 0.818344 0.626816
1588 1 0.501484 0.682396 0.55803
525 1 0.369587 0.997135 0.501117
1129 1 0.24417 0.876944 1.00173
1745 1 0.497838 0.74703 0.614081
1620 1 0.502805 0.816078 0.566163
1592 1 0.623083 0.686384 0.558584
1619 1 0.569761 0.75637 0.563004
1624 1 0.62597 0.811359 0.56948
1714 1 0.56055 0.693908 0.622521
1746 1 0.569036 0.811126 0.635488
1749 1 0.634368 0.747912 0.627069
1596 1 0.754797 0.687509 0.55708
1623 1 0.68947 0.750307 0.556862
1627 1 0.818711 0.752384 0.556197
1628 1 0.758857 0.807802 0.553443
1718 1 0.697982 0.680953 0.612505
1722 1 0.810893 0.695244 0.624553
1750 1 0.691114 0.811347 0.626169
1753 1 0.755135 0.747799 0.6244
1754 1 0.816267 0.816825 0.631207
661 1 0.627044 0.991572 0.619091
777 1 0.247602 0.99129 0.741288
1650 1 0.569773 0.92826 0.504659
1572 1 0.990729 0.685562 0.564333
1729 1 0.999426 0.750635 0.625167
1069 1 0.37005 0.619953 0.995486
1600 1 0.869416 0.687633 0.559843
1631 1 0.932098 0.752772 0.568516
1632 1 0.871162 0.816744 0.565681
1726 1 0.939415 0.681751 0.629413
1757 1 0.876096 0.752694 0.621895
1758 1 0.936851 0.809263 0.625366
1931 1 0.309607 0.50378 0.947341
1693 1 0.88208 0.503349 0.61942
1635 1 0.0652927 0.873854 0.56458
1640 1 0.128642 0.940376 0.563112
1762 1 0.0548194 0.936309 0.619815
1765 1 0.122755 0.8728 0.626439
531 1 0.565181 0.997558 0.563784
1539 1 0.0635905 0.501571 0.56064
1542 1 0.184787 0.562742 0.506532
1141 1 0.62986 0.866525 0.998651
925 1 0.882354 0.988059 0.872202
1639 1 0.178588 0.870903 0.562966
1643 1 0.305838 0.871963 0.560458
1644 1 0.248684 0.938305 0.567884
1766 1 0.184166 0.931223 0.629612
1769 1 0.24908 0.866969 0.624114
1770 1 0.305335 0.935675 0.626736
1679 1 0.44136 0.502919 0.682477
1645 1 0.370142 0.870197 0.497079
1937 1 0.494068 0.50457 0.872729
2046 1 0.942528 0.940752 0.879212
915 1 0.563052 0.996399 0.929594
1647 1 0.432579 0.868675 0.559826
1648 1 0.376244 0.942188 0.562336
1773 1 0.370559 0.871781 0.619101
1774 1 0.433421 0.926823 0.621204
1652 1 0.5013 0.929732 0.570172
1559 1 0.689497 0.500612 0.558407
1598 1 0.934535 0.690428 0.501826
653 1 0.371563 0.995196 0.62733
1113 1 0.744613 0.748934 0.992535
1634 1 0.0548287 0.944115 0.5075
1777 1 0.504312 0.871782 0.630391
1651 1 0.563503 0.868315 0.566718
1656 1 0.634577 0.941704 0.560885
1778 1 0.567936 0.937983 0.624729
1781 1 0.629646 0.86974 0.62568
1951 1 0.937494 0.502229 0.938995
1939 1 0.550859 0.506488 0.92982
663 1 0.683223 1.00129 0.688063
1655 1 0.695328 0.876434 0.557525
1659 1 0.810549 0.883028 0.562169
1660 1 0.748652 0.944196 0.564744
1782 1 0.691179 0.933616 0.623325
1785 1 0.750326 0.872866 0.622345
1786 1 0.809865 0.942021 0.638596
1662 1 0.938034 0.94047 0.504919
1629 1 0.879798 0.753099 0.499998
1761 1 0.994531 0.877746 0.629727
1636 1 0.99084 0.938047 0.569668
1663 1 0.932648 0.882813 0.568961
1664 1 0.868091 0.942582 0.573535
1789 1 0.871372 0.884751 0.628759
1790 1 0.930423 0.94219 0.627147
1142 1 0.687223 0.929988 0.996522
1034 1 0.312485 0.565227 1.00103
1819 1 0.80217 0.502027 0.811567
1613 1 0.366485 0.749719 0.503651
1672 1 0.125285 0.55807 0.679878
1699 1 0.0633534 0.617337 0.680198
1794 1 0.0513328 0.550873 0.751246
1800 1 0.118963 0.557456 0.811195
1827 1 0.0615192 0.622954 0.809356
1829 1 0.121056 0.621296 0.749317
1577 1 0.242457 0.62517 0.504076
771 1 0.0678853 0.996281 0.816305
1676 1 0.250145 0.562522 0.686451
1703 1 0.18499 0.626141 0.681801
1707 1 0.316322 0.622791 0.693008
1798 1 0.17971 0.55495 0.745729
1802 1 0.306451 0.562647 0.754205
1804 1 0.24248 0.564802 0.811092
1831 1 0.179185 0.621534 0.810427
1833 1 0.246496 0.625639 0.750451
1835 1 0.312852 0.625113 0.826508
1029 1 0.122352 0.504136 0.998482
2036 1 0.50128 0.946199 0.932154
1058 1 0.0538115 0.689615 0.992238
1680 1 0.3795 0.563265 0.690694
1711 1 0.443968 0.622468 0.678914
1806 1 0.430959 0.562047 0.748569
1808 1 0.372062 0.562577 0.811376
1837 1 0.3775 0.626545 0.753809
1839 1 0.436075 0.623697 0.822069
1841 1 0.499543 0.624881 0.744597
1684 1 0.498142 0.56317 0.68184
2047 1 0.933889 0.874961 0.939598
2048 1 0.872774 0.935279 0.938446
1653 1 0.631933 0.876413 0.502089
1812 1 0.4953 0.563779 0.807358
1688 1 0.63419 0.567549 0.681983
1715 1 0.563567 0.628442 0.68237
1810 1 0.55838 0.561846 0.738612
1816 1 0.629072 0.560666 0.80854
1843 1 0.558609 0.615795 0.803833
1845 1 0.625483 0.617463 0.75011
1097 1 0.248253 0.744432 1.00174
2033 1 0.502944 0.872612 0.874608
1646 1 0.4385 0.936147 0.509291
1692 1 0.746246 0.555796 0.685416
1719 1 0.695714 0.62339 0.679782
1723 1 0.806098 0.622457 0.692574
1814 1 0.690006 0.569964 0.751037
1818 1 0.807502 0.554966 0.75131
1820 1 0.747227 0.564246 0.8134
1847 1 0.695008 0.626658 0.815097
1849 1 0.750704 0.62803 0.745475
1851 1 0.807728 0.629161 0.808186
1803 1 0.309579 0.503177 0.813053
1825 1 0.996121 0.619032 0.746785
1668 1 0.995597 0.559641 0.691401
1796 1 0.996478 0.559195 0.817922
1696 1 0.875765 0.557461 0.685032
1727 1 0.942128 0.622298 0.686372
1822 1 0.931873 0.56169 0.753321
1824 1 0.879486 0.562521 0.816981
1853 1 0.869516 0.622527 0.738705
1855 1 0.938542 0.622418 0.814597
1704 1 0.115194 0.680204 0.682906
1731 1 0.0621363 0.745542 0.683209
1736 1 0.130262 0.801494 0.689577
1826 1 0.0513035 0.687783 0.756121
1832 1 0.119714 0.688306 0.812075
1858 1 0.0671599 0.815768 0.753219
1859 1 0.0540713 0.74941 0.814353
1861 1 0.116027 0.742045 0.744694
1864 1 0.125441 0.813521 0.808463
1828 1 0.994309 0.690055 0.810592
1708 1 0.250343 0.679505 0.688915
1735 1 0.183385 0.738099 0.686686
1739 1 0.310052 0.73739 0.691428
1740 1 0.245066 0.801636 0.686264
1830 1 0.176008 0.681794 0.750311
1834 1 0.312458 0.679343 0.755879
1836 1 0.240188 0.678372 0.816411
1862 1 0.185255 0.800437 0.749768
1863 1 0.182163 0.743463 0.800297
1865 1 0.246994 0.7399 0.754905
1866 1 0.311259 0.805546 0.748897
1867 1 0.309903 0.736371 0.809648
1868 1 0.248551 0.809847 0.815762
1712 1 0.383383 0.687837 0.694105
1743 1 0.43972 0.741527 0.691185
1744 1 0.363604 0.813055 0.685719
1838 1 0.44269 0.687702 0.754664
1840 1 0.375225 0.682152 0.811599
1869 1 0.374726 0.749461 0.744457
1870 1 0.427741 0.809841 0.744367
1871 1 0.43555 0.743758 0.819672
1872 1 0.36446 0.806875 0.812733
1876 1 0.505776 0.809955 0.8106
1716 1 0.500238 0.690102 0.686516
1748 1 0.49355 0.808139 0.679592
1844 1 0.501405 0.680946 0.808793
1873 1 0.507294 0.746519 0.74533
1720 1 0.627224 0.686995 0.676056
1747 1 0.567594 0.757067 0.690851
1752 1 0.639362 0.816237 0.689691
1842 1 0.563177 0.68424 0.749866
1848 1 0.627369 0.68374 0.812968
1874 1 0.556942 0.818917 0.744178
1875 1 0.561913 0.750514 0.815517
1877 1 0.625772 0.751874 0.757689
1880 1 0.628464 0.809598 0.811945
1724 1 0.751564 0.687549 0.684251
1751 1 0.685982 0.74955 0.683878
1755 1 0.812529 0.745816 0.685584
1756 1 0.750465 0.806542 0.688068
1846 1 0.683273 0.683333 0.749272
1850 1 0.819375 0.689755 0.74422
1852 1 0.753461 0.684829 0.812721
1878 1 0.689166 0.816238 0.75429
1879 1 0.684279 0.747829 0.811027
1881 1 0.743592 0.745013 0.740041
1882 1 0.816379 0.807622 0.746087
1883 1 0.811525 0.751017 0.803903
1884 1 0.753031 0.804267 0.813669
1732 1 0.998537 0.810556 0.691955
1857 1 0.994871 0.748169 0.745204
1700 1 0.00490371 0.679142 0.683511
1860 1 0.0089939 0.817738 0.8212
1728 1 0.879864 0.686236 0.685322
1759 1 0.936053 0.743308 0.680689
1760 1 0.88073 0.80593 0.685892
1854 1 0.933734 0.678025 0.744988
1856 1 0.884723 0.682201 0.805362
1885 1 0.885964 0.753514 0.748748
1886 1 0.939981 0.816195 0.751632
1887 1 0.944029 0.758675 0.817104
1888 1 0.879107 0.816912 0.804652
1683 1 0.565431 0.509515 0.682841
1763 1 0.0565408 0.872746 0.689506
1768 1 0.120771 0.934477 0.681044
1890 1 0.0706168 0.936298 0.751485
1891 1 0.0684936 0.875651 0.816756
1893 1 0.127026 0.873519 0.741931
1896 1 0.124462 0.935216 0.810413
1889 1 0.00531611 0.874562 0.754069
1892 1 0.00546561 0.942013 0.810822
1638 1 0.187137 0.94155 0.507364
1073 1 0.492993 0.622391 0.998511
1105 1 0.507749 0.744617 0.994366
2039 1 0.69863 0.872733 0.934611
1767 1 0.185338 0.862104 0.682139
1771 1 0.309708 0.871802 0.680468
1772 1 0.25165 0.930192 0.684332
1894 1 0.18218 0.940674 0.747213
1895 1 0.188545 0.872401 0.799663
1897 1 0.250273 0.867157 0.748954
1898 1 0.305856 0.928504 0.749439
1899 1 0.310836 0.871733 0.811171
1900 1 0.248777 0.945196 0.806308
651 1 0.309081 0.989924 0.685785
1775 1 0.423986 0.872252 0.682028
1776 1 0.367063 0.929585 0.687642
1901 1 0.371173 0.868565 0.748501
1902 1 0.427849 0.930272 0.748761
1903 1 0.419855 0.870029 0.815284
1904 1 0.366318 0.938063 0.811954
1905 1 0.493031 0.869455 0.752564
1102 1 0.430265 0.80299 0.997594
1908 1 0.49773 0.932927 0.816676
897 1 0.00574945 0.995963 0.87997
911 1 0.429751 0.994992 0.929268
1780 1 0.501527 0.936634 0.691738
1779 1 0.570475 0.877992 0.686871
1784 1 0.624849 0.943493 0.690486
1906 1 0.560243 0.946963 0.754793
1907 1 0.566035 0.878752 0.807004
1909 1 0.628129 0.886771 0.745964
1912 1 0.629612 0.943734 0.811258
1614 1 0.432987 0.80303 0.504853
2022 1 0.198643 0.931544 0.869181
2032 1 0.37479 0.93764 0.939874
1783 1 0.691649 0.878961 0.682693
1787 1 0.805368 0.877297 0.687971
1788 1 0.748605 0.946044 0.692773
1910 1 0.690839 0.933556 0.746435
1911 1 0.690176 0.872788 0.814189
1913 1 0.7553 0.871323 0.748697
1914 1 0.810944 0.942892 0.750297
1915 1 0.80856 0.875978 0.811167
1916 1 0.748349 0.941305 0.809741
1089 1 0.00669885 0.758191 0.982027
1642 1 0.309637 0.931845 0.502768
1764 1 0.00840528 0.93873 0.691498
1791 1 0.934162 0.872849 0.682383
1792 1 0.882634 0.942387 0.696562
1917 1 0.873293 0.876892 0.742779
1918 1 0.941673 0.929801 0.747276
1919 1 0.934898 0.877218 0.815549
1920 1 0.870021 0.934657 0.806612
1125 1 0.124931 0.871826 0.999059
2026 1 0.30738 0.945543 0.877861
1563 1 0.812712 0.497432 0.566059
2027 1 0.306017 0.875141 0.939024
1922 1 0.0556294 0.560347 0.871894
1928 1 0.11821 0.564967 0.934849
1955 1 0.053523 0.626407 0.930062
1957 1 0.115722 0.625052 0.870083
1924 1 0.998419 0.561146 0.938459
2025 1 0.257617 0.87499 0.873257
1953 1 0.996303 0.619629 0.870005
1617 1 0.500042 0.75468 0.504971
1578 1 0.307968 0.689234 0.501404
1930 1 0.314353 0.555159 0.878234
1926 1 0.179783 0.561818 0.87078
1963 1 0.310606 0.619944 0.930266
1961 1 0.249281 0.614282 0.874068
1932 1 0.253743 0.557614 0.941037
1959 1 0.186468 0.61986 0.937373
2037 1 0.621795 0.869201 0.877969
1809 1 0.505063 0.505024 0.749897
2034 1 0.573063 0.934431 0.873738
1965 1 0.375902 0.620935 0.87309
1934 1 0.435564 0.561346 0.872207
1936 1 0.372242 0.557747 0.93312
1967 1 0.433092 0.627102 0.937492
2043 1 0.80946 0.881743 0.934188
1969 1 0.504961 0.616503 0.867508
1940 1 0.492739 0.5617 0.933975
1938 1 0.560541 0.552897 0.868069
1944 1 0.632097 0.563913 0.939496
1971 1 0.556069 0.621962 0.92694
1973 1 0.615679 0.615357 0.870295
1050 1 0.810652 0.563418 0.988646
2041 1 0.751259 0.879191 0.871482
1090 1 0.065545 0.81582 0.993188
2040 1 0.620541 0.93788 0.938201
2044 1 0.746881 0.942871 0.942426
1813 1 0.625723 0.51035 0.743708
2038 1 0.671056 0.937064 0.880741
1942 1 0.681848 0.557963 0.872374
1946 1 0.816549 0.564897 0.873611
1977 1 0.758783 0.624951 0.872323
1979 1 0.815439 0.62992 0.935866
1948 1 0.750169 0.561649 0.929034
1975 1 0.690666 0.620693 0.927578
2031 1 0.436302 0.877113 0.934249
1121 1 0.00452886 0.885068 0.997869
1950 1 0.944214 0.557121 0.879171
1981 1 0.88154 0.63022 0.870314
1983 1 0.938983 0.627269 0.949251
1952 1 0.879622 0.560819 0.933373
2035 1 0.560718 0.873203 0.937202
1677 1 0.371281 0.500763 0.614479
2028 1 0.257581 0.953367 0.940171
1807 1 0.439807 0.500003 0.807276
2029 1 0.366043 0.877482 0.878976
1986 1 0.0730548 0.819072 0.879848
1987 1 0.0684249 0.755185 0.932341
1954 1 0.0515056 0.696547 0.878654
1992 1 0.132775 0.813883 0.930774
1989 1 0.123384 0.754836 0.87157
1960 1 0.121526 0.68203 0.934679
1988 1 0.00102438 0.827855 0.931051
1985 1 0.989973 0.759557 0.887575
1996 1 0.252579 0.810055 0.940992
1990 1 0.186814 0.817479 0.867918
1994 1 0.310524 0.806593 0.87644
1958 1 0.179185 0.688861 0.876831
1993 1 0.241182 0.750488 0.861503
1964 1 0.244456 0.684423 0.93509
1995 1 0.306676 0.747874 0.938671
1962 1 0.310735 0.690907 0.877748
1991 1 0.189207 0.753577 0.935524
2030 1 0.436535 0.934216 0.867151
2021 1 0.131369 0.882474 0.874795
2023 1 0.192733 0.874796 0.927347
2000 1 0.373051 0.810104 0.941412
1966 1 0.432888 0.685075 0.881694
1997 1 0.372347 0.750867 0.876622
1999 1 0.437117 0.745941 0.932645
1998 1 0.431382 0.807002 0.87101
1968 1 0.37095 0.68837 0.938276
2004 1 0.494154 0.814207 0.934273
1658 1 0.811515 0.941692 0.503765
1972 1 0.499198 0.679749 0.935321
2001 1 0.503515 0.748762 0.878223
2003 1 0.566648 0.743571 0.929633
1976 1 0.624122 0.682951 0.931794
2005 1 0.626765 0.746002 0.872073
2002 1 0.563297 0.811686 0.871408
2008 1 0.624924 0.805761 0.933448
1970 1 0.564467 0.683709 0.865325
1978 1 0.821389 0.696719 0.869962
2006 1 0.694097 0.813343 0.870274
2012 1 0.753662 0.812389 0.935814
2009 1 0.749821 0.753284 0.872664
2011 1 0.81957 0.758567 0.945085
2010 1 0.810571 0.821422 0.870971
1980 1 0.752511 0.689661 0.930342
1974 1 0.678642 0.686291 0.869816
2007 1 0.684186 0.744666 0.928904
2024 1 0.135407 0.939765 0.940695
1956 1 0.991828 0.687517 0.932656
2016 1 0.873565 0.820946 0.935407
1984 1 0.876252 0.691218 0.939503
2014 1 0.942271 0.82396 0.875005
2013 1 0.881381 0.768134 0.874636
1982 1 0.930054 0.694648 0.870704
2015 1 0.933819 0.75359 0.942723
2042 1 0.808129 0.93875 0.869371
2020 1 0.00740008 0.937673 0.937419
1074 1 0.568816 0.684507 0.998329
2017 1 0.00326751 0.887057 0.876278
2018 1 0.0735352 0.940774 0.87211
2019 1 0.065568 0.881839 0.929137
1093 1 0.120925 0.748899 0.990826
1 1 0.00324182 0.999319 0.998768
1122 1 0.0667867 0.941802 0.997369
1110 1 0.693648 0.809288 0.988882
647 1 0.188035 0.999557 0.684969
1543 1 0.185229 0.501439 0.55721
1094 1 0.185558 0.813026 0.995898
1045 1 0.616671 0.499463 0.993926
643 1 0.0642585 0.999385 0.682549
781 1 0.373202 0.999958 0.735891
1661 1 0.876186 0.88332 0.508904
659 1 0.558263 0.999782 0.692945
1078 1 0.686652 0.682732 0.99348
1799 1 0.188502 0.50383 0.811872
1070 1 0.434144 0.687521 0.999375
1801 1 0.24354 0.503953 0.752717
1630 1 0.933479 0.81725 0.509369
1146 1 0.809693 0.939999 0.991986
655 1 0.437975 0.996923 0.686345
1101 1 0.365884 0.741398 1.00104
1134 1 0.446316 0.933059 0.996071
13 1 0.37907 0.990977 0.997285
913 1 0.49503 1.00025 0.863706
649 1 0.246859 0.995633 0.621882
1038 1 0.42888 0.567756 0.998336
1042 1 0.554895 0.565217 0.994441
1118 1 0.948288 0.814559 0.998987
1106 1 0.564329 0.802186 0.990807
1602 1 0.0623812 0.810886 0.508339
529 1 0.500026 0.997573 0.505991
1130 1 0.317255 0.934337 0.998883
1109 1 0.625883 0.746379 0.995105
1590 1 0.684026 0.682783 0.502164
17 1 0.500914 0.995119 0.992747
1637 1 0.123719 0.870317 0.501712
1054 1 0.943345 0.56676 0.998336
1549 1 0.373855 0.507703 0.502525
1601 1 0.994935 0.755455 0.512125
1566 1 0.937886 0.569494 0.507572
1649 1 0.50455 0.873234 0.50481
1573 1 0.122279 0.620417 0.5106
1593 1 0.755668 0.628317 0.501678
1586 1 0.567001 0.696221 0.503201
| [
"[email protected]"
] | |
147f74e7695a23f54e4d60422aa134405e303107 | b167407960a3b69b16752590def1a62b297a4b0c | /tools/project-creator/Python2.6.6/Lib/ctypes/test/test_varsize_struct.py | f4c25715a02e5cb28aa1eee72294595b8126bbb1 | [
"MIT"
] | permissive | xcode1986/nineck.ca | 543d1be2066e88a7db3745b483f61daedf5f378a | 637dfec24407d220bb745beacebea4a375bfd78f | refs/heads/master | 2020-04-15T14:48:08.551821 | 2019-01-15T07:36:06 | 2019-01-15T07:36:06 | 164,768,581 | 1 | 1 | MIT | 2019-01-15T08:30:27 | 2019-01-09T02:09:21 | C++ | UTF-8 | Python | false | false | 1,956 | py | from ctypes import *
import unittest
class VarSizeTest(unittest.TestCase):
def test_resize(self):
class X(Structure):
_fields_ = [("item", c_int),
("array", c_int * 1)]
self.failUnlessEqual(sizeof(X), sizeof(c_int) * 2)
x = X()
x.item = 42
x.array[0] = 100
self.failUnlessEqual(sizeof(x), sizeof(c_int) * 2)
# make room for one additional item
new_size = sizeof(X) + sizeof(c_int) * 1
resize(x, new_size)
self.failUnlessEqual(sizeof(x), new_size)
self.failUnlessEqual((x.item, x.array[0]), (42, 100))
# make room for 10 additional items
new_size = sizeof(X) + sizeof(c_int) * 9
resize(x, new_size)
self.failUnlessEqual(sizeof(x), new_size)
self.failUnlessEqual((x.item, x.array[0]), (42, 100))
# make room for one additional item
new_size = sizeof(X) + sizeof(c_int) * 1
resize(x, new_size)
self.failUnlessEqual(sizeof(x), new_size)
self.failUnlessEqual((x.item, x.array[0]), (42, 100))
def test_array_invalid_length(self):
# cannot create arrays with non-positive size
self.failUnlessRaises(ValueError, lambda: c_int * -1)
self.failUnlessRaises(ValueError, lambda: c_int * -3)
def test_zerosized_array(self):
array = (c_int * 0)()
# accessing elements of zero-sized arrays raise IndexError
self.failUnlessRaises(IndexError, array.__setitem__, 0, None)
self.failUnlessRaises(IndexError, array.__getitem__, 0)
self.failUnlessRaises(IndexError, array.__setitem__, 1, None)
self.failUnlessRaises(IndexError, array.__getitem__, 1)
self.failUnlessRaises(IndexError, array.__setitem__, -1, None)
self.failUnlessRaises(IndexError, array.__getitem__, -1)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
4749151995a180d653b3e898e082677a1668e88a | 139617b9e7c7dbbc592170a761a2f9fb1ee06734 | /main_kaggle.py | 0300f2b589b53fdf7d932cb6bf1cd133dfcab12b | [] | no_license | ANKITPODDER2000/FSP_ML_Project | 09aecb7d803a217a710914e6369ac4a790ec6424 | bba6cf8f70f9c6977654856a8be8de75ebf22f77 | refs/heads/master | 2022-12-14T06:21:18.370814 | 2020-09-18T17:15:50 | 2020-09-18T17:15:50 | 295,822,511 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,817 | py | # -*- coding: utf-8 -*-
"""Untitled7.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1N_Vdb2XxxCwvUDqTVVT7hoS0qYL73aSw
from google.colab import files
files.upload()
! mkdir ~/.kaggle
! cp kaggle.json ~/.kaggle/
! chmod 600 ~/.kaggle/kaggle.json
!kaggle competitions download -c titanic
"""
import pandas as pd
print("Version of pandas : ",pd.__version__)
import seaborn as sns
print("Version of seaborn : ",sns.__version__)
import matplotlib.pyplot as plt
import numpy as np
print("Version of numpy : ",np.__version__)
#%matplotlib inline
import sklearn as sk
print("Version of scikit-learnt : ",sk.__version__)
import math
train_data = pd.read_csv("./dataset.csv")
train_data.head()
#Name isn't needed
train_data.drop("Name" , axis = 1 , inplace = True)
sns.heatmap(train_data.isna())
plt.show()
train_data.drop("Cabin" , axis = 1 , inplace = True)
sns.heatmap(train_data.isna())
plt.show()
plt.plot(train_data['Age'].dropna())
plt.show()
data = train_data[['Age' , 'Pclass']].dropna()
sns.scatterplot(data = data , x = 'Pclass' , y = 'Age' , hue = 'Pclass' , cmap = "virdis")
plt.show()
sns.countplot(x = 'Pclass' , data = train_data)
plt.show()
sns.countplot(x = 'Survived' , data = train_data)
plt.show()
sns.countplot(x = 'Survived' , data = train_data , hue = "Pclass")
plt.show()
sns.countplot(x = 'Survived' , data = train_data , hue = "Sex")
plt.show()
avg_age_class1 = math.ceil(data[data['Pclass'] == 1]['Age'].mean())
print("avg_age_class1 : ",avg_age_class1)
avg_age_class2 = math.ceil(data[data['Pclass'] == 2]['Age'].mean())
print("avg_age_class2 : ",avg_age_class2)
avg_age_class3 = math.ceil(data[data['Pclass'] == 3]['Age'].mean())
print("avg_age_class3 : ",avg_age_class3)
def updateAge(List):
age = List[0]
Pclass = List[1]
if pd.isnull(age):
if Pclass == 1:
age = avg_age_class1
elif Pclass == 2:
age = avg_age_class2
else:
age = avg_age_class1
return age
train_data['Age'] = train_data[['Age' , "Pclass"]].apply(updateAge , axis = 1)
sns.heatmap(train_data.isna())
plt.show()
train_data.head(n = 3)
print("Parch value : " , train_data['Parch'].unique())
train_data.drop(columns = ['Embarked' , "Ticket" ] , axis = 1 , inplace = True)
def replaceSex(s):
if s == "male":
return 1
return 0
train_data['Sex'] = train_data['Sex'].apply(replaceSex) #1->male || 0->female
train_data.head(n = 3)
train_data.drop("PassengerId" , axis = 1 , inplace = True)
train_data.head(n = 3)
X = train_data.drop("Survived" , axis = 1)
y = train_data['Survived']
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=100)
print("Details of train_data : ")
print("Shape of X_train : " , X_train.shape)
print("Shape of y_train : " , y_train.shape)
print("Features name : ",X_train.columns)
print("Target name : Survived")
print("\n================================\n")
print("Details of test_data : ")
print("Shape of X_test : " , X_test.shape)
print("Shape of y_test : " , y_test.shape)
C_ = [0.0001,0.001,0.01,0.1,1,5,10,20,30,40,50,100]
model = {}
acc = []
val_acc = []
for i in C_:
model_name = "model_c_" + str(i)
model[model_name] = {}
model_LR = LogisticRegression(C = i).fit(X_train , y_train)
model[model_name]['model'] = model_LR
model[model_name]['acc'] = model_LR.score(X_train , y_train)
acc.append(model[model_name]['acc'])
model[model_name]['val_acc'] = model_LR.score(X_test , y_test)
val_acc.append(model[model_name]['val_acc'])
plt.plot(acc , label = "training_data")
plt.plot(val_acc,'o--' , label = "testing_data")
plt.ylabel("Accurecy")
plt.xlabel("C->")
plt.legend()
plt.show()
take_model = model["model_c_10"]['model']
from sklearn.metrics import classification_report , confusion_matrix , accuracy_score
print("Confusion matrix for train _data : ")
print(confusion_matrix(y_train , take_model.predict(X_train)))
print("Confusion matrix for test _data : ")
print(confusion_matrix(y_test , take_model.predict(X_test)))
print("Accurecy score for training data : %.3f %%"%( accuracy_score(y_train , take_model.predict(X_train)) * 100))
print("Accurecy score for training data : %.3f %%"%( accuracy_score(y_test , take_model.predict(X_test)) * 100))
print("Classification report for training data : \n============================================================\n")
print(classification_report(y_train , take_model.predict(X_train)))
print("\nClassification report for testing data : \n============================================================\n")
print(classification_report(y_test , take_model.predict(X_test)))
| [
"[email protected]"
] | |
52f4054f55228ea0919d84539947c10ee02a97c2 | ad963dc590fe3ee16fe70674ffa9a77a3462a2d2 | /taskManager/migrations/0011_auto_20200210_1539.py | 3b5756687a8bcd93a9876ba0408a8f364cf10d02 | [] | no_license | ShuheiKuriki/task_manager | 564dc1a646efdd288ff31bc9044981aecbd6db78 | f5d4a53a758c64615f22c69baae59b36dd5dab1f | refs/heads/master | 2023-05-12T11:06:11.388036 | 2023-01-15T09:12:37 | 2023-01-15T09:12:37 | 234,110,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 739 | py | # Generated by Django 3.0.2 on 2020-02-10 06:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('taskManager', '0010_delete_done'),
]
operations = [
migrations.RenameField(
model_name='linepush',
old_name='user_id',
new_name='line_id',
),
migrations.AddField(
model_name='linepush',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"[email protected]"
] | |
b3fcb829703ca9ecdd728c1793c5b8f9213bfd5c | 1a2ca64839723ede3134a0781128b0dc0b5f6ab8 | /ExtractFeatures/Data/kracekumar/gettowork.py | fe439f5f1fff77243040ee035dd7483d80428c15 | [] | no_license | vivekaxl/LexisNexis | bc8ee0b92ae95a200c41bd077082212243ee248c | 5fa3a818c3d41bd9c3eb25122e1d376c8910269c | refs/heads/master | 2021-01-13T01:44:41.814348 | 2015-07-08T15:42:35 | 2015-07-08T15:42:35 | 29,705,371 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,240 | py | def reducelist(elem):
nitems = len(elem)
if nitems > sum(elem):
return -1 # IMPOSSIBLE
empty_list = [0 for i in range(nitems)]
elem.sort()
#elem.reverse()
count = 0
while (sum(empty_list) != nitems):
each = elem.pop()
count += 1
for i in range(0,len(empty_list)):
if each == 0:
break
elif empty_list[i] == 1:
pass
else:
empty_list[i] = 1
each -= 1
return count
C = int(raw_input())
impossible = 0
for tc in range(C):
N, T = [int(x) for x in raw_input().split()]
E = int(raw_input())
res = [0 for i in range(N)]
for e in range(E):
H, P = [int(x) for x in raw_input().split()]
if H == T:
res[H-1] = 0
else:
if res[H-1] == 0:
res[H-1] = [P]
else:
res[H-1].append(P)
resstr = ''
for element in res:
if type(element) == type([]):
element = reducelist(element)
if element == -1:
resstr = 'IMPOSSIBLE'
break
else:
resstr = resstr + ' ' + str(element)
print 'Case #%d: %s' % (tc+1, resstr)
| [
"[email protected]"
] | |
1d27f416efb6fa46af660588f8d141cf7590b0bc | 0494c9caa519b27f3ed6390046fde03a313d2868 | /build/scripts/master/unittests/annotator_test.py | f78a2133dfdb9739b2a95aed77ab6233a8dc1649 | [] | no_license | mhcchang/chromium30 | 9e9649bec6fb19fe0dc2c8b94c27c9d1fa69da2c | 516718f9b7b95c4280257b2d319638d4728a90e1 | refs/heads/master | 2023-03-17T00:33:40.437560 | 2017-08-01T01:13:12 | 2017-08-01T01:13:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,919 | py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Source file for annotated command testcases."""
import os
import time
import unittest
import test_env # pylint: disable=W0611
from buildbot.status import builder
import mock
from twisted.internet import defer
from master import chromium_step
# Mocks confuse pylint.
# pylint: disable=E1101
# pylint: disable=R0201
class FakeCommand(mock.Mock):
def __init__(self):
mock.Mock.__init__(self)
self.rc = builder.SUCCESS
self.status = None
def addLog(self, name):
return self.status.addLog(name)
class FakeLog(object):
def __init__(self, name):
self.text = ''
self.name = name
self.chunkSize = 1024
self.finished = False
def addStdout(self, data):
assert not self.finished
self.text += data
def addStderr(self, data):
assert not self.finished
def getName(self):
return self.name
def addHeader(self, msg):
assert not self.finished
def finish(self):
self.finished = True
class FakeBuildstepStatus(mock.Mock):
def __init__(self, name, build):
mock.Mock.__init__(self)
self.name = name
self.urls = {}
self.build = build
self.text = None
self.step = None
self.logs = []
self.started = False
self.finished = False
def stepStarted(self):
self.started = True
def isStarted(self):
return self.started
def setText(self, text):
self.text = text
def setText2(self, text):
self.text = text
def getBuild(self):
return self.build
def getURLs(self):
return self.urls.copy()
def addURL(self, label, url):
self.urls[label] = url
def addLog(self, log):
l = FakeLog(log)
self.logs.append(l)
return l
def getLogs(self):
return self.logs
def getLog(self, log):
candidates = [x for x in self.logs if x.name == log]
if candidates:
return candidates[0]
else:
return None
def stepFinished(self, status):
self.finished = True
self.getBuild().receivedStatus.append(status)
def isFinished(self):
return self.finished
def setHidden(self, hidden):
return None
class FakeBuildStatus(mock.Mock):
def __init__(self):
mock.Mock.__init__(self)
self.steps = []
self.receivedStatus = []
self.logs = []
def addStepWithName(self, step_name):
newstep = FakeBuildstepStatus(step_name, self)
self.steps.append(newstep)
return newstep
class AnnotatorCommandsTest(unittest.TestCase):
def setUp(self):
self.buildstatus = FakeBuildStatus()
self.command = FakeCommand()
self.step = chromium_step.AnnotatedCommand(name='annotated_steps',
description='annotated_steps',
command=self.command)
self.step_status = self.buildstatus.addStepWithName('annotated_steps')
self.step.setStepStatus(self.step_status)
self.command.status = self.step_status
preamble = self.command.addLog('preamble')
self.step.script_observer.addSection('annotated_steps',
step=self.step_status)
self.step.script_observer.sections[0]['log'] = preamble
self.step.script_observer.sections[0]['started'] = time.time()
self.step.script_observer.cursor = self.step.script_observer.sections[0]
def handleOutputLine(self, line):
self.step.script_observer.cursor['step'].started = True
if not self.step.script_observer.cursor['log']:
self.step.script_observer.cursor['log'] = (
self.step.script_observer.cursor['step'].addLog('stdio'))
self.step.script_observer.cursor['started'] = time.time()
self.step.script_observer.handleOutputLine(line)
def handleReturnCode(self, code):
self.step.script_observer['step'].stepFinished()
self.step.script_observer.handleReturnCode(code)
def testAddAnnotatedSteps(self):
self.handleOutputLine('@@@BUILD_STEP step@@@')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.handleOutputLine('@@@BUILD_STEP done@@@')
self.step.script_observer.handleReturnCode(0)
stepnames = [x['step'].name for x in self.step.script_observer.sections]
statuses = [x['status'] for x in self.step.script_observer.sections]
self.assertEquals(stepnames, ['annotated_steps', 'step', 'step2', 'done'])
self.assertEquals(statuses, 4 * [builder.SUCCESS])
self.assertEquals(self.step.script_observer.annotate_status,
builder.SUCCESS)
def testBuildFailure(self):
self.handleOutputLine('@@@STEP_FAILURE@@@')
self.handleOutputLine('@@@BUILD_STEP step@@@')
self.step.script_observer.handleReturnCode(0)
statuses = [x['status'] for x in self.step.script_observer.sections]
self.assertEquals(statuses, [builder.FAILURE, builder.SUCCESS])
self.assertEquals(self.step.script_observer.annotate_status,
builder.FAILURE)
def testBuildException(self):
self.handleOutputLine('@@@STEP_EXCEPTION@@@')
self.handleOutputLine('@@@BUILD_STEP step@@@')
statuses = [x['status'] for x in self.step.script_observer.sections]
self.assertEquals(statuses, [builder.EXCEPTION, builder.SUCCESS])
self.assertEquals(self.step.script_observer.annotate_status,
builder.EXCEPTION)
def testStepLink(self):
self.handleOutputLine('@@@STEP_LINK@label@http://localhost/@@@')
testurls = [('label', 'http://localhost/')]
testurl_hash = {'label': 'http://localhost/'}
annotatedLinks = [x['links'] for x in self.step.script_observer.sections]
stepLinks = [x['step'].getURLs() for x in
self.step.script_observer.sections]
self.assertEquals(annotatedLinks, [testurls])
self.assertEquals(stepLinks, [testurl_hash])
def testStepWarning(self):
self.handleOutputLine('@@@STEP_WARNINGS@@@')
self.handleOutputLine('@@@BUILD_STEP step@@@')
statuses = [x['status'] for x in self.step.script_observer.sections]
self.assertEquals(statuses, [builder.WARNINGS, builder.SUCCESS])
self.assertEquals(self.step.script_observer.annotate_status,
builder.WARNINGS)
def testStepText(self):
self.handleOutputLine('@@@STEP_TEXT@example_text@@@')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.handleOutputLine('@@@STEP_TEXT@example_text2@@@')
self.handleOutputLine('@@@BUILD_STEP step3@@@')
self.handleOutputLine('@@@STEP_TEXT@example_text3@@@')
texts = [x['step_text'] for x in self.step.script_observer.sections]
self.assertEquals(texts, [['example_text'], ['example_text2'],
['example_text3']])
def testStepTextSeeded(self):
self.handleOutputLine('@@@SEED_STEP example_step@@@')
self.handleOutputLine('@@@SEED_STEP_TEXT@example_step@example_text@@@')
self.handleOutputLine('@@@STEP_CURSOR example_step@@@')
texts = [x['step_text'] for x in self.step.script_observer.sections]
start = [x['step'].isStarted() for x in self.step.script_observer.sections]
self.assertEquals(texts, [[], ['example_text']])
self.assertEquals(start, [True, False])
def testStepClear(self):
self.handleOutputLine('@@@STEP_TEXT@example_text@@@')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.handleOutputLine('@@@STEP_TEXT@example_text2@@@')
self.handleOutputLine('@@@STEP_CLEAR@@@')
texts = [x['step_text'] for x in self.step.script_observer.sections]
self.assertEquals(texts, [['example_text'], []])
def testStepSummaryText(self):
self.handleOutputLine('@@@STEP_SUMMARY_TEXT@example_text@@@')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.handleOutputLine('@@@STEP_SUMMARY_TEXT@example_text2@@@')
self.handleOutputLine('@@@BUILD_STEP step3@@@')
self.handleOutputLine('@@@STEP_SUMMARY_TEXT@example_text3@@@')
texts = [x['step_summary_text'] for x in self.step.script_observer.sections]
self.assertEquals(texts, [['example_text'], ['example_text2'],
['example_text3']])
def testStepSummaryClear(self):
self.handleOutputLine('@@@STEP_SUMMARY_TEXT@example_text@@@')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.handleOutputLine('@@@STEP_SUMMARY_TEXT@example_text2@@@')
self.handleOutputLine('@@@STEP_SUMMARY_CLEAR@@@')
texts = [x['step_summary_text'] for x in self.step.script_observer.sections]
self.assertEquals(texts, [['example_text'], []])
def testHaltOnFailure(self):
self.step.deferred = defer.Deferred()
self.handleOutputLine('@@@HALT_ON_FAILURE@@@')
catchFailure = lambda r: self.assertEquals(
self.step_status.getBuild().receivedStatus, [builder.FAILURE])
self.step.deferred.addBoth(catchFailure)
self.handleOutputLine('@@@STEP_FAILURE@@@')
self.assertEquals(self.step.script_observer.annotate_status,
builder.FAILURE)
def testReturnCode(self):
self.step.script_observer.handleReturnCode(1)
self.assertEquals(self.step.script_observer.annotate_status,
builder.FAILURE)
def testHonorZeroReturnCode(self):
self.handleOutputLine('@@@HONOR_ZERO_RETURN_CODE@@@')
self.handleOutputLine('@@@STEP_FAILURE@@@')
self.step.script_observer.handleReturnCode(0)
self.assertEquals(self.step.script_observer.annotate_status,
builder.SUCCESS)
def testLogLine(self):
self.handleOutputLine('@@@STEP_LOG_LINE@test_log@this is line one@@@')
self.handleOutputLine('@@@STEP_LOG_LINE@test_log@this is line two@@@')
self.handleOutputLine('@@@STEP_LOG_END@test_log@@@')
logs = self.step_status.getLogs()
self.assertEquals(len(logs), 2)
self.assertEquals(logs[1].getName(), 'test_log')
self.assertEquals(self.step_status.getLog('test_log').text,
'this is line one\nthis is line two')
def testForNoPreambleAfter1Step(self):
self.handleOutputLine('this line is part of the preamble')
self.step.commandComplete(self.command)
logs = self.step_status.getLogs()
# buildbot will append 'stdio' for the first non-annotated section
# but it won't show up in self.step_status.getLogs()
self.assertEquals(len(logs), 0)
def testForPreambleAfter2Steps(self):
self.handleOutputLine('this line is part of the preamble')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.step.commandComplete(self.command)
logs = [l for x in self.buildstatus.steps for l in x.getLogs()]
# annotator adds a stdio for each buildstep added
self.assertEquals([x.getName() for x in logs], ['preamble', 'stdio'])
def testForPreambleAfter3Steps(self):
self.handleOutputLine('this line is part of the preamble')
self.handleOutputLine('@@@BUILD_STEP step2@@@')
self.handleOutputLine('@@@BUILD_STEP step3@@@')
self.step.commandComplete(self.command)
logs = [l for x in self.buildstatus.steps for l in x.getLogs()]
self.assertEquals([x.getName() for x in logs], ['preamble', 'stdio',
'stdio'])
def testSeed(self):
self.handleOutputLine('@@@BUILD_STEP step@@@')
self.handleOutputLine('@@@SEED_STEP step2@@@')
self.handleOutputLine('@@@SEED_STEP step3@@@')
self.handleOutputLine('@@@SEED_STEP step4@@@')
self.handleOutputLine('@@@STEP_CURSOR step2@@@')
self.handleOutputLine('@@@STEP_STARTED@@@')
self.handleOutputLine('@@@STEP_CURSOR step3@@@')
self.step.script_observer.handleReturnCode(0)
stepnames = [x['step'].name for x in self.step.script_observer.sections]
started = [x['step'].isStarted() for x
in self.step.script_observer.sections]
finished = [x['step'].isFinished() for x in
self.step.script_observer.sections]
self.assertEquals(stepnames, ['annotated_steps', 'step', 'step2', 'step3',
'step4'])
self.assertEquals(started, [True, True, True, True, False])
self.assertEquals(finished, [False, True, True, True, False])
self.assertEquals(self.step.script_observer.annotate_status,
builder.SUCCESS)
def testCursor(self):
self.handleOutputLine('@@@BUILD_STEP step@@@')
self.handleOutputLine('@@@SEED_STEP step2@@@')
self.handleOutputLine('@@@SEED_STEP step3@@@')
self.handleOutputLine('@@@SEED_STEP step4@@@')
self.handleOutputLine('@@@SEED_STEP step5@@@')
self.handleOutputLine('@@@STEP_CURSOR step2@@@')
self.handleOutputLine('@@@STEP_STARTED@@@')
self.handleOutputLine('@@@STEP_CURSOR step4@@@')
self.handleOutputLine('@@@STEP_STARTED@@@')
self.handleOutputLine('@@@STEP_LOG_LINE@test_log@AAthis is line one@@@')
self.handleOutputLine('@@@STEP_CURSOR step2@@@')
self.handleOutputLine('@@@STEP_LOG_LINE@test_log@BBthis is line one@@@')
self.handleOutputLine('@@@STEP_CURSOR step4@@@')
self.handleOutputLine('@@@STEP_LOG_LINE@test_log@AAthis is line two@@@')
self.handleOutputLine('@@@STEP_CURSOR step2@@@')
self.handleOutputLine('@@@STEP_LOG_LINE@test_log@BBthis is line two@@@')
self.handleOutputLine('@@@STEP_CURSOR step4@@@')
self.handleOutputLine('@@@STEP_LOG_END@test_log@@@')
self.handleOutputLine('@@@STEP_CURSOR step2@@@')
self.handleOutputLine('@@@STEP_LOG_END@test_log@@@')
self.handleOutputLine('@@@STEP_CURSOR step4@@@')
self.handleOutputLine('@@@STEP_CLOSED@@@')
self.handleOutputLine('@@@STEP_CURSOR step3@@@')
self.handleOutputLine('@@@STEP_STARTED@@@')
self.step.script_observer.handleReturnCode(0)
stepnames = [x['step'].name for x in self.step.script_observer.sections]
started = [x['step'].isStarted() for x
in self.step.script_observer.sections]
finished = [x['step'].isFinished() for x
in self.step.script_observer.sections]
logs = [x['step'].logs for x in self.step.script_observer.sections]
self.assertEquals(stepnames, ['annotated_steps', 'step', 'step2', 'step3',
'step4', 'step5'])
self.assertEquals(started, [True, True, True, True, True, False])
self.assertEquals(finished, [False, True, True, True, True, False])
self.assertEquals(self.step.script_observer.annotate_status,
builder.SUCCESS)
lognames = [[x.getName() for x in l] for l in logs]
logtexts = [[x.text for x in l] for l in logs]
expected_lognames = [['preamble'], ['stdio'],
['stdio', 'test_log'],
['stdio'],
['stdio', 'test_log'],
[]]
self.assertEquals(lognames, expected_lognames)
self.assertEquals(logtexts[1:], [
[''],
['', 'BBthis is line one\nBBthis is line two'],
[''],
['', 'AAthis is line one\nAAthis is line two'],
[]
])
def testHandleRealOutput(self):
with open(os.path.join(test_env.DATA_PATH,
'chromium_fyi_android_annotator_stdio')) as f:
for line in f.readlines():
self.handleOutputLine(line.rstrip())
stepnames = [x['step'].name for x in self.step.script_observer.sections]
self.assertEquals(stepnames, ['annotated_steps',
'Environment setup',
'Check licenses for WebView',
'compile',
'Experimental Compile android_experimental ',
'Zip build'])
def testRealOutputBuildStepSeedStep(self):
with open(os.path.join(test_env.DATA_PATH,
'build_step_seed_step_annotator.txt')) as f:
for line in f.readlines():
self.handleOutputLine(line.rstrip())
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
06a236c2de21ac9825557737984acf1f6c49d267 | 993bb247b983874a882f4933508601fd0ab5f195 | /airflow/kubernetes/pod_runtime_info_env.py | c7937e23ddc25a5ca6ff407f8ea87db0ec104754 | [
"Apache-2.0",
"BSD-3-Clause",
"Python-2.0",
"MIT",
"BSD-2-Clause"
] | permissive | franloza/airflow | 0dcb077d1418ae2f83d5ddef1d384184cfd54946 | 0224105343f8f37f243411c70d37c6ee0f0a8f25 | refs/heads/master | 2020-06-02T11:52:02.310756 | 2019-06-10T09:26:06 | 2019-06-10T09:26:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,386 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
class PodRuntimeInfoEnv:
"""Defines Pod runtime information as environment variable"""
def __init__(self, name, field_path):
""" Adds Kubernetes pod runtime information as environment variables such as namespace, pod IP, pod name.
Full list of options can be found in kubernetes documentation.
:param name: the name of the environment variable
:type: name: str
:param field_path: path to pod runtime info. Ex: metadata.namespace | status.podIP
:type: field_path: str
"""
self.name = name
self.field_path = field_path
| [
"[email protected]"
] | |
9ef4bc80084e942c3e0264adedc1dce137002ed9 | afc8d5a9b1c2dd476ea59a7211b455732806fdfd | /Configurations/VBSjjlnu/Full2017v7/configuration_test_tau21wp_2017.py | 5a6dff8102aa14fe4ce1a29741c1c293111bfbf4 | [] | no_license | latinos/PlotsConfigurations | 6d88a5ad828dde4a7f45c68765081ed182fcda21 | 02417839021e2112e740607b0fb78e09b58c930f | refs/heads/master | 2023-08-18T20:39:31.954943 | 2023-08-18T09:23:34 | 2023-08-18T09:23:34 | 39,819,875 | 10 | 63 | null | 2023-08-10T14:08:04 | 2015-07-28T07:36:50 | Python | UTF-8 | Python | false | false | 1,302 | py | # example of configuration file
treeName= 'Events'
tag = 'test_tau21wp_2017'
direc = "conf_test_tau21wp"
# used by mkShape to define output directory for root files
outputDir = 'rootFile_'+tag
# file with TTree aliases
aliasesFile = direc+'/aliases.py'
# file with list of variables
variablesFile = direc+'/variables.py'
# file with list of cuts
cutsFile = direc +'/cuts.py'
# file with list of samples
samplesFile = direc+'/samples.py'
#samplesFile = direc+'/samples.py'
#t file with list of samples
plotFile = direc+'/plot.py'
# luminosity to normalize to (in 1/fb)
lumi = 41.5
# used by mkPlot to define output directory for plots
# different from "outputDir" to do things more tidy
#outputDirPlots = 'plot_'+tag +"_rescaled/detajpt_ext"
outputDirPlots = 'plot_'+tag + "/"
# used by mkDatacards to define output directory for datacards
#outputDirDatacard = 'datacards_'+tag
#outputDirDatacard = 'datacards_'+tag + "/Wjets_njets"
outputDirDatacard = 'datacards_'+tag + "_v4"
# structure file for datacard
structureFile = direc+'/structure.py'
# nuisances file for mkDatacards and for mkShape
# nuisancesFile = direc+'/nuisances_reduced.py'
# nuisancesFile = direc+'/nuisances_datacard.py'
# nuisancesFile = direc + '/nuisances_recover.py'
customizeScript = direc + "/customize.py" | [
"[email protected]"
] | |
9db9ca8bc1f8cda57c69c3a2f593776bdb60eeb6 | bfbe642d689b5595fc7a8e8ae97462c863ba267a | /src/CyPhyPET/Resources/zip.py | 5ee5bf64d4dce688e9cf9be0eb40b6c5af188e56 | [
"LicenseRef-scancode-other-permissive"
] | permissive | mcanthony/meta-core | 0c0a8cde1669f749a4880aca6f816d28742a9c68 | 3844cce391c1e6be053572810bad2b8405a9839b | refs/heads/master | 2020-12-26T03:11:11.338182 | 2015-11-04T22:58:13 | 2015-11-04T22:58:13 | 45,806,011 | 1 | 0 | null | 2015-11-09T00:34:22 | 2015-11-09T00:34:22 | null | UTF-8 | Python | false | false | 868 | py | #!/usr/bin/python
import zipfile
import os
import sys
import os.path
path_join = os.path.join
if sys.platform == 'win32':
def path_join(*args):
return '\\\\?\\' + os.path.join(os.getcwd(), os.path.join(*args))
output_filename = 'source_data.zip'
if os.path.exists(output_filename):
os.remove(output_filename)
with zipfile.ZipFile(output_filename, 'w', allowZip64=True) as z:
parent_dir_name = os.path.basename(os.getcwd())
os.chdir('..\\')
for dirpath,dirs,files in os.walk(parent_dir_name):
if '.git' in dirpath or '.svn' in dirpath:
continue
for f in files:
if output_filename == f or f.endswith('.suo'):
continue
fn = path_join(dirpath, f)
#print fn
z.write(fn, arcname=os.path.join(dirpath, f), compress_type=zipfile.ZIP_DEFLATED)
| [
"[email protected]"
] | |
0472e752dc24ddaf0d91505984789b83cdf34efd | a3d6556180e74af7b555f8d47d3fea55b94bcbda | /components/feature_engagement/DEPS | f25c147bc846d9c605930e33ad2974793f4f91cd | [
"BSD-3-Clause"
] | permissive | chromium/chromium | aaa9eda10115b50b0616d2f1aed5ef35d1d779d6 | a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c | refs/heads/main | 2023-08-24T00:35:12.585945 | 2023-08-23T22:01:11 | 2023-08-23T22:01:11 | 120,360,765 | 17,408 | 7,102 | BSD-3-Clause | 2023-09-10T23:44:27 | 2018-02-05T20:55:32 | null | UTF-8 | Python | false | false | 226 | include_rules = [
"-content",
"+components/feature_engagement/internal/jni_headers",
"+components/feature_engagement/features.h",
"+components/flags_ui",
"+components/keyed_service",
"+components/leveldb_proto",
]
| [
"[email protected]"
] | ||
e4b55f125082613aecd55dc5804faaa01d106542 | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/contrib/legacy_seq2seq/python/ops/seq2seq.py | 5fe5f16c0bd50ff115a8936cb43677c9ed237bf8 | [
"Apache-2.0"
] | permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 58,269 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library for creating sequence-to-sequence models in TensorFlow.
Sequence-to-sequence recurrent neural networks can learn complex functions
that map input sequences to output sequences. These models yield very good
results on a number of tasks, such as speech recognition, parsing, machine
translation, or even constructing automated replies to emails.
Before using this module, it is recommended to read the TensorFlow tutorial
on sequence-to-sequence models. It explains the basic concepts of this module
and shows an end-to-end example of how to build a translation model.
https://www.tensorflow.org/versions/master/tutorials/seq2seq/index.html
Here is an overview of functions available in this module. They all use
a very similar interface, so after reading the above tutorial and using
one of them, others should be easy to substitute.
* Full sequence-to-sequence models.
- basic_rnn_seq2seq: The most basic RNN-RNN model.
- tied_rnn_seq2seq: The basic model with tied encoder and decoder weights.
- embedding_rnn_seq2seq: The basic model with input embedding.
- embedding_tied_rnn_seq2seq: The tied model with input embedding.
- embedding_attention_seq2seq: Advanced model with input embedding and
the neural attention mechanism; recommended for complex tasks.
* Multi-task sequence-to-sequence models.
- one2many_rnn_seq2seq: The embedding model with multiple decoders.
* Decoders (when you write your own encoder, you can use these to decode;
e.g., if you want to write a model that generates captions for images).
- rnn_decoder: The basic decoder based on a pure RNN.
- attention_decoder: A decoder that uses the attention mechanism.
* Losses.
- sequence_loss: Loss for a sequence model returning average log-perplexity.
- sequence_loss_by_example: As above, but not averaging over all examples.
* model_with_buckets: A convenience function to create models with bucketing
(see the tutorial above for an explanation of why and how to use it).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
# We disable pylint because we need python3 compatibility.
from six.moves import xrange # pylint: disable=redefined-builtin
from six.moves import zip # pylint: disable=redefined-builtin
from tensorflow.contrib.rnn.python.ops import core_rnn_cell
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variable_scope
from tensorflow.python.util import nest
# TODO(ebrevdo): Remove once _linear is fully deprecated.
Linear = core_rnn_cell._Linear # pylint: disable=protected-access,invalid-name
def _extract_argmax_and_embed(embedding,
output_projection=None,
update_embedding=True):
"""Get a loop_function that extracts the previous symbol and embeds it.
Args:
embedding: embedding tensor for symbols.
output_projection: None or a pair (W, B). If provided, each fed previous
output will first be multiplied by W and added B.
update_embedding: Boolean; if False, the gradients will not propagate
through the embeddings.
Returns:
A loop function.
"""
def loop_function(prev, _):
if output_projection is not None:
prev = nn_ops.xw_plus_b(prev, output_projection[0], output_projection[1])
prev_symbol = math_ops.argmax(prev, 1)
# Note that gradients will not propagate through the second parameter of
# embedding_lookup.
emb_prev = embedding_ops.embedding_lookup(embedding, prev_symbol)
if not update_embedding:
emb_prev = array_ops.stop_gradient(emb_prev)
return emb_prev
return loop_function
def rnn_decoder(decoder_inputs,
initial_state,
cell,
loop_function=None,
scope=None):
"""RNN decoder for the sequence-to-sequence model.
Args:
decoder_inputs: A list of 2D Tensors [batch_size x input_size].
initial_state: 2D Tensor with shape [batch_size x cell.state_size].
cell: rnn_cell.RNNCell defining the cell function and size.
loop_function: If not None, this function will be applied to the i-th output
in order to generate the i+1-st input, and decoder_inputs will be ignored,
except for the first element ("GO" symbol). This can be used for decoding,
but also for training to emulate http://arxiv.org/abs/1506.03099.
Signature -- loop_function(prev, i) = next * prev is a 2D Tensor of
shape [batch_size x output_size], * i is an integer, the step number
(when advanced control is needed), * next is a 2D Tensor of shape
[batch_size x input_size].
scope: VariableScope for the created subgraph; defaults to "rnn_decoder".
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors with
shape [batch_size x output_size] containing generated outputs.
state: The state of each cell at the final time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
(Note that in some cases, like basic RNN cell or GRU cell, outputs and
states can be the same. They are different for LSTM cells though.)
"""
with variable_scope.variable_scope(scope or "rnn_decoder"):
state = initial_state
outputs = []
prev = None
for i, inp in enumerate(decoder_inputs):
if loop_function is not None and prev is not None:
with variable_scope.variable_scope("loop_function", reuse=True):
inp = loop_function(prev, i)
if i > 0:
variable_scope.get_variable_scope().reuse_variables()
output, state = cell(inp, state)
outputs.append(output)
if loop_function is not None:
prev = output
return outputs, state
def basic_rnn_seq2seq(encoder_inputs,
decoder_inputs,
cell,
dtype=dtypes.float32,
scope=None):
"""Basic RNN sequence-to-sequence model.
This model first runs an RNN to encode encoder_inputs into a state vector,
then runs decoder, initialized with the last encoder state, on decoder_inputs.
Encoder and decoder use the same RNN cell type, but don't share parameters.
Args:
encoder_inputs: A list of 2D Tensors [batch_size x input_size].
decoder_inputs: A list of 2D Tensors [batch_size x input_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function and size.
dtype: The dtype of the initial state of the RNN cell (default: tf.float32).
scope: VariableScope for the created subgraph; default: "basic_rnn_seq2seq".
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors with
shape [batch_size x output_size] containing the generated outputs.
state: The state of each decoder cell in the final time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
"""
with variable_scope.variable_scope(scope or "basic_rnn_seq2seq"):
enc_cell = copy.deepcopy(cell)
_, enc_state = rnn.static_rnn(enc_cell, encoder_inputs, dtype=dtype)
return rnn_decoder(decoder_inputs, enc_state, cell)
def tied_rnn_seq2seq(encoder_inputs,
decoder_inputs,
cell,
loop_function=None,
dtype=dtypes.float32,
scope=None):
"""RNN sequence-to-sequence model with tied encoder and decoder parameters.
This model first runs an RNN to encode encoder_inputs into a state vector, and
then runs decoder, initialized with the last encoder state, on decoder_inputs.
Encoder and decoder use the same RNN cell and share parameters.
Args:
encoder_inputs: A list of 2D Tensors [batch_size x input_size].
decoder_inputs: A list of 2D Tensors [batch_size x input_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function and size.
loop_function: If not None, this function will be applied to i-th output in
order to generate i+1-th input, and decoder_inputs will be ignored, except
for the first element ("GO" symbol), see rnn_decoder for details.
dtype: The dtype of the initial state of the rnn cell (default: tf.float32).
scope: VariableScope for the created subgraph; default: "tied_rnn_seq2seq".
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors with
shape [batch_size x output_size] containing the generated outputs.
state: The state of each decoder cell in each time-step. This is a list
with length len(decoder_inputs) -- one item for each time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
"""
with variable_scope.variable_scope("combined_tied_rnn_seq2seq"):
scope = scope or "tied_rnn_seq2seq"
_, enc_state = rnn.static_rnn(
cell, encoder_inputs, dtype=dtype, scope=scope)
variable_scope.get_variable_scope().reuse_variables()
return rnn_decoder(
decoder_inputs,
enc_state,
cell,
loop_function=loop_function,
scope=scope)
def embedding_rnn_decoder(decoder_inputs,
initial_state,
cell,
num_symbols,
embedding_size,
output_projection=None,
feed_previous=False,
update_embedding_for_previous=True,
scope=None):
"""RNN decoder with embedding and a pure-decoding option.
Args:
decoder_inputs: A list of 1D batch-sized int32 Tensors (decoder inputs).
initial_state: 2D Tensor [batch_size x cell.state_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function.
num_symbols: Integer, how many symbols come into the embedding.
embedding_size: Integer, the length of the embedding vector for each symbol.
output_projection: None or a pair (W, B) of output projection weights and
biases; W has shape [output_size x num_symbols] and B has shape
[num_symbols]; if provided and feed_previous=True, each fed previous
output will first be multiplied by W and added B.
feed_previous: Boolean; if True, only the first of decoder_inputs will be
used (the "GO" symbol), and all other decoder inputs will be generated by:
next = embedding_lookup(embedding, argmax(previous_output)), In effect,
this implements a greedy decoder. It can also be used
during training to emulate http://arxiv.org/abs/1506.03099. If False,
decoder_inputs are used as given (the standard decoder case).
update_embedding_for_previous: Boolean; if False and feed_previous=True,
only the embedding for the first symbol of decoder_inputs (the "GO"
symbol) will be updated by back propagation. Embeddings for the symbols
generated from the decoder itself remain unchanged. This parameter has no
effect if feed_previous=False.
scope: VariableScope for the created subgraph; defaults to
"embedding_rnn_decoder".
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors. The
output is of shape [batch_size x cell.output_size] when
output_projection is not None (and represents the dense representation
of predicted tokens). It is of shape [batch_size x num_decoder_symbols]
when output_projection is None.
state: The state of each decoder cell in each time-step. This is a list
with length len(decoder_inputs) -- one item for each time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
Raises:
ValueError: When output_projection has the wrong shape.
"""
with variable_scope.variable_scope(scope or "embedding_rnn_decoder") as scope:
if output_projection is not None:
dtype = scope.dtype
proj_weights = ops.convert_to_tensor(output_projection[0], dtype=dtype)
proj_weights.get_shape().assert_is_compatible_with([None, num_symbols])
proj_biases = ops.convert_to_tensor(output_projection[1], dtype=dtype)
proj_biases.get_shape().assert_is_compatible_with([num_symbols])
embedding = variable_scope.get_variable("embedding",
[num_symbols, embedding_size])
loop_function = _extract_argmax_and_embed(
embedding, output_projection,
update_embedding_for_previous) if feed_previous else None
emb_inp = (
embedding_ops.embedding_lookup(embedding, i) for i in decoder_inputs)
return rnn_decoder(
emb_inp, initial_state, cell, loop_function=loop_function)
def embedding_rnn_seq2seq(encoder_inputs,
decoder_inputs,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size,
output_projection=None,
feed_previous=False,
dtype=None,
scope=None):
"""Embedding RNN sequence-to-sequence model.
This model first embeds encoder_inputs by a newly created embedding (of shape
[num_encoder_symbols x input_size]). Then it runs an RNN to encode
embedded encoder_inputs into a state vector. Next, it embeds decoder_inputs
by another newly created embedding (of shape [num_decoder_symbols x
input_size]). Then it runs RNN decoder, initialized with the last
encoder state, on embedded decoder_inputs.
Args:
encoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
decoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function and size.
num_encoder_symbols: Integer; number of symbols on the encoder side.
num_decoder_symbols: Integer; number of symbols on the decoder side.
embedding_size: Integer, the length of the embedding vector for each symbol.
output_projection: None or a pair (W, B) of output projection weights and
biases; W has shape [output_size x num_decoder_symbols] and B has shape
[num_decoder_symbols]; if provided and feed_previous=True, each fed
previous output will first be multiplied by W and added B.
feed_previous: Boolean or scalar Boolean Tensor; if True, only the first of
decoder_inputs will be used (the "GO" symbol), and all other decoder
inputs will be taken from previous outputs (as in embedding_rnn_decoder).
If False, decoder_inputs are used as given (the standard decoder case).
dtype: The dtype of the initial state for both the encoder and encoder
rnn cells (default: tf.float32).
scope: VariableScope for the created subgraph; defaults to
"embedding_rnn_seq2seq"
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors. The
output is of shape [batch_size x cell.output_size] when
output_projection is not None (and represents the dense representation
of predicted tokens). It is of shape [batch_size x num_decoder_symbols]
when output_projection is None.
state: The state of each decoder cell in each time-step. This is a list
with length len(decoder_inputs) -- one item for each time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
"""
with variable_scope.variable_scope(scope or "embedding_rnn_seq2seq") as scope:
if dtype is not None:
scope.set_dtype(dtype)
else:
dtype = scope.dtype
# Encoder.
encoder_cell = copy.deepcopy(cell)
encoder_cell = core_rnn_cell.EmbeddingWrapper(
encoder_cell,
embedding_classes=num_encoder_symbols,
embedding_size=embedding_size)
_, encoder_state = rnn.static_rnn(encoder_cell, encoder_inputs, dtype=dtype)
# Decoder.
if output_projection is None:
cell = core_rnn_cell.OutputProjectionWrapper(cell, num_decoder_symbols)
if isinstance(feed_previous, bool):
return embedding_rnn_decoder(
decoder_inputs,
encoder_state,
cell,
num_decoder_symbols,
embedding_size,
output_projection=output_projection,
feed_previous=feed_previous)
# If feed_previous is a Tensor, we construct 2 graphs and use cond.
def decoder(feed_previous_bool):
reuse = None if feed_previous_bool else True
with variable_scope.variable_scope(
variable_scope.get_variable_scope(), reuse=reuse):
outputs, state = embedding_rnn_decoder(
decoder_inputs,
encoder_state,
cell,
num_decoder_symbols,
embedding_size,
output_projection=output_projection,
feed_previous=feed_previous_bool,
update_embedding_for_previous=False)
state_list = [state]
if nest.is_sequence(state):
state_list = nest.flatten(state)
return outputs + state_list
outputs_and_state = control_flow_ops.cond(
feed_previous, lambda: decoder(True), lambda: decoder(False))
outputs_len = len(decoder_inputs) # Outputs length same as decoder inputs.
state_list = outputs_and_state[outputs_len:]
state = state_list[0]
if nest.is_sequence(encoder_state):
state = nest.pack_sequence_as(
structure=encoder_state, flat_sequence=state_list)
return outputs_and_state[:outputs_len], state
def embedding_tied_rnn_seq2seq(encoder_inputs,
decoder_inputs,
cell,
num_symbols,
embedding_size,
num_decoder_symbols=None,
output_projection=None,
feed_previous=False,
dtype=None,
scope=None):
"""Embedding RNN sequence-to-sequence model with tied (shared) parameters.
This model first embeds encoder_inputs by a newly created embedding (of shape
[num_symbols x input_size]). Then it runs an RNN to encode embedded
encoder_inputs into a state vector. Next, it embeds decoder_inputs using
the same embedding. Then it runs RNN decoder, initialized with the last
encoder state, on embedded decoder_inputs. The decoder output is over symbols
from 0 to num_decoder_symbols - 1 if num_decoder_symbols is none; otherwise it
is over 0 to num_symbols - 1.
Args:
encoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
decoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function and size.
num_symbols: Integer; number of symbols for both encoder and decoder.
embedding_size: Integer, the length of the embedding vector for each symbol.
num_decoder_symbols: Integer; number of output symbols for decoder. If
provided, the decoder output is over symbols 0 to num_decoder_symbols - 1.
Otherwise, decoder output is over symbols 0 to num_symbols - 1. Note that
this assumes that the vocabulary is set up such that the first
num_decoder_symbols of num_symbols are part of decoding.
output_projection: None or a pair (W, B) of output projection weights and
biases; W has shape [output_size x num_symbols] and B has shape
[num_symbols]; if provided and feed_previous=True, each fed previous
output will first be multiplied by W and added B.
feed_previous: Boolean or scalar Boolean Tensor; if True, only the first of
decoder_inputs will be used (the "GO" symbol), and all other decoder
inputs will be taken from previous outputs (as in embedding_rnn_decoder).
If False, decoder_inputs are used as given (the standard decoder case).
dtype: The dtype to use for the initial RNN states (default: tf.float32).
scope: VariableScope for the created subgraph; defaults to
"embedding_tied_rnn_seq2seq".
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors with
shape [batch_size x output_symbols] containing the generated
outputs where output_symbols = num_decoder_symbols if
num_decoder_symbols is not None otherwise output_symbols = num_symbols.
state: The state of each decoder cell at the final time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
Raises:
ValueError: When output_projection has the wrong shape.
"""
with variable_scope.variable_scope(
scope or "embedding_tied_rnn_seq2seq", dtype=dtype) as scope:
dtype = scope.dtype
if output_projection is not None:
proj_weights = ops.convert_to_tensor(output_projection[0], dtype=dtype)
proj_weights.get_shape().assert_is_compatible_with([None, num_symbols])
proj_biases = ops.convert_to_tensor(output_projection[1], dtype=dtype)
proj_biases.get_shape().assert_is_compatible_with([num_symbols])
embedding = variable_scope.get_variable(
"embedding", [num_symbols, embedding_size], dtype=dtype)
emb_encoder_inputs = [
embedding_ops.embedding_lookup(embedding, x) for x in encoder_inputs
]
emb_decoder_inputs = [
embedding_ops.embedding_lookup(embedding, x) for x in decoder_inputs
]
output_symbols = num_symbols
if num_decoder_symbols is not None:
output_symbols = num_decoder_symbols
if output_projection is None:
cell = core_rnn_cell.OutputProjectionWrapper(cell, output_symbols)
if isinstance(feed_previous, bool):
loop_function = _extract_argmax_and_embed(embedding, output_projection,
True) if feed_previous else None
return tied_rnn_seq2seq(
emb_encoder_inputs,
emb_decoder_inputs,
cell,
loop_function=loop_function,
dtype=dtype)
# If feed_previous is a Tensor, we construct 2 graphs and use cond.
def decoder(feed_previous_bool):
loop_function = _extract_argmax_and_embed(
embedding, output_projection, False) if feed_previous_bool else None
reuse = None if feed_previous_bool else True
with variable_scope.variable_scope(
variable_scope.get_variable_scope(), reuse=reuse):
outputs, state = tied_rnn_seq2seq(
emb_encoder_inputs,
emb_decoder_inputs,
cell,
loop_function=loop_function,
dtype=dtype)
state_list = [state]
if nest.is_sequence(state):
state_list = nest.flatten(state)
return outputs + state_list
outputs_and_state = control_flow_ops.cond(
feed_previous, lambda: decoder(True), lambda: decoder(False))
outputs_len = len(decoder_inputs) # Outputs length same as decoder inputs.
state_list = outputs_and_state[outputs_len:]
state = state_list[0]
# Calculate zero-state to know it's structure.
static_batch_size = encoder_inputs[0].get_shape()[0]
for inp in encoder_inputs[1:]:
static_batch_size.merge_with(inp.get_shape()[0])
batch_size = static_batch_size.value
if batch_size is None:
batch_size = array_ops.shape(encoder_inputs[0])[0]
zero_state = cell.zero_state(batch_size, dtype)
if nest.is_sequence(zero_state):
state = nest.pack_sequence_as(
structure=zero_state, flat_sequence=state_list)
return outputs_and_state[:outputs_len], state
def attention_decoder(decoder_inputs,
initial_state,
attention_states,
cell,
output_size=None,
num_heads=1,
loop_function=None,
dtype=None,
scope=None,
initial_state_attention=False):
"""RNN decoder with attention for the sequence-to-sequence model.
In this context "attention" means that, during decoding, the RNN can look up
information in the additional tensor attention_states, and it does this by
focusing on a few entries from the tensor. This model has proven to yield
especially good results in a number of sequence-to-sequence tasks. This
implementation is based on http://arxiv.org/abs/1412.7449 (see below for
details). It is recommended for complex sequence-to-sequence tasks.
Args:
decoder_inputs: A list of 2D Tensors [batch_size x input_size].
initial_state: 2D Tensor [batch_size x cell.state_size].
attention_states: 3D Tensor [batch_size x attn_length x attn_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function and size.
output_size: Size of the output vectors; if None, we use cell.output_size.
num_heads: Number of attention heads that read from attention_states.
loop_function: If not None, this function will be applied to i-th output in
order to generate i+1-th input, and decoder_inputs will be ignored, except
for the first element ("GO" symbol). This can be used for decoding,
but also for training to emulate http://arxiv.org/abs/1506.03099.
Signature -- loop_function(prev, i) = next * prev is a 2D Tensor of
shape [batch_size x output_size], * i is an integer, the step number
(when advanced control is needed), * next is a 2D Tensor of shape
[batch_size x input_size].
dtype: The dtype to use for the RNN initial state (default: tf.float32).
scope: VariableScope for the created subgraph; default: "attention_decoder".
initial_state_attention: If False (default), initial attentions are zero. If
True, initialize the attentions from the initial state and attention
states -- useful when we wish to resume decoding from a previously stored
decoder state and attention states.
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors of
shape [batch_size x output_size]. These represent the generated outputs.
Output i is computed from input i (which is either the i-th element
of decoder_inputs or loop_function(output {i-1}, i)) as follows.
First, we run the cell on a combination of the input and previous
attention masks:
cell_output, new_state = cell(linear(input, prev_attn), prev_state).
Then, we calculate new attention masks:
new_attn = softmax(V^T * tanh(W * attention_states + U * new_state))
and then we calculate the output:
output = linear(cell_output, new_attn).
state: The state of each decoder cell the final time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
Raises:
ValueError: when num_heads is not positive, there are no inputs, shapes
of attention_states are not set, or input size cannot be inferred
from the input.
"""
if not decoder_inputs:
raise ValueError("Must provide at least 1 input to attention decoder.")
if num_heads < 1:
raise ValueError("With less than 1 heads, use a non-attention decoder.")
if attention_states.get_shape()[2].value is None:
raise ValueError("Shape[2] of attention_states must be known: %s" %
attention_states.get_shape())
if output_size is None:
output_size = cell.output_size
with variable_scope.variable_scope(
scope or "attention_decoder", dtype=dtype) as scope:
dtype = scope.dtype
batch_size = array_ops.shape(decoder_inputs[0])[0] # Needed for reshaping.
attn_length = attention_states.get_shape()[1].value
if attn_length is None:
attn_length = array_ops.shape(attention_states)[1]
attn_size = attention_states.get_shape()[2].value
# To calculate W1 * h_t we use a 1-by-1 convolution, need to reshape before.
hidden = array_ops.reshape(attention_states,
[-1, attn_length, 1, attn_size])
hidden_features = []
v = []
attention_vec_size = attn_size # Size of query vectors for attention.
for a in xrange(num_heads):
k = variable_scope.get_variable(
"AttnW_%d" % a, [1, 1, attn_size, attention_vec_size], dtype=dtype)
hidden_features.append(nn_ops.conv2d(hidden, k, [1, 1, 1, 1], "SAME"))
v.append(
variable_scope.get_variable(
"AttnV_%d" % a, [attention_vec_size], dtype=dtype))
state = initial_state
def attention(query):
"""Put attention masks on hidden using hidden_features and query."""
ds = [] # Results of attention reads will be stored here.
if nest.is_sequence(query): # If the query is a tuple, flatten it.
query_list = nest.flatten(query)
for q in query_list: # Check that ndims == 2 if specified.
ndims = q.get_shape().ndims
if ndims:
assert ndims == 2
query = array_ops.concat(query_list, 1)
for a in xrange(num_heads):
with variable_scope.variable_scope("Attention_%d" % a):
y = Linear(query, attention_vec_size, True)(query)
y = array_ops.reshape(y, [-1, 1, 1, attention_vec_size])
y = math_ops.cast(y, dtype)
# Attention mask is a softmax of v^T * tanh(...).
s = math_ops.reduce_sum(v[a] * math_ops.tanh(hidden_features[a] + y),
[2, 3])
a = nn_ops.softmax(math_ops.cast(s, dtype=dtypes.float32))
# Now calculate the attention-weighted vector d.
a = math_ops.cast(a, dtype)
d = math_ops.reduce_sum(
array_ops.reshape(a, [-1, attn_length, 1, 1]) * hidden, [1, 2])
ds.append(array_ops.reshape(d, [-1, attn_size]))
return ds
outputs = []
prev = None
batch_attn_size = array_ops.stack([batch_size, attn_size])
attns = [
array_ops.zeros(batch_attn_size, dtype=dtype) for _ in xrange(num_heads)
]
for a in attns: # Ensure the second shape of attention vectors is set.
a.set_shape([None, attn_size])
if initial_state_attention:
attns = attention(initial_state)
for i, inp in enumerate(decoder_inputs):
if i > 0:
variable_scope.get_variable_scope().reuse_variables()
# If loop_function is set, we use it instead of decoder_inputs.
if loop_function is not None and prev is not None:
with variable_scope.variable_scope("loop_function", reuse=True):
inp = loop_function(prev, i)
# Merge input and previous attentions into one vector of the right size.
input_size = inp.get_shape().with_rank(2)[1]
if input_size.value is None:
raise ValueError("Could not infer input size from input: %s" % inp.name)
inputs = [inp] + attns
inputs = [math_ops.cast(e, dtype) for e in inputs]
x = Linear(inputs, input_size, True)(inputs)
# Run the RNN.
cell_output, state = cell(x, state)
# Run the attention mechanism.
if i == 0 and initial_state_attention:
with variable_scope.variable_scope(
variable_scope.get_variable_scope(), reuse=True):
attns = attention(state)
else:
attns = attention(state)
with variable_scope.variable_scope("AttnOutputProjection"):
cell_output = math_ops.cast(cell_output, dtype)
inputs = [cell_output] + attns
output = Linear(inputs, output_size, True)(inputs)
if loop_function is not None:
prev = output
outputs.append(output)
return outputs, state
def embedding_attention_decoder(decoder_inputs,
initial_state,
attention_states,
cell,
num_symbols,
embedding_size,
num_heads=1,
output_size=None,
output_projection=None,
feed_previous=False,
update_embedding_for_previous=True,
dtype=None,
scope=None,
initial_state_attention=False):
"""RNN decoder with embedding and attention and a pure-decoding option.
Args:
decoder_inputs: A list of 1D batch-sized int32 Tensors (decoder inputs).
initial_state: 2D Tensor [batch_size x cell.state_size].
attention_states: 3D Tensor [batch_size x attn_length x attn_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function.
num_symbols: Integer, how many symbols come into the embedding.
embedding_size: Integer, the length of the embedding vector for each symbol.
num_heads: Number of attention heads that read from attention_states.
output_size: Size of the output vectors; if None, use output_size.
output_projection: None or a pair (W, B) of output projection weights and
biases; W has shape [output_size x num_symbols] and B has shape
[num_symbols]; if provided and feed_previous=True, each fed previous
output will first be multiplied by W and added B.
feed_previous: Boolean; if True, only the first of decoder_inputs will be
used (the "GO" symbol), and all other decoder inputs will be generated by:
next = embedding_lookup(embedding, argmax(previous_output)), In effect,
this implements a greedy decoder. It can also be used
during training to emulate http://arxiv.org/abs/1506.03099. If False,
decoder_inputs are used as given (the standard decoder case).
update_embedding_for_previous: Boolean; if False and feed_previous=True,
only the embedding for the first symbol of decoder_inputs (the "GO"
symbol) will be updated by back propagation. Embeddings for the symbols
generated from the decoder itself remain unchanged. This parameter has no
effect if feed_previous=False.
dtype: The dtype to use for the RNN initial states (default: tf.float32).
scope: VariableScope for the created subgraph; defaults to
"embedding_attention_decoder".
initial_state_attention: If False (default), initial attentions are zero. If
True, initialize the attentions from the initial state and attention
states -- useful when we wish to resume decoding from a previously stored
decoder state and attention states.
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors with
shape [batch_size x output_size] containing the generated outputs.
state: The state of each decoder cell at the final time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
Raises:
ValueError: When output_projection has the wrong shape.
"""
if output_size is None:
output_size = cell.output_size
if output_projection is not None:
proj_biases = ops.convert_to_tensor(output_projection[1], dtype=dtype)
proj_biases.get_shape().assert_is_compatible_with([num_symbols])
with variable_scope.variable_scope(
scope or "embedding_attention_decoder", dtype=dtype) as scope:
embedding = variable_scope.get_variable("embedding",
[num_symbols, embedding_size])
loop_function = _extract_argmax_and_embed(
embedding, output_projection,
update_embedding_for_previous) if feed_previous else None
emb_inp = [
embedding_ops.embedding_lookup(embedding, i) for i in decoder_inputs
]
return attention_decoder(
emb_inp,
initial_state,
attention_states,
cell,
output_size=output_size,
num_heads=num_heads,
loop_function=loop_function,
initial_state_attention=initial_state_attention)
def embedding_attention_seq2seq(encoder_inputs,
decoder_inputs,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size,
num_heads=1,
output_projection=None,
feed_previous=False,
dtype=None,
scope=None,
initial_state_attention=False):
"""Embedding sequence-to-sequence model with attention.
This model first embeds encoder_inputs by a newly created embedding (of shape
[num_encoder_symbols x input_size]). Then it runs an RNN to encode
embedded encoder_inputs into a state vector. It keeps the outputs of this
RNN at every step to use for attention later. Next, it embeds decoder_inputs
by another newly created embedding (of shape [num_decoder_symbols x
input_size]). Then it runs attention decoder, initialized with the last
encoder state, on embedded decoder_inputs and attending to encoder outputs.
Warning: when output_projection is None, the size of the attention vectors
and variables will be made proportional to num_decoder_symbols, can be large.
Args:
encoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
decoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the cell function and size.
num_encoder_symbols: Integer; number of symbols on the encoder side.
num_decoder_symbols: Integer; number of symbols on the decoder side.
embedding_size: Integer, the length of the embedding vector for each symbol.
num_heads: Number of attention heads that read from attention_states.
output_projection: None or a pair (W, B) of output projection weights and
biases; W has shape [output_size x num_decoder_symbols] and B has shape
[num_decoder_symbols]; if provided and feed_previous=True, each fed
previous output will first be multiplied by W and added B.
feed_previous: Boolean or scalar Boolean Tensor; if True, only the first of
decoder_inputs will be used (the "GO" symbol), and all other decoder
inputs will be taken from previous outputs (as in embedding_rnn_decoder).
If False, decoder_inputs are used as given (the standard decoder case).
dtype: The dtype of the initial RNN state (default: tf.float32).
scope: VariableScope for the created subgraph; defaults to
"embedding_attention_seq2seq".
initial_state_attention: If False (default), initial attentions are zero. If
True, initialize the attentions from the initial state and attention
states.
Returns:
A tuple of the form (outputs, state), where:
outputs: A list of the same length as decoder_inputs of 2D Tensors with
shape [batch_size x num_decoder_symbols] containing the generated
outputs.
state: The state of each decoder cell at the final time-step.
It is a 2D Tensor of shape [batch_size x cell.state_size].
"""
with variable_scope.variable_scope(
scope or "embedding_attention_seq2seq", dtype=dtype) as scope:
dtype = scope.dtype
# Encoder.
encoder_cell = copy.deepcopy(cell)
encoder_cell = core_rnn_cell.EmbeddingWrapper(
encoder_cell,
embedding_classes=num_encoder_symbols,
embedding_size=embedding_size)
encoder_outputs, encoder_state = rnn.static_rnn(
encoder_cell, encoder_inputs, dtype=dtype)
# First calculate a concatenation of encoder outputs to put attention on.
top_states = [
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in encoder_outputs
]
attention_states = array_ops.concat(top_states, 1)
# Decoder.
output_size = None
if output_projection is None:
cell = core_rnn_cell.OutputProjectionWrapper(cell, num_decoder_symbols)
output_size = num_decoder_symbols
if isinstance(feed_previous, bool):
return embedding_attention_decoder(
decoder_inputs,
encoder_state,
attention_states,
cell,
num_decoder_symbols,
embedding_size,
num_heads=num_heads,
output_size=output_size,
output_projection=output_projection,
feed_previous=feed_previous,
initial_state_attention=initial_state_attention)
# If feed_previous is a Tensor, we construct 2 graphs and use cond.
def decoder(feed_previous_bool):
reuse = None if feed_previous_bool else True
with variable_scope.variable_scope(
variable_scope.get_variable_scope(), reuse=reuse):
outputs, state = embedding_attention_decoder(
decoder_inputs,
encoder_state,
attention_states,
cell,
num_decoder_symbols,
embedding_size,
num_heads=num_heads,
output_size=output_size,
output_projection=output_projection,
feed_previous=feed_previous_bool,
update_embedding_for_previous=False,
initial_state_attention=initial_state_attention)
state_list = [state]
if nest.is_sequence(state):
state_list = nest.flatten(state)
return outputs + state_list
outputs_and_state = control_flow_ops.cond(
feed_previous, lambda: decoder(True), lambda: decoder(False))
outputs_len = len(decoder_inputs) # Outputs length same as decoder inputs.
state_list = outputs_and_state[outputs_len:]
state = state_list[0]
if nest.is_sequence(encoder_state):
state = nest.pack_sequence_as(
structure=encoder_state, flat_sequence=state_list)
return outputs_and_state[:outputs_len], state
def one2many_rnn_seq2seq(encoder_inputs,
decoder_inputs_dict,
enc_cell,
dec_cells_dict,
num_encoder_symbols,
num_decoder_symbols_dict,
embedding_size,
feed_previous=False,
dtype=None,
scope=None):
"""One-to-many RNN sequence-to-sequence model (multi-task).
This is a multi-task sequence-to-sequence model with one encoder and multiple
decoders. Reference to multi-task sequence-to-sequence learning can be found
here: http://arxiv.org/abs/1511.06114
Args:
encoder_inputs: A list of 1D int32 Tensors of shape [batch_size].
decoder_inputs_dict: A dictionary mapping decoder name (string) to the
corresponding decoder_inputs; each decoder_inputs is a list of 1D Tensors
of shape [batch_size]; num_decoders is defined as
len(decoder_inputs_dict).
enc_cell: tf.compat.v1.nn.rnn_cell.RNNCell defining the encoder cell
function and size.
dec_cells_dict: A dictionary mapping encoder name (string) to an instance of
tf.nn.rnn_cell.RNNCell.
num_encoder_symbols: Integer; number of symbols on the encoder side.
num_decoder_symbols_dict: A dictionary mapping decoder name (string) to an
integer specifying number of symbols for the corresponding decoder;
len(num_decoder_symbols_dict) must be equal to num_decoders.
embedding_size: Integer, the length of the embedding vector for each symbol.
feed_previous: Boolean or scalar Boolean Tensor; if True, only the first of
decoder_inputs will be used (the "GO" symbol), and all other decoder
inputs will be taken from previous outputs (as in embedding_rnn_decoder).
If False, decoder_inputs are used as given (the standard decoder case).
dtype: The dtype of the initial state for both the encoder and encoder
rnn cells (default: tf.float32).
scope: VariableScope for the created subgraph; defaults to
"one2many_rnn_seq2seq"
Returns:
A tuple of the form (outputs_dict, state_dict), where:
outputs_dict: A mapping from decoder name (string) to a list of the same
length as decoder_inputs_dict[name]; each element in the list is a 2D
Tensors with shape [batch_size x num_decoder_symbol_list[name]]
containing the generated outputs.
state_dict: A mapping from decoder name (string) to the final state of the
corresponding decoder RNN; it is a 2D Tensor of shape
[batch_size x cell.state_size].
Raises:
TypeError: if enc_cell or any of the dec_cells are not instances of RNNCell.
ValueError: if len(dec_cells) != len(decoder_inputs_dict).
"""
outputs_dict = {}
state_dict = {}
if not isinstance(enc_cell, rnn_cell_impl.RNNCell):
raise TypeError("enc_cell is not an RNNCell: %s" % type(enc_cell))
if set(dec_cells_dict) != set(decoder_inputs_dict):
raise ValueError("keys of dec_cells_dict != keys of decodre_inputs_dict")
for dec_cell in dec_cells_dict.values():
if not isinstance(dec_cell, rnn_cell_impl.RNNCell):
raise TypeError("dec_cell is not an RNNCell: %s" % type(dec_cell))
with variable_scope.variable_scope(
scope or "one2many_rnn_seq2seq", dtype=dtype) as scope:
dtype = scope.dtype
# Encoder.
enc_cell = core_rnn_cell.EmbeddingWrapper(
enc_cell,
embedding_classes=num_encoder_symbols,
embedding_size=embedding_size)
_, encoder_state = rnn.static_rnn(enc_cell, encoder_inputs, dtype=dtype)
# Decoder.
for name, decoder_inputs in decoder_inputs_dict.items():
num_decoder_symbols = num_decoder_symbols_dict[name]
dec_cell = dec_cells_dict[name]
with variable_scope.variable_scope("one2many_decoder_" +
str(name)) as scope:
dec_cell = core_rnn_cell.OutputProjectionWrapper(
dec_cell, num_decoder_symbols)
if isinstance(feed_previous, bool):
outputs, state = embedding_rnn_decoder(
decoder_inputs,
encoder_state,
dec_cell,
num_decoder_symbols,
embedding_size,
feed_previous=feed_previous)
else:
# If feed_previous is a Tensor, we construct 2 graphs and use cond.
def filled_embedding_rnn_decoder(feed_previous):
"""The current decoder with a fixed feed_previous parameter."""
# pylint: disable=cell-var-from-loop
reuse = None if feed_previous else True
vs = variable_scope.get_variable_scope()
with variable_scope.variable_scope(vs, reuse=reuse):
outputs, state = embedding_rnn_decoder(
decoder_inputs,
encoder_state,
dec_cell,
num_decoder_symbols,
embedding_size,
feed_previous=feed_previous)
# pylint: enable=cell-var-from-loop
state_list = [state]
if nest.is_sequence(state):
state_list = nest.flatten(state)
return outputs + state_list
outputs_and_state = control_flow_ops.cond(
feed_previous, lambda: filled_embedding_rnn_decoder(True), lambda:
filled_embedding_rnn_decoder(False))
# Outputs length is the same as for decoder inputs.
outputs_len = len(decoder_inputs)
outputs = outputs_and_state[:outputs_len]
state_list = outputs_and_state[outputs_len:]
state = state_list[0]
if nest.is_sequence(encoder_state):
state = nest.pack_sequence_as(
structure=encoder_state, flat_sequence=state_list)
outputs_dict[name] = outputs
state_dict[name] = state
return outputs_dict, state_dict
def sequence_loss_by_example(logits,
targets,
weights,
average_across_timesteps=True,
softmax_loss_function=None,
name=None):
"""Weighted cross-entropy loss for a sequence of logits (per example).
Args:
logits: List of 2D Tensors of shape [batch_size x num_decoder_symbols].
targets: List of 1D batch-sized int32 Tensors of the same length as logits.
weights: List of 1D batch-sized float-Tensors of the same length as logits.
average_across_timesteps: If set, divide the returned cost by the total
label weight.
softmax_loss_function: Function (labels, logits) -> loss-batch to be used
instead of the standard softmax (the default if this is None). **Note that
to avoid confusion, it is required for the function to accept named
arguments.**
name: Optional name for this operation, default: "sequence_loss_by_example".
Returns:
1D batch-sized float Tensor: The log-perplexity for each sequence.
Raises:
ValueError: If len(logits) is different from len(targets) or len(weights).
"""
if len(targets) != len(logits) or len(weights) != len(logits):
raise ValueError("Lengths of logits, weights, and targets must be the same "
"%d, %d, %d." % (len(logits), len(weights), len(targets)))
with ops.name_scope(name, "sequence_loss_by_example",
logits + targets + weights):
log_perp_list = []
for logit, target, weight in zip(logits, targets, weights):
if softmax_loss_function is None:
# TODO(irving,ebrevdo): This reshape is needed because
# sequence_loss_by_example is called with scalars sometimes, which
# violates our general scalar strictness policy.
target = array_ops.reshape(target, [-1])
crossent = nn_ops.sparse_softmax_cross_entropy_with_logits(
labels=target, logits=logit)
else:
crossent = softmax_loss_function(labels=target, logits=logit)
log_perp_list.append(crossent * weight)
log_perps = math_ops.add_n(log_perp_list)
if average_across_timesteps:
total_size = math_ops.add_n(weights)
total_size += 1e-12 # Just to avoid division by 0 for all-0 weights.
log_perps /= total_size
return log_perps
def sequence_loss(logits,
targets,
weights,
average_across_timesteps=True,
average_across_batch=True,
softmax_loss_function=None,
name=None):
"""Weighted cross-entropy loss for a sequence of logits, batch-collapsed.
Args:
logits: List of 2D Tensors of shape [batch_size x num_decoder_symbols].
targets: List of 1D batch-sized int32 Tensors of the same length as logits.
weights: List of 1D batch-sized float-Tensors of the same length as logits.
average_across_timesteps: If set, divide the returned cost by the total
label weight.
average_across_batch: If set, divide the returned cost by the batch size.
softmax_loss_function: Function (labels, logits) -> loss-batch to be used
instead of the standard softmax (the default if this is None). **Note that
to avoid confusion, it is required for the function to accept named
arguments.**
name: Optional name for this operation, defaults to "sequence_loss".
Returns:
A scalar float Tensor: The average log-perplexity per symbol (weighted).
Raises:
ValueError: If len(logits) is different from len(targets) or len(weights).
"""
with ops.name_scope(name, "sequence_loss", logits + targets + weights):
cost = math_ops.reduce_sum(
sequence_loss_by_example(
logits,
targets,
weights,
average_across_timesteps=average_across_timesteps,
softmax_loss_function=softmax_loss_function))
if average_across_batch:
batch_size = array_ops.shape(targets[0])[0]
return cost / math_ops.cast(batch_size, cost.dtype)
else:
return cost
def model_with_buckets(encoder_inputs,
decoder_inputs,
targets,
weights,
buckets,
seq2seq,
softmax_loss_function=None,
per_example_loss=False,
name=None):
"""Create a sequence-to-sequence model with support for bucketing.
The seq2seq argument is a function that defines a sequence-to-sequence model,
e.g., seq2seq = lambda x, y: basic_rnn_seq2seq(
x, y, rnn_cell.GRUCell(24))
Args:
encoder_inputs: A list of Tensors to feed the encoder; first seq2seq input.
decoder_inputs: A list of Tensors to feed the decoder; second seq2seq input.
targets: A list of 1D batch-sized int32 Tensors (desired output sequence).
weights: List of 1D batch-sized float-Tensors to weight the targets.
buckets: A list of pairs of (input size, output size) for each bucket.
seq2seq: A sequence-to-sequence model function; it takes 2 input that agree
with encoder_inputs and decoder_inputs, and returns a pair consisting of
outputs and states (as, e.g., basic_rnn_seq2seq).
softmax_loss_function: Function (labels, logits) -> loss-batch to be used
instead of the standard softmax (the default if this is None). **Note that
to avoid confusion, it is required for the function to accept named
arguments.**
per_example_loss: Boolean. If set, the returned loss will be a batch-sized
tensor of losses for each sequence in the batch. If unset, it will be a
scalar with the averaged loss from all examples.
name: Optional name for this operation, defaults to "model_with_buckets".
Returns:
A tuple of the form (outputs, losses), where:
outputs: The outputs for each bucket. Its j'th element consists of a list
of 2D Tensors. The shape of output tensors can be either
[batch_size x output_size] or [batch_size x num_decoder_symbols]
depending on the seq2seq model used.
losses: List of scalar Tensors, representing losses for each bucket, or,
if per_example_loss is set, a list of 1D batch-sized float Tensors.
Raises:
ValueError: If length of encoder_inputs, targets, or weights is smaller
than the largest (last) bucket.
"""
if len(encoder_inputs) < buckets[-1][0]:
raise ValueError("Length of encoder_inputs (%d) must be at least that of la"
"st bucket (%d)." % (len(encoder_inputs), buckets[-1][0]))
if len(targets) < buckets[-1][1]:
raise ValueError("Length of targets (%d) must be at least that of last "
"bucket (%d)." % (len(targets), buckets[-1][1]))
if len(weights) < buckets[-1][1]:
raise ValueError("Length of weights (%d) must be at least that of last "
"bucket (%d)." % (len(weights), buckets[-1][1]))
all_inputs = encoder_inputs + decoder_inputs + targets + weights
losses = []
outputs = []
with ops.name_scope(name, "model_with_buckets", all_inputs):
for j, bucket in enumerate(buckets):
with variable_scope.variable_scope(
variable_scope.get_variable_scope(), reuse=True if j > 0 else None):
bucket_outputs, _ = seq2seq(encoder_inputs[:bucket[0]],
decoder_inputs[:bucket[1]])
outputs.append(bucket_outputs)
if per_example_loss:
losses.append(
sequence_loss_by_example(
outputs[-1],
targets[:bucket[1]],
weights[:bucket[1]],
softmax_loss_function=softmax_loss_function))
else:
losses.append(
sequence_loss(
outputs[-1],
targets[:bucket[1]],
weights[:bucket[1]],
softmax_loss_function=softmax_loss_function))
return outputs, losses
| [
"[email protected]"
] | |
277f7795b863efce66e36fe0d3af9d010aeeb97f | 9ae08906602af5eacec43d60e5e428269bf24eb1 | /detection.py | 648538a99d8aa1cf5d30ae48f7af2a87f8b59b77 | [] | no_license | yangzhaonan18/TSDcv2 | 9f73278979542d1a40ced5aa152bbc7fa363398c | e9cb0fefc7177db93510b7bc5ca1bb86e32571c6 | refs/heads/master | 2020-04-04T19:39:08.138349 | 2019-03-18T04:54:36 | 2019-03-18T04:54:36 | 156,214,910 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,944 | py | # -*- coding:utf-8 -*-
import cv2
from cal_wh_ratio import cal_wh_ratio
from Crop_cnt import Crop_cnt
from cal_color_ratio import cal_ratio
from find_crop_center import find_crop_center
def detection(frame, BinColors, color, contours, i): # 判断是否是需要识别的对象 是返回1 否为0
"""
:param frame: 一张没有处理过的原始图片
:param BinColors: 经过颜色选择 二值化处理之后对应彩色部分的图片
:param color: 当前处理的颜色
:param contours: 当前颜色提取出的所有轮廓
:param i: 正在处理的轮廓下表号
:return: -1 表示当前编号对应的轮廓是不需要的后续对象(直接放弃的对象),1 表示是需要后续分类的对象
"""
print("def detection(frame, BinColors, color, contours, i): >>>")
# 输入只有一个轮廓
BinColors_show = BinColors.copy()
print("i = ", i)
cv2.drawContours(BinColors_show, contours, i, (0, 255, 255), 2) # 最后一个数字表示线条的粗细 -1时表示填充
cv2.imshow("detection/BinColors_show", BinColors_show) # 二值彩图上显示当前处理的轮廓
wh_ratio = cal_wh_ratio(contours[i]) # 返回轮廓的比例 [1,判断外接矩形的长宽比例 不应该很大
CropThing = Crop_cnt(frame, contours[i], color, wh_ratio) # 裁剪图片, 将图片变成水平的
color_ratio, cnt_ratio, rect_ratio, circle_ratio = cal_ratio(CropThing, color) # 计算轮廓面积 与 凸包面积的比例 不应该很大
if color_ratio == -1: # 排除计算异常的情况
print(">>> case: color_ratio == -1")
return None, -1
if wh_ratio[0] == -1: # 排除计算异常的情况
print(">>> case: wh_ratio[0] == -1 :", wh_ratio)
return None, -1
if wh_ratio[1] > 9: # 排除长宽比例和合理的情况
print(">>> case: wh_ratio[1] > 9 :", wh_ratio)
return None, -1
# if rect_ratio < 0.5: # 矩形度小于0.5的情况 三角形刚好0.5 红绿灯不可能小于0.5
# print(">>> case: rect_ratio < 0.5: ")
# 下面讨论只符合条件的情况 可能是红绿灯的情况:
# 红灯 = 红色 + 长窄比为1 + 尺寸(10:50)
if color == "red" and wh_ratio[1] == 1:
if wh_ratio[2][0] > 10 and wh_ratio[2][0] < 100 and color_ratio > 0.5 and color_ratio / cnt_ratio >= 1:
print(">>> a red light" * 10)
cv2.waitKey(0)
return CropThing, 1
if wh_ratio[2][0] > 15 and wh_ratio[2][0] < 150 and color_ratio / cnt_ratio != 1:
if color_ratio / cnt_ratio < 0.99: # 图标中间有非红色
print(">>> a red sign " * 10)
cv2.waitKey(0)
return CropThing, 1
elif color == "red" and wh_ratio[1] > 1 and wh_ratio[1] < 10: # 长宽比限制
if wh_ratio[2][0] > 15 and wh_ratio[2][
1] > 15 and color_ratio / cnt_ratio < 1 and color_ratio < 0.85 and color_ratio > 0.3:
print(">>> many red sign " * 10)
cv2.waitKey(0)
CropThing_show, center, radius = find_crop_center(CropThing, color)
return CropThing_show, 1
if color == "green" and wh_ratio[1] == 1 and color_ratio > 0.4 and wh_ratio[2][0] > 10 and wh_ratio[2][
0] < 100 and color_ratio / cnt_ratio >= 1:
print(">>> a green light" * 10)
cv2.waitKey(0)
return CropThing, 1
if color == "blue" and wh_ratio[1] == 1:
print(">>> a blue sign" * 10)
cv2.waitKey(0)
return CropThing, 1
elif color == "blue" and wh_ratio[0] == 1 and wh_ratio[2][0] > 20 and wh_ratio[2][0] < 150 and (
wh_ratio[1] == 2 or wh_ratio[1] == 3):
print(">>> many longitudinal blue sign" * 10)
cv2.waitKey(0)
CropThing_show, center, radius = find_crop_center(CropThing, color)
return CropThing_show, 1
if color == "yellow" and wh_ratio[1] == 1 and color_ratio > 0.4 and wh_ratio[2][0] > 10 and wh_ratio[2][
0] < 100 and color_ratio / cnt_ratio >= 1:
print(">>> a yellow light" * 10)
cv2.waitKey(0)
return CropThing, 1
if color == "yellow" and wh_ratio[0] == 0 and wh_ratio[1] == 2 and wh_ratio[2][0] > 50 and wh_ratio[2][
0] < 400 and color_ratio / cnt_ratio < 0.9 and color_ratio > 0.5 and cnt_ratio > 0.9:
print(">>> a yellow ETC sign " * 10)
cv2.waitKey(0)
return CropThing, 1
elif color == "yellow" and wh_ratio[1] == 1 and color_ratio > 0.5:
print(">>> mabey a yellow work sign")
cv2.waitKey(0)
return CropThing, 1
# center, radius = find_crop_center(CropThing, color)
# cv2.drawContours(frame, [box[0:2]], 0, (0, 0, 255), 2) # 画外接矩形
# cv2.imshow("frame", frame)
# print("wh_ratio:", wh_ratio)
# print("color_ratio:", color, "=", color_ratio)
# print("good " * 10)
else:
return None, -1
| [
"[email protected]"
] | |
56c463ba7c4f3619bcfee2dca9180a72df2723b2 | b72985ae7ab123f855e024814e70173fde84b43e | /DAYS/Day_38/django_training/animal_info/venv/lib/python3.8/token.py | 2e7165bbfd5fb1fab7ab6e27b9eba6e14b96a369 | [] | no_license | raziel5746/BOOTCAMP | 522da4526ded4225ba95c60146fd13fb5129c519 | 22088ab622b34ad8ae98e0fe3e5edc592a75ea0c | refs/heads/master | 2021-09-23T20:32:01.422171 | 2021-03-30T20:51:05 | 2021-03-30T20:51:05 | 218,781,276 | 0 | 0 | null | 2021-09-22T18:54:00 | 2019-10-31T14:10:28 | Python | UTF-8 | Python | false | false | 57 | py | /home/raziel/.pyenv/versions/3.8.0/lib/python3.8/token.py | [
"[email protected]"
] | |
9e87c0e877171415faf85154197b17b0bd660a82 | 5173c3e3956387a3f2ae8fcf4aed7c7a600dac78 | /Algorithm_Practice/Subset_BackTrackinig.py | 3179a5128a63a3e2612e05b9a06d76ea6127d75a | [] | no_license | ma0723/Min_Algorithm | df75f53f6e89b7817d4b52d686effb8236a4ddac | b02d1043008cb32e22daa9d4207b9a45f111d66f | refs/heads/master | 2023-07-25T11:00:15.397093 | 2021-08-30T02:08:05 | 2021-08-30T02:08:05 | 375,613,927 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 941 | py | #s: 선택된 원소들의 합
#r:남은 원소들의 합(선택가능한 것들중에)
def f(i, N, K,s, r ):
global cnt
if s == K:
# 부분집합의 원소의 합이 K인 것을 찾음
cnt+=1
return
# 이미 K를 찾았으므로 원소가 추가되면 K보다 커지므로 고려 X
elif i == N:
# 모든 원소를 고려함 K는 못찾아냄
return
elif s > K:
# 현재까지의 합이 K보다 커지는 경우
return
elif s + r < K :
# 남은 원소를 모두 포함해도 K가 안되는 경우
return
else:
f(i+1,N,K,s,r-(i+1))
# i번째 원소를 선택하지 않은 경우
f(i+1,N,K,s+(i+1),r-(i+1))
# i번째 원소 선택
cnt = 0
N = 10 #1에서부터 N까지 집합의 원소
K = 10 #부분집합의 합
f(0,N,K,0, (N+1) * N //2 ) #선택된 원소의 합, 아직 선택되지 않은 원소의 합
print(cnt) | [
"[email protected]"
] | |
46e0be022cc0d4d8eade37c191f8d95043a38b7f | 764073a5e32b99cd339114af76d55ba57e9bd0e0 | /backend/firearms_19698/wsgi.py | c61cc306c625059cadfa90373f4006022475937d | [] | no_license | crowdbotics-apps/firearms-19698 | 9f03ea38c697db6bc6af5be692d223b421b502eb | 87b369f37702294d78802c7a656f9d8f1eeaa9e0 | refs/heads/master | 2022-12-02T22:32:37.130766 | 2020-08-21T08:58:05 | 2020-08-21T08:58:05 | 289,224,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for firearms_19698 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'firearms_19698.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
93ebf812cf2dc1ab6fe7dacb49a0940fadec933e | 9e549ee54faa8b037f90eac8ecb36f853e460e5e | /venv/lib/python3.6/site-packages/pylint/test/functional/bad_reversed_sequence.py | 5a7b1f3dad23ad88d9b3fc7280087bcf9eae9d3b | [
"MIT"
] | permissive | aitoehigie/britecore_flask | e8df68e71dd0eac980a7de8c0f20b5a5a16979fe | eef1873dbe6b2cc21f770bc6dec783007ae4493b | refs/heads/master | 2022-12-09T22:07:45.930238 | 2019-05-15T04:10:37 | 2019-05-15T04:10:37 | 177,354,667 | 0 | 0 | MIT | 2022-12-08T04:54:09 | 2019-03-24T00:38:20 | Python | UTF-8 | Python | false | false | 2,181 | py | """ Checks that reversed() receive proper argument """
# pylint: disable=missing-docstring, useless-object-inheritance
# pylint: disable=too-few-public-methods,no-self-use,no-absolute-import
from collections import deque
__revision__ = 0
class GoodReversed(object):
""" Implements __reversed__ """
def __reversed__(self):
return [1, 2, 3]
class SecondGoodReversed(object):
""" Implements __len__ and __getitem__ """
def __len__(self):
return 3
def __getitem__(self, index):
return index
class BadReversed(object):
""" implements only len() """
def __len__(self):
return 3
class SecondBadReversed(object):
""" implements only __getitem__ """
def __getitem__(self, index):
return index
class ThirdBadReversed(dict):
""" dict subclass """
def uninferable(seq):
""" This can't be infered at this moment,
make sure we don't have a false positive.
"""
return reversed(seq)
def test(path):
""" test function """
seq = reversed() # No argument given
seq = reversed(None) # [bad-reversed-sequence]
seq = reversed([1, 2, 3])
seq = reversed((1, 2, 3))
seq = reversed(set()) # [bad-reversed-sequence]
seq = reversed({"a": 1, "b": 2}) # [bad-reversed-sequence]
seq = reversed(iter([1, 2, 3])) # [bad-reversed-sequence]
seq = reversed(GoodReversed())
seq = reversed(SecondGoodReversed())
seq = reversed(BadReversed()) # [bad-reversed-sequence]
seq = reversed(SecondBadReversed()) # [bad-reversed-sequence]
seq = reversed(range(100))
seq = reversed(ThirdBadReversed()) # [bad-reversed-sequence]
seq = reversed(lambda: None) # [bad-reversed-sequence]
seq = reversed(deque([]))
seq = reversed("123")
seq = uninferable([1, 2, 3])
seq = reversed(path.split("/"))
return seq
def test_dict_ancestor_and_reversed():
"""Don't emit for subclasses of dict, with __reversed__ implemented."""
from collections import OrderedDict
class Child(dict):
def __reversed__(self):
return reversed(range(10))
seq = reversed(OrderedDict())
return reversed(Child()), seq
| [
"[email protected]"
] | |
14f4384b3f417db7ee5e8c97bf8f2c110123b40a | bfbe642d689b5595fc7a8e8ae97462c863ba267a | /bin/Python27/Lib/site-packages/openmdao.main-0.8.1-py2.7.egg/openmdao/main/api.py | 829c20fd341079e51ad207c522ec775c63353842 | [
"MIT",
"LicenseRef-scancode-other-permissive"
] | permissive | mcanthony/meta-core | 0c0a8cde1669f749a4880aca6f816d28742a9c68 | 3844cce391c1e6be053572810bad2b8405a9839b | refs/heads/master | 2020-12-26T03:11:11.338182 | 2015-11-04T22:58:13 | 2015-11-04T22:58:13 | 45,806,011 | 1 | 0 | null | 2015-11-09T00:34:22 | 2015-11-09T00:34:22 | null | UTF-8 | Python | false | false | 1,672 | py | """
Pseudo package containing all of the main classes/objects in the
openmdao.main API.
"""
from openmdao.util.log import logger, enable_console
from openmdao.main.expreval import ExprEvaluator
from openmdao.main.factory import Factory
from openmdao.main.factorymanager import create, get_available_types
from openmdao.main.container import Container, get_default_name, \
create_io_traits
from openmdao.main.vartree import VariableTree
from openmdao.main.component import Component, SimulationRoot
from openmdao.main.component_with_derivatives import ComponentWithDerivatives
from openmdao.main.driver_uses_derivatives import DriverUsesDerivatives
from openmdao.main.assembly import Assembly, set_as_top, dump_iteration_tree
from openmdao.main.driver import Driver, Run_Once
from openmdao.main.workflow import Workflow
from openmdao.main.dataflow import Dataflow
from openmdao.main.sequentialflow import SequentialWorkflow
from openmdao.main.cyclicflow import CyclicWorkflow
from openmdao.main.variable import Variable
from openmdao.main.exceptions import ConstraintError
from openmdao.main.interfaces import implements, Attribute, Interface
from openmdao.main.file_supp import FileMetadata
from openmdao.main.case import Case
from openmdao.main.arch import Architecture
from openmdao.main.problem_formulation import ArchitectureAssembly, OptProblem
from openmdao.util.eggsaver import SAVE_PICKLE, SAVE_CPICKLE #, SAVE_YAML, SAVE_LIBYAML
from openmdao.units import convert_units
# TODO: This probably shouldn't be here. Removing it will require edits to some
# of our plugins
from openmdao.main.datatypes.slot import Slot
| [
"[email protected]"
] | |
13089b737a1eb01b2104f1882aedeeacdca2c27f | c13027a6dc58c49cf557b5ec61ddae9886a95cab | /AtCoderBeginnerContest200/A.py | 32781fccda16d90d708fa03e7fdae9bfc07601be | [] | no_license | kaitey/AtCoder | beec54a1303ce16898e4dfb68d5153c734dbe7ff | 162239d90321cf83b1316edf8b7438c6c4369a28 | refs/heads/main | 2023-06-26T09:45:02.044147 | 2021-07-31T11:08:47 | 2021-07-31T11:08:47 | 344,876,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95 | py | def main():
n = int(input())
print(-(-n//100))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
a0d0fa4b510c5d5e1735f2ebdfb84349b06dbeeb | 96b179626d078b92bb5da5b940246ae1063e5ed9 | /emmet-core/tests/test_polar.py | 6c0c7b77b1e95911ecb70b4bf51b465c4b038f60 | [
"LicenseRef-scancode-hdf5",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | jmmshn/emmet | cdb5e7abba2a4bf34f74621fe6a5cbad72061573 | b6b4f9fafbf47faa6792f037d3c56221b711dfb4 | refs/heads/main | 2023-08-31T15:42:37.533834 | 2023-01-17T23:43:57 | 2023-01-17T23:43:57 | 309,003,912 | 0 | 0 | NOASSERTION | 2020-11-01T01:40:17 | 2020-11-01T01:40:17 | null | UTF-8 | Python | false | false | 9,737 | py | from datetime import datetime
import io
from monty.dev import deprecated
import pytest
from pymatgen.core import Lattice, Structure
from emmet.core.polar import DielectricDoc, PiezoelectricDoc
@pytest.fixture
def dielectric_structure():
test_latt = Lattice.cubic(3.0)
test_struc = Structure(lattice=test_latt, species=["Fe"], coords=[[0, 0, 0]])
return test_struc
def test_dielectric(dielectric_structure):
epsilon_static = [
[10.81747665, -0.00378371, 0.0049036],
[-0.00373185, 10.82629335, -0.00432847],
[0.0036548, -0.00479139, 8.68204827],
]
epsilon_ionic = [
[30.98960925, -0.09107371, 0.00226948],
[-0.09107371, 31.44264572, -0.00427919],
[0.00226948, -0.00427919, 29.21747234],
]
doc = DielectricDoc.from_ionic_and_electronic(
material_id="mp-149",
structure=dielectric_structure,
electronic=epsilon_static,
ionic=epsilon_ionic,
deprecated=False,
)
assert isinstance(doc, DielectricDoc)
assert doc.property_name == "dielectric"
assert doc.material_id == "mp-149"
assert doc.n == pytest.approx(3.17940376590938)
assert doc.e_total == pytest.approx(40.6585061611482)
assert doc.e_ionic == pytest.approx(30.5498978544694)
@pytest.fixture
def piezoelectric_structure():
d = {
"@module": "pymatgen.core.structure",
"@class": "Structure",
"charge": None,
"lattice": {
"matrix": [
[0.0, 5.077586, 0.0],
[8.769167, 0.0, 0.0],
[0.0, -1.7206, -4.819114],
],
"a": 5.077586,
"b": 8.769167,
"c": 5.11706205795826,
"alpha": 90.0,
"beta": 109.648423669999,
"gamma": 90.0,
"volume": 214.576831815117,
},
"sites": [
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.5, 0.997223, 0.0],
"xyz": [8.744815023241, 2.538793, 0.0],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.0, 0.007335, 0.5],
"xyz": [0.064321839945, -0.8603, -2.409557],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.5, 0.673599, 0.0],
"xyz": [5.906902122033, 2.538793, 0.0],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.0, 0.848636, 0.0],
"xyz": [7.441830806212, 0.0, 0.0],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.0, 0.497223, 0.0],
"xyz": [4.360231523241, 0.0, 0.0],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.5, 0.507335, 0.5],
"xyz": [4.448905339945, 1.678493, -2.409557],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.0, 0.173599, 0.0],
"xyz": [1.522318622033, 0.0, 0.0],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Li", "occu": 1}],
"abc": [0.5, 0.348636, 0.0],
"xyz": [3.057247306212, 2.538793, 0.0],
"label": "Li",
"properties": {},
},
{
"species": [{"element": "Fe", "occu": 1}],
"abc": [0.5, 0.840139, 0.5],
"xyz": [7.367319194213, 1.678493, -2.409557],
"label": "Fe",
"properties": {},
},
{
"species": [{"element": "Fe", "occu": 1}],
"abc": [0.0, 0.674037, 0.5],
"xyz": [5.910743017179, -0.8603, -2.409557],
"label": "Fe",
"properties": {},
},
{
"species": [{"element": "Fe", "occu": 1}],
"abc": [0.0, 0.340139, 0.5],
"xyz": [2.982735694213, -0.8603, -2.409557],
"label": "Fe",
"properties": {},
},
{
"species": [{"element": "Fe", "occu": 1}],
"abc": [0.5, 0.174037, 0.5],
"xyz": [1.526159517179, 1.678493, -2.409557],
"label": "Fe",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.779705, 0.500278, 0.268671],
"xyz": [4.387021328426, 3.49674386953, -1.294756177494],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.720295, 0.000278, 0.731329],
"xyz": [0.002437828426, 2.39903513047, -3.524357822506],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.755993, 0.189934, 0.270851],
"xyz": [1.665562964978, 3.372593242298, -1.305261846014],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.744007, 0.689934, 0.729149],
"xyz": [6.050146464978, 2.523185757702, -3.513852153986],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.766434, 0.333483, 0.723505],
"xyz": [2.924368118661, 2.646771845324, -3.48665307457],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.733566, 0.833483, 0.276495],
"xyz": [7.308951618661, 3.249007154676, -1.33246092543],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.279705, 0.000278, 0.268671],
"xyz": [0.002437828426, 0.95795086953, -1.294756177494],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.220295, 0.500278, 0.731329],
"xyz": [4.387021328426, -0.13975786953, -3.524357822506],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.255993, 0.689934, 0.270851],
"xyz": [6.050146464978, 0.833800242298, -1.305261846014],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.244007, 0.189934, 0.729149],
"xyz": [1.665562964978, -0.015607242298, -3.513852153986],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.266434, 0.833483, 0.723505],
"xyz": [7.308951618661, 0.107978845324, -3.48665307457],
"label": "O",
"properties": {},
},
{
"species": [{"element": "O", "occu": 1}],
"abc": [0.233566, 0.333483, 0.276495],
"xyz": [2.924368118661, 0.710214154676, -1.33246092543],
"label": "O",
"properties": {},
},
],
}
test_struc = Structure.from_dict(d)
return test_struc
def test_piezoelectric(piezoelectric_structure):
piezo_static = [
[0.07886, -0.07647, -0.01902, 0.0, -0.18077, 0.0],
[0.0, 0.0, 0.0, -0.10377, 0.0, 0.18109],
[0.0, 0.0, 0.0, -0.07831, 0.0, 0.04849],
]
piezo_ionic = [
[-0.53096, 0.12789, -0.01236, 0.0, 0.09352, 0.0],
[-0.00013, 9e-05, 3e-05, 0.2681, 0.00042, -0.09373],
[-0.00018, -9e-05, -0.00029, 0.15863, 0.0001, -0.22751],
]
doc = PiezoelectricDoc.from_ionic_and_electronic(
material_id="mp-149",
structure=piezoelectric_structure,
electronic=piezo_static,
ionic=piezo_ionic,
deprecated=False,
)
assert isinstance(doc, PiezoelectricDoc)
assert doc.property_name == "piezoelectric"
assert doc.material_id == "mp-149"
assert doc.e_ij_max == pytest.approx(0.464365904540805)
assert [abs(n) for n in doc.strain_for_max] == pytest.approx(
[
0.0675760207481869,
0.97358569089405,
0.110731643941102,
0.0,
0.187890624929232,
0.0,
]
)
total = [
[0.0, 0.0, 0.0, 0.08032, 0.0, -0.17902],
[-0.03138, -0.4521, 0.05142, 0.0, -0.08725, 0.0],
[0.0, 0.0, 0.0, 0.16433, 0.0, 0.08736],
]
for i in range(3):
assert doc.total[i] == pytest.approx(total[i])
| [
"[email protected]"
] | |
3276de1e752d2ef39dbbb444cbf57f4b51d4316e | cad91ae76d2746a6c28ddda0f33a58f9d461378f | /PaddlePaddle/Classification/RN50v1.5/optimizer.py | 8d128ff97b5a0186f87a48845218c12b7d4e81b1 | [] | no_license | NVIDIA/DeepLearningExamples | fe677521e7e2a16e3cb0b77e358f9aab72f8c11a | a5388a45f71a949639b35cc5b990bd130d2d8164 | refs/heads/master | 2023-08-31T20:57:08.798455 | 2023-08-23T10:09:12 | 2023-08-23T10:09:12 | 131,881,622 | 11,838 | 3,124 | null | 2023-08-28T16:57:33 | 2018-05-02T17:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 2,166 | py | # Copyright (c) 2022 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import logging
from paddle import optimizer as optim
class Momentum:
"""
Simple Momentum optimizer with velocity state.
Args:
args(Namespace): Arguments obtained from ArgumentParser.
learning_rate(float|LRScheduler): The learning rate used to update parameters.
Can be a float value or a paddle.optimizer.lr.LRScheduler.
"""
def __init__(self, args, learning_rate):
super().__init__()
self.learning_rate = learning_rate
self.momentum = args.momentum
self.weight_decay = args.weight_decay
self.grad_clip = None
self.multi_precision = args.amp
def __call__(self):
# model_list is None in static graph
parameters = None
opt = optim.Momentum(
learning_rate=self.learning_rate,
momentum=self.momentum,
weight_decay=self.weight_decay,
grad_clip=self.grad_clip,
multi_precision=self.multi_precision,
parameters=parameters)
return opt
def build_optimizer(args, lr):
"""
Build a raw optimizer with learning rate scheduler.
Args:
args(Namespace): Arguments obtained from ArgumentParser.
lr(paddle.optimizer.lr.LRScheduler): A LRScheduler used for training.
return:
optim(paddle.optimizer): A normal optmizer.
"""
optimizer_mod = sys.modules[__name__]
opt = getattr(optimizer_mod, args.optimizer)(args, learning_rate=lr)()
logging.info("build optimizer %s success..", opt)
return opt
| [
"[email protected]"
] | |
5e02da1fb75760d07cd1c9d3dc9e28f51968d90a | d1d067bad6b65e2be1b5488d5abd17c0e9cd1756 | /perdiem/artist/forms.py | b83abf50adfe49eb0fec71576b242b9a8e66a202 | [] | no_license | localastronaut/perdiem-django | 7b84cf34b83a49cc4695b735321f52eb2be01260 | c273dc6fda5533c52710cde0f196886369b36c9d | refs/heads/master | 2021-06-13T14:31:54.089816 | 2016-05-24T06:13:10 | 2016-05-24T06:13:10 | 59,598,476 | 0 | 0 | null | 2016-05-24T18:31:19 | 2016-05-24T18:31:18 | null | UTF-8 | Python | false | false | 1,376 | py | """
:Created: 19 March 2016
:Author: Lucas Connors
"""
from django import forms
class CoordinatesFromAddressForm(forms.Form):
address = forms.CharField()
class ArtistApplyForm(forms.Form):
artist_name = forms.CharField(label='Artist / Band Name')
genre = forms.CharField()
hometown = forms.CharField()
email = forms.EmailField()
phone_number = forms.CharField()
bio = forms.CharField(widget=forms.Textarea(attrs={'placeholder': 'We started playing music because...',}))
campaign_reason = forms.CharField(label='Why are you raising money?', widget=forms.Textarea(attrs={'placeholder': 'We are trying to record our album...',}))
campaign_expenses = forms.CharField(label='What do you need the money for?', widget=forms.Textarea(attrs={'placeholder': 'Mixing, mastering, studio time, etc...',}))
facebook = forms.URLField(required=False, widget=forms.TextInput(attrs={'placeholder': 'http://',}))
twitter = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder': '@',}))
instagram = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder': '@',}))
music_link = forms.URLField(label='Link to music', widget=forms.TextInput(attrs={'placeholder': 'http://',}))
terms = forms.BooleanField(label='Terms & Conditions', help_text='I have read and agree to the Terms & Conditions')
| [
"[email protected]"
] | |
bcdbf07e584e7fad3a4d3eef5a75fc13b6f524e5 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_34/238.py | ec0410ac5b4c8cc4443fa88ef75da4466aa226a6 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | #-*-coding:utf-8-*-
import sys, re
fh = open(sys.argv[1])
L, D, N = map(int,fh.readline().split(' '))
words = []
for i in range(D): words.append(fh.readline().strip())
for i in range(N):
pattern=fh.readline().strip()
reg = re.compile(re.sub('\\)', ']', re.sub('\\(', '[',pattern)))
n = 0
for w in words:
if reg.match(w): n += 1
pass
print("Case #%d: %d" % (i + 1, n))
| [
"[email protected]"
] | |
75e13f2077f48e5efe61d8eadde5b91febcc50bd | f7badaf5a680a3276ba8c4223a7c411eff4e0ef5 | /users/validate_vk_mini_apps.py | a534317ac67fc58cf317a7bcfd82caaa4d172d5a | [] | no_license | petrshirin/easy-meet-backend | df9d29190b3f7acb524737f18192abf6c24c346b | 4e315e5e5abeb29bab68d53a83ee005cb13fd28f | refs/heads/master | 2023-01-09T04:42:40.710603 | 2020-11-11T05:05:15 | 2020-11-11T05:05:15 | 310,966,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,057 | py | from base64 import b64encode
from collections import OrderedDict
from hashlib import sha256
from hmac import HMAC
from urllib.parse import urlparse, parse_qsl, urlencode
from django.conf import settings
from typing import Union, Dict
def check_sign(*, query: dict, secret: str) -> Union[Dict, None]:
"""Check VK Apps signature"""
vk_subset = OrderedDict(sorted(x for x in query.items() if x[0][:3] == "vk_"))
hash_code = b64encode(HMAC(secret.encode(), urlencode(vk_subset, doseq=True).encode(), sha256).digest())
decoded_hash_code = hash_code.decode('utf-8')[:-1].replace('+', '-').replace('/', '_')
if query["sign"] == decoded_hash_code:
return vk_subset
def validate_request(url: str) -> Union[Dict, None]:
if not url:
return None
client_secret = settings.MINI_APP_SECRET # Защищённый ключ из настроек вашего приложения
query_params = dict(parse_qsl(urlparse(url).query, keep_blank_values=True))
return check_sign(query=query_params, secret=client_secret)
| [
"[email protected]"
] | |
88115d2979a0ddc98e1aab042b9257d23e41433b | fcc88521f63a3c22c81a9242ae3b203f2ea888fd | /Python3/1189-Maximum-Number-of-Balloons/soln.py | bd9afd7441f12e992a1a0795f2a1914ec11278d7 | [
"MIT"
] | permissive | wyaadarsh/LeetCode-Solutions | b5963e3427aa547d485d3a2cb24e6cedc72804fd | 3719f5cb059eefd66b83eb8ae990652f4b7fd124 | refs/heads/master | 2022-12-06T15:50:37.930987 | 2020-08-30T15:49:27 | 2020-08-30T15:49:27 | 291,811,790 | 0 | 1 | MIT | 2020-08-31T19:57:35 | 2020-08-31T19:57:34 | null | UTF-8 | Python | false | false | 203 | py | class Solution:
def maxNumberOfBalloons(self, text: str) -> int:
balloon = [('a', 1), ('b', 1), ('l', 2), ('o', 2), ('n', 1)]
return min(text.count(ch) // cnt for ch, cnt in balloon)
| [
"[email protected]"
] | |
cca4da9a90f7539c0deb3454ad0983978deade59 | 2b6b9deea3d020e87815caf7e3985f050103abf6 | /pie/initialization.py | a5613570f3d272f0978bb0fe6b838f732348899b | [] | no_license | PonteIneptique/pie | a3de5672dcb38936748ec4a43a81edfd1aa07fb0 | 64700bfbf6cc7c3efc1c5b0e14d6f06d6bf1b77f | refs/heads/master | 2021-06-07T08:04:24.117804 | 2018-07-09T09:31:45 | 2018-07-09T09:31:45 | 154,836,022 | 0 | 1 | null | 2021-05-12T09:23:03 | 2018-10-26T13:06:10 | Python | UTF-8 | Python | false | false | 1,465 | py |
import torch
import torch.nn as nn
def init_embeddings(embeddings):
embeddings.reset_parameters()
# nn.init.constant_(embeddings.weight, 0.01)
def init_linear(linear):
linear.reset_parameters()
nn.init.constant_(linear.bias, 0.)
pass
def init_rnn(rnn, forget_bias=1.0):
for pname, p in rnn.named_parameters():
if 'bias' in pname:
nn.init.constant_(p, 0.)
# forget_bias
if 'LSTM' in type(rnn).__name__:
nn.init.constant_(p[rnn.hidden_size:rnn.hidden_size*2], forget_bias)
else:
nn.init.xavier_uniform_(p)
def init_conv(conv):
conv.reset_parameters()
nn.init.xavier_uniform_(conv.weight)
nn.init.constant_(conv.bias, 0.)
pass
def init_pretrained_embeddings(path, encoder, embedding):
with open(path) as f:
nemb, dim = next(f).split()
if int(dim) != embedding.weight.data.size(1):
raise ValueError("Unexpected embeddings size: {}".format(dim))
inits = 0
for line in f:
word, *vec = line.split()
if word in encoder.table:
embedding.weight.data[encoder.table[word], :].copy_(
torch.tensor([float(v) for v in vec]))
inits += 1
if embedding.padding_idx is not None:
embedding.weight.data[embedding.padding_idx].zero_()
print("Initialized {}/{} embeddings".format(inits, embedding.num_embeddings))
| [
"[email protected]"
] | |
3873f848d8c6404a56bae01a616e6ebe1340f841 | 272a8b0b38e4af5f22dd811040f0ca2b0b111c61 | /exp_scripts/loss_improved_wgan_2001.py | 754e8a62540576922f21b77b9f49c33f10940155 | [] | no_license | jessemin/GeneGan | 1c1a97b6ab566a7c556ce1452e4c35530b0b626c | 2ad94e842cfaee531d7e13af7472b623bf96de30 | refs/heads/master | 2021-09-13T13:02:33.629138 | 2018-04-30T06:57:13 | 2018-04-30T06:57:13 | 112,046,600 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 420 | py | import os
os.chdir('../exp_notebooks')
os.system('python loss_improved_wgan.py\
-w=2001\
-save=loss_improved_wgan_2001\
-sample_num=10000\
-n_critic=1\
-d_freq=32\
-w_weight=0.08\
-mse_weight=1.0\
-g_lr=0.001\
-d_lr=0.00001\
-m=/users/jesikmin/GeneGan/exp_notebooks/models/cnn_2001_2/best_model.h5\
-cuda=2')
| [
"[email protected]"
] | |
b010766d32899caed0d8e553c520db23fd3eac74 | cfa632132cd29a0b58e7f45b441ea4f62b0f5eba | /flytekit/bin/entrypoint.py | 7c386888f9e1949c02eb5c8e991015c2e35d7d39 | [
"Apache-2.0"
] | permissive | chixcode/flytekit | 5b4f2e687e82a0d6527411afcdaf0929a94adb13 | f901aee721847c6264d44079d4fa31a75b8876e1 | refs/heads/master | 2020-08-24T00:06:02.808187 | 2019-10-14T18:34:19 | 2019-10-14T18:34:19 | 216,729,272 | 1 | 0 | Apache-2.0 | 2019-10-22T05:22:01 | 2019-10-22T05:22:00 | null | UTF-8 | Python | false | false | 4,641 | py | from __future__ import absolute_import
import importlib as _importlib
import os as _os
import click as _click
import datetime as _datetime
import random as _random
from flyteidl.core import literals_pb2 as _literals_pb2
from flytekit.common import utils as _utils
from flytekit.common.exceptions import scopes as _scopes, system as _system_exceptions
from flytekit.configuration import internal as _internal_config, TemporaryConfiguration as _TemporaryConfiguration
from flytekit.engines import loader as _engine_loader
from flytekit.interfaces.data import data_proxy as _data_proxy
from flytekit.interfaces import random as _flyte_random
from flytekit.models import literals as _literal_models
def _compute_array_job_index():
# type () -> int
"""
Computes the absolute index of the current array job. This is determined by summing the compute-environment-specific
environment variable and the offset (if one's set). The offset will be set and used when the user request that the
job runs in a number of slots less than the size of the input.
:rtype: int
"""
offset = 0
if _os.environ.get('BATCH_JOB_ARRAY_INDEX_OFFSET'):
offset = int(_os.environ.get('BATCH_JOB_ARRAY_INDEX_OFFSET'))
return offset + int(_os.environ.get(_os.environ.get('BATCH_JOB_ARRAY_INDEX_VAR_NAME')))
def _map_job_index_to_child_index(local_input_dir, datadir, index):
local_lookup_file = local_input_dir.get_named_tempfile('indexlookup.pb')
idx_lookup_file = _os.path.join(datadir, 'indexlookup.pb')
# if the indexlookup.pb does not exist, then just return the index
if not _data_proxy.Data.data_exists(idx_lookup_file):
return index
_data_proxy.Data.get_data(idx_lookup_file, local_lookup_file)
mapping_proto = _utils.load_proto_from_file(_literals_pb2.LiteralCollection, local_lookup_file)
if len(mapping_proto.literals) < index:
raise _system_exceptions.FlyteSystemAssertion(
"dynamic task index lookup array size: {} is smaller than lookup index {}".format(
len(mapping_proto.literals), index))
return mapping_proto.literals[index].scalar.primitive.integer
@_scopes.system_entry_point
def execute_task(task_module, task_name, inputs, output_prefix, test):
with _TemporaryConfiguration(_internal_config.CONFIGURATION_PATH.get()):
with _utils.AutoDeletingTempDir('input_dir') as input_dir:
# Load user code
task_module = _importlib.import_module(task_module)
task_def = getattr(task_module, task_name)
if not test:
local_inputs_file = input_dir.get_named_tempfile('inputs.pb')
# Handle inputs/outputs for array job.
if _os.environ.get('BATCH_JOB_ARRAY_INDEX_VAR_NAME'):
job_index = _compute_array_job_index()
# TODO: Perhaps remove. This is a workaround to an issue we perceived with limited entropy in
# TODO: AWS batch array jobs.
_flyte_random.seed_flyte_random(
"{} {} {}".format(
_random.random(),
_datetime.datetime.utcnow(),
job_index
)
)
# If an ArrayTask is discoverable, the original job index may be different than the one specified in
# the environment variable. Look up the correct input/outputs in the index lookup mapping file.
job_index = _map_job_index_to_child_index(input_dir, inputs, job_index)
inputs = _os.path.join(inputs, str(job_index), 'inputs.pb')
output_prefix = _os.path.join(output_prefix, str(job_index))
_data_proxy.Data.get_data(inputs, local_inputs_file)
input_proto = _utils.load_proto_from_file(_literals_pb2.LiteralMap, local_inputs_file)
_engine_loader.get_engine().get_task(task_def).execute(
_literal_models.LiteralMap.from_flyte_idl(input_proto),
context={'output_prefix': output_prefix}
)
@_click.command('pyflyte-execute')
@_click.option('--task-module', required=True)
@_click.option('--task-name', required=True)
@_click.option('--inputs', required=True)
@_click.option('--output-prefix', required=True)
@_click.option('--test', is_flag=True)
def execute_task_cmd(task_module, task_name, inputs, output_prefix, test):
_click.echo(_utils.get_version_message())
execute_task(task_module, task_name, inputs, output_prefix, test)
| [
"[email protected]"
] | |
24bde7208335cd60e375b68ff00e7581c4892bd2 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_bug.py | 72b907dcbd8c7bf8f279c2a1b40116f869da8dfd | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 600 | py |
#calss header
class _BUG():
def __init__(self,):
self.name = "BUG"
self.definitions = [u'a very small insect', u'a bacteria or a virus causing an illness that is usually not serious: ', u'a mistake or problem in a computer program: ', u'a very small device fixed on to a phone or hidden in a room, that allows you to listen to what people are saying without them knowing', u'a very strong enthusiasm for something: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
aa46cc71da38d2f98ed82ae05dd93bd331c9be5f | a5937c0b4d122fa3189fee6414d5be1316d9c4f9 | /src/eteaching.policy/eteaching/policy/tests/base.py | 354a81695bbf30c41210153f374cad989432b7ec | [] | no_license | zopyx/eteaching.org | e3eac4e53506cd9b1c65ac681a3138a5c7ac99b7 | d326cbf7734f538132df84290e768625df43ada6 | refs/heads/master | 2020-12-25T19:14:59.692335 | 2013-12-04T09:47:19 | 2013-12-04T09:47:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,489 | py | ################################################################
# eteaching.policy
# (C) 2013, ZOPYX Ltd.
################################################################
import os
import unittest2
from plone.app.testing import PloneSandboxLayer
from plone.app.testing import applyProfile
from plone.app.testing import PLONE_FIXTURE
from plone.app.testing import IntegrationTesting
from plone.app.testing import setRoles, login
from plone.testing import z2
from zope.configuration import xmlconfig
from AccessControl.SecurityManagement import newSecurityManager
from zope.component import getUtility
import plone.app.contenttypes
import plone.app.widgets
import plone.app.event
import eteaching.policy
import z3c.jbot
class PolicyFixture(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
def setUpZope(self, app, configurationContext):
xmlconfig.file('meta.zcml', z3c.jbot, context=configurationContext)
for mod in [plone.app.contenttypes,
plone.app.widgets,
plone.app.event,
eteaching.policy]:
xmlconfig.file('configure.zcml', mod, context=configurationContext)
# Install product and call its initialize() function
z2.installProduct(app, 'Products.DateRecurringIndex')
# z2.installProduct(app, 'eteaching.policy')
def setUpPloneSite(self, portal):
# Install into Plone site using portal_setup
applyProfile(portal, 'eteaching.policy:default')
portal.acl_users.userFolderAddUser('god', 'dummy', ['Manager'], [])
setRoles(portal, 'god', ['Manager'])
portal.acl_users.userFolderAddUser('ppr', 'dummy', ['PPR'], [])
setRoles(portal, 'ppr', ['Member', 'PPR'])
portal.acl_users.userFolderAddUser('member', 'dummy', ['Member'], [])
setRoles(portal, 'member', ['Member'])
login(portal, 'god')
def tearDownZope(self, app):
# Uninstall product
z2.uninstallProduct(app, 'eteaching.policy')
POLICY_FIXTURE = PolicyFixture()
POLICY_INTEGRATION_TESTING = IntegrationTesting(bases=(POLICY_FIXTURE,), name="PolicyFixture:Integration")
class TestBase(unittest2.TestCase):
layer = POLICY_INTEGRATION_TESTING
@property
def portal(self):
return self.layer['portal']
def login(self, uid='god'):
""" Login as manager """
user = self.portal.acl_users.getUser(uid)
newSecurityManager(None, user.__of__(self.portal.acl_users))
| [
"[email protected]"
] | |
f59946c1cad0866529f675844754b8f4572cffc7 | c25a17f0f82c2eebca55bbe180f4c2ccbbf00292 | /01_Jump_to_python/Chap06/6장_practice/practice3_게시물.py | 3a62507a88248db26e85f59d32ca417fa4481483 | [] | no_license | superbeom97/jumpjump | a0a4da6f0df0483ef0cef9833b5fe0402ec63c9c | fc45efce2a2b00c614aa5aa54b36be1572ed40ce | refs/heads/master | 2021-09-15T09:35:16.903857 | 2018-05-30T00:00:59 | 2018-05-30T00:00:59 | 111,883,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | def getTotalPage(m, n):
try:
if int(m) <= int(n):
s = '1'
print("게시물 총 건 수: %s, 한 페이지에 보여줄 게시물 수: %s, 총 페이지 수: %s" % (m, n, s))
if int(m) % int(n) == 0:
s = int(m) / int(n)
print("게시물 총 건 수: %s, 한 페이지에 보여줄 게시물 수: %s, 총 페이지 수: %s" % (m, n, int(s)))
elif int(m) > int(n):
s = int(m) / int(n) + 1
print("게시물 총 건 수: %s, 한 페이지에 보여줄 게시물 수: %s, 총 페이지 수: %s" % (m, n, int(s)))
except:
pass
f = open("D:\Python_workspace\jumpjump\Chap06\\6장_practice\\condition.txt", 'r')
for i in f.readlines():
aa = i.split()
m = aa[0]
n = aa[1]
getTotalPage(m, n) | [
"[email protected]"
] | |
b44861acb3e5969de6120b8f0592726c43630eb7 | 9cef1dc0a6a7b95ceb8a2d892bc39e9a0d15b681 | /python-novaclient/novaclient/shell.py | 027f1cc602c451dab81f508016b936a44c5ca779 | [
"Apache-2.0",
"BSD-2-Clause"
] | permissive | bopopescu/OpenStack-CVRM-1 | d2a4353cfe6d53634456e43a726698bd705db1db | fc0128258bf7417c6b9e1181d032529efbb08c42 | refs/heads/master | 2022-11-22T01:50:05.586113 | 2015-12-15T07:56:22 | 2015-12-15T07:57:01 | 282,140,514 | 0 | 0 | null | 2020-07-24T06:24:49 | 2020-07-24T06:24:48 | null | UTF-8 | Python | false | false | 33,880 | py | # Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Command-line interface to the OpenStack Nova API.
"""
from __future__ import print_function
import argparse
import getpass
import glob
import imp
import itertools
import logging
import os
import pkgutil
import sys
import time
from keystoneclient.auth.identity.generic import password
from keystoneclient.auth.identity.generic import token
from keystoneclient.auth.identity import v3 as identity
from keystoneclient import session as ksession
from oslo.utils import encodeutils
from oslo.utils import strutils
import pkg_resources
import six
HAS_KEYRING = False
all_errors = ValueError
try:
import keyring
HAS_KEYRING = True
except ImportError:
pass
import novaclient
import novaclient.auth_plugin
from novaclient import client
from novaclient import exceptions as exc
import novaclient.extension
from novaclient.i18n import _
from novaclient.openstack.common import cliutils
from novaclient import utils
from novaclient.v1_1 import shell as shell_v1_1
from novaclient.v3 import shell as shell_v3
DEFAULT_OS_COMPUTE_API_VERSION = "1.1"
DEFAULT_NOVA_ENDPOINT_TYPE = 'publicURL'
# NOTE(cyeoh): Having the service type dependent on the API version
# is pretty ugly, but we have to do this because traditionally the
# catalog entry for compute points directly to the V2 API rather than
# the root, and then doing version discovery.
DEFAULT_NOVA_SERVICE_TYPE_MAP = {'1.1': 'compute',
'2': 'compute',
'3': 'computev3'}
logger = logging.getLogger(__name__)
def positive_non_zero_float(text):
if text is None:
return None
try:
value = float(text)
except ValueError:
msg = _("%s must be a float") % text
raise argparse.ArgumentTypeError(msg)
if value <= 0:
msg = _("%s must be greater than 0") % text
raise argparse.ArgumentTypeError(msg)
return value
class SecretsHelper(object):
def __init__(self, args, client):
self.args = args
self.client = client
self.key = None
self._password = None
def _validate_string(self, text):
if text is None or len(text) == 0:
return False
return True
def _make_key(self):
if self.key is not None:
return self.key
keys = [
self.client.auth_url,
self.client.projectid,
self.client.user,
self.client.region_name,
self.client.endpoint_type,
self.client.service_type,
self.client.service_name,
self.client.volume_service_name,
]
for (index, key) in enumerate(keys):
if key is None:
keys[index] = '?'
else:
keys[index] = str(keys[index])
self.key = "/".join(keys)
return self.key
def _prompt_password(self, verify=True):
pw = None
if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty():
# Check for Ctl-D
try:
while True:
pw1 = getpass.getpass('OS Password: ')
if verify:
pw2 = getpass.getpass('Please verify: ')
else:
pw2 = pw1
if pw1 == pw2 and self._validate_string(pw1):
pw = pw1
break
except EOFError:
pass
return pw
def save(self, auth_token, management_url, tenant_id):
if not HAS_KEYRING or not self.args.os_cache:
return
if (auth_token == self.auth_token and
management_url == self.management_url):
# Nothing changed....
return
if not all([management_url, auth_token, tenant_id]):
raise ValueError(_("Unable to save empty management url/auth "
"token"))
value = "|".join([str(auth_token),
str(management_url),
str(tenant_id)])
keyring.set_password("novaclient_auth", self._make_key(), value)
@property
def password(self):
# Cache password so we prompt user at most once
if self._password:
pass
elif self._validate_string(self.args.os_password):
self._password = self.args.os_password
else:
verify_pass = strutils.bool_from_string(
utils.env("OS_VERIFY_PASSWORD", default=False), True)
self._password = self._prompt_password(verify_pass)
if not self._password:
raise exc.CommandError(
'Expecting a password provided via either '
'--os-password, env[OS_PASSWORD], or '
'prompted response')
return self._password
@property
def management_url(self):
if not HAS_KEYRING or not self.args.os_cache:
return None
management_url = None
try:
block = keyring.get_password('novaclient_auth', self._make_key())
if block:
_token, management_url, _tenant_id = block.split('|', 2)
except all_errors:
pass
return management_url
@property
def auth_token(self):
# Now is where it gets complicated since we
# want to look into the keyring module, if it
# exists and see if anything was provided in that
# file that we can use.
if not HAS_KEYRING or not self.args.os_cache:
return None
token = None
try:
block = keyring.get_password('novaclient_auth', self._make_key())
if block:
token, _management_url, _tenant_id = block.split('|', 2)
except all_errors:
pass
return token
@property
def tenant_id(self):
if not HAS_KEYRING or not self.args.os_cache:
return None
tenant_id = None
try:
block = keyring.get_password('novaclient_auth', self._make_key())
if block:
_token, _management_url, tenant_id = block.split('|', 2)
except all_errors:
pass
return tenant_id
class NovaClientArgumentParser(argparse.ArgumentParser):
def __init__(self, *args, **kwargs):
super(NovaClientArgumentParser, self).__init__(*args, **kwargs)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
"""
self.print_usage(sys.stderr)
# FIXME(lzyeval): if changes occur in argparse.ArgParser._check_value
choose_from = ' (choose from'
progparts = self.prog.partition(' ')
self.exit(2, _("error: %(errmsg)s\nTry '%(mainp)s help %(subp)s'"
" for more information.\n") %
{'errmsg': message.split(choose_from)[0],
'mainp': progparts[0],
'subp': progparts[2]})
class OpenStackComputeShell(object):
times = []
def _append_global_identity_args(self, parser):
# Register the CLI arguments that have moved to the session object.
ksession.Session.register_cli_options(parser)
parser.set_defaults(insecure=utils.env('NOVACLIENT_INSECURE',
default=False))
identity.Password.register_argparse_arguments(parser)
parser.set_defaults(os_username=utils.env('OS_USERNAME',
'NOVA_USERNAME'))
parser.set_defaults(os_password=utils.env('OS_PASSWORD',
'NOVA_PASSWORD'))
parser.set_defaults(os_auth_url=utils.env('OS_AUTH_URL', 'NOVA_URL'))
def get_base_parser(self):
parser = NovaClientArgumentParser(
prog='nova',
description=__doc__.strip(),
epilog='See "nova help COMMAND" '
'for help on a specific command.',
add_help=False,
formatter_class=OpenStackHelpFormatter,
)
# Global arguments
parser.add_argument('-h', '--help',
action='store_true',
help=argparse.SUPPRESS,
)
parser.add_argument('--version',
action='version',
version=novaclient.__version__)
parser.add_argument('--debug',
default=False,
action='store_true',
help=_("Print debugging output"))
parser.add_argument('--os-cache',
default=strutils.bool_from_string(
utils.env('OS_CACHE', default=False), True),
action='store_true',
help=_("Use the auth token cache. Defaults to False if "
"env[OS_CACHE] is not set."))
parser.add_argument('--timings',
default=False,
action='store_true',
help=_("Print call timing info"))
parser.add_argument('--os-auth-token',
default=utils.env('OS_AUTH_TOKEN'),
help='Defaults to env[OS_AUTH_TOKEN]')
parser.add_argument('--os_username',
help=argparse.SUPPRESS)
parser.add_argument('--os_password',
help=argparse.SUPPRESS)
parser.add_argument('--os-tenant-name',
metavar='<auth-tenant-name>',
default=utils.env('OS_TENANT_NAME', 'NOVA_PROJECT_ID'),
help=_('Defaults to env[OS_TENANT_NAME].'))
parser.add_argument('--os_tenant_name',
help=argparse.SUPPRESS)
parser.add_argument('--os-tenant-id',
metavar='<auth-tenant-id>',
default=utils.env('OS_TENANT_ID'),
help=_('Defaults to env[OS_TENANT_ID].'))
parser.add_argument('--os_auth_url',
help=argparse.SUPPRESS)
parser.add_argument('--os-region-name',
metavar='<region-name>',
default=utils.env('OS_REGION_NAME', 'NOVA_REGION_NAME'),
help=_('Defaults to env[OS_REGION_NAME].'))
parser.add_argument('--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument('--os-auth-system',
metavar='<auth-system>',
default=utils.env('OS_AUTH_SYSTEM'),
help='Defaults to env[OS_AUTH_SYSTEM].')
parser.add_argument('--os_auth_system',
help=argparse.SUPPRESS)
parser.add_argument('--service-type',
metavar='<service-type>',
help=_('Defaults to compute for most actions'))
parser.add_argument('--service_type',
help=argparse.SUPPRESS)
parser.add_argument('--service-name',
metavar='<service-name>',
default=utils.env('NOVA_SERVICE_NAME'),
help=_('Defaults to env[NOVA_SERVICE_NAME]'))
parser.add_argument('--service_name',
help=argparse.SUPPRESS)
parser.add_argument('--volume-service-name',
metavar='<volume-service-name>',
default=utils.env('NOVA_VOLUME_SERVICE_NAME'),
help=_('Defaults to env[NOVA_VOLUME_SERVICE_NAME]'))
parser.add_argument('--volume_service_name',
help=argparse.SUPPRESS)
parser.add_argument('--os-endpoint-type',
metavar='<endpoint-type>',
dest='endpoint_type',
default=utils.env('NOVA_ENDPOINT_TYPE',
default=utils.env('OS_ENDPOINT_TYPE',
default=DEFAULT_NOVA_ENDPOINT_TYPE)),
help=_('Defaults to env[NOVA_ENDPOINT_TYPE], '
'env[OS_ENDPOINT_TYPE] or ')
+ DEFAULT_NOVA_ENDPOINT_TYPE + '.')
parser.add_argument('--endpoint-type',
help=argparse.SUPPRESS)
# NOTE(dtroyer): We can't add --endpoint_type here due to argparse
# thinking usage-list --end is ambiguous; but it
# works fine with only --endpoint-type present
# Go figure. I'm leaving this here for doc purposes.
# parser.add_argument('--endpoint_type',
# help=argparse.SUPPRESS)
parser.add_argument('--os-compute-api-version',
metavar='<compute-api-ver>',
default=utils.env('OS_COMPUTE_API_VERSION',
default=DEFAULT_OS_COMPUTE_API_VERSION),
help=_('Accepts 1.1 or 3, '
'defaults to env[OS_COMPUTE_API_VERSION].'))
parser.add_argument('--os_compute_api_version',
help=argparse.SUPPRESS)
parser.add_argument('--bypass-url',
metavar='<bypass-url>',
dest='bypass_url',
default=utils.env('NOVACLIENT_BYPASS_URL'),
help="Use this API endpoint instead of the Service Catalog. "
"Defaults to env[NOVACLIENT_BYPASS_URL]")
parser.add_argument('--bypass_url',
help=argparse.SUPPRESS)
# The auth-system-plugins might require some extra options
novaclient.auth_plugin.load_auth_system_opts(parser)
self._append_global_identity_args(parser)
return parser
def get_subcommand_parser(self, version):
parser = self.get_base_parser()
self.subcommands = {}
subparsers = parser.add_subparsers(metavar='<subcommand>')
try:
actions_module = {
'1.1': shell_v1_1,
'2': shell_v1_1,
'3': shell_v3,
}[version]
except KeyError:
actions_module = shell_v1_1
self._find_actions(subparsers, actions_module)
self._find_actions(subparsers, self)
for extension in self.extensions:
self._find_actions(subparsers, extension.module)
self._add_bash_completion_subparser(subparsers)
return parser
def _discover_extensions(self, version):
extensions = []
for name, module in itertools.chain(
self._discover_via_python_path(),
self._discover_via_contrib_path(version),
self._discover_via_entry_points()):
extension = novaclient.extension.Extension(name, module)
extensions.append(extension)
return extensions
def _discover_via_python_path(self):
for (module_loader, name, _ispkg) in pkgutil.iter_modules():
if name.endswith('_python_novaclient_ext'):
if not hasattr(module_loader, 'load_module'):
# Python 2.6 compat: actually get an ImpImporter obj
module_loader = module_loader.find_module(name)
module = module_loader.load_module(name)
if hasattr(module, 'extension_name'):
name = module.extension_name
yield name, module
def _discover_via_contrib_path(self, version):
module_path = os.path.dirname(os.path.abspath(__file__))
version_str = "v%s" % version.replace('.', '_')
ext_path = os.path.join(module_path, version_str, 'contrib')
ext_glob = os.path.join(ext_path, "*.py")
for ext_path in glob.iglob(ext_glob):
name = os.path.basename(ext_path)[:-3]
if name == "__init__":
continue
module = imp.load_source(name, ext_path)
yield name, module
def _discover_via_entry_points(self):
for ep in pkg_resources.iter_entry_points('novaclient.extension'):
name = ep.name
module = ep.load()
yield name, module
def _add_bash_completion_subparser(self, subparsers):
subparser = subparsers.add_parser('bash_completion',
add_help=False,
formatter_class=OpenStackHelpFormatter
)
self.subcommands['bash_completion'] = subparser
subparser.set_defaults(func=self.do_bash_completion)
def _find_actions(self, subparsers, actions_module):
for attr in (a for a in dir(actions_module) if a.startswith('do_')):
# I prefer to be hyphen-separated instead of underscores.
command = attr[3:].replace('_', '-')
callback = getattr(actions_module, attr)
desc = callback.__doc__ or ''
action_help = desc.strip()
arguments = getattr(callback, 'arguments', [])
subparser = subparsers.add_parser(command,
help=action_help,
description=desc,
add_help=False,
formatter_class=OpenStackHelpFormatter
)
subparser.add_argument('-h', '--help',
action='help',
help=argparse.SUPPRESS,
)
self.subcommands[command] = subparser
for (args, kwargs) in arguments:
subparser.add_argument(*args, **kwargs)
subparser.set_defaults(func=callback)
def setup_debugging(self, debug):
if not debug:
return
streamformat = "%(levelname)s (%(module)s:%(lineno)d) %(message)s"
# Set up the root logger to debug so that the submodules can
# print debug messages
logging.basicConfig(level=logging.DEBUG,
format=streamformat)
def _get_keystone_auth(self, session, auth_url, **kwargs):
auth_token = kwargs.pop('auth_token', None)
if auth_token:
return token.Token(auth_url, auth_token, **kwargs)
else:
return password.Password(auth_url,
username=kwargs.pop('username'),
user_id=kwargs.pop('user_id'),
password=kwargs.pop('password'),
user_domain_id=kwargs.pop('user_domain_id'),
user_domain_name=kwargs.pop('user_domain_name'),
**kwargs)
def main(self, argv):
# Parse args once to find version and debug settings
parser = self.get_base_parser()
(options, args) = parser.parse_known_args(argv)
self.setup_debugging(options.debug)
# Discover available auth plugins
novaclient.auth_plugin.discover_auth_systems()
# build available subcommands based on version
self.extensions = self._discover_extensions(
options.os_compute_api_version)
self._run_extension_hooks('__pre_parse_args__')
# NOTE(dtroyer): Hackery to handle --endpoint_type due to argparse
# thinking usage-list --end is ambiguous; but it
# works fine with only --endpoint-type present
# Go figure.
if '--endpoint_type' in argv:
spot = argv.index('--endpoint_type')
argv[spot] = '--endpoint-type'
subcommand_parser = self.get_subcommand_parser(
options.os_compute_api_version)
self.parser = subcommand_parser
if options.help or not argv:
subcommand_parser.print_help()
return 0
args = subcommand_parser.parse_args(argv)
self._run_extension_hooks('__post_parse_args__', args)
# Short-circuit and deal with help right away.
if args.func == self.do_help:
self.do_help(args)
return 0
elif args.func == self.do_bash_completion:
self.do_bash_completion(args)
return 0
os_username = args.os_username
os_user_id = args.os_user_id
os_password = None # Fetched and set later as needed
os_tenant_name = args.os_tenant_name
os_tenant_id = args.os_tenant_id
os_auth_url = args.os_auth_url
os_region_name = args.os_region_name
os_auth_system = args.os_auth_system
endpoint_type = args.endpoint_type
insecure = args.insecure
service_type = args.service_type
service_name = args.service_name
volume_service_name = args.volume_service_name
bypass_url = args.bypass_url
os_cache = args.os_cache
cacert = args.os_cacert
timeout = args.timeout
keystone_session = None
keystone_auth = None
# We may have either, both or none of these.
# If we have both, we don't need USERNAME, PASSWORD etc.
# Fill in the blanks from the SecretsHelper if possible.
# Finally, authenticate unless we have both.
# Note if we don't auth we probably don't have a tenant ID so we can't
# cache the token.
auth_token = args.os_auth_token if args.os_auth_token else None
management_url = bypass_url if bypass_url else None
if os_auth_system and os_auth_system != "keystone":
auth_plugin = novaclient.auth_plugin.load_plugin(os_auth_system)
else:
auth_plugin = None
if not endpoint_type:
endpoint_type = DEFAULT_NOVA_ENDPOINT_TYPE
if not service_type:
os_compute_api_version = (options.os_compute_api_version or
DEFAULT_OS_COMPUTE_API_VERSION)
try:
service_type = DEFAULT_NOVA_SERVICE_TYPE_MAP[
os_compute_api_version]
except KeyError:
service_type = DEFAULT_NOVA_SERVICE_TYPE_MAP[
DEFAULT_OS_COMPUTE_API_VERSION]
service_type = cliutils.get_service_type(args.func) or service_type
# If we have an auth token but no management_url, we must auth anyway.
# Expired tokens are handled by client.py:_cs_request
must_auth = not (cliutils.isunauthenticated(args.func)
or (auth_token and management_url))
# Do not use Keystone session for cases with no session support. The
# presence of auth_plugin means os_auth_system is present and is not
# keystone.
use_session = True
if auth_plugin or bypass_url or os_cache or volume_service_name:
use_session = False
# FIXME(usrleon): Here should be restrict for project id same as
# for os_username or os_password but for compatibility it is not.
if must_auth:
if auth_plugin:
auth_plugin.parse_opts(args)
if not auth_plugin or not auth_plugin.opts:
if not os_username and not os_user_id:
raise exc.CommandError(_("You must provide a username "
"or user id via --os-username, --os-user-id, "
"env[OS_USERNAME] or env[OS_USER_ID]"))
if not any([args.os_tenant_name, args.os_tenant_id,
args.os_project_id, args.os_project_name]):
raise exc.CommandError(_("You must provide a project name or"
" project id via --os-project-name,"
" --os-project-id, env[OS_PROJECT_ID]"
" or env[OS_PROJECT_NAME]. You may"
" use os-project and os-tenant"
" interchangeably."))
if not os_auth_url:
if os_auth_system and os_auth_system != 'keystone':
os_auth_url = auth_plugin.get_auth_url()
if not os_auth_url:
raise exc.CommandError(_("You must provide an auth url "
"via either --os-auth-url or env[OS_AUTH_URL] "
"or specify an auth_system which defines a "
"default url with --os-auth-system "
"or env[OS_AUTH_SYSTEM]"))
project_id = args.os_project_id or args.os_tenant_id
project_name = args.os_project_name or args.os_tenant_name
if use_session:
# Not using Nova auth plugin, so use keystone
start_time = time.time()
keystone_session = ksession.Session.load_from_cli_options(args)
keystone_auth = self._get_keystone_auth(
keystone_session,
args.os_auth_url,
username=args.os_username,
user_id=args.os_user_id,
user_domain_id=args.os_user_domain_id,
user_domain_name=args.os_user_domain_name,
password=args.os_password,
auth_token=args.os_auth_token,
project_id=project_id,
project_name=project_name,
project_domain_id=args.os_project_domain_id,
project_domain_name=args.os_project_domain_name)
end_time = time.time()
self.times.append(('%s %s' % ('auth_url', args.os_auth_url),
start_time, end_time))
if (options.os_compute_api_version and
options.os_compute_api_version != '1.0'):
if not any([args.os_tenant_id, args.os_tenant_name,
args.os_project_id, args.os_project_name]):
raise exc.CommandError(_("You must provide a project name or"
" project id via --os-project-name,"
" --os-project-id, env[OS_PROJECT_ID]"
" or env[OS_PROJECT_NAME]. You may"
" use os-project and os-tenant"
" interchangeably."))
if not os_auth_url:
raise exc.CommandError(_("You must provide an auth url "
"via either --os-auth-url or env[OS_AUTH_URL]"))
completion_cache = client.CompletionCache(os_username, os_auth_url)
self.cs = client.Client(options.os_compute_api_version,
os_username, os_password, os_tenant_name,
tenant_id=os_tenant_id, user_id=os_user_id,
auth_url=os_auth_url, insecure=insecure,
region_name=os_region_name, endpoint_type=endpoint_type,
extensions=self.extensions, service_type=service_type,
service_name=service_name, auth_system=os_auth_system,
auth_plugin=auth_plugin, auth_token=auth_token,
volume_service_name=volume_service_name,
timings=args.timings, bypass_url=bypass_url,
os_cache=os_cache, http_log_debug=options.debug,
cacert=cacert, timeout=timeout,
session=keystone_session, auth=keystone_auth,
completion_cache=completion_cache)
# Now check for the password/token of which pieces of the
# identifying keyring key can come from the underlying client
if must_auth:
helper = SecretsHelper(args, self.cs.client)
if (auth_plugin and auth_plugin.opts and
"os_password" not in auth_plugin.opts):
use_pw = False
else:
use_pw = True
tenant_id = helper.tenant_id
# Allow commandline to override cache
if not auth_token:
auth_token = helper.auth_token
if not management_url:
management_url = helper.management_url
if tenant_id and auth_token and management_url:
self.cs.client.tenant_id = tenant_id
self.cs.client.auth_token = auth_token
self.cs.client.management_url = management_url
self.cs.client.password_func = lambda: helper.password
elif use_pw:
# We're missing something, so auth with user/pass and save
# the result in our helper.
self.cs.client.password = helper.password
self.cs.client.keyring_saver = helper
try:
# This does a couple of bits which are useful even if we've
# got the token + service URL already. It exits fast in that case.
if not cliutils.isunauthenticated(args.func):
if not use_session:
# Only call authenticate() if Nova auth plugin is used.
# If keystone is used, authentication is handled as part
# of session.
self.cs.authenticate()
except exc.Unauthorized:
raise exc.CommandError(_("Invalid OpenStack Nova credentials."))
except exc.AuthorizationFailure:
raise exc.CommandError(_("Unable to authorize user"))
if options.os_compute_api_version == "3" and service_type != 'image':
# NOTE(cyeoh): create an image based client because the
# images api is no longer proxied by the V3 API and we
# sometimes need to be able to look up images information
# via glance when connected to the nova api.
image_service_type = 'image'
# NOTE(hdd): the password is needed again because creating a new
# Client without specifying bypass_url will force authentication.
# We can't reuse self.cs's bypass_url, because that's the URL for
# the nova service; we need to get glance's URL for this Client
if not os_password:
os_password = helper.password
self.cs.image_cs = client.Client(
options.os_compute_api_version, os_username,
os_password, os_tenant_name, tenant_id=os_tenant_id,
auth_url=os_auth_url, insecure=insecure,
region_name=os_region_name, endpoint_type=endpoint_type,
extensions=self.extensions, service_type=image_service_type,
service_name=service_name, auth_system=os_auth_system,
auth_plugin=auth_plugin,
volume_service_name=volume_service_name,
timings=args.timings, bypass_url=bypass_url,
os_cache=os_cache, http_log_debug=options.debug,
session=keystone_session, auth=keystone_auth,
cacert=cacert, timeout=timeout)
args.func(self.cs, args)
if args.timings:
self._dump_timings(self.times + self.cs.get_timings())
def _dump_timings(self, timings):
class Tyme(object):
def __init__(self, url, seconds):
self.url = url
self.seconds = seconds
results = [Tyme(url, end - start) for url, start, end in timings]
total = 0.0
for tyme in results:
total += tyme.seconds
results.append(Tyme("Total", total))
utils.print_list(results, ["url", "seconds"], sortby_index=None)
def _run_extension_hooks(self, hook_type, *args, **kwargs):
"""Run hooks for all registered extensions."""
for extension in self.extensions:
extension.run_hooks(hook_type, *args, **kwargs)
def do_bash_completion(self, _args):
"""
Prints all of the commands and options to stdout so that the
nova.bash_completion script doesn't have to hard code them.
"""
commands = set()
options = set()
for sc_str, sc in self.subcommands.items():
commands.add(sc_str)
for option in sc._optionals._option_string_actions.keys():
options.add(option)
commands.remove('bash-completion')
commands.remove('bash_completion')
print(' '.join(commands | options))
@utils.arg(
'command',
metavar='<subcommand>',
nargs='?',
help='Display help for <subcommand>')
def do_help(self, args):
"""
Display help about this program or one of its subcommands.
"""
if args.command:
if args.command in self.subcommands:
self.subcommands[args.command].print_help()
else:
raise exc.CommandError(_("'%s' is not a valid subcommand") %
args.command)
else:
self.parser.print_help()
# I'm picky about my shell help.
class OpenStackHelpFormatter(argparse.HelpFormatter):
def __init__(self, prog, indent_increment=2, max_help_position=32,
width=None):
super(OpenStackHelpFormatter, self).__init__(prog, indent_increment,
max_help_position, width)
def start_section(self, heading):
# Title-case the headings
heading = '%s%s' % (heading[0].upper(), heading[1:])
super(OpenStackHelpFormatter, self).start_section(heading)
def main():
try:
argv = [encodeutils.safe_decode(a) for a in sys.argv[1:]]
OpenStackComputeShell().main(argv)
except Exception as e:
logger.debug(e, exc_info=1)
details = {'name': encodeutils.safe_encode(e.__class__.__name__),
'msg': encodeutils.safe_encode(six.text_type(e))}
print("ERROR (%(name)s): %(msg)s" % details,
file=sys.stderr)
sys.exit(1)
except KeyboardInterrupt as e:
print("... terminating nova client", file=sys.stderr)
sys.exit(130)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
9f9ee2d6aecd744bf78e114b35591f707504a091 | e288180c977c8fccf31c00bb74b7e8f56ee69303 | /vkrb/search/serializers.py | 65be7a59e320ecf0806ee5672e5dfd061575e8ff | [] | no_license | kaluginadaria/vkrb-back | 32e0c9aef7a647ea2a2e399c8d999622e993a433 | d037baaa9f17cb038d41dda5dfbf1dbb56acdf90 | refs/heads/master | 2022-12-07T23:36:32.902662 | 2019-05-22T15:06:31 | 2019-05-22T15:06:31 | 179,382,015 | 0 | 0 | null | 2022-11-22T02:38:25 | 2019-04-03T22:52:30 | Python | UTF-8 | Python | false | false | 3,195 | py | from django.apps import apps
from django_serializer.serializer.base import ModelSerializer
from django_serializer.serializer.fields import SerializerField
from vkrb.activity.serializers import ActivityGiItemSerializer, ActivitySiItemSerializer
from vkrb.calc.serializers import FormulaSerializer
from vkrb.digest.serializers import DigestSerializer, ShortArticleSerializer, ArticleSerializer
from vkrb.education.serializers import (
LiteratureSerializer,
ScienceArticleSerializer,
CatalogItemSerializer,
InternalEducationSerializer)
from vkrb.event.serializers import EventSerializer
from vkrb.matrix.serializers import MatrixItemSerializer
from vkrb.newsitem.serializers import NewsItemSerializer
from vkrb.recourse.serializers import RecourseSerializer
from vkrb.search.models import SearchEntity
from vkrb.text.serializers import TextSerializer
class SearchEntitySerializer(ModelSerializer):
SERIALIZERS = {
'education.literature': LiteratureSerializer,
'education.sciencearticle': ScienceArticleSerializer,
'education.catalogitem': CatalogItemSerializer,
'education.internaleducation': InternalEducationSerializer,
'newsitem.newsitem': NewsItemSerializer,
'digest.digest': DigestSerializer,
'digest.article': ArticleSerializer,
'text.text': TextSerializer,
'recourse.recourse': RecourseSerializer,
'event.event': EventSerializer,
'calc.formula': FormulaSerializer,
'matrix.matrixitem': MatrixItemSerializer,
'activity.giitem': ActivityGiItemSerializer,
'activity.siitem': ActivitySiItemSerializer
}
entity = SerializerField(source='get_entity')
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super().__init__(*args, **kwargs)
class Meta:
model = SearchEntity
exclude = ('search_vector', 'body', 'id')
def get_serializer(self, entity_type, entity):
kwargs = {}
serializer_class = self.SERIALIZERS.get(entity_type)
if serializer_class is None:
raise ValueError(f'serializer for entity `{entity_type}` '
f'does not exist')
if issubclass(serializer_class, (RecourseSerializer,)):
kwargs['request_user'] = self.user
elif not issubclass(serializer_class, (
TextSerializer,
EventSerializer,
MatrixItemSerializer
)):
kwargs['user'] = self.user
return serializer_class(entity, **kwargs)
def get_entity(self, obj):
entity = apps.get_model(
obj.entity_type
).objects.filter(pk=obj.entity_id).first()
serializer = self.get_serializer(obj.entity_type, entity)
return serializer.serialize()
def serialize(self):
reformat_res = {}
res = super().serialize()
for item in res:
entity_type = item['entity_type']
entity = item['entity']
if not entity:
continue
reformat_res.setdefault(entity_type, [])
reformat_res[entity_type].append(entity)
return reformat_res
| [
"[email protected]"
] | |
7630b551f1163ab91f4b5075737ad96d95b94763 | 866de2c682a63e255d6d0016c0eeee70b98dd490 | /unifiedactivitypoints/studentauth/urls.py | 98510be2b8c6f956bd8d4c60c6d25e5a5dec37c8 | [
"MIT"
] | permissive | FossMec/activityPointsApp | 10c47e6a96ebe44b1310ad4f3c695055e35d69b8 | 2b58c96bfcd11327883dcd5bd7ddc1feb617ee49 | refs/heads/master | 2021-08-08T21:39:53.212979 | 2017-11-11T09:33:10 | 2017-11-11T09:33:10 | 107,976,551 | 1 | 8 | null | 2017-11-11T07:24:08 | 2017-10-23T12:05:04 | JavaScript | UTF-8 | Python | false | false | 237 | py | from django.conf.urls import url
from studentauth import views
urlpatterns = [
url(r'^signup/$',views.signup,name="signuppage"),
url(r'^$',views.Login.as_view(),name="login"),
url(r'^succes/$',views.Success.as_view(),name='Success'),
]
| [
"[email protected]"
] | |
f8bcf27f520284575ff8cc9cda7c4884fbd7ad04 | 788925d9dd5f98c8e2453acc6fb46aee2d638c80 | /test.py | ca2d956202e11a63a2e4ff639a3b098bebce4404 | [] | no_license | abilian/abilian-sbe-demo | 7e5a7e518e20886dbf73e96b2b054c6c1f846d58 | bcd6a97060d28429e16bd2f7cb209e22d7bf1c24 | refs/heads/master | 2021-01-22T02:53:38.157020 | 2013-11-22T23:02:43 | 2013-11-22T23:02:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | #!./bin/python
import pexpect
import urllib
import time
# Some random number
PORT = 4034
HOME = "http://0.0.0.0:{}/".format(PORT)
p = pexpect.spawn("./manage.py run -p {}".format(PORT))
try:
p.expect("Running on {}".format(HOME))
# Just in case
time.sleep(5)
page = urllib.urlopen(HOME).read()
assert "Welcome to Abilian" in page
finally:
p.kill(9)
p.close()
| [
"[email protected]"
] | |
db507e34af96e154ddea79d5af892cdf6b728bbc | 15608a179d97e399ca08be0f017296c4f4ded881 | /releases/migrations/0001_squashed_0004_make_release_date_nullable.py | cf9cc83a5150cd40db8cbb8c60d47dc7806e7af4 | [
"BSD-3-Clause"
] | permissive | shivangi1801/djangoproject.com | 1f33bef39b8c3cce136f47008eee5d4aae8d6aa4 | e51eba97f7e226d2e9deb31f8c23b1e00df04b9c | refs/heads/master | 2021-01-16T20:37:51.813671 | 2016-02-08T22:51:01 | 2016-02-08T22:51:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,619 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-21 07:11
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Release',
fields=[
('version', models.CharField(max_length=16, primary_key=True, serialize=False)),
('date', models.DateField(blank=True, default=datetime.date.today, help_text="Leave blank if the release date isn't know yet, typically if you're creating the final release just after the alpha because you want to build docs for the upcoming version.", null=True, verbose_name='Release date')),
('major', models.PositiveSmallIntegerField(editable=False)),
('minor', models.PositiveSmallIntegerField(editable=False)),
('micro', models.PositiveSmallIntegerField(editable=False)),
('status', models.CharField(choices=[('a', 'alpha'), ('b', 'beta'), ('c', 'release candidate'), ('f', 'final')], editable=False, max_length=1)),
('iteration', models.PositiveSmallIntegerField(editable=False)),
('is_lts', models.BooleanField(default=False, verbose_name='Long term support release')),
('eol_date', models.DateField(blank=True, help_text="Leave blank if the end of life date isn't known yet, typically because it depends on the release date of a later version.", null=True, verbose_name='End of life date')),
],
),
]
| [
"[email protected]"
] | |
5fb03f371d4e18de6a83f4e212e3babff6434115 | 5e36d216d31f9f5d56722e230cb468beba15c2a8 | /src/scriptsite/main/subversion.py | a5c8bff6f21c04cc0d890afe77fcd47415a3d501 | [] | no_license | isotoma/ScriptSite | d988a0da8b70681502a1b02eb1f78acfe035f545 | f401be7e2c6455208ac881d72559e5819fd2ecbd | refs/heads/master | 2016-09-06T08:35:52.898581 | 2011-03-22T21:02:40 | 2011-03-22T21:02:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,277 | py | import tempfile
import os
import shutil
from datetime import datetime
from lxml import etree
import pysvn
from django.core.files import File
from scriptsite import settings
from scriptsite.main.models import TestScript
def make_script_model_from_file(script_file, flavour, revision):
""" Create a model object from the file that we have created """
ts = TestScript()
ts.flavour = flavour
ts.date_uploaded = datetime.now()
ts.script_file.save(flavour + '.xml', File(file(script_file)), save = False)
ts.revision = revision
return ts
def get_output_directory(root_path):
final_path = os.path.join(root_path, 'processed')
return final_path
def make_script_file_from_checkout(root_path, flavour):
""" Take our export, turn it into the single canonical XML file """
path_to_config = os.path.join(root_path, flavour + '.xml')
config_file = open(path_to_config).read()
# parse the config file into memory
config_tree = etree.fromstring(config_file)
final_test_doc = etree.Element("test_doc")
# find all the mbots in the config file
for mbot in config_tree.findall('mbot'):
final_mbot_element = etree.Element('test_group')
final_mbot_element.attrib['name'] = mbot.attrib['name']
name = mbot.attrib['name']
# available tests should live in a directory matching the name of the mbot
test_dir = os.path.join(root_path, name)
directory_available_tests = os.listdir(test_dir)
# make sure we only have xml files
available_tests = [x for x in directory_available_tests if x.endswith('.xml') and not x == name + '.xml']
# get the filenames to exclude from the config file
# add .xml as tests are specified by id, not filenames....
excluded_tests = [test.text +".xml" for test in mbot.findall('exclude')]
test_list = [os.path.join(test_dir, test) for test in available_tests if test not in excluded_tests]
test_list = sorted(test_list)
# Append our tests to the master test document
for test in test_list:
parsed_test = etree.parse(test)
final_mbot_element.append(parsed_test.getroot())
# add the metadata for the test
metadata = etree.parse(os.path.join(test_dir, name +'.xml'))
for child in metadata.getroot().getchildren():
final_mbot_element.append(child)
final_test_doc.append(final_mbot_element)
file_generation_time = datetime.now().strftime("%Y-%m-%d %H:%M")
final_test_doc.attrib['flavour'] = flavour
final_test_doc.attrib['generated'] = file_generation_time
final_test_doc.append(config_tree.find('script_version'))
# we are done here, write it to the same directory as the uploaded ones,
# and return
output_dir = get_output_directory(root_path)
os.makedirs(output_dir)
output_file = open(os.path.join(output_dir, flavour + ".xml"), 'w')
s = etree.tostring(final_test_doc, pretty_print = True).encode('UTF-8')
output_file.write(s)
return output_file.name
def get_from_subversion(repo_url, revision, username, password, flavour):
""" Get the XML file from subversion """
svn_username = username
svn_password = password
# make a directory to checkout to
temp_dir = tempfile.mkdtemp()
export_dir = os.path.join(temp_dir, 'export')
print export_dir
# Get a checkout
client = pysvn.Client()
#set auth details
client.set_default_username(username)
client.set_default_password(password)
try:
# attempt an export (we don't need a checkout, just the files)
client.export(repo_url, export_dir)
finally:
# just in case
client.set_default_username("")
client.set_default_password("")
try:
# so, we've got stuff from subversion
# we should probably do something with it, no?
script_file = make_script_file_from_checkout(export_dir, flavour)
# now we have the file, make a db model
ts = make_script_model_from_file(script_file, flavour, "0")
ts.save()
finally:
# tidy up
shutil.rmtree(temp_dir)
# aand we're done, let's get out of here
return ts | [
"[email protected]"
] | |
60e5aa25031489608c8cf93ccd0f6cc8a77a0466 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/7/qjl.py | 892ee92a903e462c69168149377f635482795dba | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'qJL':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
65a90c400bd422e8ee24ab483c5144d7c5b7096d | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/420/usersdata/328/87412/submittedfiles/exe11.py | 271fb87e667691c2627e9c401cae10b6c33cfb19 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | # -*- coding: utf-8 -*-
n=int(input('Digite um número com 8 dígitos:'))
soma=0
if (n<10000000):
print('NAO SEI')
else:
def soma (x1+x2+x3+x4+x5+x6+x7+x8)
| [
"[email protected]"
] | |
0b869fa5754e8a99cf33d79e5c9b50c1c1dc606f | 25333fa1726e11adc66c25820751432d4eaebfdc | /alexa.py | d16a4b51fb45363e1a1bd19529bf1f430dba4733 | [] | no_license | aminhp93/alexa | a769d510b3373dd142721980d398a6253ed8430e | 03fa7b760302ec34de20f2cf28b356ff4e27ec45 | refs/heads/master | 2021-01-13T08:36:59.396756 | 2016-09-30T18:27:09 | 2016-09-30T18:27:09 | 69,312,807 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,983 | py | import logging
from random import randint
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
from twilio.rest import TwilioRestClient
print("I AM RIGHT HERE AT THE TOP OF THE FILE")
app = Flask(__name__)
ask = Ask(app, "/")
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
# --------------------Dont touch below!--------------------------
@ask.launch
def launch_skill():
welcome_msg = render_template('welcome')
return question(welcome_msg)
@ask.intent("DojoInfoIntent")
def dojo_info():
response = render_template("dojo_info_template")
return statement(response)
@ask.intent("AMAZON.HelpIntent")
def dojo_help():
response = render_template("help_template")
return question(response)
#--------OK Above-----------
# @ask.intent("DojoStackIntent", convert={'City': str})
# def dojo_stacks(City):
# response = ''
# if City == "San Jose":
# response = render_template("san_jose_stacks", city=City)
# elif City == "Seattle":
# response = render_template("seattle_stacks", city=City)
# elif City == "Chicago":
# response = render_template("chicago_stacks", city=City)
# elif City == "Dallas":
# response = render_template("dallas_stacks", city=City)
# elif City == "Burbank":
# response = render_template("burbank_stacks", city=City)
# elif City == "Washington":
# response = render_template("washington_stacks", city=City)
# else:
# response = render_template("invalid_city")
# return statement(response)
#-----------------------Ok above----------
#--------------Custom functions below--------
@ask.intent("TextBrendenIntent")
def touch_face_with_Brenden():
print("I AM RIGHT HERE")
response = render_template("brendan_template_1")
return statement(response)
@ask.intent("GetTouchFaceIntent")
def get_touchface_response():
response = render_template("brendan_template_2")
return statement(response)
@ask.intent("DojoBrendenIntent")
def dojo_Brenden_response():
response = render_template("brendan_template_3")
return statement(response)
@ask.intent("AskBrendan")
def ask_brendan():
response = render_template("brendan_template_4")
return statement(response)
@ask.intent("twilioIntent")
def twilioIntentHandler():
account_sid = "AC7622914a70ec20b746fa9f5200f94a79"
auth_token = "f61cf7f88337ec156669d6f08ac693cf"
client = TwilioRestClient(account_sid, auth_token)
message = client.messages.create(to="+7142135025", from_="+16578889320", body="Hey Brendan, I touch your face lol!!!!")
response = render_template("message_sent_to")
return question(response)
@ask.intent("GroupTextIntent", convert={'Name': str})
def GroupTextIntentHandler(Name):
account_sid = "AC7622914a70ec20b746fa9f5200f94a79"
auth_token = "f61cf7f88337ec156669d6f08ac693cf"
client = TwilioRestClient(account_sid, auth_token)
if Name == "Andy":
message = client.messages.create(to="+18057043552", from_="+16578889320", body="Hello Andy you are doing well today!")
response = render_template("message_sent", name = Name)
elif Name == "Annet":
message = client.messages.create(to="+15102142298", from_="+16578889320", body="Hello Annet you are doing well today!")
response = render_template("message_sent", name = Name)
elif Name == "Luba":
message = client.messages.create(to="+17032091080", from_="+16578889320", body="Hello Tuba you are doing well today!")
response = render_template("message_sent", name = Name)
elif Name == "Minh":
message = client.messages.create(to="+17142135025", from_="+16578889320", body="Hello Minh you are doing well today!")
response = render_template("message_sent", name = Name)
else:
response = render_template("message_not_sent")
return question(response)
if __name__ == '__main__':
app.run(debug=True)
| [
"[email protected]"
] | |
55e85aa5da138f635b5bedb73c67575e21d7513e | 6ac2631c256f156d4ddf169e6c67f1fe66ebcaaf | /091/pyteacher/app_chat/models.py | 48e44f13eaba832d4322ad4695b20811a2caee4c | [] | no_license | kasaiee/how-to-pyteacher | 101f106aeeed1b34756cecf502337ff8ee584ff5 | 074a57533f53fd1b8c7f37cd11dbc3b32ab8a08f | refs/heads/master | 2022-12-10T23:50:46.851784 | 2019-07-15T19:31:03 | 2019-07-15T19:31:03 | 187,372,111 | 6 | 4 | null | 2022-12-08T01:55:05 | 2019-05-18T15:08:03 | null | UTF-8 | Python | false | false | 3,459 | py | import jdatetime
from django.db import models
from django.urls import reverse
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils import timezone
from django.utils.timezone import localtime
from django.contrib.auth import get_user_model
User = get_user_model()
class Chat(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
code = models.TextField(null=True, blank=True)
message = models.TextField(null=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
create_datetime = models.DateTimeField(auto_now_add=True, null=True)
seen = models.BooleanField(null=True, default=False)
seen_datetime = models.DateTimeField(null=True)
def jd_create_datetime(self):
jdatetime.set_locale('fa_IR')
jdatetime.datetime.now().strftime('%A %B')
jd_datetime = jdatetime.datetime.fromgregorian(
year=self.create_datetime.year,
month=self.create_datetime.month,
day=self.create_datetime.day,
hour=self.create_datetime.hour,
monute=self.create_datetime.minute,
second=self.create_datetime.second,
)
return jd_datetime.strftime('%A, %d %B %y %H:%M:%S')
def status_color(self):
return 'grey' if self.seen else 'teal'
def status(self):
return 'دیده شده' if self.seen else 'دیده نشده'
def is_done_exercise(self):
return self.content_object in [e.exercise for e in self.user.exercisebystudent_set.all()]
def done_color(self):
return 'teal' if self.is_done_exercise() else 'red'
def done_status(self):
return 'انجام شده' if self.is_done_exercise() else 'انجام نشده'
def is_student(self):
return 'students' in [group.name for group in self.user.groups.all()]
def is_operator(self):
return not self.is_student()
def __str__(self):
return self.message[:30]
class Ticket(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
topic = models.CharField(max_length=60, null=True)
chats = GenericRelation(Chat, null=True)
closed = models.BooleanField(default=False)
create_datetime = models.DateTimeField(default=timezone.now, blank=True)
def status(self):
return 'بسته' if self.closed else 'باز'
def status_color(self):
return 'red' if self.closed else 'blue'
def jd_create_datetime(self):
self.create_datetime = localtime(self.create_datetime)
jdatetime.set_locale('fa_IR')
jdatetime.datetime.now().strftime('%A %B')
jd_datetime = jdatetime.datetime.fromgregorian(
year=self.create_datetime.year,
month=self.create_datetime.month,
day=self.create_datetime.day,
hour=self.create_datetime.hour,
minute=self.create_datetime.minute,
second=self.create_datetime.second,
)
return jd_datetime.strftime('%A, %d %B %y %H:%M:%S')
def get_absolute_url(self):
params = {'id': self.id}
return reverse('app-accounts:ticket-detail', kwargs=params)
def __str__(self):
return self.topic
| [
"[email protected]"
] | |
565726ab7ef232eba372668568ba5298c9e21452 | 6a8bc7da3104726f894ae360fce6a43a54b30812 | /gradio/components/file.py | 6c5a41b18d9de2d5da58eb85e5198f2d92775877 | [
"Apache-2.0"
] | permissive | gradio-app/gradio | 0b6b29bb0029ad3b8fc1b143f111b1230b29d23a | e4e7a4319924aaf51dcb18d07d0c9953d4011074 | refs/heads/main | 2023-09-01T10:56:50.822550 | 2023-09-01T00:28:01 | 2023-09-01T00:28:01 | 162,405,963 | 21,224 | 1,537 | Apache-2.0 | 2023-09-14T21:42:00 | 2018-12-19T08:24:04 | Python | UTF-8 | Python | false | false | 11,821 | py | """gr.File() component"""
from __future__ import annotations
import tempfile
import warnings
from pathlib import Path
from typing import Any, Callable, Literal
from gradio_client import utils as client_utils
from gradio_client.documentation import document, set_documentation_group
from gradio_client.serializing import FileSerializable
from gradio import utils
from gradio.components.base import IOComponent, _Keywords
from gradio.deprecation import warn_deprecation
from gradio.events import (
Changeable,
Clearable,
EventListenerMethod,
Selectable,
Uploadable,
)
set_documentation_group("component")
@document()
class File(
Changeable,
Selectable,
Clearable,
Uploadable,
IOComponent,
FileSerializable,
):
"""
Creates a file component that allows uploading generic file (when used as an input) and or displaying generic files (output).
Preprocessing: passes the uploaded file as a {tempfile._TemporaryFileWrapper} or {List[tempfile._TemporaryFileWrapper]} depending on `file_count` (or a {bytes}/{List{bytes}} depending on `type`)
Postprocessing: expects function to return a {str} path to a file, or {List[str]} consisting of paths to files.
Examples-format: a {str} path to a local file that populates the component.
Demos: zip_to_json, zip_files
"""
def __init__(
self,
value: str | list[str] | Callable | None = None,
*,
file_count: Literal["single", "multiple", "directory"] = "single",
file_types: list[str] | None = None,
type: Literal["file", "binary"] = "file",
label: str | None = None,
every: float | None = None,
show_label: bool | None = None,
container: bool = True,
scale: int | None = None,
min_width: int = 160,
height: int | float | None = None,
interactive: bool | None = None,
visible: bool = True,
elem_id: str | None = None,
elem_classes: list[str] | str | None = None,
**kwargs,
):
"""
Parameters:
value: Default file to display, given as str file path. If callable, the function will be called whenever the app loads to set the initial value of the component.
file_count: if single, allows user to upload one file. If "multiple", user uploads multiple files. If "directory", user uploads all files in selected directory. Return type will be list for each file in case of "multiple" or "directory".
file_types: List of file extensions or types of files to be uploaded (e.g. ['image', '.json', '.mp4']). "file" allows any file to be uploaded, "image" allows only image files to be uploaded, "audio" allows only audio files to be uploaded, "video" allows only video files to be uploaded, "text" allows only text files to be uploaded.
type: Type of value to be returned by component. "file" returns a temporary file object with the same base name as the uploaded file, whose full path can be retrieved by file_obj.name, "binary" returns an bytes object.
label: component name in interface.
every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute.
show_label: if True, will display label.
container: If True, will place the component in a container - providing some extra padding around the border.
scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer.
min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first.
height: The maximum height of the file component, in pixels. If more files are uploaded than can fit in the height, a scrollbar will appear.
interactive: if True, will allow users to upload a file; if False, can only be used to display files. If not provided, this is inferred based on whether the component is used as an input or output.
visible: If False, component will be hidden.
elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles.
elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles.
"""
self.file_count = file_count
self.file_types = file_types
if file_types is not None and not isinstance(file_types, list):
raise ValueError(
f"Parameter file_types must be a list. Received {file_types.__class__.__name__}"
)
valid_types = [
"file",
"binary",
"bytes",
] # "bytes" is included for backwards compatibility
if type not in valid_types:
raise ValueError(
f"Invalid value for parameter `type`: {type}. Please choose from one of: {valid_types}"
)
if type == "bytes":
warn_deprecation(
"The `bytes` type is deprecated and may not work as expected. Please use `binary` instead."
)
if file_count == "directory" and file_types is not None:
warnings.warn(
"The `file_types` parameter is ignored when `file_count` is 'directory'."
)
self.type = type
self.height = height
self.select: EventListenerMethod
"""
Event listener for when the user selects file from list.
Uses event data gradio.SelectData to carry `value` referring to name of selected file, and `index` to refer to index.
See EventData documentation on how to use this event data.
"""
IOComponent.__init__(
self,
label=label,
every=every,
show_label=show_label,
container=container,
scale=scale,
min_width=min_width,
interactive=interactive,
visible=visible,
elem_id=elem_id,
elem_classes=elem_classes,
value=value,
**kwargs,
)
def get_config(self):
return {
"file_count": self.file_count,
"file_types": self.file_types,
"value": self.value,
"selectable": self.selectable,
"height": self.height,
**IOComponent.get_config(self),
}
@staticmethod
def update(
value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE,
label: str | None = None,
show_label: bool | None = None,
container: bool | None = None,
scale: int | None = None,
min_width: int | None = None,
height: int | float | None = None,
interactive: bool | None = None,
visible: bool | None = None,
):
return {
"label": label,
"show_label": show_label,
"container": container,
"scale": scale,
"min_width": min_width,
"height": height,
"interactive": interactive,
"visible": visible,
"value": value,
"__type__": "update",
}
def preprocess(
self, x: list[dict[str, Any]] | None
) -> (
bytes
| tempfile._TemporaryFileWrapper
| list[bytes | tempfile._TemporaryFileWrapper]
| None
):
"""
Parameters:
x: List of JSON objects with filename as 'name' property and base64 data as 'data' property
Returns:
File objects in requested format
"""
if x is None:
return None
def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper:
file_name, data, is_file = (
f["name"],
f["data"],
f.get("is_file", False),
)
if self.type == "file":
if is_file:
path = self.make_temp_copy_if_needed(file_name)
else:
data, _ = client_utils.decode_base64_to_binary(data)
path = self.file_bytes_to_file(data, file_name=file_name)
path = str(utils.abspath(path))
self.temp_files.add(path)
# Creation of tempfiles here
file = tempfile.NamedTemporaryFile(
delete=False, dir=self.DEFAULT_TEMP_DIR
)
file.name = path
file.orig_name = file_name # type: ignore
return file
elif (
self.type == "binary" or self.type == "bytes"
): # "bytes" is included for backwards compatibility
if is_file:
with open(file_name, "rb") as file_data:
return file_data.read()
return client_utils.decode_base64_to_binary(data)[0]
else:
raise ValueError(
"Unknown type: "
+ str(self.type)
+ ". Please choose from: 'file', 'bytes'."
)
if self.file_count == "single":
if isinstance(x, list):
return process_single_file(x[0])
else:
return process_single_file(x)
else:
if isinstance(x, list):
return [process_single_file(f) for f in x]
else:
return process_single_file(x)
def postprocess(
self, y: str | list[str] | None
) -> dict[str, Any] | list[dict[str, Any]] | None:
"""
Parameters:
y: file path
Returns:
JSON object with key 'name' for filename, 'data' for base64 url, and 'size' for filesize in bytes
"""
if y is None:
return None
if isinstance(y, list):
return [
{
"orig_name": Path(file).name,
"name": self.make_temp_copy_if_needed(file),
"size": Path(file).stat().st_size,
"data": None,
"is_file": True,
}
for file in y
]
else:
d = {
"orig_name": Path(y).name,
"name": self.make_temp_copy_if_needed(y),
"size": Path(y).stat().st_size,
"data": None,
"is_file": True,
}
return d
def as_example(self, input_data: str | list | None) -> str:
if input_data is None:
return ""
elif isinstance(input_data, list):
return ", ".join([Path(file).name for file in input_data])
else:
return Path(input_data).name
def api_info(self) -> dict[str, dict | bool]:
if self.file_count == "single":
return self._single_file_api_info()
else:
return self._multiple_file_api_info()
def serialized_info(self):
if self.file_count == "single":
return self._single_file_serialized_info()
else:
return self._multiple_file_serialized_info()
def example_inputs(self) -> dict[str, Any]:
if self.file_count == "single":
return self._single_file_example_inputs()
else:
return self._multiple_file_example_inputs()
| [
"[email protected]"
] | |
d7aaeffe43f3d4370c2d6713f4192dc5ec0a03b8 | 18274645eeb605098b966571f3e0ac8806c992d8 | /companies/views.py | 13f1cd96a2393a901cc9dead362ccae6dc2e9171 | [] | no_license | webclinic017/fintechapp | 0ec47a6b75dbe1faa52d1d30039ecf2bb1673bc2 | f971a502c0abc1daf23115ade9f4a6901e21d023 | refs/heads/main | 2023-03-01T10:02:01.608251 | 2021-02-11T22:45:35 | 2021-02-11T22:45:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,491 | py | from django.shortcuts import render
from django.contrib.postgres.aggregates import StringAgg
from django.contrib.postgres.search import (
SearchQuery, SearchRank, SearchVector, TrigramSimilarity,
)
from mergedeep import merge
from django.views.generic import TemplateView, ListView, DetailView
from django.db.models import Q, F
from django.shortcuts import render, redirect, get_object_or_404
from django.views import generic
from django.core.paginator import Paginator
import csv
from io import StringIO
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.files import File
from django.http import HttpResponse, StreamingHttpResponse
from django.utils.text import slugify
import json # will be needed for saving preprocessing details
import numpy as np # for data manipulation
import pandas as pd # for data manipulation
from sklearn.model_selection import train_test_split # will be used for data split
import requests
from apps.ml.income_classifier import random_forest as rf
from apps.ml.income_classifier import extra_trees as et
from apps.endpoints.models import Endpoint
from apps.endpoints.serializers import EndpointSerializer
from apps.endpoints.models import MLAlgorithm
from apps.endpoints.serializers import MLAlgorithmSerializer
from apps.endpoints.models import MLAlgorithmStatus
from apps.endpoints.serializers import MLAlgorithmStatusSerializer
from apps.endpoints.models import MLRequest
from apps.endpoints.serializers import MLRequestSerializer
import json
from numpy.random import rand
from rest_framework import views, status
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.ml.registry import MLRegistry
# from fintechapp.wsgi import registry
from django.db import transaction
from account.models import Clients, LoanOfficer, Loan, AccountUser
from companies.models import *
from django.forms.models import model_to_dict
from companies.models import Loan_History
# Create your views here.
from data_processor import imports as imp
from data_processor import logic as log
from datetime import datetime, date
from django.db.models import Sum
from apps.ml.income_classifier.random_forest import RandomForestClassifier
from apps.ml.income_classifier.extra_trees import ExtraTreesClassifier
from apps.ml.application_classifier.random_forest import RandomForestApplicationClassifier
from django.http import JsonResponse
from rest_framework.generics import ListAPIView
from .serializers import *
from .models import Loan_History, IncomeData
from .pagination import StandardResultsSetPagination
from .get_income_pred import GetPredictions as gpred
from apps.endpoints.models import BehaviouralScores
from apps.endpoints.models import RetentionScores
from apps.endpoints.models import ApplicationScores
class ApplicationAPIView(ListAPIView):
pagination_class = StandardResultsSetPagination
serializer_class = ApplicationScoresSerializer
def get_queryset(self):
# filter the queryset based on the filters applied
global qry
queryList = ApplicationScores.objects.all()
loan_officer = self.request.query_params.get('loan_officer', None)
# ORGANIZATION_TYPE = self.request.query_params.get('mortage', None)
# OCCUPATION_TYPE = self.request.query_params.get('funeral', None)
# CODE_GENDER = self.request.query_params.get('school', None)
sort_by = self.request.query_params.get('sort_by', None)
if loan_officer:
queryList = ApplicationScores.objects.filter(loan_officer = loan_officer)[:5]
# sort it if applied on based on price/points
if sort_by == "income":
queryList = queryList.order_by("client_id")
elif sort_by == "credit_amount":
queryList = queryList.order_by("loan_amount")
# get predictions for applications scoring predictions
# application_classifier_data = gpred.get_application_scores(queryList)
return queryList
class BehavioralAPIView(ListAPIView):
pagination_class = StandardResultsSetPagination
serializer_class = BehaviouralScoresSerializer
def get_queryset(self):
# filter the queryset based on the filters applied
global qry
queryList = BehaviouralScores.objects.all()
loan_officer = self.request.query_params.get('loan_officer', None)
# ORGANIZATION_TYPE = self.request.query_params.get('mortage', None)
# OCCUPATION_TYPE = self.request.query_params.get('funeral', None)
# CODE_GENDER = self.request.query_params.get('school', None)
sort_by = self.request.query_params.get('sort_by', None)
if loan_officer:
queryList = BehaviouralScores.objects.filter(loan_officer = loan_officer)[:5]
# sort it if applied on based on price/points
if sort_by == "income":
queryList = queryList.order_by("client_id")
elif sort_by == "credit_amount":
queryList = queryList.order_by("loan_amount")
# get predictions for applications scoring predictions
# application_classifier_data = gpred.get_application_scores(queryList)
return queryList
class RetentionAPIView(ListAPIView):
pagination_class = StandardResultsSetPagination
serializer_class = RetentionScoresSerializer
def get_queryset(self):
# filter the queryset based on the filters applied
global qry
queryList = RetentionScores.objects.all()
loan_officer = self.request.query_params.get('loan_officer', None)
# ORGANIZATION_TYPE = self.request.query_params.get('mortage', None)
# OCCUPATION_TYPE = self.request.query_params.get('funeral', None)
# CODE_GENDER = self.request.query_params.get('school', None)
sort_by = self.request.query_params.get('sort_by', None)
if loan_officer:
queryList = RetentionScores.objects.filter(loan_officer = loan_officer)[:5]
# sort it if applied on based on price/points
if sort_by == "income":
queryList = queryList.order_by("client_id")
elif sort_by == "credit_amount":
queryList = queryList.order_by("loan_amount")
# get predictions for applications scoring predictions
# application_classifier_data = gpred.get_application_scores(queryList)
return queryList
class HomeView(ListView):
template_name = 'dashboards/landing/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time
user_id = self.request.session['account_user_id']
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
user_name = acc_user
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
}
return context
class ProfileView(ListView):
template_name = 'dashboards/clients/profile/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time, client
user_id = self.request.session['account_user_id']
id = self.request.GET.get('client_id', None)
print(id)
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
client = Clients.objects.all()[:1]
user_name = acc_user
print("acc_user",acc_user)
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
'client':client,
}
return context
class ApplicationReportExportCsvView(ListView):
template_name = 'dashboards/clients/profile/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time
user_id = self.request.session['account_user_id']
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
user_name = acc_user
print("acc_user",acc_user)
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
}
return context
class ApplicationReportView(ListView):
template_name = 'dashboards/clients/profile/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time
user_id = self.request.session['account_user_id']
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
user_name = acc_user
print("acc_user",acc_user)
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
}
return context
class BehavioralAnalyticsResultsView(ListView):
template_name = 'dashboards/behavioral/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time
user_id = self.request.session['account_user_id']
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
user_name = acc_user
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
}
return context
class ApplicationAnalyticsResultsView(ListView):
template_name = 'dashboards/application/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time
user_id = self.request.session['account_user_id']
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
user_name = acc_user
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
}
return context
class RetentionAnalyticsResultsView(ListView):
template_name = 'dashboards/retention/index.html'
def get_queryset(self, **kwargs):
global cust_data, loan, user_name, input_data,acc_user, time
user_id = self.request.session['account_user_id']
time = end = datetime.today()
acc_user = AccountUser.objects.get(id=user_id)
user_name = acc_user
org = Organization.objects.get(id=1)
client = Clients.objects.filter(insti=org)
def get_context_data(self, **kwargs):
context = {
'user_name':user_name,
'acc_user':acc_user,
}
return context
def client_profile(request):
id = request.GET.get('client_id', None)
print(id)
client = Clients.objects.get(id=id)
print("client", client)
context = {
'client':client,
}
return render(request,'dashboards/clients/profile/index.html', context)
def officer_profile(request):
id = request.GET.get('officer_id', None)
print(id)
officer = LoanOfficer.objects.get(id=id)
print("officer", officer)
context = {
'officer':officer,
}
return render(request,'dashboards/officers/profile/index.html', context)
def application_report_export_csv(request):
id = request.GET.get('loan_id', None)
print(id)
officer = Loan.objects.get(loan_id=id)
print("loan", loan)
return render(request,'dashboards/articles/index.html')
def application_report(request):
id = request.GET.get('loan_id', None)
print(id)
officer = Loan.objects.get(loan_id=id)
print("loan", loan)
return render(request,'dashboards/articles/index.html')
def reports(request):
return render(request,'dashboards/reports/index.html')
def articles(request):
return render(request,'dashboards/articles/index.html')
import csv
from django.http import HttpResponse
def some_view(request):
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="somefilename.csv"'
writer = csv.writer(response)
writer.writerow(['First row', 'Foo', 'Bar', 'Baz'])
writer.writerow(['Second row', 'A', 'B', 'C', '"Testing"', "Here's a quote"])
return response
#Get Business Loan Filter
def getBusiness(request):
# get all the business loans from the database excluding
# null and blank values
if request.method == "GET" and request.is_ajax():
business = LoanOfficer.objects.exclude(id__isnull=True).\
exclude(insti__exact='').order_by('insti').values_list('insti').distinct()
business = [i[0] for i in list(business)]
data = {
"business": business,
}
return JsonResponse(data, status = 200)
#Get mortage Loan Filter
def getMortage(request):
# get all the mortage loans from the database excluding
# null and blank values
if request.method == "GET" and request.is_ajax():
mortage = ApplicationScores.objects.exclude(income_text__isnull=True).\
exclude(income_text__exact='').order_by('income_text').values_list('income_text').distinct()
mortage = [i[0] for i in list(mortage)]
data = {
"mortage": mortage,
}
return JsonResponse(data, status = 200)
#Get funeral Loan Filter
def getFuneral(request):
# get all the funeral loans from the database excluding
# null and blank values
if request.method == "GET" and request.is_ajax():
funeral = ApplicationScores.objects.exclude(income_text__isnull=True).\
exclude(income_text__exact='').order_by('income_text').values_list('income_text').distinct()
funeral = [i[0] for i in list(funeral)]
data = {
"funeral": funeral,
}
return JsonResponse(data, status = 200)
#Get mortage Loan Filter
def getSchool(request):
# get all the school loans from the database excluding
# null and blank values
if request.method == "GET" and request.is_ajax():
school = ApplicationScores.objects.exclude(income_text__isnull=True).\
exclude(income_text__exact='').order_by('income_text').values_list('income_text').distinct()
school = [i[0] for i in list(school)]
data = {
"school": school,
}
return JsonResponse(data, status = 200)
def data_for_charts(request):
labels = []
data ={}
chart_data = {}
queryset = Loan_History.objects.values('OCCUPATION_TYPE').annotate(amount_borrowed=Sum('AMT_CREDIT')).order_by('-amount_borrowed')
for entry in queryset:
data[entry['OCCUPATION_TYPE']] = []
# chart_data['label'] = entry['OCCUPATION_TYPE']
data[entry['OCCUPATION_TYPE']].append(entry['amount_borrowed'])
# data = merge(data,chart_data)
return JsonResponse(data={
# 'labels': labels,
'data': data,
})
def data_aggretation(request):
labels = []
data = []
queryset = Loan_History.objects.values('OCCUPATION_TYPE').annotate(amount_borrowed=Sum('AMT_CREDIT')).order_by('-amount_borrowed')
for entry in queryset:
labels.append(entry['OCCUPATION_TYPE'])
data.append(entry['amount_borrowed'])
return JsonResponse(data={
'labels': labels,
'data': data,
})
def live_app_score_pred(request):
algorithm_object = RandomForestClassifier()
# posts = Post.objects.all()
response_data = {}
context = {}
if request.method == 'POST':
age = request.POST.get('age')
workclass = request.POST.get('workclass')
fnlwgt = request.POST.get('fnlwgt')
education = request.POST.get('education')
education_num = request.POST.get('education_num')
marital_status = request.POST.get('marital_status')
occupation = request.POST.get('occupation')
relationship = request.POST.get('relationship')
race = request.POST.get('race')
sex = request.POST.get('sex')
capital_gain = request.POST.get('capital_gain')
capital_loss = request.POST.get('capital_loss')
hours_per_week = request.POST.get('hours_per_week')
native_country = request.POST.get('native_country')
data ={'age': age, 'workclass': workclass,
'fnlwgt': fnlwgt, 'education': education,
'education-num': education_num, 'marital-status': marital_status,
'occupation': occupation, 'relationship': relationship,
'race': race, 'sex': sex, 'capital-gain': capital_gain,
'capital-loss': capital_loss, 'hours-per-week': hours_per_week,
'native-country': native_country
}
incomes_prediction = {}
print("incomes_prediction data", data)
incomes_prediction = algorithm_object.compute_prediction(data)
print("incomes_prediction data", incomes_prediction)
if incomes_prediction['income_probability'] > 0.67:
color = 'red'
text = 'high risk'
incomes_prediction["income_color"] = color
incomes_prediction["income_text"] = text
elif incomes_prediction['income_probability'] > 0.33:
color = 'blue'
text = 'moderate risk'
incomes_prediction["income_color"] = color
incomes_prediction["income_text"] = text
else:
color = 'green'
text = 'low risk'
incomes_prediction["income_color"] = color
incomes_prediction["income_text"] = text
data = {
"incomes_prediction": incomes_prediction,
}
context = {
'result': "prediction successful",
'incomes_prediction':incomes_prediction,
}
return render(request,'dashboards/officers/predictions/index.html', context)
context = {
'result': "fail",
}
return render(request,'dashboards/officers/predictions/index.html', context) | [
"[email protected]"
] | |
3f8d007f6c0a470c755b8c64925dd0a407c4166b | c58bfb0d7a293cc471f2921db57b00d5c3e56bac | /scripts/mp-scripts/Bio/Restriction/_Update/RestrictionCompiler.py | 38e3c4ce40465e3db07f73c6fae7cdf294b966b8 | [
"Apache-2.0",
"LicenseRef-scancode-biopython"
] | permissive | dbmi-pitt/DIKB-Micropublication | 0a2f29b704c269d2ccfe091f8faff1b2374d626d | 0fa264903414ac0b552d363d139746ead198f06a | refs/heads/master | 2020-12-24T16:32:04.206847 | 2020-01-22T18:33:58 | 2020-01-22T18:33:58 | 22,527,994 | 6 | 2 | null | 2015-07-15T14:39:09 | 2014-08-01T20:51:07 | Python | UTF-8 | Python | false | false | 35,222 | py |
#
# Restriction Analysis Libraries.
# Copyright (C) 2004. Frederic Sohm.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
# this script is used to produce the dictionary which will contains the data
# about the restriction enzymes from the Emboss/Rebase data files
# namely
# emboss_e.### (description of the sites),
# emboss_r.### (origin, methylation, references)
# emboss_s.### (suppliers)
# where ### is a number of three digits : 1 for the year two for the month
#
# very dirty implementation but it does the job, so...
# Not very quick either but you are not supposed to use it frequently.
#
# The results are stored in
# path/to/site-packages/Bio/Restriction/Restriction_Dictionary.py
# the file contains two dictionary :
# 'rest_dict' which contains the data for the enzymes
# and
# 'suppliers' which map the name of the suppliers to their abbreviation.
#
"""Convert a serie of Rebase files into a Restriction_Dictionary.py module.
The Rebase files are in the emboss format :
emboss_e.### -> contains informations about the restriction sites.
emboss_r.### -> contains general informations about the enzymes.
emboss_s.### -> contains informations about the suppliers.
### is a 3 digit number. The first digit is the year and the two last the month.
"""
import sre
import os
import itertools
import pprint
import time
import sys
import site
import shutil
from Bio.Seq import Seq
from Bio.Alphabet import IUPAC
import Bio.Restriction.Restriction
from Bio.Restriction.Restriction import AbstractCut, RestrictionType, NoCut, OneCut,\
TwoCuts, Meth_Dep, Meth_Undep, Palindromic, NonPalindromic, Unknown, Blunt,\
Ov5, Ov3, NotDefined, Defined, Ambiguous, Commercially_available, Not_available
import Bio.Restriction.RanaConfig as config
from Bio.Restriction._Update.Update import RebaseUpdate
from Bio.Restriction.Restriction import *
from Bio.Restriction.DNAUtils import antiparallel
DNA=Seq
dna_alphabet = {'A':'A', 'C':'C', 'G':'G', 'T':'T',
'R':'AG', 'Y':'CT', 'W':'AT', 'S':'CG', 'M':'AC', 'K':'GT',
'H':'ACT', 'B':'CGT', 'V':'ACG', 'D':'AGT',
'N':'ACGT',
'a': 'a', 'c': 'c', 'g': 'g', 't': 't',
'r':'ag', 'y':'ct', 'w':'at', 's':'cg', 'm':'ac', 'k':'gt',
'h':'act', 'b':'cgt', 'v':'acg', 'd':'agt',
'n':'acgt'}
complement_alphabet = {'A':'T', 'T':'A', 'C':'G', 'G':'C','R':'Y', 'Y':'R',
'W':'W', 'S':'S', 'M':'K', 'K':'M', 'H':'D', 'D':'H',
'B':'V', 'V':'B', 'N':'N','a':'t', 'c':'g', 'g':'c',
't':'a', 'r':'y', 'y':'r', 'w':'w', 's':'s','m':'k',
'k':'m', 'h':'d', 'd':'h', 'b':'v', 'v':'b', 'n':'n'}
enzymedict = {}
suppliersdict = {}
classdict = {}
typedict = {}
class OverhangError(ValueError) :
"""Exception for dealing with overhang."""
pass
def BaseExpand(base) :
"""BaseExpand(base) -> string.
given a degenerated base, returns its meaning in IUPAC alphabet.
i.e:
b= 'A' -> 'A'
b= 'N' -> 'ACGT'
etc..."""
base = base.upper()
return dna_alphabet[base]
def regex(site) :
"""regex(site) -> string.
Construct a regular expression from a DNA sequence.
i.e. :
site = 'ABCGN' -> 'A[CGT]CG.'"""
reg_ex = site
for base in reg_ex :
if base in ('A', 'T', 'C', 'G', 'a', 'c', 'g', 't') :
pass
if base in ('N', 'n') :
reg_ex = '.'.join(reg_ex.split('N'))
reg_ex = '.'.join(reg_ex.split('n'))
if base in ('R', 'Y', 'W', 'M', 'S', 'K', 'H', 'D', 'B', 'V') :
expand = '['+ str(BaseExpand(base))+']'
reg_ex = expand.join(reg_ex.split(base))
return reg_ex
def Antiparallel(sequence) :
"""Antiparallel(sequence) -> string.
returns a string which represents the reverse complementary strand of
a DNA sequence."""
return antiparallel(sequence.tostring())
def is_palindrom(sequence) :
"""is_palindrom(sequence) -> bool.
True is the sequence is a palindrom.
sequence is a DNA object."""
return sequence == DNA(Antiparallel(sequence))
def LocalTime() :
"""LocalTime() -> string.
LocalTime calculate the extension for emboss file for the current year and
month."""
t = time.gmtime()
year = str(t.tm_year)[-1]
month = str(t.tm_mon)
if len(month) == 1 : month = '0'+month
return year+month
class newenzyme(object) :
"""construct the attributes of the enzyme corresponding to 'name'."""
def __init__(cls, name) :
cls.opt_temp = 37
cls.inact_temp = 65
cls.substrat = 'DNA'
target = enzymedict[name]
cls.site = target[0]
cls.size = target[1]
cls.suppl = tuple(target[9])
cls.freq = target[11]
cls.ovhg = target[13]
cls.ovhgseq = target[14]
cls.bases = ()
#
# Is the site palindromic?
# Important for the way the DNA is search for the site.
# Palindromic sites needs to be looked for only over 1 strand.
# Non Palindromic needs to be search for on the reverse complement
# as well.
#
if target[10] : cls.bases += ('Palindromic',)
else : cls.bases += ('NonPalindromic',)
#
# Number of cut the enzyme produce.
# 0 => unknown, the enzyme has not been fully characterised.
# 2 => 1 cut, (because one cut is realised by cutting 2 strands
# 4 => 2 cuts, same logic.
# A little bit confusing but it is the way EMBOSS/Rebase works.
#
if not target[2] :
#
# => undefined enzymes, nothing to be done.
#
cls.bases += ('NoCut','Unknown', 'NotDefined')
cls.fst5 = None
cls.fst3 = None
cls.scd5 = None
cls.scd3 = None
cls.ovhg = None
cls.ovhgseq = None
else :
#
# we will need to calculate the overhang.
#
if target[2] == 2 :
cls.bases += ('OneCut',)
cls.fst5 = target[4]
cls.fst3 = target[5]
cls.scd5 = None
cls.scd3 = None
else :
cls.bases += ('TwoCuts',)
cls.fst5 = target[4]
cls.fst3 = target[5]
cls.scd5 = target[6]
cls.scd3 = target[7]
#
# Now, prepare the overhangs which will be added to the DNA
# after the cut.
# Undefined enzymes will not be allowed to catalyse,
# they are not available commercially anyway.
# I assumed that if an enzyme cut twice the overhang will be of
# the same kind. The only exception is HaeIV. I do not deal
# with that at the moment (ie I don't include it,
# need to be fixed).
# They generally cut outside their recognition site and
# therefore the overhang is undetermined and dependent of
# the DNA sequence upon which the enzyme act.
#
if target[3] :
#
# rebase field for blunt: blunt == 1, other == 0.
# The enzyme is blunt. No overhang.
#
cls.bases += ('Blunt', 'Defined')
cls.ovhg = 0
elif isinstance(cls.ovhg, int) :
#
# => overhang is sequence dependent
#
if cls.ovhg > 0 :
#
# 3' overhang, ambiguous site (outside recognition site
# or site containing ambiguous bases (N, W, R,...)
#
cls.bases += ('Ov3', 'Ambiguous')
elif cls.ovhg < 0 :
#
# 5' overhang, ambiguous site (outside recognition site
# or site containing ambiguous bases (N, W, R,...)
#
cls.bases += ('Ov5', 'Ambiguous')
else :
#
# cls.ovhg is a string => overhang is constant
#
if cls.fst5 - (cls.fst3 + cls.size) < 0 :
cls.bases += ('Ov5', 'Defined')
cls.ovhg = - len(cls.ovhg)
else :
cls.bases += ('Ov3', 'Defined')
cls.ovhg = + len(cls.ovhg)
#
# Next class : sensibility to methylation.
# Set by EmbossMixer from emboss_r.txt file
# Not really methylation dependent at the moment, stands rather for
# 'is the site methylable?'.
# Proper methylation sensibility has yet to be implemented.
# But the class is there for further development.
#
if target[8] :
cls.bases += ('Meth_Dep', )
cls.compsite = target[12]
else :
cls.bases += ('Meth_Undep',)
cls.compsite = target[12]
#
# Next class will allow to select enzymes in function of their
# suppliers. Not essential but can be useful.
#
if cls.suppl :
cls.bases += ('Commercially_available', )
else :
cls.bases += ('Not_available', )
cls.bases += ('AbstractCut', 'RestrictionType')
cls.__name__ = name
cls.results = None
cls.dna = None
cls.__bases__ = cls.bases
cls.charac = (cls.fst5, cls.fst3, cls.scd5, cls.scd3, cls.site)
if not target[2] and cls.suppl :
supp = ', '.join([suppliersdict[s][0] for s in cls.suppl])
print 'WARNING : It seems that %s is both commercially available\
\n\tand its characteristics are unknown. \
\n\tThis seems counter-intuitive.\
\n\tThere is certainly an error either in ranacompiler or\
\n\tin this REBASE release.\
\n\tThe supplier is : %s.' % (name, supp)
return
class TypeCompiler(object) :
"""Build the different types possible for Restriction Enzymes"""
def __init__(self) :
"""TypeCompiler() -> new TypeCompiler instance."""
pass
def buildtype(self) :
"""TC.buildtype() -> generator.
build the new types that will be needed for constructing the
restriction enzymes."""
baT = (AbstractCut, RestrictionType)
cuT = (NoCut, OneCut, TwoCuts)
meT = (Meth_Dep, Meth_Undep)
paT = (Palindromic, NonPalindromic)
ovT = (Unknown, Blunt, Ov5, Ov3)
deT = (NotDefined, Defined, Ambiguous)
coT = (Commercially_available, Not_available)
All = (baT, cuT, meT, paT, ovT, deT, coT)
#
# Now build the types. Only the most obvious are left out.
# Modified even the most obvious are not so obvious.
# emboss_*.403 AspCNI is unknown and commercially available.
# So now do not remove the most obvious.
#
types = [(p,c,o,d,m,co,baT[0],baT[1])
for p in paT for c in cuT for o in ovT
for d in deT for m in meT for co in coT]
n= 1
for ty in types :
dct = {}
for t in ty :
dct.update(t.__dict__)
#
# here we need to customize the dictionary.
# i.e. types deriving from OneCut have always scd5 and scd3
# equal to None. No need therefore to store that in a specific
# enzyme of this type. but it then need to be in the type.
#
dct['results'] = []
dct['substrat'] = 'DNA'
dct['dna'] = None
if t == NoCut :
dct.update({'fst5':None,'fst3':None,
'scd5':None,'scd3':None,
'ovhg':None,'ovhgseq':None})
elif t == OneCut :
dct.update({'scd5':None, 'scd3':None})
class klass(type) :
def __new__(cls) :
return type.__new__(cls, 'type%i'%n,ty,dct)
def __init__(cls) :
super(klass, cls).__init__('type%i'%n,ty,dct)
yield klass()
n+=1
start = '\n\
#!/usr/bin/env python\n\
#\n\
# Restriction Analysis Libraries.\n\
# Copyright (C) 2004. Frederic Sohm.\n\
#\n\
# This code is part of the Biopython distribution and governed by its\n\
# license. Please see the LICENSE file that should have been included\n\
# as part of this package.\n\
#\n\
#\n\
rest_dict = \\\n'
class DictionaryBuilder(object) :
def __init__(self, e_mail='', ftp_proxy='') :
"""DictionaryBuilder([e_mail[, ftp_proxy]) -> DictionaryBuilder instance.
If the emboss files used for the construction need to be updated this
class will download them if the ftp connection is correctly set.
either in RanaConfig.py or given at run time.
e_mail is the e-mail address used as password for the anonymous
ftp connection.
proxy is the ftp_proxy to use if any."""
self.rebase_pass = e_mail or config.Rebase_password
self.proxy = ftp_proxy or config.ftp_proxy
def build_dict(self) :
"""DB.build_dict() -> None.
Construct the dictionary and build the files containing the new
dictionaries."""
#
# first parse the emboss files.
#
emboss_e, emboss_r, emboss_s = self.lastrebasefile()
#
# the results will be stored into enzymedict.
#
self.information_mixer(emboss_r, emboss_e, emboss_s)
emboss_r.close()
emboss_e.close()
emboss_s.close()
#
# we build all the possible type
#
tdct = {}
for klass in TypeCompiler().buildtype() :
exec klass.__name__ +'= klass'
exec "tdct['"+klass.__name__+"'] = klass"
#
# Now we build the enzymes from enzymedict
# and store them in a dictionary.
# The type we will need will also be stored.
#
for name in enzymedict :
#
# the class attributes first:
#
cls = newenzyme(name)
#
# Now select the right type for the enzyme.
#
bases = cls.bases
clsbases = tuple([eval(x) for x in bases])
typestuff = ''
for n, t in tdct.iteritems() :
#
# if the bases are the same. it is the right type.
# create the enzyme and remember the type
#
if t.__bases__ == clsbases :
typestuff = t
typename = t.__name__
continue
#
# now we build the dictionaries.
#
dct = dict(cls.__dict__)
del dct['bases']
del dct['__bases__']
del dct['__name__']# no need to keep that, it's already in the type.
classdict[name] = dct
commonattr = ['fst5', 'fst3', 'scd5', 'scd3', 'substrat',
'ovhg', 'ovhgseq','results', 'dna']
if typename in typedict :
typedict[typename][1].append(name)
else :
enzlst= []
tydct = dict(typestuff.__dict__)
tydct = dict([(k,v) for k,v in tydct.iteritems() if k in commonattr])
enzlst.append(name)
typedict[typename] = (bases, enzlst)
for letter in cls.__dict__['suppl'] :
supplier = suppliersdict[letter]
suppliersdict[letter][1].append(name)
if not classdict or not suppliersdict or not typedict :
print 'One of the new dictionaries is empty.'
print 'Check the integrity of the emboss file before continuing.'
print 'Update aborted.'
sys.exit()
#
# How many enzymes this time?
#
print '\nThe new database contains %i enzymes.\n' % len(classdict)
#
# the dictionaries are done. Build the file
#
#update = config.updatefolder
update = os.getcwd()
results = open(os.path.join(update, 'Restriction_Dictionary.py'), 'w')
print 'Writing the dictionary containing the new Restriction classes.\t',
results.write(start)
a = pprint.PrettyPrinter(1, 80, None, results)
a.pprint(classdict)
print 'OK.\n'
print 'Writing the dictionary containing the suppliers datas.\t\t',
results.write('suppliers = \\\n')
a.pprint(suppliersdict)
print 'OK.\n'
print 'Writing the dictionary containing the Restriction types.\t',
results.write('typedict = \\\n')
a.pprint(typedict)
print 'OK.\n'
results.close()
return
def install_dict(self) :
"""DB.install_dict() -> None.
Install the newly created dictionary in the site-packages folder.
May need super user privilege on some architectures."""
print '\n ' +'*'*78 + ' \n'
print '\n\t\tInstalling Restriction_Dictionary.py'
try :
import Bio.Restriction.Restriction_Dictionary as rd
except ImportError :
print '\
\n Unable to locate the previous Restriction_Dictionary.py module\
\n Aborting installation.'
sys.exit()
#
# first save the old file in Updates
#
old = os.path.join(os.path.split(rd.__file__)[0],
'Restriction_Dictionary.py')
#update_folder = config.updatefolder
update_folder = os.getcwd()
shutil.copyfile(old, os.path.join(update_folder,
'Restriction_Dictionary.old'))
#
# Now test and install.
#
new = os.path.join(update_folder, 'Restriction_Dictionary.py')
try :
execfile(new)
print '\
\n\tThe new file seems ok. Proceeding with the installation.'
except SyntaxError :
print '\
\n The new dictionary file is corrupted. Aborting the installation.'
return
try :
shutil.copyfile(new, old)
print'\n\t Everything ok. If you need it a version of the old\
\n\t dictionary have been saved in the Updates folder under\
\n\t the name Restriction_Dictionary.old.'
print '\n ' +'*'*78 + ' \n'
except IOError :
print '\n ' +'*'*78 + ' \n'
print '\
\n\t WARNING : Impossible to install the new dictionary.\
\n\t Are you sure you have write permission to the folder :\n\
\n\t %s ?\n\n' % os.path.split(old)[0]
return self.no_install()
return
def no_install(self) :
"""BD.no_install() -> None.
build the new dictionary but do not install the dictionary."""
print '\n ' +'*'*78 + '\n'
#update = config.updatefolder
try :
import Bio.Restriction.Restriction_Dictionary as rd
except ImportError :
print '\
\n Unable to locate the previous Restriction_Dictionary.py module\
\n Aborting installation.'
sys.exit()
#
# first save the old file in Updates
#
old = os.path.join(os.path.split(rd.__file__)[0],
'Restriction_Dictionary.py')
update = os.getcwd()
shutil.copyfile(old, os.path.join(update, 'Restriction_Dictionary.old'))
places = update, os.path.split(Bio.Restriction.Restriction.__file__)[0]
print "\t\tCompilation of the new dictionary : OK.\
\n\t\tInstallation : No.\n\
\n You will find the newly created 'Restriction_Dictionary.py' file\
\n in the folder : \n\
\n\t%s\n\
\n Make a copy of 'Restriction_Dictionary.py' and place it with \
\n the other Restriction libraries.\n\
\n note : \
\n This folder should be :\n\
\n\t%s\n" % places
print '\n ' +'*'*78 + '\n'
return
def lastrebasefile(self) :
"""BD.lastrebasefile() -> None.
Check the emboss files are up to date and download them if they are not.
"""
embossnames = ('emboss_e', 'emboss_r', 'emboss_s')
#
# first check if we have the last update :
#
emboss_now = ['.'.join((x,LocalTime())) for x in embossnames]
update_needed = False
#dircontent = os.listdir(config.Rebase) # local database content
dircontent = os.listdir(os.getcwd())
base = os.getcwd() # added for biopython current directory
for name in emboss_now :
if name in dircontent :
pass
else :
update_needed = True
if not update_needed :
#
# nothing to be done
#
print '\n Using the files : %s'% ', '.join(emboss_now)
return tuple([open(os.path.join(base, n)) for n in emboss_now])
else :
#
# may be download the files.
#
print '\n The rebase files are more than one month old.\
\n Would you like to update them before proceeding?(y/n)'
r = raw_input(' update [n] >>> ')
if r in ['y', 'yes', 'Y', 'Yes'] :
updt = RebaseUpdate(self.rebase_pass, self.proxy)
updt.openRebase()
updt.getfiles()
updt.close()
print '\n Update complete. Creating the dictionaries.\n'
print '\n Using the files : %s'% ', '.join(emboss_now)
return tuple([open(os.path.join(base, n)) for n in emboss_now])
else :
#
# we will use the last files found without updating.
# But first we check we have some file to use.
#
class NotFoundError(Exception) :
pass
for name in embossnames :
try :
for file in dircontent :
if file.startswith(name) :
break
else :
pass
raise NotFoundError
except NotFoundError :
print "\nNo %s file found. Upgrade is impossible.\n"%name
sys.exit()
continue
pass
#
# now find the last file.
#
last = [0]
for file in dircontent :
fs = file.split('.')
try :
if fs[0] in embossnames and int(fs[1]) > int(last[-1]) :
if last[0] : last.append(fs[1])
else : last[0] = fs[1]
else :
continue
except ValueError :
continue
last.sort()
last = last[::-1]
if int(last[-1]) < 100 : last[0], last[-1] = last[-1], last[0]
for number in last :
files = [(name, name+'.%s'%number) for name in embossnames]
strmess = '\nLast EMBOSS files found are :\n'
try :
for name,file in files :
if os.path.isfile(os.path.join(base, file)) :
strmess += '\t%s.\n'%file
else :
raise ValueError
print strmess
emboss_e = open(os.path.join(base, 'emboss_e.%s'%number),'r')
emboss_r = open(os.path.join(base, 'emboss_r.%s'%number),'r')
emboss_s = open(os.path.join(base, 'emboss_s.%s'%number),'r')
return emboss_e, emboss_r, emboss_s
except ValueError :
continue
def parseline(self, line) :
line = [line[0]]+[line[1].upper()]+[int(i) for i in line[2:9]]+line[9:]
name = line[0]
site = line[1] # sequence of the recognition site
dna = DNA(site)
size = line[2] # size of the recognition site
#
# Calculate the overhang.
#
fst5 = line[5] # first site sense strand
fst3 = line[6] # first site antisense strand
scd5 = line[7] # second site sense strand
scd3 = line[8] # second site antisense strand
#
# the overhang is the difference between the two cut
#
ovhg1 = fst5 - fst3
ovhg2 = scd5 - scd3
#
# 0 has the meaning 'do not cut' in rebase. So we get short of 1
# for the negative numbers so we add 1 to negative sites for now.
# We will deal with the record later.
#
if fst5 < 0 : fst5 += 1
if fst3 < 0 : fst3 += 1
if scd5 < 0 : scd5 += 1
if scd3 < 0 : scd3 += 1
if ovhg2 != 0 and ovhg1 != ovhg2 :
#
# different length of the overhang of the first and second cut
# it's a pain to deal with and at the moment it concerns only
# one enzyme which is not commercially available (HaeIV).
# So we don't deal with it but we check the progression
# of the affair.
# Should HaeIV become commercially available or other similar
# new enzymes be added, this might be modified.
#
print '\
\nWARNING : %s cut twice with different overhang length each time.\
\n\tUnable to deal with this behaviour. \
\n\tThis enzyme will not be included in the database. Sorry.' %name
print '\tChecking :',
raise OverhangError
if 0 <= fst5 <= size and 0 <= fst3 <= size :
#
# cut inside recognition site
#
if fst5 < fst3 :
#
# 5' overhang
#
ovhg1 = ovhgseq = site[fst5:fst3]
elif fst5 > fst3 :
#
# 3' overhang
#
ovhg1 = ovhgseq = site[fst3:fst5]
else :
#
# blunt
#
ovhg1 = ovhgseq = ''
for base in 'NRYWMSKHDBV' :
if base in ovhg1 :
#
# site and overhang degenerated
#
ovhgseq = ovhg1
if fst5 < fst3 : ovhg1 = - len(ovhg1)
else : ovhg1 = len(ovhg1)
break
else :
continue
elif 0 <= fst5 <= size :
#
# 5' cut inside the site 3' outside
#
if fst5 < fst3 :
#
# 3' cut after the site
#
ovhgseq = site[fst5:] + (fst3 - size) * 'N'
elif fst5 > fst3 :
#
# 3' cut before the site
#
ovhgseq = abs(fst3) * 'N' + site[:fst5]
else :
#
# blunt outside
#
ovhg1 = ovhgseq = ''
elif 0 <= fst3 <= size :
#
# 3' cut inside the site, 5' outside
#
if fst5 < fst3 :
#
# 5' cut before the site
#
ovhgseq = abs(fst5) * 'N' + site[:fst3]
elif fst5 > fst3 :
#
# 5' cut after the site
#
ovhgseq = site[fst3:] + (fst5 - size) * 'N'
else :
#
# should not happend
#
raise ValueError, 'Error in #1'
elif fst3 < 0 and size < fst5 :
#
# 3' overhang. site is included.
#
ovhgseq = abs(fst3)*'N' + site + (fst5-size)*'N'
elif fst5 < 0 and size <fst3 :
#
# 5' overhang. site is included.
#
ovhgseq = abs(fst5)*'N' + site + (fst3-size)*'N'
else :
#
# 5' and 3' outside of the site
#
ovhgseq = 'N' * abs(ovhg1)
#
# Now line[5] to [8] are the location of the cut but we have to
# deal with the weird mathematics of biologists.
#
# EMBOSS sequence numbering give :
# DNA = 'a c g t A C G T'
# -1 1 2 3 4
#
# Biologists do not know about 0. Too much use of latin certainly.
#
# To compensate, we add 1 to the positions if they are negative.
# No need to modify 0 as it means no cut and will not been used.
# Positive numbers should be ok since our sequence starts 1.
#
# Moreover line[6] and line[8] represent cut on the reverse strand.
# They will be used for non palindromic sites and sre.finditer
# will detect the site in inverse orientation so we need to add the
# length of the site to compensate (+1 if they are negative).
#
for x in (5, 7) :
if line[x] < 0 : line[x] += 1
for x in (6, 8) :
if line[x] > 0 : line[x] -= size
elif line[x] < 0 : line[x] = line[x] - size + 1
#
# now is the site palindromic?
# produce the regular expression which correspond to the site.
# tag of the regex will be the name of the enzyme for palindromic
# enzymesband two tags for the other, the name for the sense sequence
# and the name with '_as' at the end for the antisense sequence.
#
rg = ''
if is_palindrom(dna) :
line.append(True)
rg = ''.join(['(?P<', name, '>', regex(site.upper()), ')'])
else :
line.append(False)
sense = ''.join(['(?P<', name, '>', regex(site.upper()), ')'])
antisense = ''.join(['(?P<', name, '_as>',
regex(Antiparallel(dna)), ')'])
rg = sense + '|' + antisense
#
# exact frequency of the site. (ie freq(N) == 1, ...)
#
f = [4/len(dna_alphabet[l]) for l in site.upper()]
freq = reduce(lambda x, y : x*y, f)
line.append(freq)
#
# append regex and ovhg1, they have not been appended before not to
# break the factory class. simply to leazy to make the changes there.
#
line.append(rg)
line.append(ovhg1)
line.append(ovhgseq)
return line
def removestart(self, file) :
#
# remove the heading of the file.
#
return [l for l in itertools.dropwhile(lambda l:l.startswith('#'),file)]
def getblock(self, file, index) :
#
# emboss_r.txt, separation between blocks is //
#
take = itertools.takewhile
block = [l for l in take(lambda l :not l.startswith('//'),file[index:])]
index += len(block)+1
return block, index
def get(self, block) :
#
# take what we want from the block.
# Each block correspond to one enzyme.
# block[0] => enzyme name
# block[3] => methylation (position and type)
# block[5] => suppliers (as a string of single letter)
#
bl3 = block[3].strip()
if not bl3 : bl3 = False # site is not methylable
return (block[0].strip(), bl3, block[5].strip())
def information_mixer(self, file1, file2, file3) :
#
# Mix all the information from the 3 files and produce a coherent
# restriction record.
#
methfile = self.removestart(file1)
sitefile = self.removestart(file2)
supplier = self.removestart(file3)
i1, i2= 0, 0
try :
while True :
block, i1 = self.getblock(methfile, i1)
bl = self.get(block)
line = (sitefile[i2].strip()).split()
name = line[0]
if name == bl[0] :
line.append(bl[1]) # -> methylation
line.append(bl[2]) # -> suppliers
else :
bl = self.get(oldblock)
if line[0] == bl[0] :
line.append(bl[1])
line.append(bl[2])
i2 += 1
else : raise TypeError
oldblock = block
i2 += 1
try :
line = self.parseline(line)
except OverhangError : # overhang error
n = name # do not include the enzyme
if not bl[2] :
print 'Anyway, %s is not commercially available.\n' %n
else :
print 'Unfortunately, %s is commercially available.\n'%n
continue
if name in enzymedict :
#
# deal with TaqII and its two sites.
#
print '\nWARNING :',
print name, 'has two different sites.\n'
dna = DNA(line[1])
sense1 = regex(dna.tostring())
antisense1 = regex(Antiparallel(dna))
dna = DNA(enzymedict[line[0]][0])
sense2 = regex(dna.tostring())
antisense2 = regex(Antiparallel(dna))
sense = '(?P<'+line[0]+'>'+sense1+'|'+sense2+')'
antisense = '(?P<'+line[0]+'_as>'+antisense1+'|'+antisense2 + ')'
reg = sense + '|' + antisense
line[1] = line[1] + '|' + enzymedict[line[0]][0]
line[-1] = reg
#
# the data to produce the enzyme class are then stored in
# enzymedict.
#
enzymedict[name] = line[1:]
except IndexError :
pass
for i in supplier :
#
# construction of the list of suppliers.
#
t = i.strip().split(' ', 1)
suppliersdict[t[0]] = (t[1], [])
return
| [
"[email protected]"
] | |
af8a48a4a4040fc973260082647f18ee36556192 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02701/s370299645.py | e8108ca9322d746e5943dac88a3fad579c97de48 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | N = int(input())
S = [str(input()) for _ in range(N)]
S = set(S)
print(len(S)) | [
"[email protected]"
] | |
e955056ae8b1a40428c8a44ebd451318d77d7d9a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02823/s163186577.py | 42c07c5526acc3c86687a8b401d84e30f2355ab1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | n,a,b = map(int,input().split())
if abs(a-b)%2 == 0:
print(abs(a-b)//2)
else:
if a - 1 > n - b:
print((n-b+n-a+1)//2)
else:
print((a-1+b-1+1)//2) | [
"[email protected]"
] | |
078f44a27afc7d6a99a5ea269e9e276d20398e5a | 01b8b5f60d4feef40b8be75866b33a0c34c80c92 | /akikaproject/urls.py | 3fabb8bcd2a2ccf58243255775ad764d65b58ab5 | [] | no_license | laetitia123/akika-test | 26007782ab3997c860413f11eb993e5c64d00a19 | 2556df04e2c4b2b46492bbfbb419a17f3ed84a0f | refs/heads/master | 2023-01-06T15:22:25.807387 | 2020-11-11T13:24:33 | 2020-11-11T13:24:33 | 311,641,846 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,164 | py | """akikaproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth import views
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('akikaapp.urls')),
path('accounts/', include('registration.backends.hmac.urls')),
# path('accounts/', include('registration.backends.simple.urls')),
# path('accounts/', include('registration.backends.hmac.urls')),
# path('logout/', views.LogoutView.as_view(), {"next_page": '/'}),
]
| [
"[email protected]"
] | |
7f7deb8679912039a94789792fe3fdd6b065e6bf | 3c6aeb458a8bec0671c1d8be18331072ac97e05f | /ohsn/sentiment/__init__.py | 635f2bffcfc2bd0b4481aa32db5d50e2a01a1b4b | [] | no_license | wtgme/ohsn | d7b17ad179a789be2325e0923026a681e343a40c | 9c165d45eefa4058e7ed2c6bad348703e296362d | refs/heads/master | 2021-08-29T06:01:20.165839 | 2021-08-12T08:51:46 | 2021-08-12T08:51:46 | 44,922,360 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | # -*- coding: utf-8 -*-
"""
Created on 15:24, 14/04/17
@author: wt
"""
| [
"[email protected]"
] | |
8022f81f1c964930db4e3dd656af1c871f260f99 | d85f8b95e21523840d65517ab99baeb04a33e297 | /demo/settings.py | c6b817f3586d8efd959927c2955a83cb937cdde4 | [
"BSD-3-Clause"
] | permissive | gregplaysguitar/django-trolley | 4947bd26e3eea7a33dd138c05262a9f2a45d0bb3 | 187cbe883961b13f995998cc5fa80c6a879560a8 | refs/heads/master | 2021-01-21T12:07:53.829939 | 2014-05-20T22:37:48 | 2014-05-20T22:37:48 | 632,479 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,958 | py | # -*- coding: utf-8 -*-
# Django settings for demo project.
import os, sys, re
PROJECT_ROOT = os.path.dirname(globals()["__file__"])
for directory in [os.path.join(PROJECT_ROOT, dir) for dir in ('.', '..')]:
if not directory in sys.path:
sys.path.insert(0, directory)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, 'demo.db'),
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Pacific/Auckland'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '*u@at7*1xx=3^=nfnq^fgik(_=mmu6u1&ldx6^svem^dvjt=+1'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'demo.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
#'django.contrib.staticfiles',
'django.contrib.admin',
'shop',
'cart',
'payment',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
CART_PAYMENT_BACKEND = 'cart.payment.manual'
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
| [
"[email protected]"
] | |
7006aeddebbec67a7e4f97e57b36acf2e9faa98e | 76dd191c092c836f0f72a364bda133bdb37c5cbc | /system/djangoapp/forms.py | 1778f78f69b8a044baa2c3a41b664ee5f6178c3f | [] | no_license | PreritBhandari/Django-Assignment-II- | 36619b337c5f6454e8729cb1ae3df2bfa400c243 | 808e9d47bbc7f78f81577a45356cb34839713ccb | refs/heads/master | 2022-11-25T09:07:07.423648 | 2020-07-29T11:47:12 | 2020-07-29T11:47:12 | 283,487,872 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,116 | py | from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from verified_email_field.forms import VerifiedEmailField
class register_form(forms.Form):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=150)
email = forms.EmailField()
password = forms.CharField(max_length=200, widget=forms.PasswordInput())
confirm_password = forms.CharField(max_length=200, widget=forms.PasswordInput())
def clean_username(self):
if User.objects.filter(username=self.cleaned_data['email']).exists():
raise forms.ValidationError("This email is already taken !")
return self.cleaned_data['username']
def clean(self):
password = self.cleaned_data['password']
confirm_password = self.cleaned_data['confirm_password']
if password != confirm_password:
raise forms.ValidationError("Passwords do not match !")
class login_form(forms.Form):
email = forms.EmailField()
password = forms.CharField(max_length=128, widget=forms.PasswordInput)
| [
"[email protected]"
] | |
436542025ea7dafd39e2fcf4fe88ff79492c88df | 086aa68683c71d2e4b7b584eda4536f5cc51c27c | /examples-tk499-lvgl/exams/widgets_ex/tabview/__init__.py | a527b46c199edeeb8c53d5f206a677cf7e300027 | [
"MIT"
] | permissive | bhgv/micropython-tk499 | 1617529d9f4c74d31f844d4eb440e3c670eb105b | 963d0546dda980aa8502c5be29f6aee3d5cd805e | refs/heads/master | 2023-07-18T07:25:36.592091 | 2021-08-28T20:37:31 | 2021-08-28T20:37:31 | 397,341,868 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | import lv_utils
import lvgl as lv
btn = lv.btn(lv.scr_act())
btn.set_height(lv.SIZE.CONTENT)
btn.align(lv.ALIGN.TOP_LEFT, 30, 30)
def foo(e):
if e.get_code() == lv.EVENT.CLICKED:
lv_utils.ex_new_page("exams/widgets_ex/tabview/p1.py")
btn.add_event_cb(foo, lv.EVENT.ALL, None)
label = lv.label(btn)
label.set_text("p1.py")
btn = lv.btn(lv.scr_act())
btn.set_height(lv.SIZE.CONTENT)
btn.align(lv.ALIGN.TOP_LEFT, 30, 90)
def foo(e):
if e.get_code() == lv.EVENT.CLICKED:
lv_utils.ex_new_page("exams/widgets_ex/tabview/p2.py")
btn.add_event_cb(foo, lv.EVENT.ALL, None)
label = lv.label(btn)
label.set_text("p2.py")
| [
"[email protected]"
] | |
f3b7e43803f64f2480c46438d8cdadf8981c4c1f | eb3683f9127befb9ef96d8eb801206cf7b84d6a7 | /stypy/sgmc/sgmc_cache/site_packages/numpy/distutils/fcompiler/vast.py | 096a8b05b41a728f84345dbdae9c3c04a579f8bf | [] | no_license | ComputationalReflection/stypy | 61ec27333a12f76ac055d13f8969d3e0de172f88 | be66ae846c82ac40ba7b48f9880d6e3990681a5b | refs/heads/master | 2021-05-13T18:24:29.005894 | 2018-06-14T15:42:50 | 2018-06-14T15:42:50 | 116,855,812 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49,485 | py |
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: from __future__ import division, absolute_import, print_function
2:
3: import os
4:
5: from numpy.distutils.fcompiler.gnu import GnuFCompiler
6:
7: compilers = ['VastFCompiler']
8:
9: class VastFCompiler(GnuFCompiler):
10: compiler_type = 'vast'
11: compiler_aliases = ()
12: description = 'Pacific-Sierra Research Fortran 90 Compiler'
13: version_pattern = r'\s*Pacific-Sierra Research vf90 '\
14: '(Personal|Professional)\s+(?P<version>[^\s]*)'
15:
16: # VAST f90 does not support -o with -c. So, object files are created
17: # to the current directory and then moved to build directory
18: object_switch = ' && function _mvfile { mv -v `basename $1` $1 ; } && _mvfile '
19:
20: executables = {
21: 'version_cmd' : ["vf90", "-v"],
22: 'compiler_f77' : ["g77"],
23: 'compiler_fix' : ["f90", "-Wv,-ya"],
24: 'compiler_f90' : ["f90"],
25: 'linker_so' : ["<F90>"],
26: 'archiver' : ["ar", "-cr"],
27: 'ranlib' : ["ranlib"]
28: }
29: module_dir_switch = None #XXX Fix me
30: module_include_switch = None #XXX Fix me
31:
32: def find_executables(self):
33: pass
34:
35: def get_version_cmd(self):
36: f90 = self.compiler_f90[0]
37: d, b = os.path.split(f90)
38: vf90 = os.path.join(d, 'v'+b)
39: return vf90
40:
41: def get_flags_arch(self):
42: vast_version = self.get_version()
43: gnu = GnuFCompiler()
44: gnu.customize(None)
45: self.version = gnu.get_version()
46: opt = GnuFCompiler.get_flags_arch(self)
47: self.version = vast_version
48: return opt
49:
50: if __name__ == '__main__':
51: from distutils import log
52: log.set_verbosity(2)
53: from numpy.distutils.fcompiler import new_fcompiler
54: compiler = new_fcompiler(compiler='vast')
55: compiler.customize()
56: print(compiler.get_version())
57:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 3, 0))
# 'import os' statement (line 3)
import os
import_module(stypy.reporting.localization.Localization(__file__, 3, 0), 'os', os, module_type_store)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 5, 0))
# 'from numpy.distutils.fcompiler.gnu import GnuFCompiler' statement (line 5)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/numpy/distutils/fcompiler/')
import_63352 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 5, 0), 'numpy.distutils.fcompiler.gnu')
if (type(import_63352) is not StypyTypeError):
if (import_63352 != 'pyd_module'):
__import__(import_63352)
sys_modules_63353 = sys.modules[import_63352]
import_from_module(stypy.reporting.localization.Localization(__file__, 5, 0), 'numpy.distutils.fcompiler.gnu', sys_modules_63353.module_type_store, module_type_store, ['GnuFCompiler'])
nest_module(stypy.reporting.localization.Localization(__file__, 5, 0), __file__, sys_modules_63353, sys_modules_63353.module_type_store, module_type_store)
else:
from numpy.distutils.fcompiler.gnu import GnuFCompiler
import_from_module(stypy.reporting.localization.Localization(__file__, 5, 0), 'numpy.distutils.fcompiler.gnu', None, module_type_store, ['GnuFCompiler'], [GnuFCompiler])
else:
# Assigning a type to the variable 'numpy.distutils.fcompiler.gnu' (line 5)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 5, 0), 'numpy.distutils.fcompiler.gnu', import_63352)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/numpy/distutils/fcompiler/')
# Assigning a List to a Name (line 7):
# Assigning a List to a Name (line 7):
# Obtaining an instance of the builtin type 'list' (line 7)
list_63354 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 7, 12), 'list')
# Adding type elements to the builtin type 'list' instance (line 7)
# Adding element type (line 7)
str_63355 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 7, 13), 'str', 'VastFCompiler')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 7, 12), list_63354, str_63355)
# Assigning a type to the variable 'compilers' (line 7)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 7, 0), 'compilers', list_63354)
# Declaration of the 'VastFCompiler' class
# Getting the type of 'GnuFCompiler' (line 9)
GnuFCompiler_63356 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 20), 'GnuFCompiler')
class VastFCompiler(GnuFCompiler_63356, ):
# Assigning a Str to a Name (line 10):
# Assigning a Tuple to a Name (line 11):
# Assigning a Str to a Name (line 12):
# Assigning a Str to a Name (line 13):
# Assigning a Str to a Name (line 18):
# Assigning a Dict to a Name (line 20):
# Assigning a Name to a Name (line 29):
# Assigning a Name to a Name (line 30):
@norecursion
def find_executables(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'find_executables'
module_type_store = module_type_store.open_function_context('find_executables', 32, 4, False)
# Assigning a type to the variable 'self' (line 33)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 33, 4), 'self', type_of_self)
# Passed parameters checking function
VastFCompiler.find_executables.__dict__.__setitem__('stypy_localization', localization)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_type_of_self', type_of_self)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_type_store', module_type_store)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_function_name', 'VastFCompiler.find_executables')
VastFCompiler.find_executables.__dict__.__setitem__('stypy_param_names_list', [])
VastFCompiler.find_executables.__dict__.__setitem__('stypy_varargs_param_name', None)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_kwargs_param_name', None)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_call_defaults', defaults)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_call_varargs', varargs)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_call_kwargs', kwargs)
VastFCompiler.find_executables.__dict__.__setitem__('stypy_declared_arg_number', 1)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'VastFCompiler.find_executables', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'find_executables', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'find_executables(...)' code ##################
pass
# ################# End of 'find_executables(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'find_executables' in the type store
# Getting the type of 'stypy_return_type' (line 32)
stypy_return_type_63357 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 32, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_63357)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'find_executables'
return stypy_return_type_63357
@norecursion
def get_version_cmd(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'get_version_cmd'
module_type_store = module_type_store.open_function_context('get_version_cmd', 35, 4, False)
# Assigning a type to the variable 'self' (line 36)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 36, 4), 'self', type_of_self)
# Passed parameters checking function
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_localization', localization)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_type_of_self', type_of_self)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_type_store', module_type_store)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_function_name', 'VastFCompiler.get_version_cmd')
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_param_names_list', [])
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_varargs_param_name', None)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_kwargs_param_name', None)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_call_defaults', defaults)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_call_varargs', varargs)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_call_kwargs', kwargs)
VastFCompiler.get_version_cmd.__dict__.__setitem__('stypy_declared_arg_number', 1)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'VastFCompiler.get_version_cmd', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'get_version_cmd', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'get_version_cmd(...)' code ##################
# Assigning a Subscript to a Name (line 36):
# Assigning a Subscript to a Name (line 36):
# Obtaining the type of the subscript
int_63358 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 32), 'int')
# Getting the type of 'self' (line 36)
self_63359 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 14), 'self')
# Obtaining the member 'compiler_f90' of a type (line 36)
compiler_f90_63360 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 36, 14), self_63359, 'compiler_f90')
# Obtaining the member '__getitem__' of a type (line 36)
getitem___63361 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 36, 14), compiler_f90_63360, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 36)
subscript_call_result_63362 = invoke(stypy.reporting.localization.Localization(__file__, 36, 14), getitem___63361, int_63358)
# Assigning a type to the variable 'f90' (line 36)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 36, 8), 'f90', subscript_call_result_63362)
# Assigning a Call to a Tuple (line 37):
# Assigning a Call to a Name:
# Call to split(...): (line 37)
# Processing the call arguments (line 37)
# Getting the type of 'f90' (line 37)
f90_63366 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 29), 'f90', False)
# Processing the call keyword arguments (line 37)
kwargs_63367 = {}
# Getting the type of 'os' (line 37)
os_63363 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 15), 'os', False)
# Obtaining the member 'path' of a type (line 37)
path_63364 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 37, 15), os_63363, 'path')
# Obtaining the member 'split' of a type (line 37)
split_63365 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 37, 15), path_63364, 'split')
# Calling split(args, kwargs) (line 37)
split_call_result_63368 = invoke(stypy.reporting.localization.Localization(__file__, 37, 15), split_63365, *[f90_63366], **kwargs_63367)
# Assigning a type to the variable 'call_assignment_63349' (line 37)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63349', split_call_result_63368)
# Assigning a Call to a Name (line 37):
# Call to __getitem__(...):
# Processing the call arguments
int_63371 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 37, 8), 'int')
# Processing the call keyword arguments
kwargs_63372 = {}
# Getting the type of 'call_assignment_63349' (line 37)
call_assignment_63349_63369 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63349', False)
# Obtaining the member '__getitem__' of a type (line 37)
getitem___63370 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 37, 8), call_assignment_63349_63369, '__getitem__')
# Calling __getitem__(args, kwargs)
getitem___call_result_63373 = invoke(stypy.reporting.localization.Localization(__file__, 0, 0), getitem___63370, *[int_63371], **kwargs_63372)
# Assigning a type to the variable 'call_assignment_63350' (line 37)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63350', getitem___call_result_63373)
# Assigning a Name to a Name (line 37):
# Getting the type of 'call_assignment_63350' (line 37)
call_assignment_63350_63374 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63350')
# Assigning a type to the variable 'd' (line 37)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'd', call_assignment_63350_63374)
# Assigning a Call to a Name (line 37):
# Call to __getitem__(...):
# Processing the call arguments
int_63377 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 37, 8), 'int')
# Processing the call keyword arguments
kwargs_63378 = {}
# Getting the type of 'call_assignment_63349' (line 37)
call_assignment_63349_63375 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63349', False)
# Obtaining the member '__getitem__' of a type (line 37)
getitem___63376 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 37, 8), call_assignment_63349_63375, '__getitem__')
# Calling __getitem__(args, kwargs)
getitem___call_result_63379 = invoke(stypy.reporting.localization.Localization(__file__, 0, 0), getitem___63376, *[int_63377], **kwargs_63378)
# Assigning a type to the variable 'call_assignment_63351' (line 37)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63351', getitem___call_result_63379)
# Assigning a Name to a Name (line 37):
# Getting the type of 'call_assignment_63351' (line 37)
call_assignment_63351_63380 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 8), 'call_assignment_63351')
# Assigning a type to the variable 'b' (line 37)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 37, 11), 'b', call_assignment_63351_63380)
# Assigning a Call to a Name (line 38):
# Assigning a Call to a Name (line 38):
# Call to join(...): (line 38)
# Processing the call arguments (line 38)
# Getting the type of 'd' (line 38)
d_63384 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 38, 28), 'd', False)
str_63385 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 38, 31), 'str', 'v')
# Getting the type of 'b' (line 38)
b_63386 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 38, 35), 'b', False)
# Applying the binary operator '+' (line 38)
result_add_63387 = python_operator(stypy.reporting.localization.Localization(__file__, 38, 31), '+', str_63385, b_63386)
# Processing the call keyword arguments (line 38)
kwargs_63388 = {}
# Getting the type of 'os' (line 38)
os_63381 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 38, 15), 'os', False)
# Obtaining the member 'path' of a type (line 38)
path_63382 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 38, 15), os_63381, 'path')
# Obtaining the member 'join' of a type (line 38)
join_63383 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 38, 15), path_63382, 'join')
# Calling join(args, kwargs) (line 38)
join_call_result_63389 = invoke(stypy.reporting.localization.Localization(__file__, 38, 15), join_63383, *[d_63384, result_add_63387], **kwargs_63388)
# Assigning a type to the variable 'vf90' (line 38)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 38, 8), 'vf90', join_call_result_63389)
# Getting the type of 'vf90' (line 39)
vf90_63390 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 39, 15), 'vf90')
# Assigning a type to the variable 'stypy_return_type' (line 39)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 39, 8), 'stypy_return_type', vf90_63390)
# ################# End of 'get_version_cmd(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'get_version_cmd' in the type store
# Getting the type of 'stypy_return_type' (line 35)
stypy_return_type_63391 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_63391)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'get_version_cmd'
return stypy_return_type_63391
@norecursion
def get_flags_arch(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'get_flags_arch'
module_type_store = module_type_store.open_function_context('get_flags_arch', 41, 4, False)
# Assigning a type to the variable 'self' (line 42)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 42, 4), 'self', type_of_self)
# Passed parameters checking function
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_localization', localization)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_type_of_self', type_of_self)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_type_store', module_type_store)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_function_name', 'VastFCompiler.get_flags_arch')
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_param_names_list', [])
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_varargs_param_name', None)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_kwargs_param_name', None)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_call_defaults', defaults)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_call_varargs', varargs)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_call_kwargs', kwargs)
VastFCompiler.get_flags_arch.__dict__.__setitem__('stypy_declared_arg_number', 1)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'VastFCompiler.get_flags_arch', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'get_flags_arch', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'get_flags_arch(...)' code ##################
# Assigning a Call to a Name (line 42):
# Assigning a Call to a Name (line 42):
# Call to get_version(...): (line 42)
# Processing the call keyword arguments (line 42)
kwargs_63394 = {}
# Getting the type of 'self' (line 42)
self_63392 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 42, 23), 'self', False)
# Obtaining the member 'get_version' of a type (line 42)
get_version_63393 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 42, 23), self_63392, 'get_version')
# Calling get_version(args, kwargs) (line 42)
get_version_call_result_63395 = invoke(stypy.reporting.localization.Localization(__file__, 42, 23), get_version_63393, *[], **kwargs_63394)
# Assigning a type to the variable 'vast_version' (line 42)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 42, 8), 'vast_version', get_version_call_result_63395)
# Assigning a Call to a Name (line 43):
# Assigning a Call to a Name (line 43):
# Call to GnuFCompiler(...): (line 43)
# Processing the call keyword arguments (line 43)
kwargs_63397 = {}
# Getting the type of 'GnuFCompiler' (line 43)
GnuFCompiler_63396 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 43, 14), 'GnuFCompiler', False)
# Calling GnuFCompiler(args, kwargs) (line 43)
GnuFCompiler_call_result_63398 = invoke(stypy.reporting.localization.Localization(__file__, 43, 14), GnuFCompiler_63396, *[], **kwargs_63397)
# Assigning a type to the variable 'gnu' (line 43)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 43, 8), 'gnu', GnuFCompiler_call_result_63398)
# Call to customize(...): (line 44)
# Processing the call arguments (line 44)
# Getting the type of 'None' (line 44)
None_63401 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 22), 'None', False)
# Processing the call keyword arguments (line 44)
kwargs_63402 = {}
# Getting the type of 'gnu' (line 44)
gnu_63399 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 8), 'gnu', False)
# Obtaining the member 'customize' of a type (line 44)
customize_63400 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 44, 8), gnu_63399, 'customize')
# Calling customize(args, kwargs) (line 44)
customize_call_result_63403 = invoke(stypy.reporting.localization.Localization(__file__, 44, 8), customize_63400, *[None_63401], **kwargs_63402)
# Assigning a Call to a Attribute (line 45):
# Assigning a Call to a Attribute (line 45):
# Call to get_version(...): (line 45)
# Processing the call keyword arguments (line 45)
kwargs_63406 = {}
# Getting the type of 'gnu' (line 45)
gnu_63404 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 45, 23), 'gnu', False)
# Obtaining the member 'get_version' of a type (line 45)
get_version_63405 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 45, 23), gnu_63404, 'get_version')
# Calling get_version(args, kwargs) (line 45)
get_version_call_result_63407 = invoke(stypy.reporting.localization.Localization(__file__, 45, 23), get_version_63405, *[], **kwargs_63406)
# Getting the type of 'self' (line 45)
self_63408 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 45, 8), 'self')
# Setting the type of the member 'version' of a type (line 45)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 45, 8), self_63408, 'version', get_version_call_result_63407)
# Assigning a Call to a Name (line 46):
# Assigning a Call to a Name (line 46):
# Call to get_flags_arch(...): (line 46)
# Processing the call arguments (line 46)
# Getting the type of 'self' (line 46)
self_63411 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 46, 42), 'self', False)
# Processing the call keyword arguments (line 46)
kwargs_63412 = {}
# Getting the type of 'GnuFCompiler' (line 46)
GnuFCompiler_63409 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 46, 14), 'GnuFCompiler', False)
# Obtaining the member 'get_flags_arch' of a type (line 46)
get_flags_arch_63410 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 46, 14), GnuFCompiler_63409, 'get_flags_arch')
# Calling get_flags_arch(args, kwargs) (line 46)
get_flags_arch_call_result_63413 = invoke(stypy.reporting.localization.Localization(__file__, 46, 14), get_flags_arch_63410, *[self_63411], **kwargs_63412)
# Assigning a type to the variable 'opt' (line 46)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 46, 8), 'opt', get_flags_arch_call_result_63413)
# Assigning a Name to a Attribute (line 47):
# Assigning a Name to a Attribute (line 47):
# Getting the type of 'vast_version' (line 47)
vast_version_63414 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 23), 'vast_version')
# Getting the type of 'self' (line 47)
self_63415 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 8), 'self')
# Setting the type of the member 'version' of a type (line 47)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 47, 8), self_63415, 'version', vast_version_63414)
# Getting the type of 'opt' (line 48)
opt_63416 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 48, 15), 'opt')
# Assigning a type to the variable 'stypy_return_type' (line 48)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 48, 8), 'stypy_return_type', opt_63416)
# ################# End of 'get_flags_arch(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'get_flags_arch' in the type store
# Getting the type of 'stypy_return_type' (line 41)
stypy_return_type_63417 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 41, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_63417)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'get_flags_arch'
return stypy_return_type_63417
@norecursion
def __init__(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function '__init__'
module_type_store = module_type_store.open_function_context('__init__', 9, 0, False)
# Assigning a type to the variable 'self' (line 10)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 10, 0), 'self', type_of_self)
# Passed parameters checking function
arguments = process_argument_values(localization, type_of_self, module_type_store, 'VastFCompiler.__init__', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return
# Initialize method data
init_call_information(module_type_store, '__init__', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of '__init__(...)' code ##################
pass
# ################# End of '__init__(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Assigning a type to the variable 'VastFCompiler' (line 9)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 9, 0), 'VastFCompiler', VastFCompiler)
# Assigning a Str to a Name (line 10):
str_63418 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 10, 20), 'str', 'vast')
# Getting the type of 'VastFCompiler'
VastFCompiler_63419 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'compiler_type' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63419, 'compiler_type', str_63418)
# Assigning a Tuple to a Name (line 11):
# Obtaining an instance of the builtin type 'tuple' (line 11)
tuple_63420 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 11, 23), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 11)
# Getting the type of 'VastFCompiler'
VastFCompiler_63421 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'compiler_aliases' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63421, 'compiler_aliases', tuple_63420)
# Assigning a Str to a Name (line 12):
str_63422 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 12, 18), 'str', 'Pacific-Sierra Research Fortran 90 Compiler')
# Getting the type of 'VastFCompiler'
VastFCompiler_63423 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'description' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63423, 'description', str_63422)
# Assigning a Str to a Name (line 13):
str_63424 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 13, 22), 'str', '\\s*Pacific-Sierra Research vf90 (Personal|Professional)\\s+(?P<version>[^\\s]*)')
# Getting the type of 'VastFCompiler'
VastFCompiler_63425 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'version_pattern' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63425, 'version_pattern', str_63424)
# Assigning a Str to a Name (line 18):
str_63426 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 18, 20), 'str', ' && function _mvfile { mv -v `basename $1` $1 ; } && _mvfile ')
# Getting the type of 'VastFCompiler'
VastFCompiler_63427 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'object_switch' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63427, 'object_switch', str_63426)
# Assigning a Dict to a Name (line 20):
# Obtaining an instance of the builtin type 'dict' (line 20)
dict_63428 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 18), 'dict')
# Adding type elements to the builtin type 'dict' instance (line 20)
# Adding element type (key, value) (line 20)
str_63429 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 8), 'str', 'version_cmd')
# Obtaining an instance of the builtin type 'list' (line 21)
list_63430 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 21)
# Adding element type (line 21)
str_63431 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 26), 'str', 'vf90')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 21, 25), list_63430, str_63431)
# Adding element type (line 21)
str_63432 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 34), 'str', '-v')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 21, 25), list_63430, str_63432)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63429, list_63430))
# Adding element type (key, value) (line 20)
str_63433 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 8), 'str', 'compiler_f77')
# Obtaining an instance of the builtin type 'list' (line 22)
list_63434 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 22)
# Adding element type (line 22)
str_63435 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 26), 'str', 'g77')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 25), list_63434, str_63435)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63433, list_63434))
# Adding element type (key, value) (line 20)
str_63436 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 8), 'str', 'compiler_fix')
# Obtaining an instance of the builtin type 'list' (line 23)
list_63437 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 23)
# Adding element type (line 23)
str_63438 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 26), 'str', 'f90')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 23, 25), list_63437, str_63438)
# Adding element type (line 23)
str_63439 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 33), 'str', '-Wv,-ya')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 23, 25), list_63437, str_63439)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63436, list_63437))
# Adding element type (key, value) (line 20)
str_63440 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 8), 'str', 'compiler_f90')
# Obtaining an instance of the builtin type 'list' (line 24)
list_63441 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 24)
# Adding element type (line 24)
str_63442 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 26), 'str', 'f90')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 24, 25), list_63441, str_63442)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63440, list_63441))
# Adding element type (key, value) (line 20)
str_63443 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 8), 'str', 'linker_so')
# Obtaining an instance of the builtin type 'list' (line 25)
list_63444 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 25)
# Adding element type (line 25)
str_63445 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 26), 'str', '<F90>')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 25, 25), list_63444, str_63445)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63443, list_63444))
# Adding element type (key, value) (line 20)
str_63446 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 8), 'str', 'archiver')
# Obtaining an instance of the builtin type 'list' (line 26)
list_63447 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 26)
# Adding element type (line 26)
str_63448 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 26), 'str', 'ar')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 26, 25), list_63447, str_63448)
# Adding element type (line 26)
str_63449 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 32), 'str', '-cr')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 26, 25), list_63447, str_63449)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63446, list_63447))
# Adding element type (key, value) (line 20)
str_63450 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 27, 8), 'str', 'ranlib')
# Obtaining an instance of the builtin type 'list' (line 27)
list_63451 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 27, 25), 'list')
# Adding type elements to the builtin type 'list' instance (line 27)
# Adding element type (line 27)
str_63452 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 27, 26), 'str', 'ranlib')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 27, 25), list_63451, str_63452)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 18), dict_63428, (str_63450, list_63451))
# Getting the type of 'VastFCompiler'
VastFCompiler_63453 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'executables' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63453, 'executables', dict_63428)
# Assigning a Name to a Name (line 29):
# Getting the type of 'None' (line 29)
None_63454 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 24), 'None')
# Getting the type of 'VastFCompiler'
VastFCompiler_63455 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'module_dir_switch' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63455, 'module_dir_switch', None_63454)
# Assigning a Name to a Name (line 30):
# Getting the type of 'None' (line 30)
None_63456 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 30, 28), 'None')
# Getting the type of 'VastFCompiler'
VastFCompiler_63457 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'VastFCompiler')
# Setting the type of the member 'module_include_switch' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), VastFCompiler_63457, 'module_include_switch', None_63456)
if (__name__ == '__main__'):
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 51, 4))
# 'from distutils import log' statement (line 51)
from distutils import log
import_from_module(stypy.reporting.localization.Localization(__file__, 51, 4), 'distutils', None, module_type_store, ['log'], [log])
# Call to set_verbosity(...): (line 52)
# Processing the call arguments (line 52)
int_63460 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 52, 22), 'int')
# Processing the call keyword arguments (line 52)
kwargs_63461 = {}
# Getting the type of 'log' (line 52)
log_63458 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 4), 'log', False)
# Obtaining the member 'set_verbosity' of a type (line 52)
set_verbosity_63459 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 52, 4), log_63458, 'set_verbosity')
# Calling set_verbosity(args, kwargs) (line 52)
set_verbosity_call_result_63462 = invoke(stypy.reporting.localization.Localization(__file__, 52, 4), set_verbosity_63459, *[int_63460], **kwargs_63461)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 53, 4))
# 'from numpy.distutils.fcompiler import new_fcompiler' statement (line 53)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/numpy/distutils/fcompiler/')
import_63463 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 53, 4), 'numpy.distutils.fcompiler')
if (type(import_63463) is not StypyTypeError):
if (import_63463 != 'pyd_module'):
__import__(import_63463)
sys_modules_63464 = sys.modules[import_63463]
import_from_module(stypy.reporting.localization.Localization(__file__, 53, 4), 'numpy.distutils.fcompiler', sys_modules_63464.module_type_store, module_type_store, ['new_fcompiler'])
nest_module(stypy.reporting.localization.Localization(__file__, 53, 4), __file__, sys_modules_63464, sys_modules_63464.module_type_store, module_type_store)
else:
from numpy.distutils.fcompiler import new_fcompiler
import_from_module(stypy.reporting.localization.Localization(__file__, 53, 4), 'numpy.distutils.fcompiler', None, module_type_store, ['new_fcompiler'], [new_fcompiler])
else:
# Assigning a type to the variable 'numpy.distutils.fcompiler' (line 53)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 53, 4), 'numpy.distutils.fcompiler', import_63463)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/numpy/distutils/fcompiler/')
# Assigning a Call to a Name (line 54):
# Assigning a Call to a Name (line 54):
# Call to new_fcompiler(...): (line 54)
# Processing the call keyword arguments (line 54)
str_63466 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 54, 38), 'str', 'vast')
keyword_63467 = str_63466
kwargs_63468 = {'compiler': keyword_63467}
# Getting the type of 'new_fcompiler' (line 54)
new_fcompiler_63465 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 54, 15), 'new_fcompiler', False)
# Calling new_fcompiler(args, kwargs) (line 54)
new_fcompiler_call_result_63469 = invoke(stypy.reporting.localization.Localization(__file__, 54, 15), new_fcompiler_63465, *[], **kwargs_63468)
# Assigning a type to the variable 'compiler' (line 54)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 54, 4), 'compiler', new_fcompiler_call_result_63469)
# Call to customize(...): (line 55)
# Processing the call keyword arguments (line 55)
kwargs_63472 = {}
# Getting the type of 'compiler' (line 55)
compiler_63470 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 55, 4), 'compiler', False)
# Obtaining the member 'customize' of a type (line 55)
customize_63471 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 55, 4), compiler_63470, 'customize')
# Calling customize(args, kwargs) (line 55)
customize_call_result_63473 = invoke(stypy.reporting.localization.Localization(__file__, 55, 4), customize_63471, *[], **kwargs_63472)
# Call to print(...): (line 56)
# Processing the call arguments (line 56)
# Call to get_version(...): (line 56)
# Processing the call keyword arguments (line 56)
kwargs_63477 = {}
# Getting the type of 'compiler' (line 56)
compiler_63475 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 56, 10), 'compiler', False)
# Obtaining the member 'get_version' of a type (line 56)
get_version_63476 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 56, 10), compiler_63475, 'get_version')
# Calling get_version(args, kwargs) (line 56)
get_version_call_result_63478 = invoke(stypy.reporting.localization.Localization(__file__, 56, 10), get_version_63476, *[], **kwargs_63477)
# Processing the call keyword arguments (line 56)
kwargs_63479 = {}
# Getting the type of 'print' (line 56)
print_63474 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 56, 4), 'print', False)
# Calling print(args, kwargs) (line 56)
print_call_result_63480 = invoke(stypy.reporting.localization.Localization(__file__, 56, 4), print_63474, *[get_version_call_result_63478], **kwargs_63479)
# ################# End of the type inference program ##################
module_errors = stypy.errors.type_error.StypyTypeError.get_error_msgs()
module_warnings = stypy.errors.type_warning.TypeWarning.get_warning_msgs()
| [
"[email protected]"
] | |
8dd85d17b5416331df15e80d50e3db5be325686a | 08153bc5546d434cdc2c1574e024c3b8edab69a3 | /thesis/scripts/disp_performance.py | fda1121e9383c87fd48365df586bb1e58aedc2c1 | [] | no_license | maxnoe/phd_thesis | 57c818296e6e0c43d1231116c5131f9024c9304d | a68b621cc8b658aa858342a4cfdaed68c3823257 | refs/heads/master | 2022-11-30T06:11:22.033077 | 2019-03-04T14:02:45 | 2020-08-17T12:12:49 | 288,170,864 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | from argparse import ArgumentParser
import re
parser = ArgumentParser()
parser.add_argument('log_file')
parser.add_argument('output_base')
args = parser.parse_args()
with open(args.log_file) as f:
log = f.read()
acc = re.search(r'Mean accuracy from CV: (.*)', log).groups()[0]
with open(args.output_base + '_accuracy.tex', 'w') as f:
f.write(r'\num{' + acc.replace('±', r'\pm') + '}\n')
r2 = re.search(r'score from CV: (.*)', log).groups()[0]
with open(args.output_base + '_r2.tex', 'w') as f:
f.write(r'\num{' + r2.replace('±', r'\pm') + '}\n')
| [
"[email protected]"
] | |
f0aa91eba8c5568951b8f1fb0d90ecf0b928734d | d01670aa5bddb47dc414bf01921155610e2a5070 | /leetcode/053_maximumsubarray.py | 1e558d7c790472f0f0b466aec2d5417f103da43c | [] | no_license | hwillmott/csfundamentals | 14c7e4253b581cef7046ca035bda038c24a52613 | 832f6a8c0deb0569d3fe0dc03e4564c2d850f067 | refs/heads/master | 2020-08-01T12:27:01.914391 | 2020-03-26T16:47:35 | 2020-03-26T16:47:35 | 73,576,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if nums is None or len(nums) == 0:
return 0
last = nums[0]
m = last
for n in nums[1:]:
if last > 0:
last = last + n
else:
last = n
if last > m:
m = last
return m
| [
"[email protected]"
] | |
f15582da123ab9fa3195f0c76fc7e5ee2568ffdf | 2904bba948c795eb0075e6dfb25d7bec93d893f1 | /mcb_twitter/tweet_mcb/models.py | 32f9a056c063bdad21eaf3e3ddbcfb6e6c47b991 | [] | no_license | raprasad/mcb-tweet | 8cd61766ea0365dff96104c72327611718051d95 | 3117b183a9b619f0bb4f3552c7c954a44728f177 | refs/heads/master | 2016-09-06T18:18:41.182394 | 2014-03-26T16:29:42 | 2014-03-26T16:29:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,196 | py | from django.db import models
from django.core.urlresolvers import reverse
from mcb_website.events.models import CalendarEvent
from mcb_twitter.tweet_mcb.tweeter import assemble_full_tweet
from datetime import datetime
TWEET_STATUS_PK_AWAITING_APPROVAL = 1
TWEET_STATUS_PK_APPROVED = 2
TWEET_STATUS_PK_REJECTED = 3
TWEET_STATUS_PK_TWEETED = 4
TWEET_GROUP_NAME = 'TWEET_GROUP'
class TweetStatus(models.Model):
name = models.CharField(max_length=200, unique=True)
sort_key = models.IntegerField()
description = models.TextField(blank=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ('sort_key', 'name')
verbose_name_plural = 'Tweet statuses'
class MCBTweetEvent(models.Model):
"""
Pre-load CalendarEvents for tweeting
"""
mcb_event = models.ForeignKey(CalendarEvent, verbose_name='MCB Event')
tweet_text = models.CharField(max_length=140)
status = models.ForeignKey(TweetStatus)
reject_tweet = models.BooleanField(default=False, help_text='auto-filled on save')
approved = models.BooleanField(default=False, help_text='auto-filled on save')
tweet_pubdate = models.DateTimeField()
tweet_tag_text = models.CharField(max_length=75, default='#MCB_Event', blank=True)
tweet_short_url = models.URLField(max_length=75, blank=True)
full_tweet = models.CharField(max_length=255, blank=True, help_text='auto-filled on save')
google_id = models.CharField(max_length=255, blank=True, db_index=True)
def view_calendar_event(self):
if not self.mcb_event:
return 'n/a'
url = reverse('admin:events_calendarevent_change', args=(self.mcb_event.id,))
return '<a href="%s">view calendar event</a>' % url
view_calendar_event.allow_tags = True
def set_tweet_to_awaiting_approval_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_AWAITING_APPROVAL)
except:
pass
def approve_tweet_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_APPROVED)
except:
pass
def reject_tweet_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_REJECTED)
except:
pass
def set_status_to_tweeted_without_save(self):
try:
self.status = TweetStatus.objects.get(pk=TWEET_STATUS_PK_TWEETED)
except:
pass
@staticmethod
def get_events_awaiting_approval():
return MCBTweetEvent.objects.filter(tweet_pubdate__gt=datetime.now()\
, status=TweetStatus.objects.get(pk=TWEET_STATUS_PK_AWAITING_APPROVAL)\
).all().order_by('tweet_pubdate')
@staticmethod
def create_tweet_from_calendar_event(cal_event):
if cal_event is None:
return None
if not cal_event.short_url:
cal_event.save()
status_awaiting_approval = TweetStatus.objects.get(pk=TWEET_STATUS_PK_AWAITING_APPROVAL)
mcb_tweet = MCBTweetEvent(mcb_event=cal_event\
, status=status_awaiting_approval\
, tweet_text=cal_event.title[:140]\
, tweet_pubdate=cal_event.start_time\
, tweet_short_url=cal_event.short_url\
, google_id=cal_event.google_id\
)
mcb_tweet.save()
return mcb_tweet
def get_full_tweet(self):
full_tweet = assemble_full_tweet(self.tweet_text\
, self.tweet_short_url\
, self.tweet_tag_text)
if len(full_tweet) <= 140:
return full_tweet
full_tweet = assemble_full_tweet(self.tweet_text\
, self.tweet_short_url\
)
if len(full_tweet) <= 140:
return full_tweet
if self.tweet_text <= 140:
return self.tweet_text
return self.tweet_text[:140]
def save(self, *args, **kwargs):
self.full_tweet = self.get_full_tweet()
if self.full_tweet is None:
self.full_tweet = ''
if self.status.id == TWEET_STATUS_PK_REJECTED:
self.reject_tweet = True
else:
self.reject_tweet = False
if self.status.id in (TWEET_STATUS_PK_APPROVED, TWEET_STATUS_PK_TWEETED) :
self.approved = True
else:
self.approved = False
super(MCBTweetEvent, self).save(*args, **kwargs)
def __unicode__(self):
return '%s' % self.tweet_text
class Meta:
verbose_name = 'MCB Tweet Event'
ordering = ('status__sort_key', '-tweet_pubdate', 'tweet_text') | [
"[email protected]"
] | |
3249525f584c43396625c8d5adc155bf19d3e031 | 536b2dc4d0541f6f8a71d9ef8cfa6d449c5db69a | /src/files/REST-Linux/scripts/set_indicatorled.py | 8dfa96872471db2996f43be9946da8db6cf8cde1 | [
"MIT"
] | permissive | Huawei/Server_Management_Plugin_Puppet | 54529c5305944e802ae799955287ba013e06f536 | 3a549dfa28b3522744932e7716064286a5f2f118 | refs/heads/master | 2021-06-19T17:48:12.253094 | 2019-09-20T09:06:12 | 2019-09-20T09:06:12 | 150,839,359 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,975 | py | # -*- coding:utf-8 -*-
"""
#=========================================================================
# @Description: Set indicator State of Chassis
#
# @author:
# @Date:
#=========================================================================
"""
import sys
def setindicatorled_init(parser, parser_list):
"""
#=====================================================================
# @Method: set indicator LED state
# @Param:
# @Return:
# @author:
#=====================================================================
"""
sub_parser = parser.add_parser('setindicatorled',
help='''set product information''')
sub_parser.add_argument('-S', dest='state', required=True,
choices=['Lit', 'Off', 'Blinking'],
help='state of indicator led')
parser_list['setindicatorled'] = sub_parser
return 'setindicatorled'
def setindicatorled(client, parser, args):
"""
#=====================================================================
# @Method: set product info
# @Param:
# @Return:
# @author:
#=====================================================================
"""
slotid = client.get_slotid()
if slotid is None:
return None
url = "/redfish/v1/Chassis/%s" % slotid
resp = client.get_resource(url)
if resp is None:
return None
if resp['status_code'] != 200:
if resp['status_code'] == 404:
print('Failure: resource was not found')
return resp
payload = {
"IndicatorLED": args.state
}
resp = client.set_resource(url, payload)
if resp is None:
return None
if resp['status_code'] == 200:
print('Success: successfully completed request')
else:
from common_function import display_error_message
display_error_message(client, resp)
return resp
| [
"[email protected]"
] | |
775437e8f41fe09868b4e887683a5283ccc46bf9 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_103/ch163_2020_06_20_21_08_09_870683.py | 562bc7f4be71d74ecbf772f874e1ddb8cfc29b67 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | def calcula_media(lista_notas):
lista_num =[]
for i in range(len(lista_notas)):
for nome,notas in lista_notas[i].items():
a = lista_notas[i]
a[nome] = notas
lista_num.append(notas)
print(lista_num)
| [
"[email protected]"
] | |
820286b363cf8652d45eba5c228226b713f9a63e | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-2/d33210f954309e2844204a05856bdc5ff333bda1-<run_with_configuration>-bug.py | 48cdfb01d7f9ee317701b166c7719899a3142f7d | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,034 | py |
def run_with_configuration(self, configuration):
"\n Executes a BigQuery SQL query. See here:\n\n https://cloud.google.com/bigquery/docs/reference/v2/jobs\n\n For more details about the configuration parameter.\n\n :param configuration: The configuration parameter maps directly to\n BigQuery's configuration field in the job object. See\n https://cloud.google.com/bigquery/docs/reference/v2/jobs for\n details.\n "
jobs = self.service.jobs()
job_data = {
'configuration': configuration,
}
query_reply = jobs.insert(projectId=self.project_id, body=job_data).execute()
self.running_job_id = query_reply['jobReference']['jobId']
if ('location' in query_reply['jobReference']):
location = query_reply['jobReference']['location']
else:
location = self.location
keep_polling_job = True
while keep_polling_job:
try:
if location:
job = jobs.get(projectId=self.project_id, jobId=self.running_job_id, location=location).execute()
else:
job = jobs.get(projectId=self.project_id, jobId=self.running_job_id).execute()
if (job['status']['state'] == 'DONE'):
keep_polling_job = False
if ('errorResult' in job['status']):
raise Exception('BigQuery job failed. Final error was: {}. The job was: {}'.format(job['status']['errorResult'], job))
else:
self.log.info('Waiting for job to complete : %s, %s', self.project_id, self.running_job_id)
time.sleep(5)
except HttpError as err:
if (err.resp.status in [500, 503]):
self.log.info('%s: Retryable error, waiting for job to complete: %s', err.resp.status, self.running_job_id)
time.sleep(5)
else:
raise Exception('BigQuery job status check failed. Final error was: %s', err.resp.status)
return self.running_job_id
| [
"[email protected]"
] | |
c1ba0a272063671fd8a2f2ad9ae66cbc977f8d80 | 9889c3289c60957ab53b2fe0077a1279f085176f | /django-flipbook/flipbook/migrations/0004_auto__add_flipbookpage.py | 35de2c048a05c0849b68df627e67381bb7ca91a9 | [] | no_license | WeilerWebServices/FlipBook | 52b5c681ea9b36fbe91f392f1a9303ccaf01bc6d | 9d09d4310d08704e311d27d984cafda2436c0ddb | refs/heads/master | 2022-12-15T04:19:03.348567 | 2020-09-06T09:03:31 | 2020-09-06T09:03:31 | 293,239,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,454 | py | # flake8: noqa
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FlipbookPage'
db.create_table(u'flipbook_flipbookpage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('flipbook', self.gf('django.db.models.fields.related.ForeignKey')(related_name='pages', to=orm['flipbook.Flipbook'])),
('position', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)),
('content', self.gf('django.db.models.fields.TextField')(max_length=8192, blank=True)),
))
db.send_create_signal(u'flipbook', ['FlipbookPage'])
def backwards(self, orm):
# Deleting model 'FlipbookPage'
db.delete_table(u'flipbook_flipbookpage')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'flipbook.flipbook': {
'Meta': {'object_name': 'Flipbook'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'flipbooks'", 'null': 'True', 'to': u"orm['flipbook.FlipbookCategory']"}),
'download': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.File']", 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Folder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'flipbooks'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'flipbook.flipbookcategory': {
'Meta': {'object_name': 'FlipbookCategory'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'large_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'flipbook_categories_with_large_images'", 'null': 'True', 'to': "orm['filer.Image']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'small_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'flipbook_categories_with_small_images'", 'null': 'True', 'to': "orm['filer.Image']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'flipbook_categories'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'flipbook.flipbookpage': {
'Meta': {'object_name': 'FlipbookPage'},
'content': ('django.db.models.fields.TextField', [], {'max_length': '8192', 'blank': 'True'}),
'flipbook': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pages'", 'to': u"orm['flipbook.Flipbook']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
}
}
complete_apps = ['flipbook'] | [
"[email protected]"
] | |
6568491c77e00ed6a1d457abc304bb7ca1611938 | a2c02fb5dd11e1fb7d0faefaaa4929f40d12cfdf | /plot_line_uv_std_vs_time.py | 161a0a4e1a4a5cb449f8bc8bf19762b35c720618 | [] | no_license | iwenfeng/workspace_python | 2c5486f6e77e13a8fd4d516c6743c2794d062a87 | 392d8e1f953baefe5ed51028636355c67dc5df62 | refs/heads/master | 2020-12-03T23:11:31.949332 | 2019-11-12T16:52:21 | 2019-11-12T16:52:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,236 | py | from __future__ import division,print_function
import matplotlib as mpl
import scipy as sp
from datatools import *
from gridtools import *
from plottools import *
import matplotlib.tri as mplt
import matplotlib.pyplot as plt
#from mpl_toolkits.basemap import Basemap
import os as os
import sys
np.set_printoptions(precision=8,suppress=True,threshold=np.nan)
import time as timem
from matplotlib.collections import LineCollection as LC
from matplotlib.collections import PolyCollection as PC
from scipy import interpolate as intp
from mpl_toolkits.axes_grid1 import make_axes_locatable
import scipy.io as sio
# Define names and types of data
name_orig='kit4_45days_3'
name_change='kit4_kelp_20m_0.018'
name_change2='kit4_kelp_20m_0.011'
name_change3='kit4_kelp_20m_0.007'
grid='kit4'
regionname='kit4_kelp_tight5'
starttime=400
endtime=520
### load the .nc file #####
data = loadnc('runs/'+grid+'/'+name_orig+'/output/',singlename=grid + '_0001.nc')
data2 = loadnc('runs/'+grid+'/'+name_change+'/output/',singlename=grid + '_0001.nc')
data3 = loadnc('runs/'+grid+'/'+name_change2+'/output/',singlename=grid + '_0001.nc')
data4 = loadnc('runs/'+grid+'/'+name_change3+'/output/',singlename=grid + '_0001.nc')
print('done load')
data = ncdatasort(data)
print('done sort')
cages=np.genfromtxt('runs/'+grid+'/' +name_change+ '/input/' +grid+ '_cage.dat',skiprows=1)
cages=(cages[:,0]-1).astype(int)
savepath='figures/png/' + grid + '_' + '/line_uv_std_vs_time/'
if not os.path.exists(savepath): os.makedirs(savepath)
region=regions(regionname)
nidx=get_nodes(data,region)
eidx=get_elements(data,region)
spacing=1
#line=[-129.48666,52.62,52.68]
#define line as line=[bottomx,topx,bottomy,topy]
#kit4_kelp_tight2 verical
line=[-129.48833,-129.48833,52.62,52.68]
#kit4_kelp_tight2 horiz1
line=[-129.53,-129.46,52.65,52.65]
#kit4_kelp_tight2 horiz2
#line=[-129.53,-129.46,52.655,52.655]
#kit4_kelp_tight5 north
#line=[-129.44,-129.40,52.56,52.60]
#kit4_kelp_tight5 south
#line=[-129.35,-129.3,52.52,52.54]
#kit4_kelp_tight5 left horiz top
#line=[-129.45,-129.375,52.575,52.575]
#kit4_kelp_tight5 left horiz bottom
line=[-129.45,-129.375,52.54,52.54]
#kit4_kelp_tight5 right horiz bottom
#line=[-129.375,-129.3,52.53,52.53]
#kit4_kelp_tight5 right horiz bottom a
#line=[-129.375,-129.3,52.5325,52.5325]
print line
ngridy = 2000
tmparray=[list(zip(data['nodell'][data['nv'][i,[0,1,2,0]],0],data['nodell'][data['nv'][i,[0,1,2,0]],1])) for i in cages[np.in1d(cages,eidx)] ]
tmparray=np.array(tmparray)
def ccw(A,B,C):
return (C[1]-A[1]) * (B[0]-A[0]) > (B[1]-A[1]) * (C[0]-A[0])
def intersect(a1, b1, a2, b2):
"""Returns True if line segments a1b1 and a2b2 intersect."""
return ccw(a1, b1, a2) != ccw(a1, b1, b2) and ccw(a2, b2, a1) != ccw(a2, b2, b1)
lineints=np.zeros((tmparray.shape[0],3))
for i in range(0,tmparray.shape[0]):
lineints[i,0]=intersect((line[0],line[2]),(line[1],line[3]),(tmparray[i,-1,0],tmparray[i,-1,1]),(tmparray[i,0,0],tmparray[i,0,1]))
lineints[i,1]=intersect((line[0],line[2]),(line[1],line[3]),(tmparray[i,0,0],tmparray[i,0,1]),(tmparray[i,1,0],tmparray[i,1,1]))
lineints[i,2]=intersect((line[0],line[2]),(line[1],line[3]),(tmparray[i,1,0],tmparray[i,1,1]),(tmparray[i,2,0],tmparray[i,2,1]))
idx=np.where(lineints==1)
idxr=idx[0]
idxc=idx[1]
highest=0
lowest=1000000
for i in range(0,len(idxr)):
j=idxr[i]
k=idxc[i]
dist=np.sqrt((line[0]-(tmparray[j,-1+k,0]+tmparray[j,k,0])/2)**2+(line[2]-(tmparray[j,-1+k,1]+tmparray[j,k,1])/2)**2)
highest=np.max([highest,dist])
lowest=np.min([lowest,dist])
H1=(sw.dist([line[2], line[3]],[line[0], line[1]],'km'))[0]*1000;
H2=np.sqrt((line[0]-line[1])**2+(line[2]-line[3])**2)
linea=(lowest/H2)*H1
lineb=(highest/H2)*H1
start = timem.clock()
uvar_o=data['ua'][starttime:,:].var(axis=0)
vvar_o=data['va'][starttime:,:].var(axis=0)
uvar_c=data2['ua'][starttime:,:].var(axis=0)
vvar_c=data2['va'][starttime:,:].var(axis=0)
uvar_c2=data3['ua'][starttime:,:].var(axis=0)
vvar_c2=data3['va'][starttime:,:].var(axis=0)
uvar_c3=data4['ua'][starttime:,:].var(axis=0)
vvar_c3=data4['va'][starttime:,:].var(axis=0)
print ('calc current mag: %f' % (timem.clock() - start))
start = timem.clock()
yi = np.linspace(line[2],line[3], ngridy)
yim = np.linspace(0,H1, ngridy)
xi = np.linspace(line[0],line[1], ngridy)
#griddata seems to be swapping xi so that it goes high to low instead of low to high.
#fliplr is to account for this.
points=np.fliplr(np.flipud(np.eye(2000,dtype=bool)))
interpdata1_u=np.empty((ngridy,))
interpdata2_u=np.empty((ngridy,))
interpdata3_u=np.empty((ngridy,))
interpdata4_u=np.empty((ngridy,))
interpdata1_v=np.empty((ngridy,))
interpdata2_v=np.empty((ngridy,))
interpdata3_v=np.empty((ngridy,))
interpdata4_v=np.empty((ngridy,))
interpdata1_u=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], uvar_o[eidx], xi, yi)[points]
interpdata2_u=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], uvar_c[eidx], xi, yi)[points]
interpdata3_u=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], uvar_c2[eidx], xi, yi)[points]
interpdata4_u=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], uvar_c3[eidx], xi, yi)[points]
interpdata1_v=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], vvar_o[eidx], xi, yi)[points]
interpdata2_v=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], vvar_c[eidx], xi, yi)[points]
interpdata3_v=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], vvar_c2[eidx], xi, yi)[points]
interpdata4_v=mpl.mlab.griddata(data['uvnodell'][eidx,0],data['uvnodell'][eidx,1], vvar_c3[eidx], xi, yi)[points]
print ('griddata interp: %f' % (timem.clock() - start))
f = plt.figure()
ax=f.add_axes([.125,.1,.775,.8])
ax.plot(yim,interpdata1_u,'k',label='No drag')
ax.plot(yim,interpdata2_u,'r',label='Drag: 0.018')
ax.plot(yim,interpdata3_u,'b',label='Drag: 0.011')
ax.plot(yim,interpdata4_u,'g',label='Drag: 0.007')
ax.axvline(lineb,color='k',linestyle='dashed')
ax.axvline(linea,color='k',linestyle='dashed')
ax.set_ylabel(r'u-velocity std (m s$^{-1}$)',fontsize=10)
ax.set_xlabel(r'Distance (m)',fontsize=10)
ax.legend()
f.savefig(savepath + grid + '_4runs_line_u_std_vs_time_'+("%f"%line[0])+'_'+("%f"%line[1])+'_'+("%f"%line[2])+'_'+("%f"%line[3])+'.png',dpi=300)
plt.close(f)
f = plt.figure()
ax=f.add_axes([.125,.1,.775,.8])
ax.plot(yim,interpdata1_v,'k',label='No drag')
ax.plot(yim,interpdata2_v,'r',label='Drag: 0.018')
ax.plot(yim,interpdata3_v,'b',label='Drag: 0.011')
ax.plot(yim,interpdata4_v,'g',label='Drag: 0.007')
ax.axvline(lineb,color='k',linestyle='dashed')
ax.axvline(linea,color='k',linestyle='dashed')
ax.set_ylabel(r'v-velocity std (m s$^{-1}$)',fontsize=10)
ax.set_xlabel(r'Distance (m)',fontsize=10)
ax.legend()
f.savefig(savepath + grid + '_4runs_line_v_std_vs_time_'+("%f"%line[0])+'_'+("%f"%line[1])+'_'+("%f"%line[2])+'_'+("%f"%line[3])+'.png',dpi=300)
plt.close(f)
f = plt.figure()
ax=f.add_axes([.125,.1,.775,.8])
ax.triplot(data['trigrid'],lw=.5)
tmparray=[list(zip(data['nodell'][data['nv'][i,[0,1,2]],0],data['nodell'][data['nv'][i,[0,1,2]],1])) for i in cages ]
lseg0=PC(tmparray,facecolor = 'g',edgecolor='None')
ax.add_collection(lseg0)
ax.plot(xi,yi,'b.')
prettyplot_ll(ax,setregion=region,grid=True)
plotcoast(ax,filename='pacific.nc',color='r')
f.savefig(savepath + grid + '_4runs_line_uv_std_vs_time_'+("%f"%line[0])+'_'+("%f"%line[1])+'_'+("%f"%line[2])+'_'+("%f"%line[3])+'_location.png',dpi=300)
plt.close(f)
tempdic={}
tempdic['interp_orig_u']=interpdata1_u
tempdic['interp_018_u']=interpdata2_u
tempdic['interp_011_u']=interpdata3_u
tempdic['interp_007_u']=interpdata4_u
tempdic['interp_orig_v']=interpdata1_v
tempdic['interp_018_v']=interpdata2_v
tempdic['interp_011_v']=interpdata3_v
tempdic['interp_007_v']=interpdata4_v
tempdic['line']=line
tempdic['yi']=yi
tempdic['yi_meters']=yim
tempdic['kelpedge_south']=lineb
tempdic['kelpedge_north']=linea
base_dir = os.path.dirname(__file__)
sio.savemat(os.path.join(base_dir,'data', grid + '_4runs_line_uv_std_vs_time_'+("%f"%line[0])+'_'+("%f"%line[1])+'_'+("%f"%line[2])+'_'+("%f"%line[3])+'.mat'),mdict=tempdic)
| [
"[email protected]"
] | |
b99b9daf57adaea08d1ed0c0ad2114204996b225 | 55c8557a675d9228a3fb96bf8736ec613351ebb3 | /apps/account/migrations/0002_pyaccountplan_type.py | c83731236824c49ff9db7e7a1325310774da2ff0 | [
"MIT"
] | permissive | gvizquel/pyerp | e56024b481977e07339e8a0a17a26e1a0e4f1147 | c859f7293cabd1003f79112463cee93ac89fccba | refs/heads/master | 2022-12-07T13:12:16.333420 | 2019-08-29T21:38:22 | 2019-08-29T21:38:22 | 204,968,470 | 0 | 0 | MIT | 2022-12-04T09:21:19 | 2019-08-28T15:48:24 | JavaScript | UTF-8 | Python | false | false | 558 | py | # Generated by Django 2.2.4 on 2019-08-18 03:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='pyaccountplan',
name='type',
field=models.CharField(choices=[('activo', 'Activo'), ('pasivo', 'Pasivo'), ('patrimonio_capital', 'Patrimonio'), ('ingresos', 'Ingresos'), ('costos', 'Costos'), ('gastos', 'Gastos')], default='activo', max_length=64),
),
]
| [
"[email protected]"
] | |
325e3a78f1a7788dd5dc13d3f481813fcdf5258a | 564d6a4d305a8ac6a7e01c761831fb2081c02d0f | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/aio/operations/_application_gateways_operations.py | c3ea5c9de5a15baa110689f2072695a60a4e5c54 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | paultaiton/azure-sdk-for-python | 69af4d889bac8012b38f5b7e8108707be679b472 | d435a1a25fd6097454b7fdfbbdefd53e05029160 | refs/heads/master | 2023-01-30T16:15:10.647335 | 2020-11-14T01:09:50 | 2020-11-14T01:09:50 | 283,343,691 | 0 | 0 | MIT | 2020-07-28T22:43:43 | 2020-07-28T22:43:43 | null | UTF-8 | Python | false | false | 55,284 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ApplicationGatewaysOperations:
"""ApplicationGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
async def get(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> "models.ApplicationGateway":
"""Gets the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_10_01.models.ApplicationGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
application_gateway_name: str,
parameters: "models.ApplicationGateway",
**kwargs
) -> "models.ApplicationGateway":
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ApplicationGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
application_gateway_name: str,
parameters: "models.ApplicationGateway",
**kwargs
) -> AsyncLROPoller["models.ApplicationGateway"]:
"""Creates or updates the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to the create or update application gateway operation.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.ApplicationGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_10_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
application_gateway_name: str,
parameters: "models.TagsObject",
**kwargs
) -> "models.ApplicationGateway":
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
application_gateway_name: str,
parameters: "models.TagsObject",
**kwargs
) -> AsyncLROPoller["models.ApplicationGateway"]:
"""Updates the specified application gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to update application gateway tags.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ApplicationGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_10_01.models.ApplicationGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["models.ApplicationGatewayListResult"]:
"""Lists all application gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_10_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["models.ApplicationGatewayListResult"]:
"""Gets all the application gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_10_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways'} # type: ignore
async def _start_initial(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
async def begin_start(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Starts the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start'} # type: ignore
async def _stop_initial(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
async def begin_stop(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Stops the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop'} # type: ignore
async def _backend_health_initial(
self,
resource_group_name: str,
application_gateway_name: str,
expand: Optional[str] = None,
**kwargs
) -> Optional["models.ApplicationGatewayBackendHealth"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ApplicationGatewayBackendHealth"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
# Construct URL
url = self._backend_health_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationGatewayName': self._serialize.url("application_gateway_name", application_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
async def begin_backend_health(
self,
resource_group_name: str,
application_gateway_name: str,
expand: Optional[str] = None,
**kwargs
) -> AsyncLROPoller["models.ApplicationGatewayBackendHealth"]:
"""Gets the backend health of the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ApplicationGatewayBackendHealth or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_10_01.models.ApplicationGatewayBackendHealth]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayBackendHealth"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._backend_health_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
expand=expand,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth'} # type: ignore
async def list_available_waf_rule_sets(
self,
**kwargs
) -> "models.ApplicationGatewayAvailableWafRuleSetsResult":
"""Lists all available web application firewall rule sets.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableWafRuleSetsResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_10_01.models.ApplicationGatewayAvailableWafRuleSetsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayAvailableWafRuleSetsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
# Construct URL
url = self.list_available_waf_rule_sets.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableWafRuleSetsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_waf_rule_sets.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableWafRuleSets'} # type: ignore
async def list_available_ssl_options(
self,
**kwargs
) -> "models.ApplicationGatewayAvailableSslOptions":
"""Lists available Ssl options for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewayAvailableSslOptions, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_10_01.models.ApplicationGatewayAvailableSslOptions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayAvailableSslOptions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
# Construct URL
url = self.list_available_ssl_options.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewayAvailableSslOptions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_available_ssl_options.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default'} # type: ignore
def list_available_ssl_predefined_policies(
self,
**kwargs
) -> AsyncIterable["models.ApplicationGatewayAvailableSslPredefinedPolicies"]:
"""Lists all SSL predefined policies for configuring Ssl policy.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayAvailableSslPredefinedPolicies or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_10_01.models.ApplicationGatewayAvailableSslPredefinedPolicies]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewayAvailableSslPredefinedPolicies"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_available_ssl_predefined_policies.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationGatewayAvailableSslPredefinedPolicies', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_available_ssl_predefined_policies.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies'} # type: ignore
async def get_ssl_predefined_policy(
self,
predefined_policy_name: str,
**kwargs
) -> "models.ApplicationGatewaySslPredefinedPolicy":
"""Gets Ssl predefined policy with the specified policy name.
:param predefined_policy_name: Name of Ssl predefined policy.
:type predefined_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGatewaySslPredefinedPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_10_01.models.ApplicationGatewaySslPredefinedPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationGatewaySslPredefinedPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json"
# Construct URL
url = self.get_ssl_predefined_policy.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'predefinedPolicyName': self._serialize.url("predefined_policy_name", predefined_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGatewaySslPredefinedPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_ssl_predefined_policy.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGatewayAvailableSslOptions/default/predefinedPolicies/{predefinedPolicyName}'} # type: ignore
| [
"[email protected]"
] | |
07887321386c9f0bbfcb2e0b00eed25e2cd70164 | 234bb369416b18dd7757b14a8b9b03d0656a1b5d | /week12/back/back/settings.py | ade21baf7381428f520e5553eeea4c5fed798ea2 | [] | no_license | derbess/WebDEV | 432fb70e42c89fa9a47b77bf768878d96987b2f0 | dc323e242ca19df436055d7b73e33f239349cafc | refs/heads/master | 2020-05-20T13:46:55.526113 | 2019-05-08T07:15:28 | 2019-05-08T07:15:28 | 185,607,138 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,437 | py | """
Django settings for back project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kzx4c$2nifd&mx6((!!wv$a)7!*o7@i=#caji7bg88f2k+8%0e'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'api'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'back.urls'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'back.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
de97534eb6a222219bae78c0160be1a5e459d9bc | 1bba82345900327ed1c128e8046dc91f90a0ccb5 | /lets_party/migrations/0010_auto_20190916_1924.py | 7ef8348e9b96b152a19329d86c88e55ed68b582e | [
"MIT"
] | permissive | dchaplinsky/ragoogle | 40bd093682e41d1ee2a77f446c69d09e82bb3948 | dccb3d29334c3220ea12c46c725c443c8bd725c0 | refs/heads/master | 2021-06-11T10:07:41.142843 | 2020-10-12T10:30:39 | 2020-10-12T10:30:39 | 136,800,715 | 3 | 3 | MIT | 2021-03-19T23:20:02 | 2018-06-10T10:51:30 | CSS | UTF-8 | Python | false | false | 1,528 | py | # Generated by Django 2.2.4 on 2019-09-16 16:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lets_party', '0009_auto_20190916_0311'),
]
operations = [
migrations.AddField(
model_name='letspartymodel',
name='amount',
field=models.DecimalField(decimal_places=2, default=0, max_digits=15, verbose_name='Сума пожертви'),
),
migrations.AlterField(
model_name='letspartyredflag',
name='rule',
field=models.CharField(choices=[('company_won_procurement', 'Компанія виграла у держзакупівлях'), ('company_had_tax_debts', 'Компанія мала держборг'), ('company_is_high_risk', 'Компанія має ознаки фіктивності'), ('company_has_foreign_bo', 'Компанія має закордоних бенефіціарів'), ('company_has_pep_bo', 'Компанія має PEP-бенефіціарів'), ('company_is_not_active', 'Компанія припинена, або порушено справу про банкрутство'), ('company_has_pep_founder', 'Компанія має засновників/співвласників PEP-ів'), ('company_had_pep_founder', 'Компанія мала засновників/співвласників PEP-ів'), ('company_had_pep_bo', 'Компанія мала PEP-бенефіціарів')], max_length=100),
),
]
| [
"[email protected]"
] | |
2e1a629db9551f0e666dfafec29bca9c6ff38e4c | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/cloud/bigquery/connection/v1/bigquery-connection-v1-py/google/cloud/bigquery_connection/__init__.py | 724d7476ae2f9ad95c3e5888257dd2d749efc2d9 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,360 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.cloud.bigquery_connection_v1.services.connection_service.client import ConnectionServiceClient
from google.cloud.bigquery_connection_v1.services.connection_service.async_client import ConnectionServiceAsyncClient
from google.cloud.bigquery_connection_v1.types.connection import AwsAccessRole
from google.cloud.bigquery_connection_v1.types.connection import AwsCrossAccountRole
from google.cloud.bigquery_connection_v1.types.connection import AwsProperties
from google.cloud.bigquery_connection_v1.types.connection import CloudSpannerProperties
from google.cloud.bigquery_connection_v1.types.connection import CloudSqlCredential
from google.cloud.bigquery_connection_v1.types.connection import CloudSqlProperties
from google.cloud.bigquery_connection_v1.types.connection import Connection
from google.cloud.bigquery_connection_v1.types.connection import CreateConnectionRequest
from google.cloud.bigquery_connection_v1.types.connection import DeleteConnectionRequest
from google.cloud.bigquery_connection_v1.types.connection import GetConnectionRequest
from google.cloud.bigquery_connection_v1.types.connection import ListConnectionsRequest
from google.cloud.bigquery_connection_v1.types.connection import ListConnectionsResponse
from google.cloud.bigquery_connection_v1.types.connection import UpdateConnectionRequest
__all__ = ('ConnectionServiceClient',
'ConnectionServiceAsyncClient',
'AwsAccessRole',
'AwsCrossAccountRole',
'AwsProperties',
'CloudSpannerProperties',
'CloudSqlCredential',
'CloudSqlProperties',
'Connection',
'CreateConnectionRequest',
'DeleteConnectionRequest',
'GetConnectionRequest',
'ListConnectionsRequest',
'ListConnectionsResponse',
'UpdateConnectionRequest',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
8c762e14ef1ba9895fa010c2ab1c29e644d26a65 | af17007c9f8b0ccb0b482f0c76e94b542bc236cf | /LC_n_Misc/LC_832.py | 055288253939221f8448242a36636f2b8948ef46 | [] | no_license | abhikrish06/PythonPractice | da72a81845bb73e2902ec37aff9c3b31587ef9ed | 24988428cada3b1f8a6c0cf0140e288511cd9a6d | refs/heads/master | 2021-03-27T18:41:59.953316 | 2018-11-03T23:44:38 | 2018-11-03T23:44:38 | 115,932,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 641 | py | class Solution:
def flipAndInvertImage(self, A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
res, fin = [], []
for arr in A:
res.append(arr[::-1])
# print(res)
for ar in res:
res2 = []
for i in ar:
if i == 0:
res2.append(1)
elif i == 1:
res2.append(0)
fin.append(res2)
return fin
obj = Solution()
print(obj.flipAndInvertImage([[1,1,0],[1,0,1],[0,0,0]]))
print(obj.flipAndInvertImage([[1,1,0,0],[1,0,0,1],[0,1,1,1],[1,0,1,0]])) | [
"[email protected]"
] | |
d4ff4d434ccd308db73a4dc2fe714e5ef205aaf4 | 6268a19db5d7806b3a91d6350ec2777b3e13cee6 | /old_stuff/code/hpe_rgb/holi_multi_reso_net/src/create_data_h5py.py | 22cbb6fdd6cc447b32594bf323f03c618f450216 | [] | no_license | aaronlws95/phd_2019 | 3ae48b4936f039f369be3a40404292182768cf3f | 22ab0f5029b7d67d32421d06caaf3e8097a57772 | refs/heads/master | 2023-03-22T14:38:18.275184 | 2021-03-21T11:39:29 | 2021-03-21T11:39:29 | 186,387,381 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | import os
import sys
import utils.prepare_data as pd
from utils.directory import DATA_DIR, DATASET_DIR
from utils.logger import get_logger
logger = get_logger()
dataset_dir = os.path.join(DATASET_DIR, 'First_Person_Action_Benchmark')
save_dir = DATA_DIR
train_pairs, test_pairs = pd.get_fpha_data_list('color', dataset_dir)
file_name = [i for i,j in train_pairs]
xyz_gt = [j for i,j in train_pairs]
pd.write_data_no_crop_h5py(file_name, xyz_gt, os.path.join(save_dir, 'train_fpha_RGB_no_crop.h5'), logger)
file_name = [i for i,j in test_pairs]
xyz_gt = [j for i,j in test_pairs]
pd.write_data_no_crop_h5py(file_name, xyz_gt, os.path.join(save_dir, 'test_fpha_RGB_no_crop.h5'), logger)
| [
"[email protected]"
] | |
7a60fa9b117edc5ecee0dc68070efd7cddaa45a5 | 35e203ca1734458650975e56c4cc2b7e2eba1fe9 | /swexpert/swea 3347 올림픽 종목 투표 0310.py | 24a72b653b8337437a659b51ef762f4bb2ee8226 | [] | no_license | ebroebro/swproblem | 0d7e1898fdf72497b937b7f20de664123ff28c25 | e6c166ce2e3806042034b09930a8783d27db674f | refs/heads/master | 2020-12-22T07:27:14.735653 | 2020-05-24T09:24:40 | 2020-05-24T09:24:40 | 236,711,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | T=int(input())
for z in range(T):
n,m=list(map(int,input().split()))
sports=list(map(int,input().split()))
people=list(map(int,input().split()))
check_list=[0 for i in range(n)]
max_cnt=0
rslt=0
for i in range(m):
for j in range(n-1,-1,-1):
if sports[j] <=people[i]:
tmp=j
check_list[tmp]+=1
if check_list[tmp]>max_cnt:
max_cnt=check_list[tmp]
rslt=tmp+1
print("#{} {}".format(z+1,rslt))
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.