blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
100854a6d18277d4c0dd905456dcf69e64b42394 | e96e9990ba26757b834eeff95e8bee9b720b72aa | /django/test007/blog/models.py | fa736c53142137dcf246805e9ccfa1fbf92b1a0a | [] | no_license | cuiyanan89/Python | 475a0a2778d7be5b9f6aa87ba35c21569080e056 | f742684474730e3b032aabd0151d584167c3ed02 | refs/heads/master | 2016-09-06T05:31:48.956411 | 2013-09-06T08:13:09 | 2013-09-06T08:13:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | from django.db import models
# Create your models here.
class New(models.Model):
new_title = models.CharField(max_length=30)
new_content = models.TextField(max_length=3000)
new_img = models.FileField(upload_to="./images")
def __unicode__(self):
return self.new_title
| [
"root@yanan-Rev-1-0.(none)"
] | root@yanan-Rev-1-0.(none) |
f0704c277601046e7ecff140c4ce76723f895a6f | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/python2.7/test/outstanding_bugs.py | 5a947e5deea9d551dd5f2994869ab7dd70a83e94 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 81 | py | /home/action/.parts/packages/python2/2.7.6/lib/python2.7/test/outstanding_bugs.py | [
"[email protected]"
] | |
fc0054ef638181b7cdaa31bdd9b2c7d6c701b84a | 8bbe2351bbd157a46ccf8530cde4e4cc7b0bd3b7 | /main.py | 94f313395298785422b7caeae12f2b205f2ce590 | [] | no_license | airuibel/py_code | 8dc98d71e79a4c0f785ad5cf81b2ca2073061ebf | 1da9a9dcd37475dd14bab6ae58bca1e2dff4c251 | refs/heads/master | 2020-06-18T03:47:43.754204 | 2018-03-20T09:31:00 | 2018-03-20T09:31:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,021 | py | # import package
import numpy as np
from pandas import DataFrame
import pandas as pd
import re
from dateutil import relativedelta
import datetime as dt
# 1.1
def df_groupby(df, groupkey, col, func, res_col_name, asint=False, dup=False):
"""
:param df: 一个df 征对 1+ 用户
:param groupkey: df中聚合分类的变量名
:param col: df中待聚合的变量名,字符串或者列表
:param func: 聚合方式,支持sum /max /min /avg /count/ distinct_count
:param res_col_name: 聚合结果列名,字符串或者列表
:param asint: if asint=True ,聚合结果转为int ;default asint=False;
:param dup: if dup=True ,变量取值去重 ;default dup=False;
:return:df_res df
"""
# dropna all row
df = df.dropna(axis=0, how='all')
# reformat type
try:
if func != 'count' and func != 'distinct_count':
df[col] = df[col].astype('float32')
except ValueError:
print('the col could not convert string to float!')
# duplicate the col
if dup:
df = df.drop_duplicates(df.columns)
# compatible str
if type(col) != list:
col = [col]
if type(res_col_name) != list:
res_col_name = [res_col_name]
if type(func) != list:
func = [func]
# agg index
df_res = DataFrame(df[groupkey].unique(), columns=[groupkey])
for i in func:
if i == 'sum':
df_res_ago = DataFrame(df.groupby(groupkey)[col].sum())
elif i == 'max':
df_res_ago = DataFrame(df.groupby(groupkey)[col].max())
elif i == 'min':
df_res_ago = DataFrame(df.groupby(groupkey)[col].min())
elif i == 'avg':
df_res_ago = DataFrame(df.groupby(groupkey)[col].mean())
elif i == 'std':
df_res_ago = DataFrame(df.groupby(groupkey)[col].std())
elif i == 'count':
df_res_ago = DataFrame(df.groupby(groupkey)[col].count())
elif i == 'distinct_count':
df_res_ago = DataFrame(df.groupby(groupkey)[col].nunique())
else:
print('input func error!')
df_res_ago = df_res_ago.reset_index()
df_res = pd.merge(df_res, df_res_ago, how='left', on=groupkey)
columns_list = [groupkey]
columns_list.extend(res_col_name)
df_res.columns = columns_list
if asint:
df_res[res_col_name] = df_res[res_col_name].astype(int)
return df_res
# use example
# df_groupby(df,'appl_no', 'phone_gray_score', 'sum', 'phone_gray_score_sum', dup=False, asint=False)
# df_groupby(df,'appl_no', ['phone_gray_score'], ['sum'], ['phone_gray_score_sum'], dup=False, asint=False)
# df_groupby(df,'appl_no', ['register_cnt','phone_gray_score'], ['sum'], ['register_cnt_sum','phone_gray_score_sum'], dup=False, asint=False)
# df_groupby(df,'appl_no', ['register_cnt','phone_gray_score'], ['sum','avg','count'], ['register_cnt_sum','phone_gray_score_sum','register_cnt_avg','phone_gray_score_avg','register_cnt_count','phone_gray_score_count'], dup=False, asint=False)
# 1.2.1
def col_dummy(x, col, dummy_dict=[]):
"""
function about:变量编码功能函数集
by boysgs @20171103
:param x: 一个数值
:param col: df中需重新编码的变量名
:param dummy_dict: 列表,变量所有取值组成,示例['value_1','value_2']
:return:col_dummy_dict
"""
dummy_dict_sorted = sorted(dummy_dict)
dummy_dict_sorted_key = np.array(['_'.join(['if', col, i]) for i in dummy_dict_sorted])
dummy_dict_sorted_value = [0] * len(dummy_dict_sorted_key)
col_dummy_zip = zip(dummy_dict_sorted_key, dummy_dict_sorted_value)
col_dummy_dict = dict((a, b) for a, b in col_dummy_zip)
#
if x in dummy_dict_sorted:
col_dummy_dict['_'.join(['if', col, x])] = 1
return col_dummy_dict
# use example
# df = pd.DataFrame({'col1': [1, np.nan, 2, 3], 'col2': [3, 4, 5, 1], 'col3': ['s', 'a', 'c', 'd']})
# dummy_dict = ['a', 'b', 'c', 'd', 's']
# col = 'col3'
# DataFrame(list(df[col].apply(lambda x: col_dummy(x, col, dummy_dict))))
# 1.2.2
def col_dummy_lb(x, lb_trans, sorted_dummy_varname_list=[]):
"""
function about:变量编码功能函数集(使用LabelBinarizer方法)
by boysgs @20171103
:param x: 一个数值
:param lb_trans: 一个变量利用preprocessing.LabelBinarizer 方法生成的对象
:param sorted_dummy_varname_list: 列表,升序排列的变量所有取值组成,示例['value_1','value_2']
:return:col_dummy_dict 字典
"""
dummy_value = lb_trans.transform(str([x]))
col_dummy_dict = dict(zip(sorted_dummy_varname_list, dummy_value[0]))
return col_dummy_dict
# 2.1
def meetOneCondition(x,symbol = '=',threshold = ('None','b')):
"""
# 输入:
# 变量名:年龄
# 符号:=,!=,>,< , >=, <= , in , not in,like, not like
# 阈值:10,(10,11),'%10%'
# 输出
# 满足条件输出1,否则输出0
"""
if pd.isnull(x) or x == '':
if symbol in ['!=','not in ','not like'] and threshold!='None':
return 1
elif threshold=='None':
if symbol == '=':
return 1
elif symbol == '!=':
return 0
else:
return 0
elif symbol == '=':
if threshold=='None':
return 0
elif x == threshold:
return 1
else:
return 0
elif symbol == '!=':
if threshold=='None':
return 1
elif x != threshold:
return 1
else:
return 0
elif symbol == '>':
if x > threshold:
return 1
else:
return 0
elif symbol == '<':
if x < threshold:
return 1
else:
return 0
elif symbol == '>=':
if x >= threshold:
return 1
else:
return 0
elif symbol == '<=':
if x <= threshold:
return 1
else:
return 0
elif symbol == 'in':
if x in threshold:
return 1
else:
return 0
elif symbol == 'not in':
if x not in threshold:
return 1
else:
return 0
elif symbol == 'like':
if threshold[0] == '%' and threshold[-1] == '%':
if threshold[1:-1] in x:
return 1
else:
return 0
if threshold[0] == '%' and threshold[-1] != '%':
if threshold[1:] == x[len(x)-len(threshold[1:]):]:
return 1
else:
return 0
if threshold[0] != '%' and threshold[-1] == '%':
if threshold[0:-1] == x[0:len(threshold[0:-1])]:
return 1
else:
return 0
else:
return 'you need cheack your "like" threshold'
elif symbol == 'not like':
if threshold[0] == '%' and threshold[-1] == '%':
if threshold[1:-1] not in x:
return 1
else:
return 0
if threshold[0] == '%' and threshold[-1] != '%':
if threshold[1:] != x[len(x)-len(threshold[1:]):]:
return 1
else:
return 0
if threshold[0] != '%' and threshold[-1] == '%':
if threshold[0:-1] != x[0:len(threshold[0:-1])]:
return 1
else:
return 0
else:
return 'you need cheack your "not like" threshold'
elif symbol =='regex':
if re.search(threshold,x):
return 1
else:
return 0
else:
return 'please contact the developer for increaing then type of the symbol'
# test:
# x = 'abcde'
# meetOneCondition(x,'=','abcd2')
# meetOneCondition(x,'like','abc%')
# meetOneCondition(x,'like','%abc')
# meetOneCondition(x,'regex','b|adz|z')
# 2.2
def meetMultiCondition(condition = ((),'and',())):
"""
# 输入
# 多个条件,单个条件参考meetOneCondition中的
# 例子 condition = ( ('age','>=',18), 'and', ( ('age','<=',40),'or',('gender','=','female') ) )
# 输出
# 满足条件输出1,否则输出0
"""
if 'and' in condition:
a = [k for k in condition if k!='and']
b = []
for l in range(len(a)):
b.append(meetMultiCondition(a[l]))
if 0 in b:
return 0
else:
return 1
if 'or' in condition:
a = [k for k in condition if k != 'or']
b = []
for l in range(len(a)):
b.append(meetMultiCondition(a[l]))
if 1 in b:
return 1
else:
return 0
else:
return meetOneCondition(condition[0],condition[1],condition[2])
# test
# zz ='abcde'
# yy = 10
# xx = 5
# meetMultiCondition(((zz,'=','abc'),'or',(yy,'>',7)))
# 2.3
def singleConditionalAssignment(conditon =('z','=',('None','b')),assig1=1, assig2=0):
"""
# 单条件赋值
# 输入
# 参考meetOneCondition的输入
# 例如:conditon = ('age','>=',18)
# 输出:
# 满足条件assig1
# 不满足条件assig2
"""
if meetOneCondition(conditon[0],conditon[1],conditon[2])==1:
return assig1
elif meetOneCondition(conditon[0], conditon[1], conditon[2]) == 0:
return assig2
else:
return meetOneCondition(conditon[0],conditon[1],conditon[2])
# test
# singleConditionalAssignment((x, '=', 'abcde'), 5, 1)
# 2.4
def multiConditionalAssignment(condition = (),assig1 = 1,assig2 = 0):
"""
# 多个条件赋值
###输入
##多个条件类似meetMultiCondition的输入
###输出:
##满足条件assig1
##不满足条件assig2
"""
if meetMultiCondition(condition)==1:
return assig1
else:
return assig2
# test
# xx=5
# multiConditionalAssignment(condition =((zz,'=','abcde'),'and',( (yy,'>',10), 'or', (xx,'=',5) )),assig1 = 999,assig2 = 0)
# 2.5
def multiConditionalMultAssignment(condition = ((('zz','not in', ('硕士','博士')),1),(('zz','not in', ('硕士','博士')),2)),assig = 0):
"""
####多个条件多个赋值
###输入
##多个条件类似meetMultiCondition的输入,再加一满足的取值
###输出:
##满足条件输出输入目标值
##不满足条件assig
"""
for l in condition:
if meetMultiCondition(l[0])==1:
return l[1]
return assig
# test
# multiConditionalMultAssignment((((zz,'=','abcdef'),1),((zz,'=','abcde'),2)),3)
# 3.1
def substring(string,length,pos_start=0):
"""
function about : 字符串截取
by dabao @20171106
:param string: 被截取字段
:param length: 截取长度
:param pos_start: 从第几位开始截取,defualt=0
:return: a string :substr
"""
pos_end = length + pos_start
if string is np.NaN:
return np.NaN
else:
str_type = type(string)
if str_type==str:
substr = string[pos_start:pos_end]
else:
string = str(string)
substr = string[pos_start:pos_end]
return substr
# test
# string=370321199103050629
# length=4
# pos_start=6
# substring(string,length,pos_start)
# string=np.NaN
# 3.2
def charindex(substr,string,pos_start=0):
"""
function about : 字符串位置查询
by dabao @20171106
:param substr
:param string: substr 在 string 起始位置
:param pos_start: 查找substr的开始位置,default=0
:return: a int :substr_index
"""
if string is np.NaN:
return np.NaN
else:
substr = str(substr)
string = str(string)
substr_index = string.find(substr,pos_start)
return substr_index
# test
# string='370321199103050629'
# substr='1991'
# charindex(substr,string)
# string.find(substr,0)
# 3.3
def trim(string,substr=' ',method='both'):
"""
function about : 删除空格或其他指定字符串
by dabao @20171106
:param string: a string
:param substr: 在string两端删除的指定字符串,default=' '
:param method: 删除方式:left 删除左边, right 删除右边, both 删除两边
:return: a string :string_alter
"""
if string is np.NaN:
return np.NaN
else:
substr = str(substr)
string = str(string)
if method in ['left','right','both']:
if method =='left':
string_alter = string.lstrip(substr)
elif method == 'right':
string_alter = string.rstrip(substr)
elif method == 'both':
string_alter = string.strip(substr)
else:
string_alter = string.strip(substr)
print("Warning: method must be in ['left','right','both']! If not, the function will be acting as 'both'")
return string_alter
# test:
# string=' OPPO,HUAWEI,VIVO,HUAWEI '
# trim(string)
# (4)计算字符串长度:SQL中的LEN()函数 ,python自带 len()
# (5)字符串转换为大、小写:SQL 中的 LOWCASE,UPPER 语句,python自带函数 string.upper(),string.lower()
# 3.4
def OnlyCharNum(s,oth=''):
# 只显示字母与数字
s2 = s.lower()
fomart = 'abcdefghijklmnopqrstuvwxyz0123456789'
for c in s2:
if not c in fomart:
s = s.replace(c,'')
return s
# 4.1
def dateformat(date,symbol):
"""
输入:
变量名:时间,按照格式接收10位、19位
可选:'year','month','day','hour','minute','second'
输出
满足条件输出值,否则报错
"""
if pd.isnull(date):
return np.NaN
date = str(date)
if len(date)==10:
date=date+' 00:00:00'
date=dt.datetime.strptime(date,'%Y-%m-%d %H:%M:%S')
if symbol in ['year','month','day','hour','minute','second']:
if symbol =='year':
datetime_elect = date.year
elif symbol == 'month':
datetime_elect = date.month
elif symbol == 'day':
datetime_elect = date.day
elif symbol == 'hour':
datetime_elect = date.hour
elif symbol == 'minute':
datetime_elect = date.minute
elif symbol == 'second':
datetime_elect = date.second
else:
datetime_elect = np.NaN
print("Warning: symbol must be in ['year','month','day','hour','minute','second']! If not, the function will be acting as 'both'")
return datetime_elect
# test1:
# dateformat('2017-09-25 12:58:45','day')
# dateformat('2017-09-25 12:58:45','hour')
# dateformat('2017-09-25','day')
# dateformat(null,'hour')
# 4.2
def datediff(symbol,date_begin,date_end):
"""
输入:
变量名:时间,按照格式接收10位、19位
可选:'year','month','day','hour','minute','second'
输出
满足条件输出值,否则报错
"""
if pd.isnull(date_begin) or pd.isnull(date_end):
return np.NaN
date_begin = str(date_begin)
date_end = str(date_end)
if len(date_begin)==4:
date_begin=date_begin+'-01-01 00:00:00'
if len(date_end)==4:
date_end=date_end+'-01-01 00:00:00'
if len(date_begin)==7:
date_begin=date_begin+'-01 00:00:00'
if len(date_end)==7:
date_end=date_end+'-01 00:00:00'
if len(date_begin)==10:
date_begin=date_begin+' 00:00:00'
if len(date_end)==10:
date_end=date_end+' 00:00:00'
date_begin=dt.datetime.strptime(date_begin,'%Y-%m-%d %H:%M:%S')
date_end=dt.datetime.strptime(date_end,'%Y-%m-%d %H:%M:%S')
if symbol in ['year','month','day','hour','minute','second']:
r = relativedelta.relativedelta(date_end,date_begin)
if symbol =='year':
datetime_diff=r.years
elif symbol == 'month':
datetime_diff=r.years*12+r.months
elif symbol == 'day':
datetime_diff = (date_end-date_begin).days
elif symbol == 'hour':
datetime_days = (date_end-date_begin).days
datetime_seconds = (date_end-date_begin).seconds
datetime_diff = datetime_seconds/3600+datetime_days*24
elif symbol == 'minute':
datetime_days = (date_end-date_begin).days
datetime_seconds = (date_end-date_begin).seconds
datetime_diff=datetime_seconds/60+datetime_days*24*60
elif symbol == 'second':
datetime_days = (date_end-date_begin).days
datetime_seconds = (date_end-date_begin).seconds
datetime_diff=datetime_seconds+datetime_days*24*60*60
else:
datetime_diff = np.NaN
print("Warning: symbol must be in ['year','month','day','hour','minute','second']! If not, the function will be acting as 'both'")
return datetime_diff
# test
# datediff('month','2013','2017-09-25 12:58:45')
# datediff('day','2017-09-25','2017-12-30')
# datediff('hour','2017-09-15 10:58:45','2017-09-25 12:58:45')
# datediff('day','2017-09-25','2017-12-30 12:58:45') | [
"l"
] | l |
b3fb5072be2c9803b039ffc66f3bf3a06a4247b1 | 4755dabdcff6a45b9c15bf9ea814c6b8037874bd | /devel/lib/python2.7/dist-packages/snakebot_position_control/msg/__init__.py | 7e50b3d802aa4cf5d4063bde91254d3fba75ff3c | [] | no_license | Rallstad/RobotSnake | 676a97bdfde0699736d613e73d539929a0c2b492 | 37ee6d5af0458b855acf7c2b83e0ee17833dbfd1 | refs/heads/master | 2023-01-03T05:46:46.268422 | 2018-05-27T16:01:47 | 2018-05-27T16:01:47 | 308,665,980 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38 | py | from ._PositionControlEffort import *
| [
"[email protected]"
] | |
828540d2bb15e92786f7d4e9d29d60f51087bb38 | 908cf8e6ef52033bbf3d5afbb29637a25f5d66f8 | /test/test_codat_data_contracts_datasets_journal_entry_paged_response_model.py | 2fed4e4b1c7bd6ead4aef71f66240ef4f130e40b | [] | no_license | procurify/codat-python-sdk | 074769a2d9e72640741689b6f51e880d35b88095 | 3c8f664998427bda32bad8062c3bf324f39506da | refs/heads/master | 2023-08-25T03:55:19.817085 | 2021-10-22T22:14:34 | 2021-10-22T22:14:34 | 395,381,471 | 1 | 0 | null | 2021-10-20T21:10:31 | 2021-08-12T16:31:03 | Python | UTF-8 | Python | false | false | 1,570 | py | """
Codat API
[What's changed in our Swagger](https://docs.codat.io/docs/new-swagger-ui) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import codat_python_sdk
from codat_python_sdk.model.codat_data_contracts_datasets_journal_entry import CodatDataContractsDatasetsJournalEntry
from codat_python_sdk.model.codat_data_contracts_datasets_journal_entry_paged_response_links_model import CodatDataContractsDatasetsJournalEntryPagedResponseLinksModel
globals()['CodatDataContractsDatasetsJournalEntry'] = CodatDataContractsDatasetsJournalEntry
globals()['CodatDataContractsDatasetsJournalEntryPagedResponseLinksModel'] = CodatDataContractsDatasetsJournalEntryPagedResponseLinksModel
from codat_python_sdk.model.codat_data_contracts_datasets_journal_entry_paged_response_model import CodatDataContractsDatasetsJournalEntryPagedResponseModel
class TestCodatDataContractsDatasetsJournalEntryPagedResponseModel(unittest.TestCase):
"""CodatDataContractsDatasetsJournalEntryPagedResponseModel unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testCodatDataContractsDatasetsJournalEntryPagedResponseModel(self):
"""Test CodatDataContractsDatasetsJournalEntryPagedResponseModel"""
# FIXME: construct object with mandatory attributes with example values
# model = CodatDataContractsDatasetsJournalEntryPagedResponseModel() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
1bcbcbfe92659458a764c39a0f71f668340971fc | 2b0eab74af8d23244ff11699830f9bb10fbd717a | /accounts/perms.py | bd00bb6b63018efa7cc39d7709ce8ee5829b7d04 | [] | no_license | alexandrenorman/mixeur | c7e25cd20b03c78b361cb40e3e359a6dc5d9b06b | 95d21cd6036a99c5f399b700a5426e9e2e17e878 | refs/heads/main | 2023-03-13T23:50:11.800627 | 2021-03-07T15:49:15 | 2021-03-07T15:49:15 | 345,384,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,451 | py | # -*- coding: utf-8 -*-
from simple_perms import PermissionLogic, register
from helpers.mixins import BasicPermissionLogicMixin
class UserPermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, user_to_view, *args):
if user_to_view == user:
return True
if user.is_client or user.is_professional:
return False
if user.is_administrator or user.is_advisor or user.is_manager:
return True
return self.admin_permission(user, user_to_view, *args)
def change(self, user, user_to_modify, *args):
if user_to_modify == user:
return True
if user.is_client or user.is_professional:
return False
if user.is_administrator:
return True
# Allow same group modifications
if user_to_modify.group is not None and user_to_modify.group.is_member(user):
if user.is_advisor and user_to_modify.is_advisor:
return True
if user.is_manager and (
user_to_modify.is_advisor or user_to_modify.is_manager
):
return True
if (user.is_advisor or user.is_manager) and user_to_modify.is_client:
return True
if (
user.is_manager
and user_to_modify.is_advisor
and user_to_modify.group.admin_group == user.group
and user.group.is_admin
):
return True
if (
user.is_manager
and user_to_modify.is_manager
and user_to_modify.group == user.group
):
return True
return self.admin_permission(user, user_to_modify, *args)
def change_user_type(self, user, *args):
"""
Perm for user to change user_type for user_modified
Parameters
----------
user : User
args : Dict(user_modified, to_user_type)
"""
user_modified = args[0]["user_modified"]
to_user_type = args[0]["to_user_type"]
if user.is_client or user.is_professional:
return False
if user_modified.is_client or user_modified.is_professional:
return False
if to_user_type == "client" or to_user_type == "professional":
return False
if user.is_administrator:
return True
if user.is_manager:
if (
user_modified.is_advisor
or user_modified.is_superadvisor
or user_modified.is_manager
and user_modified.group.is_member(user)
):
if to_user_type in ["advisor", "superadvisor", "manager"]:
return True
if (
user.is_superadvisor
and to_user_type in ["advisor", "superadvisor"]
and user_modified.is_advisor
):
return True
return self.admin_permission(user, user_modified, *args)
register("user", UserPermissionLogic)
register("accounts/user", UserPermissionLogic)
class RgpdConsentPermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, rgpdconsent, *args):
if rgpdconsent.user == user:
return True
return self.admin_permission(user, rgpdconsent, *args)
change = view
register("rgpdconsent", RgpdConsentPermissionLogic)
register("accounts/rgpdconsent", RgpdConsentPermissionLogic)
class GroupPermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, group, *args):
if user.is_anonymous:
return False
if user.is_administrator:
return True
if user.is_advisor or user.is_manager:
return True
return self.admin_permission(user, group, *args)
def create(self, user, group, group_data, *args):
if user.is_anonymous:
return False
if user.is_administrator:
return True
if user.is_manager:
if not group_data:
return False
if user.group is not None:
if group is not None:
if group.admin_group.pk == user.group.pk:
return True
return self.admin_permission(user, None, *args)
def change(self, user, group, *args):
if user.is_anonymous:
return False
if user.is_administrator:
return True
if (
user.is_manager
and user.group is not None
and group.admin_group == user.group
):
return True
return self.admin_permission(user, group, *args)
def partial_change(self, user, group, *args):
"""
change only some fiels on group
"""
if user.is_advisor and user.group is not None and group == user.group:
return True
return self.admin_permission(user, group, *args)
register("group", GroupPermissionLogic)
register("accounts/group", GroupPermissionLogic)
class GroupPlacePermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, group, *args):
if user.is_anonymous:
return False
if user.is_expert:
return True
return self.admin_permission(user, group, *args)
register("group_place", GroupPlacePermissionLogic)
register("accounts/group_place", GroupPlacePermissionLogic)
| [
"[email protected]"
] | |
4113853c20a7674a37b502b73ee6d10f9288b8e6 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/eventgrid/v20200101preview/get_event_subscription.py | 80b0097a55e3921372b7d0b2fecda5f6ee7bbc0b | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 8,537 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetEventSubscriptionResult',
'AwaitableGetEventSubscriptionResult',
'get_event_subscription',
]
@pulumi.output_type
class GetEventSubscriptionResult:
"""
Event Subscription
"""
def __init__(__self__, dead_letter_destination=None, destination=None, event_delivery_schema=None, expiration_time_utc=None, filter=None, id=None, labels=None, name=None, provisioning_state=None, retry_policy=None, topic=None, type=None):
if dead_letter_destination and not isinstance(dead_letter_destination, dict):
raise TypeError("Expected argument 'dead_letter_destination' to be a dict")
pulumi.set(__self__, "dead_letter_destination", dead_letter_destination)
if destination and not isinstance(destination, dict):
raise TypeError("Expected argument 'destination' to be a dict")
pulumi.set(__self__, "destination", destination)
if event_delivery_schema and not isinstance(event_delivery_schema, str):
raise TypeError("Expected argument 'event_delivery_schema' to be a str")
pulumi.set(__self__, "event_delivery_schema", event_delivery_schema)
if expiration_time_utc and not isinstance(expiration_time_utc, str):
raise TypeError("Expected argument 'expiration_time_utc' to be a str")
pulumi.set(__self__, "expiration_time_utc", expiration_time_utc)
if filter and not isinstance(filter, dict):
raise TypeError("Expected argument 'filter' to be a dict")
pulumi.set(__self__, "filter", filter)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if labels and not isinstance(labels, list):
raise TypeError("Expected argument 'labels' to be a list")
pulumi.set(__self__, "labels", labels)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if retry_policy and not isinstance(retry_policy, dict):
raise TypeError("Expected argument 'retry_policy' to be a dict")
pulumi.set(__self__, "retry_policy", retry_policy)
if topic and not isinstance(topic, str):
raise TypeError("Expected argument 'topic' to be a str")
pulumi.set(__self__, "topic", topic)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="deadLetterDestination")
def dead_letter_destination(self) -> Optional['outputs.StorageBlobDeadLetterDestinationResponse']:
"""
The DeadLetter destination of the event subscription.
"""
return pulumi.get(self, "dead_letter_destination")
@property
@pulumi.getter
def destination(self) -> Optional[Any]:
"""
Information about the destination where events have to be delivered for the event subscription.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter(name="eventDeliverySchema")
def event_delivery_schema(self) -> Optional[str]:
"""
The event delivery schema for the event subscription.
"""
return pulumi.get(self, "event_delivery_schema")
@property
@pulumi.getter(name="expirationTimeUtc")
def expiration_time_utc(self) -> Optional[str]:
"""
Expiration time of the event subscription.
"""
return pulumi.get(self, "expiration_time_utc")
@property
@pulumi.getter
def filter(self) -> Optional['outputs.EventSubscriptionFilterResponse']:
"""
Information about the filter for the event subscription.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified identifier of the resource
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence[str]]:
"""
List of user defined labels.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the event subscription.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.RetryPolicyResponse']:
"""
The retry policy for events. This can be used to configure maximum number of delivery attempts and time to live for events.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def topic(self) -> str:
"""
Name of the topic of the event subscription.
"""
return pulumi.get(self, "topic")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the resource
"""
return pulumi.get(self, "type")
class AwaitableGetEventSubscriptionResult(GetEventSubscriptionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetEventSubscriptionResult(
dead_letter_destination=self.dead_letter_destination,
destination=self.destination,
event_delivery_schema=self.event_delivery_schema,
expiration_time_utc=self.expiration_time_utc,
filter=self.filter,
id=self.id,
labels=self.labels,
name=self.name,
provisioning_state=self.provisioning_state,
retry_policy=self.retry_policy,
topic=self.topic,
type=self.type)
def get_event_subscription(event_subscription_name: Optional[str] = None,
scope: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEventSubscriptionResult:
"""
Event Subscription
:param str event_subscription_name: Name of the event subscription
:param str scope: The scope of the event subscription. The scope can be a subscription, or a resource group, or a top level resource belonging to a resource provider namespace, or an EventGrid topic. For example, use '/subscriptions/{subscriptionId}/' for a subscription, '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for a resource group, and '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}' for a resource, and '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topics/{topicName}' for an EventGrid topic.
"""
__args__ = dict()
__args__['eventSubscriptionName'] = event_subscription_name
__args__['scope'] = scope
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:eventgrid/v20200101preview:getEventSubscription', __args__, opts=opts, typ=GetEventSubscriptionResult).value
return AwaitableGetEventSubscriptionResult(
dead_letter_destination=__ret__.dead_letter_destination,
destination=__ret__.destination,
event_delivery_schema=__ret__.event_delivery_schema,
expiration_time_utc=__ret__.expiration_time_utc,
filter=__ret__.filter,
id=__ret__.id,
labels=__ret__.labels,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
retry_policy=__ret__.retry_policy,
topic=__ret__.topic,
type=__ret__.type)
| [
"[email protected]"
] | |
a65604c0deab61126203e90bd4b92e397c7b27c7 | 73c9537b3e2dd9c57e581d474b9e2daf7a8fb02a | /petccenv/lib/python3.4/site-packages/django_summernote/__init__.py | c63655d2b511e883b3efacf6fa71393ea01972ab | [] | no_license | pviniciusm/petcc | 8f6ec2966729051f11b482c4c7ed522df3f920ba | 30ccddce6d0e39ccea492ac73b2ddca855c63cee | refs/heads/master | 2021-01-21T13:29:52.835434 | 2016-04-23T18:06:07 | 2016-04-23T18:06:07 | 54,607,007 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | version_info = (0, 8, 2)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "Park Hyunwoo <[email protected]>"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
| [
"[email protected]"
] | |
f179eade30c3bd9c2fd92c1dcafbdf2683622c47 | 635cb7fb75048f9de7b95b48d1f59de68f9b3368 | /R01/sortowanie_obiektów_bez_wbudowanej_obsługi_porównań/example.py | 7641bed7b8787e11f23a4ef78d74ba00e90b1ae8 | [] | no_license | anpadoma/python_receptury3 | 9e889ac503e48eb62160050eecfdc4a64072c184 | c761f2c36707785a8a70bdaccebd7533c76dee21 | refs/heads/master | 2021-01-22T14:38:34.718999 | 2014-01-31T22:09:44 | 2014-01-31T22:09:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | from operator import attrgetter
class User:
def __init__(self, user_id):
self.user_id = user_id
def __repr__(self):
return 'User({})'.format(self.user_id)
# Przykład
users = [User(23), User(3), User(99)]
print(users)
# Sortowanie według pola user-id
print(sorted(users, key=attrgetter('user_id')))
| [
"[email protected]"
] | |
b539a324c93a3ce5b5b5feedc5d1287601d63ffd | 0b4957de738dd05f964ea838016b4b811feca970 | /tests/utils/test_utils_shell.py | fdae13b81ae7f8e06716a3e3f09b9ce5f7a76e6a | [
"MIT",
"Apache-2.0"
] | permissive | bossjones/ultron8 | bdb5db72ba58b80645ae417cdf97287cfadd325d | 09d69c788110becadb9bfaa7b3d2a2046f6b5a1c | refs/heads/master | 2023-01-13T06:52:45.679582 | 2023-01-03T22:25:54 | 2023-01-03T22:25:54 | 187,934,920 | 0 | 0 | Apache-2.0 | 2023-01-03T22:25:56 | 2019-05-22T00:44:03 | Python | UTF-8 | Python | false | false | 1,008 | py | """Test shell utils"""
# pylint: disable=protected-access
import logging
import pytest
from six.moves import zip
from ultron8.utils.shell import quote_unix
logger = logging.getLogger(__name__)
@pytest.mark.utilsonly
@pytest.mark.unittest
class TestShellUtilsTestCase:
def test_quote_unix(self):
arguments = ["foo", "foo bar", "foo1 bar1", '"foo"', '"foo" "bar"', "'foo bar'"]
expected_values = [
"""
foo
""",
"""
'foo bar'
""",
"""
'foo1 bar1'
""",
"""
'"foo"'
""",
"""
'"foo" "bar"'
""",
"""
''"'"'foo bar'"'"''
""",
]
for argument, expected_value in zip(arguments, expected_values):
actual_value = quote_unix(value=argument)
expected_value = expected_value.lstrip()
assert actual_value == expected_value.strip()
| [
"[email protected]"
] | |
8f98de03e4669f0cea77fa4b917683db4d9be640 | 1f256bf20e68770c1a74f7e41ef6730623db0c74 | /location_management/migrations/0001_initial.py | 4015d233eb0c002e111dfd9acab22eacef6e3268 | [
"MIT"
] | permissive | davtoh/enterprise-website | 380ea32b730f16b7157e59ca0dc1e86d1f10e4a8 | 00b6c42cd6cb01517c152b9ffce9cfb56744703d | refs/heads/master | 2021-06-05T09:20:27.721789 | 2021-05-05T04:58:04 | 2021-05-05T05:14:05 | 141,315,681 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,933 | py | # Generated by Django 2.0.6 on 2018-07-05 23:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cities',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=8, max_digits=10)),
('longitude', models.DecimalField(decimal_places=8, max_digits=11)),
],
),
migrations.CreateModel(
name='Countries',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('code', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='States',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('code', models.CharField(max_length=10)),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='location_management.Countries')),
],
),
migrations.AddField(
model_name='cities',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='location_management.Countries'),
),
migrations.AddField(
model_name='cities',
name='state',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='location_management.States'),
),
]
| [
"[email protected]"
] | |
1a442845be688845257b798f6b9a0bb3d80717e6 | 8c7efb37b53717c228a017e0799eb477959fb8ef | /wmm/scenario/migrations/0116_auto__add_field_tidalenergyparameter_ordering__add_field_pelagicconser.py | 6686340589e48b393339faa86cbcd5a5df0aba84 | [] | no_license | rhodges/washington-marinemap | d3c9b24265b1a0800c7dcf0163d22407328eff57 | e360902bc41b398df816e461b3c864520538a226 | refs/heads/master | 2021-01-23T11:47:50.886681 | 2012-09-24T18:38:33 | 2012-09-24T18:38:33 | 32,354,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,555 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TidalEnergyParameter.ordering'
db.add_column('scenario_tidalenergyparameter', 'ordering', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
# Adding field 'PelagicConservationParameter.ordering'
db.add_column('scenario_pelagicconservationparameter', 'ordering', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
# Adding field 'OffshoreConservationParameter.ordering'
db.add_column('scenario_offshoreconservationparameter', 'ordering', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
# Adding field 'NearshoreConservationParameter.ordering'
db.add_column('scenario_nearshoreconservationparameter', 'ordering', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
# Adding field 'WaveEnergyParameter.ordering'
db.add_column('scenario_waveenergyparameter', 'ordering', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'TidalEnergyParameter.ordering'
db.delete_column('scenario_tidalenergyparameter', 'ordering')
# Deleting field 'PelagicConservationParameter.ordering'
db.delete_column('scenario_pelagicconservationparameter', 'ordering')
# Deleting field 'OffshoreConservationParameter.ordering'
db.delete_column('scenario_offshoreconservationparameter', 'ordering')
# Deleting field 'NearshoreConservationParameter.ordering'
db.delete_column('scenario_nearshoreconservationparameter', 'ordering')
# Deleting field 'WaveEnergyParameter.ordering'
db.delete_column('scenario_waveenergyparameter', 'ordering')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'scenario.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'})
},
'scenario.chlorophyl': {
'Meta': {'object_name': 'Chlorophyl'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.conservationobjective': {
'Meta': {'object_name': 'ConservationObjective'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'objective': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Objective']", 'null': 'True', 'blank': 'True'})
},
'scenario.conservationsite': {
'Meta': {'object_name': 'ConservationSite'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'scenario_conservationsite_related'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'geometry_final': ('django.contrib.gis.db.models.fields.PolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'geometry_orig': ('django.contrib.gis.db.models.fields.PolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manipulators': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': "'255'"}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'sharing_groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'scenario_conservationsite_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scenario_conservationsite_related'", 'to': "orm['auth.User']"})
},
'scenario.depthclass': {
'Meta': {'object_name': 'DepthClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.energyobjective': {
'Meta': {'object_name': 'EnergyObjective'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'objective': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Objective']", 'null': 'True', 'blank': 'True'})
},
'scenario.geomorphology': {
'Meta': {'object_name': 'Geomorphology'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.mos': {
'Meta': {'object_name': 'MOS'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'scenario_mos_related'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_chlorophyl_pelagic_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Chlorophyl']", 'null': 'True', 'blank': 'True'}),
'input_depth_class_offshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'MOSOffshoreConservationDepthClass'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['scenario.DepthClass']"}),
'input_dist_astoria_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_astoria_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_astoria_wind_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_hoquium_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_hoquium_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_hoquium_wind_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_port_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_port_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_port_wind_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_shore_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_shore_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_shore_wind_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_ecosystem_nearshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.NearshoreEcosystem']", 'null': 'True', 'blank': 'True'}),
'input_exposure_nearshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.NearshoreExposure']", 'null': 'True', 'blank': 'True'}),
'input_geomorphology_offshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'MOSOffshoreConservationGeomorphology'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['scenario.Geomorphology']"}),
'input_max_depth_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_depth_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_depth_wind_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_tidalmax_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_tidalmean_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_wavesummer_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_wavewinter_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_depth_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_depth_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_depth_wind_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_tidalmax_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_tidalmean_tidal_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_wavesummer_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_wavewinter_wave_energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_objectives': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Objective']", 'null': 'True', 'blank': 'True'}),
'input_objectives_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.ConservationObjective']", 'null': 'True', 'blank': 'True'}),
'input_objectives_energy': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.EnergyObjective']", 'null': 'True', 'blank': 'True'}),
'input_parameters_nearshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['scenario.NearshoreConservationParameter']", 'symmetrical': 'False'}),
'input_parameters_offshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['scenario.OffshoreConservationParameter']", 'symmetrical': 'False'}),
'input_parameters_pelagic_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['scenario.PelagicConservationParameter']", 'symmetrical': 'False'}),
'input_parameters_tidal_energy': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.TidalEnergyParameter']", 'null': 'True', 'blank': 'True'}),
'input_parameters_wave_energy': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['scenario.WaveEnergyParameter']", 'symmetrical': 'False'}),
'input_parameters_wind_energy': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['scenario.WindEnergyParameter']", 'symmetrical': 'False'}),
'input_substrate_nearshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.NearshoreSubstrate']", 'null': 'True', 'blank': 'True'}),
'input_substrate_offshore_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'MOSOffshoreConservationSubstrate'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['scenario.Substrate']"}),
'input_substrate_tidal_energy': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.TidalSubstrate']", 'null': 'True', 'blank': 'True'}),
'input_substrate_wave_energy': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'MOSWaveEnergySubstrate'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['scenario.Substrate']"}),
'input_substrate_wind_energy': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'MOSWindEnergySubstrate'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['scenario.Substrate']"}),
'input_upwelling_pelagic_conservation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Upwelling']", 'null': 'True', 'blank': 'True'}),
'input_wind_potential_wind_energy': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.WindPotential']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': "'255'"}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'overlap_geom': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'scenarios': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Scenario']", 'null': 'True', 'blank': 'True'}),
'sharing_groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'scenario_mos_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'support_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scenario_mos_related'", 'to': "orm['auth.User']"})
},
'scenario.nearshoreconservationparameter': {
'Meta': {'object_name': 'NearshoreConservationParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'})
},
'scenario.nearshoreconservationparameterarea': {
'Meta': {'object_name': 'NearshoreConservationParameterArea'},
'area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'scenario.nearshoreecosystem': {
'Meta': {'object_name': 'NearshoreEcosystem'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.nearshoreexposure': {
'Meta': {'object_name': 'NearshoreExposure'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.nearshoresubstrate': {
'Meta': {'object_name': 'NearshoreSubstrate'},
'color': ('django.db.models.fields.CharField', [], {'default': "'778B1A55'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.objective': {
'Meta': {'object_name': 'Objective'},
'color': ('django.db.models.fields.CharField', [], {'default': "'778B1A55'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'})
},
'scenario.offshoreconservationparameter': {
'Meta': {'object_name': 'OffshoreConservationParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'})
},
'scenario.offshoreconservationparameterarea': {
'Meta': {'object_name': 'OffshoreConservationParameterArea'},
'area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'scenario.parameter': {
'Meta': {'object_name': 'Parameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'})
},
'scenario.pelagicconservationparameter': {
'Meta': {'object_name': 'PelagicConservationParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'})
},
'scenario.pelagicconservationparameterarea': {
'Meta': {'object_name': 'PelagicConservationParameterArea'},
'area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'scenario.scenario': {
'Meta': {'object_name': 'Scenario'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'scenario_scenario_related'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'geometry_final': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_chlorophyl': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Chlorophyl']", 'null': 'True', 'blank': 'True'}),
'input_depth_class': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.DepthClass']", 'null': 'True', 'blank': 'True'}),
'input_dist_astoria': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_hoquium': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_port': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_dist_shore': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_geomorphology': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Geomorphology']", 'null': 'True', 'blank': 'True'}),
'input_max_depth': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_tidalmax': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_tidalmean': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_wavesummer': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_max_wavewinter': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_depth': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_tidalmax': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_tidalmean': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_wavesummer': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_min_wavewinter': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'input_nearshore_ecosystem': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.NearshoreEcosystem']", 'null': 'True', 'blank': 'True'}),
'input_nearshore_exposure': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.NearshoreExposure']", 'null': 'True', 'blank': 'True'}),
'input_nearshore_substrate': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.NearshoreSubstrate']", 'null': 'True', 'blank': 'True'}),
'input_objective': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Objective']"}),
'input_parameters': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'}),
'input_substrate': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Substrate']", 'null': 'True', 'blank': 'True'}),
'input_tidal_substrate': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.TidalSubstrate']", 'null': 'True', 'blank': 'True'}),
'input_upwelling': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.Upwelling']", 'null': 'True', 'blank': 'True'}),
'input_wind_potential': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['scenario.WindPotential']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': "'255'"}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'output_area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'output_geom': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'output_mapcalc': ('django.db.models.fields.CharField', [], {'max_length': '720', 'null': 'True', 'blank': 'True'}),
'output_report': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sharing_groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'scenario_scenario_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scenario_scenario_related'", 'to': "orm['auth.User']"})
},
'scenario.substrate': {
'Meta': {'object_name': 'Substrate'},
'color': ('django.db.models.fields.CharField', [], {'default': "'778B1A55'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.tidalenergyparameter': {
'Meta': {'object_name': 'TidalEnergyParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'})
},
'scenario.tidalenergyparameterarea': {
'Meta': {'object_name': 'TidalEnergyParameterArea'},
'area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'scenario.tidalsubstrate': {
'Meta': {'object_name': 'TidalSubstrate'},
'color': ('django.db.models.fields.CharField', [], {'default': "'778B1A55'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.upwelling': {
'Meta': {'object_name': 'Upwelling'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'scenario.waveenergyparameter': {
'Meta': {'object_name': 'WaveEnergyParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'})
},
'scenario.waveenergyparameterarea': {
'Meta': {'object_name': 'WaveEnergyParameterArea'},
'area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'scenario.windenergyparameter': {
'Meta': {'object_name': 'WindEnergyParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scenario.Parameter']", 'null': 'True', 'blank': 'True'})
},
'scenario.windenergyparameterarea': {
'Meta': {'object_name': 'WindEnergyParameterArea'},
'area': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'scenario.windenergysite': {
'Meta': {'object_name': 'WindEnergySite'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'scenario_windenergysite_related'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'geometry_final': ('django.contrib.gis.db.models.fields.PolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'geometry_orig': ('django.contrib.gis.db.models.fields.PolygonField', [], {'srid': '32610', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manipulators': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': "'255'"}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'sharing_groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'scenario_windenergysite_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scenario_windenergysite_related'", 'to': "orm['auth.User']"})
},
'scenario.windpotential': {
'Meta': {'object_name': 'WindPotential'},
'density': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'speed': ('django.db.models.fields.CharField', [], {'max_length': '30'})
}
}
complete_apps = ['scenario']
| [
"[email protected]"
] | |
a811597869c088ec4c17da0719f6b9a3e9e8a9b8 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_46/83.py | 728c1c577aee018ba646a8511a4f62a6e9af6751 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,459 | py | import psyco
psyco.full()
class memoize:
def __init__(self, function):
self.function = function
self.memoized = {}
def __call__(self, *args):
if args not in self.memoized:
self.memoized[args] = self.function(*args)
return self.memoized[args]
def clear(self):
self.memoized = {}
def alloc(size, default = 0): return [default] * size
def alloc2(r, c, default = 0): return [alloc(c, default)] * r
def isset(a, bit): return ((a >> bit) & 1) > 0
def dig(c): return ord(c) - 48
def abs(x):
if x<0: return -x;
return x
def area(x1, y1, x2, y2, x3, y3):
return abs((x3-x1)*(y2-y1) - (x2-x1)*(y3-y1))/2
def bisection(f, lo, hi):
"""
finds the integer x where f(x)=0.
assumes f is monotounous.
"""
while lo < hi:
mid = (lo+hi)//2
midval = f(mid)
if midval < 0:
lo = mid+1
elif midval > 0:
hi = mid
else:
return mid
return None
def minarg(f, args):
min_val = None
min_arg = None
for a in args:
temp=f(a)
if min_arg==None or temp<min_val:
min_val=temp
min_arg=a
return min_arg, min_val
#mat[i] = lowest row for the row currently at position i
def solve():
c=0
for i in range(N):
#print mat, c
#print "i=", i
if mat[i]>i:
for j in range(i+1, N):
if mat[j]<=i:
#print "replace", i, " with ", j
mat.insert(i, mat[j])
#print mat
del mat[j+1]
#mat[j]=None
c+=j-i
break
return c
from time import time
if __name__ == "__main__":
def getInts(): return map(int, input.readline().rstrip('\n').split(' '))
def getFloats(): return map(float, input.readline().rstrip('\n').split(' '))
def getMatrix(rows): return [getInts() for _ in range(rows)]
input, output = open("d:/gcj/in", "r"), open('d:/gcj/output', 'w')
start_time=time()
for case in range(1, int(input.readline()) + 1):
N, = getInts()
mat=[[int(d) for d in input.readline().rstrip('\n')] for _ in range(N)]
for i in range(N):
j=N-1
while j>0 and mat[i][j]==0:
j-=1
mat[i]=j
s="Case #%d: %d\n" % (case, solve())
print s
output.write(s)
print time()-start_time
| [
"[email protected]"
] | |
677fb51759db8a07210bb76240c9cbab445670b8 | edcd74f8f65119bdbe737360c2ca33b4a6da160a | /python/problem-string/two_characters.py | 10b3ac19c02ca478f6a224f3f683e11fe2efc679 | [] | no_license | hyunjun/practice | 72e83de6a1d5e04ddcd16526f16110ea2dd00373 | 5376dd48b1cefb4faba9d2ef6a8a497b6b1d6c67 | refs/heads/master | 2023-08-31T07:00:37.320351 | 2023-08-17T07:29:24 | 2023-08-17T07:29:24 | 2,704,126 | 3 | 2 | null | 2022-12-14T20:25:07 | 2011-11-03T18:28:44 | Python | UTF-8 | Python | false | false | 1,698 | py | # https://www.hackerrank.com/challenges/two-characters
from collections import Counter
from collections import defaultdict
def alternate(s):
if s is None or 0 == len(s):
return 0
consecutiveSet = set()
for i, c in enumerate(s):
if 0 == i:
continue
if s[i - 1] == c:
consecutiveSet.add(c)
#print(consecutiveSet)
def isAlternating(cand):
for i, c in enumerate(cand):
if 0 == i:
continue
if cand[i - 1] == c:
return False
return True
cntDict = Counter([c for c in s if c not in consecutiveSet])
cntCharDict = defaultdict(list)
for c, cnt in cntDict.items():
cntCharDict[cnt].append(c)
sortedCntCharList = sorted(cntCharDict.items(), key=lambda t: t[0], reverse=True)
#print(sortedCntCharList)
for i, (cnt1, charList1) in enumerate(sortedCntCharList):
for j, (cnt2, charList2) in enumerate(sortedCntCharList):
if j < i or 1 < abs(cnt1 - cnt2):
continue
for ch1 in charList1:
for ch2 in charList2:
if ch1 == ch2:
continue
cand = [c for c in s if c == ch1 or c == ch2]
#print(cand)
if isAlternating(cand):
return len(cand)
return 0
data = [('abaacdabd', 4),
('beabeefeab', 5),
('asdcbsdcagfsdbgdfanfghbsfdab', 8),
('asvkugfiugsalddlasguifgukvsa', 0),
]
for s, expected in data:
real = alternate(s)
print('{}, expected {}, real {}, result {}'.format(s, expected, real, expected == real))
| [
"[email protected]"
] | |
3f7a3592ecb43458823f4a89ef52c6dcfbfef71c | 70d4ef0863906b3ca64f986075cd35b8412b871e | /blueapps/account/sites/default.py | e996ac9936aeb25beb19699d619290f60b693d5c | [
"MIT",
"BSD-3-Clause",
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | selinagyan/bk-sops | 72db0ac33d9c307f51769e4baa181ceb8e1b279e | 39e63e66416f688e6a3641ea8e975d414ece6b04 | refs/heads/master | 2020-05-07T16:44:33.312442 | 2019-04-11T02:09:25 | 2019-04-11T02:09:25 | 180,696,241 | 0 | 0 | null | 2019-04-11T02:07:11 | 2019-04-11T02:07:10 | null | UTF-8 | Python | false | false | 2,955 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
class ConfFixture(object):
"""
登录模块项目变量汇总
"""
#################
# 浏览器参数说明 #
#################
# 登录模块,可选项为 components 目录下的模块,如 qcloud_tlogin
BACKEND_TYPE = None
# 用户验证 Backend qcloud_tlogin.backends.QPtloginBackend
USER_BACKEND = None
# 用户登录验证中间件 qcloud_tlogin.middlewares.LoginRequiredMiddleware
LOGIN_REQUIRED_MIDDLEWARE = None
# 用户模型 qcloud_tlogin.models.UserProxy
USER_MODEL = None
# 登录平台弹窗链接 http://xxxx.com/accounts/login_page/
CONSOLE_LOGIN_URL = None
# 登录平台链接 http://login.o.qcloud.com
LOGIN_URL = None
# 内嵌式的登录平台链接(可嵌入弹框、IFrame)http://xxx.com/plain/
LOGIN_PLAIN_URL = None
# 是否提供内嵌式的统一登录页面
HAS_PLAIN = True
# 跳转至登录平台是否加跨域前缀标识
# http://xxx.com/login/?c_url={CROSS_PREFIX}http%3A//xxx.com%3A8000/
ADD_CROSS_PREFIX = True
CROSS_PREFIX = ''
# 跳转至登录平台是否加上APP_CODE
# http://xxx.com/login/?c_url=http%3A//xxx.com%3A8000/&app_code=xxx
ADD_APP_CODE = True
# http://xxx.com/login/?c_url=http%3A//xxx.com%3A8000/&{APP_KEY}=xxx
APP_KEY = 'app_code'
SETTINGS_APP_KEY = 'APP_CODE'
# 跳转至登录平台,回调参数名称
# http://xxx.com/login/?{C_URL}=http%3A//xxx.com%3A8000/
C_URL = 'c_url'
# 内嵌式的登录平台的尺寸大小,决定前端适配的弹框大小
IFRAME_HEIGHT = 490
IFRAME_WIDTH = 460
###############
# 微信参数说明 #
###############
# 登录模块 weixin
WEIXIN_BACKEND_TYPE = None
# 用户认证中间件 bk_ticket.middlewares.LoginRequiredMiddleware
WEIXIN_MIDDLEWARE = None
# 用户认证 Backend bk_ticket.backends.TicketBackend
WEIXIN_BACKEND = None
# 用户信息链接 http://xxx.com/user/weixin/get_user_info/
WEIXIN_INFO_URL = None
# 用户 OAUTH 认证链接 https://xxx.com/connect/oauth2/authorize
WEIXIN_OAUTH_URL = None
# 在微信端的应用ID 'xxxx'
WEIXIN_APP_ID = None
| [
"[email protected]"
] | |
1b1d43ac638223550a5a9f28cb4d5f216a837cbf | 1fac53ab13a9a682ecd926857ef565fa779afae4 | /fbseries.py | 44da677508725917468869fb71285e9ed733a195 | [] | no_license | Shamabanu/python-1 | 339123ff4e7667d6331c207cb1c7ca3fc775dc48 | 4c1642679bb0bdd53a1d21e5421e04eb7abda65b | refs/heads/master | 2020-04-13T23:49:27.700807 | 2018-12-29T15:10:26 | 2018-12-29T15:10:26 | 163,516,492 | 1 | 0 | null | 2018-12-29T14:16:28 | 2018-12-29T14:16:28 | null | UTF-8 | Python | false | false | 219 | py | def fibonacci(n):
if(n <= 1):
return n
else:
return(fibonacci(n-1) + fibonacci(n-2))
n = int(input("Enter no of terms:"))
print("Fibonacci sequence:")
for i in range(n):
print (fibonacci(i))
| [
"[email protected]"
] | |
52389b5b2bff83aa9b999bd20397ad5a96cf1b26 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_145/601.py | 1c4900414caa5c3d523730cdea08f4e249066ea5 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | #!/usr/bin/env python3
from fractions import gcd
from math import log
rounds = int(input())
for i in range(rounds):
n, d = input().split('/')
n = int(n)
d = int(d)
g = gcd(n,d)
n = n//g
d = d//g
if log(d,2) != round( log(d,2)):
print("Case #{}: impossible".format(i+1))
continue;
while n!=1 :
n -= 1
g = gcd(n,d)
n = n // g
d = d // g
print("Case #{}: {}".format(i+1,int(log(d,2))))
| [
"[email protected]"
] | |
93856c78a47412b99de857cb1abbf8b25758ad79 | f8bbdfb112618136fc4adccb03ce25fbfc48bff5 | /panel/config/admin/management_data/CustomPages/Member.py | 16842cd9719bcaac1229acc9f6e270cb55f48b24 | [] | no_license | lazypanda10117/CICSA-Ranking-Platform | 160973987b533ede6e0b94af29b5bc85646b2bc0 | d5f6ac64a1f85c3333c71a7d81749b49145b9a16 | refs/heads/master | 2022-12-09T23:21:28.649252 | 2020-04-28T22:53:07 | 2020-04-28T22:53:07 | 133,093,367 | 3 | 2 | null | 2021-09-22T17:51:39 | 2018-05-11T22:14:01 | Python | UTF-8 | Python | false | false | 3,351 | py | from cicsa_ranking.models import Member
from .AbstractCustomClass import AbstractCustomClass
from panel.component.CustomElements import Choices
from misc.CustomFunctions import MiscFunctions, RequestFunctions, LogFunctions
class MemberView(AbstractCustomClass):
def __init__(self, request):
self.base_class = Member
self.validation_table = {
'base_table_invalid': {'_state'},
'base_form_invalid': {'_state', 'id'},
}
super().__init__(request, self.base_class, self.validation_table)
# View Process Functions
def abstractFormProcess(self, action, **kwargs):
try:
post_dict = dict(self.request.POST)
dispatcher = super().populateDispatcher()
if dispatcher.get(action):
member_id = kwargs.pop('id', None)
member = self.useAPI(self.base_class).editSelf(id=member_id)
else:
member = self.base_class()
member.member_name = RequestFunctions.getSingleRequestObj(post_dict, 'member_name')
member.member_school = RequestFunctions.getSingleRequestObj(post_dict, 'member_school')
member.member_email = RequestFunctions.getSingleRequestObj(post_dict, 'member_email')
member.member_status = RequestFunctions.getSingleRequestObj(post_dict, 'member_status')
if not action == 'delete':
member.save()
LogFunctions.generateLog(
self.request, 'admin', LogFunctions.makeLogQuery(self.base_class, action.title(), id=member.id))
if action == 'delete':
member.delete()
except Exception:
print({"Error": "Cannot Process " + action.title() + " Request."})
# View Generating Functions
# Form Generating Functions
def getFieldData(self, **kwargs):
action = kwargs.pop('action')
element_id = kwargs.pop('element_id')
field_data_dispatcher = self.populateDispatcher()
if field_data_dispatcher.get(action):
field_data = MiscFunctions.filterDict(self.useAPI(self.base_class).getSelf(id=element_id).__dict__.items(),
self.validation_table['base_form_invalid'])
return field_data
return None
def getChoiceData(self):
choice_data = dict()
choice_data["member_status"] = Choices().getStatusChoices()
choice_data["member_school"] = Choices().getSchoolChoices()
return choice_data
def getDBMap(self, data):
return None
def getMultiChoiceData(self):
return None
def getSearchElement(self, **kwargs):
return None
# Table Generating Functions
def getTableSpecificHeader(self):
return [field.name for field in self.base_class._meta.get_fields()
if field.name not in self.validation_table['base_table_invalid']]
def getTableRowContent(self, content):
field_data = MiscFunctions.filterDict(self.useAPI(self.base_class).getSelf(id=content.id).__dict__.items(),
self.validation_table['base_table_invalid'])
field_data = self.updateChoiceAsValue(field_data, self.getChoiceData())
field_data = MiscFunctions.grabValueAsList(field_data)
return field_data
| [
"[email protected]"
] | |
399b13357b719cf03b12dbebc9c3cd588315a576 | 5774101105b47d78adb7a57eefdfa21502bbd70c | /Django/csvt-django/csvt05/manage.py | cf37dbd64ee66f2e67594f34529b3a7bff46f5fa | [] | no_license | zhlthunder/python-study | 34d928f0ebbdcd5543ae0f41baaea955c92f5c56 | 0f25dd5105ba46791842d66babbe4c3a64819ee5 | refs/heads/master | 2023-01-12T18:39:47.184978 | 2018-10-07T23:48:04 | 2018-10-07T23:48:04 | 90,516,611 | 0 | 1 | null | 2022-12-26T19:46:22 | 2017-05-07T07:39:48 | HTML | UTF-8 | Python | false | false | 804 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "csvt05.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
a06db2c071875ff44793b4fa25d314d8e7a501c1 | 0178c69ef9fc5e49cadeaadddb4839eeff3f4a2a | /examples/sac.py | edb4bb7454feec8eb93576ef06326455a559076a | [] | no_license | YangHaha11514/rlkit | 3b17de2b4861e12b8c13c849410b7fab335157df | 8c2ee5d1602423e352724a0b0845c646688f98df | refs/heads/master | 2020-03-14T06:22:53.568011 | 2018-03-11T01:31:38 | 2018-03-11T01:31:38 | 131,482,724 | 1 | 0 | null | 2018-04-29T09:46:53 | 2018-04-29T09:46:53 | null | UTF-8 | Python | false | false | 1,813 | py | """
Run PyTorch Soft Actor Critic on HalfCheetahEnv.
NOTE: You need PyTorch 0.3 or more (to have torch.distributions)
"""
import gym
import numpy as np
import rlkit.torch.pytorch_util as ptu
from rlkit.envs.wrappers import NormalizedBoxEnv
from rlkit.launchers.launcher_util import setup_logger
from rlkit.torch.sac.policies import TanhGaussianPolicy
from rlkit.torch.sac.sac import SoftActorCritic
from rlkit.torch.networks import FlattenMlp
def experiment(variant):
env = NormalizedBoxEnv(gym.make('HalfCheetah-v1'))
obs_dim = int(np.prod(env.observation_space.shape))
action_dim = int(np.prod(env.action_space.shape))
net_size = variant['net_size']
qf = FlattenMlp(
hidden_sizes=[net_size, net_size],
input_size=obs_dim + action_dim,
output_size=1,
)
vf = FlattenMlp(
hidden_sizes=[net_size, net_size],
input_size=obs_dim,
output_size=1,
)
policy = TanhGaussianPolicy(
hidden_sizes=[net_size, net_size],
obs_dim=obs_dim,
action_dim=action_dim,
)
algorithm = SoftActorCritic(
env=env,
policy=policy,
qf=qf,
vf=vf,
**variant['algo_params']
)
if ptu.gpu_enabled():
algorithm.cuda()
algorithm.train()
if __name__ == "__main__":
# noinspection PyTypeChecker
variant = dict(
algo_params=dict(
num_epochs=1000,
num_steps_per_epoch=1000,
num_steps_per_eval=1000,
batch_size=128,
max_path_length=999,
discount=0.99,
soft_target_tau=0.001,
policy_lr=3E-4,
qf_lr=3E-4,
vf_lr=3E-4,
),
net_size=300,
)
setup_logger('name-of-experiment', variant=variant)
experiment(variant)
| [
"[email protected]"
] | |
05352a15e8fe5729ce8218b174d55903f616d532 | 0f812d8a0a3743a9ff9df414e096a7f9830b0397 | /old/demo/onelinkmanipulator_demo_PID.py | 56c7d8e95f6edfbf0742cc0e0823707431e2d674 | [
"MIT"
] | permissive | pierrecaillouette/AlexRobotics | 18977eec79875b7fc8c84d11f1c680be93b43fcb | 2223100df3e141d88491dde3d60a4eadd07a5c72 | refs/heads/master | 2021-04-09T03:18:58.858708 | 2019-04-28T15:30:26 | 2019-04-28T15:30:26 | 248,833,850 | 0 | 0 | MIT | 2020-03-20T19:14:52 | 2020-03-20T19:14:52 | null | UTF-8 | Python | false | false | 2,985 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 6 15:27:04 2016
@author: alex
"""
import numpy as np
###########################
# Load libs
###########################
from AlexRobotics.dynamic import Manipulator
from AlexRobotics.control import linear
from AlexRobotics.control import ComputedTorque
from AlexRobotics.planning import RandomTree
from AlexRobotics.control import DPO
###########################
# Objectives
###########################
x_start = np.array([-3.0, 0.0])
x_goal = np.array([ 0.0, 0.0])
###########################
# Create objects
###########################
Robot = Manipulator.OneLinkManipulator()
PD = linear.PD( kp = 5 , kd = 2 )
PID = linear.PID( kp = 5 , kd = 2 , ki = 4 )
CTC = ComputedTorque.ComputedTorqueController( Robot )
SLD = ComputedTorque.SlidingModeController( Robot )
RRT = RandomTree.RRT( Robot , x_start )
VI = DPO.ValueIteration1DOF( Robot , 'quadratic' )
############################
# Params
############################
tmax = 8 # max motor torque
Robot.u_ub = np.array([ tmax]) # Control Upper Bounds
Robot.u_lb = np.array([-tmax]) # Control Lower Bounds
RRT.x_start = x_start
RRT.discretizeactions( 3 )
RRT.dt = 0.1
RRT.goal_radius = 0.3
RRT.max_nodes = 5000
RRT.max_solution_time = 5
RRT.dyna_plot = True
RRT.dyna_node_no_update = 10
RRT.traj_ctl_kp = 25
RRT.traj_ctl_kd = 10
PID.dt = 0.001
CTC.w0 = 2
SLD.lam = 1
SLD.nab = 0
SLD.D = 5
###########################
# Offline Plannning
###########################
#RRT.find_path_to_goal( x_goal )
#RRT.plot_2D_Tree()
###########################
# Offline Optimization
###########################
#VI.first_step()
#VI.load_data( 'data/' + 'R1' + 'quadratic' )
#VI.compute_steps(1)
#
## Plot Value Iteration Results
#ValueIterationAlgo.plot_raw()
#ValueIterationAlgo.plot_J_nice( 2 )
###########################
# Assign controller
###########################
#Robot.ctl = PD.ctl
Robot.ctl = PID.ctl
#Robot.ctl = CTC.ctl
#Robot.ctl = SLD.ctl
#Robot.ctl = RRT.trajectory_controller
#VI.assign_interpol_controller()
###########################
# Simulation
###########################
Robot.plotAnimation( x_start , tf=10, n=10001, solver='euler' )
###########################
# Plots
###########################
Robot.Sim.phase_plane_trajectory()
#Robot.Sim.phase_plane_trajectory( PP_OL = False , PP_CL = True )
Robot.Sim.plot_CL()
###########################
# and more
###########################
#from AlexRobotics.dynamic import CustomManipulator
#BoeingArm = CustomManipulator.BoeingArm()
#BoeingArm.plot3DAnimation( x0 = np.array([0.2,0,0,0,0,0]) )
# Hold script in console
import matplotlib.pyplot as plt
plt.show() | [
"[email protected]"
] | |
3f38851402838e78a9602b3e882605fb1e2d4f86 | 14f4d045750f7cf45252838d625b2a761d5dee38 | /argo/test/test_io_k8s_kube_aggregator_pkg_apis_apiregistration_v1beta1_api_service_condition.py | 01d2de718c08b57e04b58fbd20a8e3d5c8c0eb44 | [] | no_license | nfillot/argo_client | cf8d7413d728edb4623de403e03d119fe3699ee9 | c8cf80842f9eebbf4569f3d67b9d8eff4ba405fa | refs/heads/master | 2020-07-11T13:06:35.518331 | 2019-08-26T20:54:07 | 2019-08-26T20:54:07 | 204,546,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,330 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.14.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import argo
from models.io_k8s_kube_aggregator_pkg_apis_apiregistration_v1beta1_api_service_condition import IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition # noqa: E501
from argo.rest import ApiException
class TestIoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition(unittest.TestCase):
"""IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition(self):
"""Test IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition"""
# FIXME: construct object with mandatory attributes with example values
# model = argo.models.io_k8s_kube_aggregator_pkg_apis_apiregistration_v1beta1_api_service_condition.IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
8fe298aaf5cf8b93c96ab107fbe0f5771e3f5e25 | b775940595617a13289ee7006cf837f8f3a34480 | /examples/ppk_plot.py | 24497e1d56f97c75755d7197f2dbe75215961c3c | [] | no_license | Nathan-Walk/manufacturing | 5d2f58c2be45c9ccb2263bd750b6c18809fe76d4 | 2a22457ff9ef695da649a1e11d0cf7cb8ddde348 | refs/heads/master | 2023-03-08T19:48:15.613729 | 2021-02-26T01:05:46 | 2021-02-26T01:05:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | import logging
import matplotlib.pyplot as plt
from manufacturing import import_excel, ppk_plot
logging.basicConfig(level=logging.INFO)
data = import_excel('data/example_data_with_faults.xlsx', columnname='value (lcl=-7.4 ucl=7.4)', skiprows=3)
ppk_plot(**data)
plt.show()
| [
"[email protected]"
] | |
37857bc4bb9559c9e3f68635744baf75a7cc8762 | c086a38a366b0724d7339ae94d6bfb489413d2f4 | /PythonEnv/Lib/site-packages/docutils/utils/urischemes.py | 01335601af86e67266b95a75aa5f0935ea92bcf5 | [] | no_license | FlowkoHinti/Dionysos | 2dc06651a4fc9b4c8c90d264b2f820f34d736650 | d9f8fbf3bb0713527dc33383a7f3e135b2041638 | refs/heads/master | 2021-03-02T01:14:18.622703 | 2020-06-09T08:28:44 | 2020-06-09T08:28:44 | 245,826,041 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,028 | py | # $Id: urischemes.py 8376 2019-08-27 19:49:29Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
`schemes` is a dictionary with lowercase URI addressing schemes as
keys and descriptions as values. It was compiled from the index at
http://www.iana.org/assignments/uri-schemes (revised 2005-11-28)
and an older list at http://www.w3.org/Addressing/schemes.html.
"""
# Many values are blank and should be filled in with useful descriptions.
schemes = {
'about': 'provides information on Navigator',
'acap': 'Application Configuration Access Protocol; RFC 2244',
'addbook': "To add vCard entries to Communicator's Address Book",
'afp': 'Apple Filing Protocol',
'afs': 'Andrew File System global file names',
'aim': 'AOL Instant Messenger',
'callto': 'for NetMeeting links',
'castanet': 'Castanet Tuner URLs for Netcaster',
'chttp': 'cached HTTP supported by RealPlayer',
'cid': 'content identifier; RFC 2392',
'crid': 'TV-Anytime Content Reference Identifier; RFC 4078',
'data': ('allows inclusion of small data items as "immediate" data; '
'RFC 2397'),
'dav': 'Distributed Authoring and Versioning Protocol; RFC 2518',
'dict': 'dictionary service protocol; RFC 2229',
'dns': 'Domain Name System resources',
'eid': ('External ID; non-URL data; general escape mechanism to allow '
'access to information for applications that are too '
'specialized to justify their own schemes'),
'fax': ('a connection to a terminal that can handle telefaxes '
'(facsimiles); RFC 2806'),
'feed': 'NetNewsWire feed',
'file': 'Host-specific file names; RFC 1738',
'finger': '',
'freenet': '',
'ftp': 'File Transfer Protocol; RFC 1738',
'go': 'go; RFC 3368',
'gopher': 'The Gopher Protocol',
'gsm-sms': ('Global System for Mobile Communications Short Message '
'Service'),
'h323': ('video (audiovisual) communication on local area networks; '
'RFC 3508'),
'h324': ('video and audio communications over low bitrate connections '
'such as POTS modem connections'),
'hdl': 'CNRI handle system',
'hnews': 'an HTTP-tunneling variant of the NNTP news protocol',
'http': 'Hypertext Transfer Protocol; RFC 2616',
'https': 'HTTP over SSL; RFC 2818',
'hydra': 'SubEthaEdit URI. See http://www.codingmonkeys.de/subethaedit.',
'iioploc': 'Internet Inter-ORB Protocol Location?',
'ilu': 'Inter-Language Unification',
'im': 'Instant Messaging; RFC 3860',
'imap': 'Internet Message Access Protocol; RFC 2192',
'info': 'Information Assets with Identifiers in Public Namespaces',
'ior': 'CORBA interoperable object reference',
'ipp': 'Internet Printing Protocol; RFC 3510',
'irc': 'Internet Relay Chat',
'iris.beep': 'iris.beep; RFC 3983',
'iseek': 'See www.ambrosiasw.com; a little util for OS X.',
'jar': 'Java archive',
'javascript': ('JavaScript code; evaluates the expression after the '
'colon'),
'jdbc': 'JDBC connection URI.',
'ldap': 'Lightweight Directory Access Protocol',
'lifn': '',
'livescript': '',
'lrq': '',
'mailbox': 'Mail folder access',
'mailserver': 'Access to data available from mail servers',
'mailto': 'Electronic mail address; RFC 2368',
'md5': '',
'mid': 'message identifier; RFC 2392',
'mocha': '',
'modem': ('a connection to a terminal that can handle incoming data '
'calls; RFC 2806'),
'mtqp': 'Message Tracking Query Protocol; RFC 3887',
'mupdate': 'Mailbox Update (MUPDATE) Protocol; RFC 3656',
'news': 'USENET news; RFC 1738',
'nfs': 'Network File System protocol; RFC 2224',
'nntp': 'USENET news using NNTP access; RFC 1738',
'opaquelocktoken': 'RFC 2518',
'phone': '',
'pop': 'Post Office Protocol; RFC 2384',
'pop3': 'Post Office Protocol v3',
'pres': 'Presence; RFC 3859',
'printer': '',
'prospero': 'Prospero Directory Service; RFC 4157',
'rdar': ('URLs found in Darwin source '
'(http://www.opensource.apple.com/darwinsource/).'),
'res': '',
'rtsp': 'real time streaming protocol; RFC 2326',
'rvp': '',
'rwhois': '',
'rx': 'Remote Execution',
'sdp': '',
'service': 'service location; RFC 2609',
'shttp': 'secure hypertext transfer protocol',
'sip': 'Session Initiation Protocol; RFC 3261',
'sips': 'secure session intitiaion protocol; RFC 3261',
'smb': 'SAMBA filesystems.',
'snews': 'For NNTP postings via SSL',
'snmp': 'Simple Network Management Protocol; RFC 4088',
'soap.beep': 'RFC 3288',
'soap.beeps': 'RFC 3288',
'ssh': 'Reference to interactive sessions via ssh.',
't120': 'real time data conferencing (audiographics)',
'tag': 'RFC 4151',
'tcp': '',
'tel': ('a connection to a terminal that handles normal voice '
'telephone calls, a voice mailbox or another voice messaging '
'system or a service that can be operated using DTMF tones; '
'RFC 3966.'),
'telephone': 'telephone',
'telnet': 'Reference to interactive sessions; RFC 4248',
'tftp': 'Trivial File Transfer Protocol; RFC 3617',
'tip': 'Transaction Internet Protocol; RFC 2371',
'tn3270': 'Interactive 3270 emulation sessions',
'tv': '',
'urn': 'Uniform Resource Name; RFC 2141',
'uuid': '',
'vemmi': 'versatile multimedia interface; RFC 2122',
'videotex': '',
'view-source': 'displays HTML code that was generated with JavaScript',
'wais': 'Wide Area Information Servers; RFC 4156',
'whodp': '',
'whois++': 'Distributed directory service.',
'x-man-page': ('Opens man page in Terminal.app on OS X '
'(see macosxhints.com)'),
'xmlrpc.beep': 'RFC 3529',
'xmlrpc.beeps': 'RFC 3529',
'z39.50r': 'Z39.50 Retrieval; RFC 2056',
'z39.50s': 'Z39.50 Session; RFC 2056', }
| [
"="
] | = |
3c6272b5ed36863e8a7b012c1491944ae1bc0fed | d61f7eda203a336868c010abb8f9a6f45dd51adb | /217. Contains Duplicate.py | bfe1c7f845dd61be31b14fab7c6bc51dc3d70b9b | [] | no_license | Mschikay/leetcode | b91df914afc728c2ae1a13d3994568bb6c1dcffb | 7c5e5fe76cee542f67cd7dd3a389470b02597548 | refs/heads/master | 2020-04-17T12:11:38.810325 | 2019-10-06T02:37:32 | 2019-10-06T02:37:32 | 166,570,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
# s = set()
# for n in nums:
# if n in s:
# return True
# s.add(n)
# return False
return not (len(set(nums)) == len(nums)) | [
"[email protected]"
] | |
10320c2b5c5d228ae3ada19ae71d1c1b9d7fff71 | 77d7f2c1284b276c95ad31b15ac2bde077f1ceca | /fastreid/data/common.py | 959fefb3f17b62bcdefa3071913ff3df58331735 | [
"Apache-2.0"
] | permissive | Cris-zj/fast-reid | a53f19fefe149eec93d0f1b2a1d61136d9c9eaf6 | db4b65444912cfd54675e6a52fa12e2d1321e971 | refs/heads/master | 2022-12-14T15:23:40.820118 | 2020-08-31T12:34:33 | 2020-08-31T12:34:33 | 291,639,026 | 2 | 0 | Apache-2.0 | 2020-08-31T06:56:24 | 2020-08-31T06:56:23 | null | UTF-8 | Python | false | false | 1,078 | py | # encoding: utf-8
"""
@author: liaoxingyu
@contact: [email protected]
"""
from torch.utils.data import Dataset
from .data_utils import read_image
class CommDataset(Dataset):
"""Image Person ReID Dataset"""
def __init__(self, img_items, transform=None, relabel=True):
self.img_items = img_items
self.transform = transform
self.relabel = relabel
pid_set = set([i[1] for i in img_items])
self.pids = sorted(list(pid_set))
if relabel: self.pid_dict = dict([(p, i) for i, p in enumerate(self.pids)])
def __len__(self):
return len(self.img_items)
def __getitem__(self, index):
img_path, pid, camid = self.img_items[index]
img = read_image(img_path)
if self.transform is not None: img = self.transform(img)
if self.relabel: pid = self.pid_dict[pid]
return {
"images": img,
"targets": pid,
"camid": camid,
"img_path": img_path
}
@property
def num_classes(self):
return len(self.pids)
| [
"[email protected]"
] | |
43161b15896e4902218ba23e07244705afec3bd9 | b0bb0dcdf8228cbdd02e47a9e2097892f7dd0861 | /bak/download.py | 03f5f93a9a8b197c04727d19e700059556ff1ede | [] | no_license | scmsqhn/zipline_for_u | 562de0d8ed638de431d207e6808db7e19fc168f7 | 369b17fd3142fcfb9ced7ce0b17a3a35a8af37d4 | refs/heads/master | 2021-01-20T06:51:09.012662 | 2017-05-02T14:54:38 | 2017-05-02T14:54:38 | 89,936,089 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,161 | py | # -*- coding: utf-8 -*-
# @Author: yuqing5
# date: 20151023
import tushare as ts
from sqlalchemy import create_engine
import datetime
import time
import pandas as pd
import os
import cPickle
from pandas import DataFrame
import pandas.io.sql as SQL
import sys
sys.path.append('./utility/')
from tool_decorator import local_memcached
def date2str(date):
return date.strftime("%Y-%m-%d")
class DownLoad(object):
'''
1.下载历史数据
2. 更新每天数据
3. 装载历史数据
'''
def __init__(self):
self.basic = ts.get_stock_basics()
self.engine = create_engine('mysql://root:[email protected]/stock_info?charset=utf8')
self.connection = self.engine.connect()
@staticmethod
def date2str(today=None):
if today == None:
today =datetime.date.today()
return today.strftime("%Y-%m-%d")
def down_history(self, stock, index=False):
'''
下载历史至今天的数据,可以用于下载新股票
date,open,high,close,low,volume,amount
'''
print '--'*10,"downing ",stock,'--'*10
date = self.basic.ix[stock]['timeToMarket']
#20100115 竟然是个整数
start_year = date/10000
today =datetime.date.today()
end_year = int(today.strftime("%Y"))
suffix = "-" + str(date)[4:6] + "-" + str(date)[6:8]
raw_data = None
#针对次新股,今年的股票
if start_year == end_year:
raw_data = ts.get_h_data(stock,index)
for year in range(start_year, end_year):
start = str(year) + suffix
right = datetime.datetime.strptime(str(year+1) + suffix, "%Y-%m-%d")-datetime.timedelta(days=1)
#跨年的应该没有那天上市的公司,所以不存在bug
end = right.strftime("%Y-%m-%d")
print start, "-----",end
data = ts.get_h_data(stock,start=start,end=end,index=index)
if data is None:
print None
else:
print data.shape
raw_data = pd.concat([raw_data, data], axis=0)
#看看是否需要补充最后一段时间的数据
if (year+1) == end_year and end < today.strftime("%Y-%m-%d"):
this_year_start = str(year+1) + suffix
print this_year_start, "-------",today.strftime("%Y-%m-%d")
data = ts.get_h_data(stock, start=this_year_start, end=today.strftime("%Y-%m-%d"),index=index)
if data is None:
print None
else:
print data.shape
raw_data = pd.concat([raw_data, data], axis=0)
raw_data = raw_data.sort_index(ascending=True)
raw_data.to_sql('day_'+stock, self.engine)
return raw_data
def down_all_day_stick(self):
'''
下载所有股票的历史数据
'''
for stock in self.basic.index:
try:
print stock
self.down_history(stock)
except Exception ,ex:
print Exception, ";",ex
def append_days(self,stock, start, end):
'''
添加stock,指定时间范围内的数据
'''
data = ts.get_h_data(stock,start=start,end=end)
data = data.sort_index(ascending=True)
data.to_sql('day_'+stock, self.engine,if_exists='append')
def append_all_days(self, start=None, end=None):
'''
添加所有股票数据
'''
if start == None:
start = datetime.datetime.today()
end = start
for stock in self.basic['code']:
self.append_days(stock, start, end)
def load_data(self, stock):
'''
加载股票历史数据
'''
search_sql = "select * from {0}".format('day_'+stock)
raw_data = SQL.read_sql(search_sql, self.engine)
return raw_data
def check_is_new_stock(self, stock):
'''
检测该股票是否为新上市股票
结果不需要该函数
'''
check_sql = "show tables like '{0}'".format('day_'+stock)
result = self.connection.execute(check_sql)
if result.first() == None:
return True
else:
return False
#默认为近3年数据
def down_period(self, stock,start=None,end=None):
raw_data = ts.get_hist_data(stock,start,end)
return raw_data
#新股如603861 有问题
#封装一下ts接口,同一天不要重复获取数据
class TS(object):
@staticmethod
@local_memcached
def memchaced_data(funcname, fileprefix):
'''
使用方法
1. funcname ts的方法名
2. fileprefix 该方法缓存的文件名字
'''
raw_data = funcname()
return raw_data
if __name__ == '__main__':
# dl = DownLoad()
# dl.down_all_day_stick()
# raw_data = dl.load_data('000001')
# print raw_data
TS() | [
"[email protected]"
] | |
00c4fad7606971274a79c91af14dc8412935ba2e | c5becab2d4201f2e828d052c22b4496a3bbe4927 | /tests/pipelines/test_pipelines_conversational.py | 9ed32adda652d5983ed5995d8d94a7a0df5d635c | [
"Apache-2.0"
] | permissive | thomwolf/transformers | ba665c456b2acd636d8e3876a87ea446ae0ae092 | 166dfa88e5dfdca1d99197e5006e4e2ea9e49cba | refs/heads/master | 2023-03-08T03:37:13.519336 | 2023-02-15T15:00:01 | 2023-02-15T15:00:01 | 238,908,404 | 4 | 1 | Apache-2.0 | 2023-02-25T16:09:30 | 2020-02-07T11:40:04 | Python | UTF-8 | Python | false | false | 17,110 | py | # Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from transformers import (
MODEL_FOR_CAUSAL_LM_MAPPING,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
AutoModelForCausalLM,
AutoModelForSeq2SeqLM,
AutoTokenizer,
BlenderbotSmallForConditionalGeneration,
BlenderbotSmallTokenizer,
Conversation,
ConversationalPipeline,
TFAutoModelForCausalLM,
pipeline,
)
from transformers.testing_utils import require_tf, require_torch, slow, torch_device
from .test_pipelines_common import ANY, PipelineTestCaseMeta
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
class ConversationalPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
model_mapping = dict(
list(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items())
if MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
else [] + list(MODEL_FOR_CAUSAL_LM_MAPPING.items())
if MODEL_FOR_CAUSAL_LM_MAPPING
else []
)
tf_model_mapping = dict(
list(TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items())
if TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
else [] + list(TF_MODEL_FOR_CAUSAL_LM_MAPPING.items())
if TF_MODEL_FOR_CAUSAL_LM_MAPPING
else []
)
def get_test_pipeline(self, model, tokenizer, processor):
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer)
return conversation_agent, [Conversation("Hi there!")]
def run_pipeline_test(self, conversation_agent, _):
# Simple
outputs = conversation_agent(Conversation("Hi there!"))
self.assertEqual(outputs, Conversation(past_user_inputs=["Hi there!"], generated_responses=[ANY(str)]))
# Single list
outputs = conversation_agent([Conversation("Hi there!")])
self.assertEqual(outputs, Conversation(past_user_inputs=["Hi there!"], generated_responses=[ANY(str)]))
# Batch
conversation_1 = Conversation("Going to the movies tonight - any suggestions?")
conversation_2 = Conversation("What's the last book you have read?")
self.assertEqual(len(conversation_1.past_user_inputs), 0)
self.assertEqual(len(conversation_2.past_user_inputs), 0)
outputs = conversation_agent([conversation_1, conversation_2])
self.assertEqual(outputs, [conversation_1, conversation_2])
self.assertEqual(
outputs,
[
Conversation(
past_user_inputs=["Going to the movies tonight - any suggestions?"],
generated_responses=[ANY(str)],
),
Conversation(past_user_inputs=["What's the last book you have read?"], generated_responses=[ANY(str)]),
],
)
# One conversation with history
conversation_2.add_user_input("Why do you recommend it?")
outputs = conversation_agent(conversation_2)
self.assertEqual(outputs, conversation_2)
self.assertEqual(
outputs,
Conversation(
past_user_inputs=["What's the last book you have read?", "Why do you recommend it?"],
generated_responses=[ANY(str), ANY(str)],
),
)
with self.assertRaises(ValueError):
conversation_agent("Hi there!")
with self.assertRaises(ValueError):
conversation_agent(Conversation())
# Conversation have been consumed and are not valid anymore
# Inactive conversations passed to the pipeline raise a ValueError
with self.assertRaises(ValueError):
conversation_agent(conversation_2)
@require_torch
@slow
def test_integration_torch_conversation(self):
# When
conversation_agent = pipeline(task="conversational", device=DEFAULT_DEVICE_NUM)
conversation_1 = Conversation("Going to the movies tonight - any suggestions?")
conversation_2 = Conversation("What's the last book you have read?")
# Then
self.assertEqual(len(conversation_1.past_user_inputs), 0)
self.assertEqual(len(conversation_2.past_user_inputs), 0)
# When
result = conversation_agent([conversation_1, conversation_2], do_sample=False, max_length=1000)
# Then
self.assertEqual(result, [conversation_1, conversation_2])
self.assertEqual(len(result[0].past_user_inputs), 1)
self.assertEqual(len(result[1].past_user_inputs), 1)
self.assertEqual(len(result[0].generated_responses), 1)
self.assertEqual(len(result[1].generated_responses), 1)
self.assertEqual(result[0].past_user_inputs[0], "Going to the movies tonight - any suggestions?")
self.assertEqual(result[0].generated_responses[0], "The Big Lebowski")
self.assertEqual(result[1].past_user_inputs[0], "What's the last book you have read?")
self.assertEqual(result[1].generated_responses[0], "The Last Question")
# When
conversation_2.add_user_input("Why do you recommend it?")
result = conversation_agent(conversation_2, do_sample=False, max_length=1000)
# Then
self.assertEqual(result, conversation_2)
self.assertEqual(len(result.past_user_inputs), 2)
self.assertEqual(len(result.generated_responses), 2)
self.assertEqual(result.past_user_inputs[1], "Why do you recommend it?")
self.assertEqual(result.generated_responses[1], "It's a good book.")
@require_torch
@slow
def test_integration_torch_conversation_truncated_history(self):
# When
conversation_agent = pipeline(task="conversational", min_length_for_response=24, device=DEFAULT_DEVICE_NUM)
conversation_1 = Conversation("Going to the movies tonight - any suggestions?")
# Then
self.assertEqual(len(conversation_1.past_user_inputs), 0)
# When
result = conversation_agent(conversation_1, do_sample=False, max_length=36)
# Then
self.assertEqual(result, conversation_1)
self.assertEqual(len(result.past_user_inputs), 1)
self.assertEqual(len(result.generated_responses), 1)
self.assertEqual(result.past_user_inputs[0], "Going to the movies tonight - any suggestions?")
self.assertEqual(result.generated_responses[0], "The Big Lebowski")
# When
conversation_1.add_user_input("Is it an action movie?")
result = conversation_agent(conversation_1, do_sample=False, max_length=36)
# Then
self.assertEqual(result, conversation_1)
self.assertEqual(len(result.past_user_inputs), 2)
self.assertEqual(len(result.generated_responses), 2)
self.assertEqual(result.past_user_inputs[1], "Is it an action movie?")
self.assertEqual(result.generated_responses[1], "It's a comedy.")
@require_torch
def test_small_model_pt(self):
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-small")
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-small")
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer)
conversation = Conversation("hello")
output = conversation_agent(conversation)
self.assertEqual(output, Conversation(past_user_inputs=["hello"], generated_responses=["Hi"]))
@require_tf
def test_small_model_tf(self):
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-small")
model = TFAutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-small")
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer)
conversation = Conversation("hello")
output = conversation_agent(conversation)
self.assertEqual(output, Conversation(past_user_inputs=["hello"], generated_responses=["Hi"]))
@require_torch
@slow
def test_integration_torch_conversation_dialogpt_input_ids(self):
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-small")
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-small")
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer)
conversation_1 = Conversation("hello")
inputs = conversation_agent.preprocess(conversation_1)
self.assertEqual(inputs["input_ids"].tolist(), [[31373, 50256]])
conversation_2 = Conversation("how are you ?", past_user_inputs=["hello"], generated_responses=["Hi there!"])
inputs = conversation_agent.preprocess(conversation_2)
self.assertEqual(
inputs["input_ids"].tolist(), [[31373, 50256, 17250, 612, 0, 50256, 4919, 389, 345, 5633, 50256]]
)
@require_torch
@slow
def test_integration_torch_conversation_blenderbot_400M_input_ids(self):
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill")
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer)
# test1
conversation_1 = Conversation("hello")
inputs = conversation_agent.preprocess(conversation_1)
self.assertEqual(inputs["input_ids"].tolist(), [[1710, 86, 2]])
# test2
conversation_1 = Conversation(
"I like lasagne.",
past_user_inputs=["hello"],
generated_responses=[
" Do you like lasagne? It is a traditional Italian dish consisting of a shepherd's pie."
],
)
inputs = conversation_agent.preprocess(conversation_1)
self.assertEqual(
inputs["input_ids"].tolist(),
[
# This should be compared with the same conversation on ParlAI `safe_interactive` demo.
[
1710, # hello
86,
228, # Double space
228,
946,
304,
398,
6881,
558,
964,
38,
452,
315,
265,
6252,
452,
322,
968,
6884,
3146,
278,
306,
265,
617,
87,
388,
75,
341,
286,
521,
21,
228, # Double space
228,
281, # I like lasagne.
398,
6881,
558,
964,
21,
2, # EOS
],
],
)
@require_torch
@slow
def test_integration_torch_conversation_blenderbot_400M(self):
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill")
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer)
conversation_1 = Conversation("hello")
result = conversation_agent(
conversation_1,
)
self.assertEqual(
result.generated_responses[0],
# ParlAI implementation output, we have a different one, but it's our
# second best, you can check by using num_return_sequences=10
# " Hello! How are you? I'm just getting ready to go to work, how about you?",
" Hello! How are you doing today? I just got back from a walk with my dog.",
)
conversation_1 = Conversation("Lasagne hello")
result = conversation_agent(conversation_1, encoder_no_repeat_ngram_size=3)
self.assertEqual(
result.generated_responses[0],
" Do you like lasagne? It is a traditional Italian dish consisting of a shepherd's pie.",
)
conversation_1 = Conversation(
"Lasagne hello Lasagne is my favorite Italian dish. Do you like lasagne? I like lasagne."
)
result = conversation_agent(
conversation_1,
encoder_no_repeat_ngram_size=3,
)
self.assertEqual(
result.generated_responses[0],
" Me too. I like how it can be topped with vegetables, meats, and condiments.",
)
@require_torch
@slow
def test_integration_torch_conversation_encoder_decoder(self):
# When
tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot_small-90M")
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot_small-90M")
conversation_agent = ConversationalPipeline(model=model, tokenizer=tokenizer, device=DEFAULT_DEVICE_NUM)
conversation_1 = Conversation("My name is Sarah and I live in London")
conversation_2 = Conversation("Going to the movies tonight, What movie would you recommend? ")
# Then
self.assertEqual(len(conversation_1.past_user_inputs), 0)
self.assertEqual(len(conversation_2.past_user_inputs), 0)
# When
result = conversation_agent([conversation_1, conversation_2], do_sample=False, max_length=1000)
# Then
self.assertEqual(result, [conversation_1, conversation_2])
self.assertEqual(len(result[0].past_user_inputs), 1)
self.assertEqual(len(result[1].past_user_inputs), 1)
self.assertEqual(len(result[0].generated_responses), 1)
self.assertEqual(len(result[1].generated_responses), 1)
self.assertEqual(result[0].past_user_inputs[0], "My name is Sarah and I live in London")
self.assertEqual(
result[0].generated_responses[0],
"hi sarah, i live in london as well. do you have any plans for the weekend?",
)
self.assertEqual(
result[1].past_user_inputs[0], "Going to the movies tonight, What movie would you recommend? "
)
self.assertEqual(
result[1].generated_responses[0], "i don't know... i'm not really sure. what movie are you going to see?"
)
# When
conversation_1.add_user_input("Not yet, what about you?")
conversation_2.add_user_input("What's your name?")
result = conversation_agent([conversation_1, conversation_2], do_sample=False, max_length=1000)
# Then
self.assertEqual(result, [conversation_1, conversation_2])
self.assertEqual(len(result[0].past_user_inputs), 2)
self.assertEqual(len(result[1].past_user_inputs), 2)
self.assertEqual(len(result[0].generated_responses), 2)
self.assertEqual(len(result[1].generated_responses), 2)
self.assertEqual(result[0].past_user_inputs[1], "Not yet, what about you?")
self.assertEqual(result[0].generated_responses[1], "i don't have any plans yet. i'm not sure what to do yet.")
self.assertEqual(result[1].past_user_inputs[1], "What's your name?")
self.assertEqual(result[1].generated_responses[1], "i don't have a name, but i'm going to see a horror movie.")
@require_torch
@slow
def test_from_pipeline_conversation(self):
model_id = "facebook/blenderbot_small-90M"
# from model id
conversation_agent_from_model_id = pipeline("conversational", model=model_id, tokenizer=model_id)
# from model object
model = BlenderbotSmallForConditionalGeneration.from_pretrained(model_id)
tokenizer = BlenderbotSmallTokenizer.from_pretrained(model_id)
conversation_agent_from_model = pipeline("conversational", model=model, tokenizer=tokenizer)
conversation = Conversation("My name is Sarah and I live in London")
conversation_copy = Conversation("My name is Sarah and I live in London")
result_model_id = conversation_agent_from_model_id([conversation])
result_model = conversation_agent_from_model([conversation_copy])
# check for equality
self.assertEqual(
result_model_id.generated_responses[0],
"hi sarah, i live in london as well. do you have any plans for the weekend?",
)
self.assertEqual(
result_model_id.generated_responses[0],
result_model.generated_responses[0],
)
| [
"[email protected]"
] | |
9a2930492647fe490bf485ff55258371f5687191 | 3a63a9af2693b7d2f87a6d2db0585d8ce5480934 | /vision-vgg_objects.py | 3eb6277cd8e7d427d5a26d0fbd15066c271bf1e7 | [] | no_license | andreeadeac22/HackCam2018 | d167f71069c6fe529f1e88dd92e31794b64e6773 | 0bb529b0d0cc11583722107b7125eb0671ca149a | refs/heads/master | 2021-05-09T09:45:34.945701 | 2018-01-30T00:21:57 | 2018-01-30T00:21:57 | 119,458,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | import torch
from torch.autograd import Variable as V
import torchvision.models as models
from torchvision.models.vgg import vgg16
from torchvision import transforms as trn
from torch.nn import functional as F
from PIL import Image
def image_to_objects(img_name):
model = vgg16(pretrained=True)
model.eval()
# load the image transformer
centre_crop = trn.Compose([
trn.Resize((256,256)),
trn.CenterCrop(224),
trn.ToTensor(),
trn.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# load the class label
file_name = 'categories_imagenet.txt'
img = Image.open(img_name)
input_img = V(centre_crop(img).unsqueeze(0))
# forward pass
logit = model.forward(input_img)
h_x = F.softmax(logit, 1).data.squeeze()
return h_x
print(image_to_objects("arch.jpeg")) | [
"[email protected]"
] | |
bc026c4ed31e48c1c7c6a8dad59f6f27b760e5de | d44b5a657e7cd69c875b55dd5cddf21812e89095 | /pixel_cnn/model/resnet.py | 4c7abe39625aca83798614a9c570268916820747 | [
"Apache-2.0"
] | permissive | nel215/chainer-pixel-cnn | ca8ae17fda998f7677dea785e53319b3fc646e76 | 94b064f9e66355d141ed5d6cce0c38492203715b | refs/heads/master | 2020-04-02T02:11:29.546694 | 2018-10-21T12:10:43 | 2018-10-21T12:10:43 | 153,896,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 774 | py | from chainer import Chain
from chainer import links as L
from chainer import functions as F
def concat_elu(x):
return F.elu(F.concat([x, -x], 1))
class GatedResnet(Chain):
def __init__(self, n_out, Conv2D):
super(GatedResnet, self).__init__()
with self.init_scope():
self.conv1 = Conv2D(n_out)
self.conv2 = L.Convolution2D(None, n_out, ksize=1)
self.conv3 = Conv2D(2*n_out)
def __call__(self, x, a=None):
h = self.conv1(concat_elu(x))
if a is not None:
h += self.conv2(concat_elu(a))
h = F.dropout(concat_elu(h))
h = self.conv3(h)
# TODO: conditional generation
a, b = F.split_axis(h, 2, 1)
h = a * F.sigmoid(b)
return x + h
| [
"[email protected]"
] | |
e6ffa0af18975bc4140bb2a0fd222509374d096d | 174975248ffa04bb0339ace7475a791842e99ffb | /reverse_bits.py | 141244053c843ee9fa1eb7c73d05ab32903b8c86 | [] | no_license | KONAPAVANKUMAR/code-library | 87a5525dcf71aaba47f233df17ad31227cb3c44b | 6839ef596858515119a3c300b031a107c8d72292 | refs/heads/main | 2023-06-02T09:33:21.382512 | 2021-06-24T09:49:00 | 2021-06-24T09:49:00 | 378,131,322 | 0 | 0 | null | 2021-06-24T09:41:12 | 2021-06-18T11:39:22 | Python | UTF-8 | Python | false | false | 415 | py | def get_reverse_bit_string(number: int) -> str:
bit_string = ""
for _ in range(0, 32):
bit_string += str(number % 2)
number = number >> 1
return bit_string
def reverse_bit(number):
result = 0
for _ in range(1, 33):
result = result << 1
end_bit = number % 2
number = number >> 1
result = result | end_bit
return get_reverse_bit_string(result) | [
"[email protected]"
] | |
57e50197193509c44c617169693c5d944c8f76f3 | 393ccacef32461f5d7f4b21419a7c695df9c62a7 | /lpo/sp/fmail/fmail.admin/datas/postcodes/81.cgi | 713794904723a1b6c22d73975a7aabfd7c129bf5 | [] | no_license | emoshu-yuta-okuma/nakagawa-dent-hp | ebc6c66efc624a256f0d7e30c2e26b9aae162cd7 | e83e8c7060881b7267f90ca3f2c599d614a219a1 | refs/heads/master | 2023-01-14T12:39:19.874341 | 2020-11-12T06:33:00 | 2020-11-12T06:33:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 160,735 | cgi | 40131,813,8130000,フクオカケン,フクオカシヒガシク,イカニケイサイガナイバアイ,福岡県,福岡市東区,以下に掲載がない場合,0,0,0,0,0,0
40131,813,8130025,フクオカケン,フクオカシヒガシク,アオバ,福岡県,福岡市東区,青葉,0,0,1,0,0,0
40131,81103,8110322,フクオカケン,フクオカシヒガシク,オオタケ,福岡県,福岡市東区,大岳,0,0,1,0,0,0
40131,812,8120052,フクオカケン,フクオカシヒガシク,カイヅカダンチ,福岡県,福岡市東区,貝塚団地,0,0,0,0,0,0
40131,813,8130011,フクオカケン,フクオカシヒガシク,カシイ,福岡県,福岡市東区,香椎,0,0,1,0,0,0
40131,813,8130013,フクオカケン,フクオカシヒガシク,カシイエキマエ,福岡県,福岡市東区,香椎駅前,0,0,1,0,0,0
40131,813,8130012,フクオカケン,フクオカシヒガシク,カシイエキヒガシ,福岡県,福岡市東区,香椎駅東,0,0,1,0,0,0
40131,813,8130014,フクオカケン,フクオカシヒガシク,カシイダイ,福岡県,福岡市東区,香椎台,0,0,1,0,0,0
40131,813,8130015,フクオカケン,フクオカシヒガシク,カシイダンチ,福岡県,福岡市東区,香椎団地,0,0,0,0,0,0
40131,813,8130017,フクオカケン,フクオカシヒガシク,カシイテリハ,福岡県,福岡市東区,香椎照葉,0,0,1,0,0,0
40131,813,8130016,フクオカケン,フクオカシヒガシク,カシイハマ,福岡県,福岡市東区,香椎浜,0,0,1,0,0,0
40131,813,8130018,フクオカケン,フクオカシヒガシク,カシイハマフトウ,福岡県,福岡市東区,香椎浜ふ頭,0,0,1,0,0,0
40131,813,8130003,フクオカケン,フクオカシヒガシク,カスミガオカ,福岡県,福岡市東区,香住ケ丘,0,0,1,0,0,0
40131,81103,8110325,フクオカケン,フクオカシヒガシク,カツマ,福岡県,福岡市東区,勝馬,0,0,0,0,0,0
40131,813,8130023,フクオカケン,フクオカシヒガシク,カマタ,福岡県,福岡市東区,蒲田,0,0,1,0,0,0
40131,81102,8110216,フクオカケン,フクオカシヒガシク,カミワジロ,福岡県,福岡市東区,上和白,0,0,0,0,0,0
40131,81102,8110206,フクオカケン,フクオカシヒガシク,ガンノス,福岡県,福岡市東区,雁の巣,0,0,1,0,0,0
40131,812,8120069,フクオカケン,フクオカシヒガシク,ゴウグチマチ,福岡県,福岡市東区,郷口町,0,0,0,0,0,0
40131,81103,8110321,フクオカケン,フクオカシヒガシク,サイトザキ,福岡県,福岡市東区,西戸崎,0,0,1,0,0,0
40131,81102,8110203,フクオカケン,フクオカシヒガシク,シオハマ,福岡県,福岡市東区,塩浜,0,0,1,0,0,0
40131,81103,8110323,フクオカケン,フクオカシヒガシク,シカシマ,福岡県,福岡市東区,志賀島,0,0,0,0,0,0
40131,813,8130002,フクオカケン,フクオカシヒガシク,シモバル,福岡県,福岡市東区,下原,0,0,1,0,0,0
40131,812,8120068,フクオカケン,フクオカシヒガシク,シャリョウ,福岡県,福岡市東区,社領,0,0,1,0,0,0
40131,813,8130045,フクオカケン,フクオカシヒガシク,シロハマダンチ,福岡県,福岡市東区,城浜団地,0,0,0,0,0,0
40131,81102,8110215,フクオカケン,フクオカシヒガシク,タカミダイ,福岡県,福岡市東区,高美台,0,0,1,0,0,0
40131,813,8130033,フクオカケン,フクオカシヒガシク,タタラ,福岡県,福岡市東区,多々良,0,0,1,0,0,0
40131,813,8130034,フクオカケン,フクオカシヒガシク,タノツ,福岡県,福岡市東区,多の津,0,0,1,0,0,0
40131,813,8130044,フクオカケン,フクオカシヒガシク,チハヤ,福岡県,福岡市東区,千早,0,0,1,0,0,0
40131,813,8130032,フクオカケン,フクオカシヒガシク,ドイ,福岡県,福岡市東区,土井,0,0,1,0,0,0
40131,813,8130001,フクオカケン,フクオカシヒガシク,トウノハル,福岡県,福岡市東区,唐原,0,0,1,0,0,0
40131,813,8130024,フクオカケン,フクオカシヒガシク,ナゴ,福岡県,福岡市東区,名子,0,0,1,0,0,0
40131,813,8130043,フクオカケン,フクオカシヒガシク,ナジマ,福岡県,福岡市東区,名島,0,0,1,0,0,0
40131,81102,8110204,フクオカケン,フクオカシヒガシク,ナタ,福岡県,福岡市東区,奈多,0,0,1,0,0,0
40131,81102,8110205,フクオカケン,フクオカシヒガシク,ナタダンチ,福岡県,福岡市東区,奈多団地,0,0,0,0,0,0
40131,812,8120053,フクオカケン,フクオカシヒガシク,ハコザキ,福岡県,福岡市東区,箱崎,0,0,1,0,0,0
40131,812,8120051,フクオカケン,フクオカシヒガシク,ハコザキフトウ,福岡県,福岡市東区,箱崎ふ頭,0,0,1,0,0,0
40131,812,8120061,フクオカケン,フクオカシヒガシク,ハコマツ,福岡県,福岡市東区,筥松,0,0,1,0,0,0
40131,812,8120067,フクオカケン,フクオカシヒガシク,ハコマツシンマチ,福岡県,福岡市東区,筥松新町,0,0,0,0,0,0
40131,813,8130031,フクオカケン,フクオカシヒガシク,ハッタ,福岡県,福岡市東区,八田,0,0,1,0,0,0
40131,812,8120063,フクオカケン,フクオカシヒガシク,ハラダ,福岡県,福岡市東区,原田,0,0,1,0,0,0
40131,812,8120055,フクオカケン,フクオカシヒガシク,ヒガシハマ,福岡県,福岡市東区,東浜,0,0,1,0,0,0
40131,81103,8110324,フクオカケン,フクオカシヒガシク,ヒロ,福岡県,福岡市東区,弘,0,0,0,0,0,0
40131,812,8120066,フクオカケン,フクオカシヒガシク,フタマタセ,福岡県,福岡市東区,二又瀬,0,0,0,0,0,0
40131,812,8120065,フクオカケン,フクオカシヒガシク,フタマタセシンマチ,福岡県,福岡市東区,二又瀬新町,0,0,0,0,0,0
40131,812,8120054,フクオカケン,フクオカシヒガシク,マイダシ,福岡県,福岡市東区,馬出,0,0,1,0,0,0
40131,813,8130042,フクオカケン,フクオカシヒガシク,マイマツバラ,福岡県,福岡市東区,舞松原,0,0,1,0,0,0
40131,813,8130004,フクオカケン,フクオカシヒガシク,マツカダイ,福岡県,福岡市東区,松香台,0,0,1,0,0,0
40131,813,8130035,フクオカケン,フクオカシヒガシク,マツザキ,福岡県,福岡市東区,松崎,0,0,1,0,0,0
40131,812,8120062,フクオカケン,フクオカシヒガシク,マツシマ(1、2チョウメ),福岡県,福岡市東区,松島(1、2丁目),1,0,1,0,0,0
40131,813,8130062,フクオカケン,フクオカシヒガシク,マツシマ(3-6チョウメ),福岡県,福岡市東区,松島(3〜6丁目),1,0,1,0,0,0
40131,812,8120064,フクオカケン,フクオカシヒガシク,マツダ,福岡県,福岡市東区,松田,0,0,1,0,0,0
40131,813,8130005,フクオカケン,フクオカシヒガシク,ミシマザキ,福岡県,福岡市東区,御島崎,0,0,1,0,0,0
40131,813,8130041,フクオカケン,フクオカシヒガシク,ミズタニ,福岡県,福岡市東区,水谷,0,0,1,0,0,0
40131,81102,8110201,フクオカケン,フクオカシヒガシク,ミトマ,福岡県,福岡市東区,三苫,0,0,1,0,0,0
40131,813,8130021,フクオカケン,フクオカシヒガシク,ミドリガオカ,福岡県,福岡市東区,みどりが丘,0,0,1,0,0,0
40131,813,8130019,フクオカケン,フクオカシヒガシク,ミナトカシイ,福岡県,福岡市東区,みなと香椎,0,0,1,0,0,0
40131,81102,8110212,フクオカケン,フクオカシヒガシク,ミワダイ,福岡県,福岡市東区,美和台,0,0,1,0,0,0
40131,81102,8110211,フクオカケン,フクオカシヒガシク,ミワダイシンマチ,福岡県,福岡市東区,美和台新町,0,0,0,0,0,0
40131,813,8130036,フクオカケン,フクオカシヒガシク,ワカミヤ,福岡県,福岡市東区,若宮,0,0,1,0,0,0
40131,81102,8110202,フクオカケン,フクオカシヒガシク,ワジロ,福岡県,福岡市東区,和白,0,0,1,0,0,0
40131,81102,8110213,フクオカケン,フクオカシヒガシク,ワジロガオカ,福岡県,福岡市東区,和白丘,0,0,1,0,0,0
40131,81102,8110214,フクオカケン,フクオカシヒガシク,ワジロヒガシ,福岡県,福岡市東区,和白東,0,0,1,0,0,0
40132,812,8120000,フクオカケン,フクオカシハカタク,イカニケイサイガナイバアイ,福岡県,福岡市博多区,以下に掲載がない場合,0,0,0,0,0,0
40132,816,8120885,フクオカケン,フクオカシハカタク,アイオイマチ,福岡県,福岡市博多区,相生町,0,0,1,0,0,0
40132,816,8120851,フクオカケン,フクオカシハカタク,アオキ,福岡県,福岡市博多区,青木,0,0,1,0,0,0
40132,816,8120881,フクオカケン,フクオカシハカタク,イソウダ,福岡県,福岡市博多区,井相田,0,0,1,0,0,0
40132,816,8120888,フクオカケン,フクオカシハカタク,イタヅケ,福岡県,福岡市博多区,板付,0,0,1,0,0,0
40132,816,8120861,フクオカケン,フクオカシハカタク,ウラタ,福岡県,福岡市博多区,浦田,0,0,1,0,0,0
40132,812,8120004,フクオカケン,フクオカシハカタク,エノキダ,福岡県,福岡市博多区,榎田,0,0,1,0,0,0
40132,812,8120001,フクオカケン,フクオカシハカタク,オオイ,福岡県,福岡市博多区,大井,0,0,1,0,0,0
40132,812,8120031,フクオカケン,フクオカシハカタク,オキハママチ,福岡県,福岡市博多区,沖浜町,0,0,0,0,0,0
40132,812,8120043,フクオカケン,フクオカシハカタク,カタカス,福岡県,福岡市博多区,堅粕,0,0,1,0,0,0
40132,816,8120863,フクオカケン,フクオカシハカタク,カネノクマ,福岡県,福岡市博多区,金の隈,0,0,1,0,0,0
40132,812,8120005,フクオカケン,フクオカシハカタク,カミウスイ,福岡県,福岡市博多区,上臼井,0,0,0,0,0,0
40132,812,8120026,フクオカケン,フクオカシハカタク,カミカワバタマチ,福岡県,福岡市博多区,上川端町,0,0,0,0,0,0
40132,812,8120036,フクオカケン,フクオカシハカタク,カミゴフクマチ,福岡県,福岡市博多区,上呉服町,0,0,0,0,0,0
40132,812,8120006,フクオカケン,フクオカシハカタク,カミムタ,福岡県,福岡市博多区,上牟田,0,0,1,0,0,0
40132,812,8120022,フクオカケン,フクオカシハカタク,カミヤマチ,福岡県,福岡市博多区,神屋町,0,0,0,0,0,0
40132,812,8120038,フクオカケン,フクオカシハカタク,ギオンマチ,福岡県,福岡市博多区,祇園町,0,0,0,0,0,0
40132,816,8120879,フクオカケン,フクオカシハカタク,ギンテンチョウ,福岡県,福岡市博多区,銀天町,0,0,1,0,0,0
40132,812,8120002,フクオカケン,フクオカシハカタク,クウコウマエ,福岡県,福岡市博多区,空港前,0,0,1,0,0,0
40132,812,8120037,フクオカケン,フクオカシハカタク,ゴクショマチ,福岡県,福岡市博多区,御供所町,0,0,0,0,0,0
40132,816,8120884,フクオカケン,フクオカシハカタク,コトブキチョウ,福岡県,福岡市博多区,寿町,0,0,1,0,0,0
40132,812,8120029,フクオカケン,フクオカシハカタク,コモンドマチ,福岡県,福岡市博多区,古門戸町,0,0,0,0,0,0
40132,816,8120891,フクオカケン,フクオカシハカタク,ササイ,福岡県,福岡市博多区,雀居,0,0,0,0,0,0
40132,816,8120887,フクオカケン,フクオカシハカタク,サンチク,福岡県,福岡市博多区,三筑,0,0,1,0,0,0
40132,812,8120015,フクオカケン,フクオカシハカタク,サンノウ,福岡県,福岡市博多区,山王,0,0,1,0,0,0
40132,816,8120871,フクオカケン,フクオカシハカタク,シノノメマチ,福岡県,福岡市博多区,東雲町,0,0,1,0,0,0
40132,812,8120003,フクオカケン,フクオカシハカタク,シモウスイ,福岡県,福岡市博多区,下臼井,0,0,0,0,0,0
40132,812,8120027,フクオカケン,フクオカシハカタク,シモカワバタマチ,福岡県,福岡市博多区,下川端町,0,0,0,0,0,0
40132,812,8120034,フクオカケン,フクオカシハカタク,シモゴフクマチ,福岡県,福岡市博多区,下呉服町,0,0,0,0,0,0
40132,816,8120855,フクオカケン,フクオカシハカタク,シモツキグマ,福岡県,福岡市博多区,下月隈,0,0,0,0,0,0
40132,816,8120876,フクオカケン,フクオカシハカタク,ショウナンマチ,福岡県,福岡市博多区,昭南町,0,0,1,0,0,0
40132,816,8120875,フクオカケン,フクオカシハカタク,シンワマチ,福岡県,福岡市博多区,新和町,0,0,1,0,0,0
40132,812,8120028,フクオカケン,フクオカシハカタク,スサキマチ,福岡県,福岡市博多区,須崎町,0,0,0,0,0,0
40132,812,8120018,フクオカケン,フクオカシハカタク,スミヨシ,福岡県,福岡市博多区,住吉,0,0,1,0,0,0
40132,812,8120032,フクオカケン,フクオカシハカタク,セキジョウマチ,福岡県,福岡市博多区,石城町,0,0,0,0,0,0
40132,812,8120033,フクオカケン,フクオカシハカタク,タイハクマチ,福岡県,福岡市博多区,大博町,0,0,0,0,0,0
40132,816,8120878,フクオカケン,フクオカシハカタク,タケオカマチ,福岡県,福岡市博多区,竹丘町,0,0,1,0,0,0
40132,816,8120895,フクオカケン,フクオカシハカタク,タケシタ,福岡県,福岡市博多区,竹下,0,0,1,0,0,0
40132,812,8120021,フクオカケン,フクオカシハカタク,チッコウホンマチ,福岡県,福岡市博多区,築港本町,0,0,0,0,0,0
40132,812,8120044,フクオカケン,フクオカシハカタク,チヨ,福岡県,福岡市博多区,千代,0,0,1,0,0,0
40132,816,8120858,フクオカケン,フクオカシハカタク,ツキグマ,福岡県,福岡市博多区,月隈,0,0,1,0,0,0
40132,812,8120024,フクオカケン,フクオカシハカタク,ツナバマチ,福岡県,福岡市博多区,綱場町,0,0,0,0,0,0
40132,812,8120020,フクオカケン,フクオカシハカタク,ツマショウジ,福岡県,福岡市博多区,対馬小路,0,0,0,0,0,0
40132,812,8120025,フクオカケン,フクオカシハカタク,テンヤマチ,福岡県,福岡市博多区,店屋町,0,0,0,0,0,0
40132,812,8120008,フクオカケン,フクオカシハカタク,トウコウ,福岡県,福岡市博多区,東光,0,0,1,0,0,0
40132,816,8120896,フクオカケン,フクオカシハカタク,トウコウジマチ,福岡県,福岡市博多区,東光寺町,0,0,1,0,0,0
40132,816,8120893,フクオカケン,フクオカシハカタク,ナカ,福岡県,福岡市博多区,那珂,0,0,1,0,0,0
40132,812,8120035,フクオカケン,フクオカシハカタク,ナカゴフクマチ,福岡県,福岡市博多区,中呉服町,0,0,0,0,0,0
40132,810,8100801,フクオカケン,フクオカシハカタク,ナカス,福岡県,福岡市博多区,中洲,0,0,1,0,0,0
40132,810,8100802,フクオカケン,フクオカシハカタク,ナカスナカシママチ,福岡県,福岡市博多区,中洲中島町,0,0,0,0,0,0
40132,812,8120023,フクオカケン,フクオカシハカタク,ナラヤマチ,福岡県,福岡市博多区,奈良屋町,0,0,0,0,0,0
40132,816,8120857,フクオカケン,フクオカシハカタク,ニシツキグマ,福岡県,福岡市博多区,西月隈,0,0,1,0,0,0
40132,816,8120873,フクオカケン,フクオカシハカタク,ニシハルマチ,福岡県,福岡市博多区,西春町,0,0,1,0,0,0
40132,812,8120012,フクオカケン,フクオカシハカタク,ハカタエキチュウオウガイ,福岡県,福岡市博多区,博多駅中央街,0,0,0,0,0,0
40132,812,8120011,フクオカケン,フクオカシハカタク,ハカタエキマエ,福岡県,福岡市博多区,博多駅前,0,0,1,0,0,0
40132,812,8120013,フクオカケン,フクオカシハカタク,ハカタエキヒガシ,福岡県,福岡市博多区,博多駅東,0,0,1,0,0,0
40132,812,8120016,フクオカケン,フクオカシハカタク,ハカタエキミナミ,福岡県,福岡市博多区,博多駅南,0,0,1,0,0,0
40132,816,8120872,フクオカケン,フクオカシハカタク,ハルマチ,福岡県,福岡市博多区,春町,0,0,1,0,0,0
40132,816,8120897,フクオカケン,フクオカシハカタク,ハンミチバシ,福岡県,福岡市博多区,半道橋,0,0,1,0,0,0
40132,812,8120014,フクオカケン,フクオカシハカタク,ヒエマチ,福岡県,福岡市博多区,比恵町,0,0,0,0,0,0
40132,816,8120874,フクオカケン,フクオカシハカタク,ヒカリガオカマチ,福岡県,福岡市博多区,光丘町,0,0,1,0,0,0
40132,812,8120045,フクオカケン,フクオカシハカタク,ヒガシコウエン,福岡県,福岡市博多区,東公園,0,0,0,0,0,0
40132,816,8120854,フクオカケン,フクオカシハカタク,ヒガシツキグマ,福岡県,福岡市博多区,東月隈,0,0,1,0,0,0
40132,816,8120892,フクオカケン,フクオカシハカタク,ヒガシナカ,福岡県,福岡市博多区,東那珂,0,0,1,0,0,0
40132,812,8120007,フクオカケン,フクオカシハカタク,ヒガシヒエ,福岡県,福岡市博多区,東比恵,0,0,1,0,0,0
40132,816,8120853,フクオカケン,フクオカシハカタク,ヒガシヒラオ,福岡県,福岡市博多区,東平尾,0,0,0,0,0,0
40132,816,8120852,フクオカケン,フクオカシハカタク,ヒガシヒラオコウエン,福岡県,福岡市博多区,東平尾公園,0,0,1,0,0,0
40132,816,8120886,フクオカケン,フクオカシハカタク,ミナミハチマンマチ,福岡県,福岡市博多区,南八幡町,0,0,1,0,0,0
40132,816,8120883,フクオカケン,フクオカシハカタク,ミナミホンマチ,福岡県,福岡市博多区,南本町,0,0,1,0,0,0
40132,812,8120017,フクオカケン,フクオカシハカタク,ミノシマ,福岡県,福岡市博多区,美野島,0,0,1,0,0,0
40132,816,8120882,フクオカケン,フクオカシハカタク,ムギノ,福岡県,福岡市博多区,麦野,0,0,1,0,0,0
40132,816,8120877,フクオカケン,フクオカシハカタク,モトマチ,福岡県,福岡市博多区,元町,0,0,1,0,0,0
40132,816,8120894,フクオカケン,フクオカシハカタク,モロオカ,福岡県,福岡市博多区,諸岡,0,0,1,0,0,0
40132,812,8120042,フクオカケン,フクオカシハカタク,ユタカ,福岡県,福岡市博多区,豊,0,0,1,0,0,0
40132,812,8120041,フクオカケン,フクオカシハカタク,ヨシヅカ,福岡県,福岡市博多区,吉塚,0,0,1,0,0,0
40132,812,8120046,フクオカケン,フクオカシハカタク,ヨシヅカホンマチ,福岡県,福岡市博多区,吉塚本町,0,0,0,0,0,0
40132,816,8120862,フクオカケン,フクオカシハカタク,リュウゲジ,福岡県,福岡市博多区,立花寺,0,0,1,0,0,0
40132,812,8120039,フクオカケン,フクオカシハカタク,レイセンマチ,福岡県,福岡市博多区,冷泉町,0,0,0,0,0,0
40133,810,8100000,フクオカケン,フクオカシチュウオウク,イカニケイサイガナイバアイ,福岡県,福岡市中央区,以下に掲載がない場合,0,0,0,0,0,0
40133,810,8100042,フクオカケン,フクオカシチュウオウク,アカサカ,福岡県,福岡市中央区,赤坂,0,0,1,0,0,0
40133,810,8100076,フクオカケン,フクオカシチュウオウク,アラツ,福岡県,福岡市中央区,荒津,0,0,1,0,0,0
40133,810,8100062,フクオカケン,フクオカシチュウオウク,アラト,福岡県,福岡市中央区,荒戸,0,0,1,0,0,0
40133,810,8100067,フクオカケン,フクオカシチュウオウク,イザキ,福岡県,福岡市中央区,伊崎,0,0,0,0,0,0
40133,810,8100021,フクオカケン,フクオカシチュウオウク,イマイズミ,福岡県,福岡市中央区,今泉,0,0,1,0,0,0
40133,810,8100054,フクオカケン,フクオカシチュウオウク,イマガワ,福岡県,福岡市中央区,今川,0,0,1,0,0,0
40133,810,8100074,フクオカケン,フクオカシチュウオウク,オオテモン,福岡県,福岡市中央区,大手門,0,0,1,0,0,0
40133,810,8100052,フクオカケン,フクオカシチュウオウク,オオホリ,福岡県,福岡市中央区,大濠,0,0,1,0,0,0
40133,810,8100051,フクオカケン,フクオカシチュウオウク,オオホリコウエン,福岡県,福岡市中央区,大濠公園,0,0,0,0,0,0
40133,810,8100013,フクオカケン,フクオカシチュウオウク,オオミヤ,福岡県,福岡市中央区,大宮,0,0,1,0,0,0
40133,810,8100033,フクオカケン,フクオカシチュウオウク,オザサ,福岡県,福岡市中央区,小笹,0,0,1,0,0,0
40133,810,8100005,フクオカケン,フクオカシチュウオウク,キヨカワ,福岡県,福岡市中央区,清川,0,0,1,0,0,0
40133,810,8100045,フクオカケン,フクオカシチュウオウク,クサガエ,福岡県,福岡市中央区,草香江,0,0,1,0,0,0
40133,810,8100055,フクオカケン,フクオカシチュウオウク,クロモン,福岡県,福岡市中央区,黒門,0,0,0,0,0,0
40133,810,8100023,フクオカケン,フクオカシチュウオウク,ケゴ,福岡県,福岡市中央区,警固,0,0,1,0,0,0
40133,810,8100027,フクオカケン,フクオカシチュウオウク,ゴショガダニ,福岡県,福岡市中央区,御所ケ谷,0,0,0,0,0,0
40133,810,8100024,フクオカケン,フクオカシチュウオウク,サクラザカ,福岡県,福岡市中央区,桜坂,0,0,1,0,0,0
40133,810,8100034,フクオカケン,フクオカシチュウオウク,ササオカ,福岡県,福岡市中央区,笹丘,0,0,1,0,0,0
40133,810,8100018,フクオカケン,フクオカシチュウオウク,サンソウドオリ,福岡県,福岡市中央区,山荘通,0,0,1,0,0,0
40133,810,8100028,フクオカケン,フクオカシチュウオウク,ジョウスイドオリ,福岡県,福岡市中央区,浄水通,0,0,0,0,0,0
40133,810,8100043,フクオカケン,フクオカシチュウオウク,ジョウナイ,福岡県,福岡市中央区,城内,0,0,0,0,0,0
40133,810,8100012,フクオカケン,フクオカシチュウオウク,シロガネ,福岡県,福岡市中央区,白金,0,0,1,0,0,0
40133,810,8100064,フクオカケン,フクオカシチュウオウク,ジギョウ,福岡県,福岡市中央区,地行,0,0,1,0,0,0
40133,810,8100065,フクオカケン,フクオカシチュウオウク,ジギョウハマ,福岡県,福岡市中央区,地行浜,0,0,1,0,0,0
40133,810,8100041,フクオカケン,フクオカシチュウオウク,ダイミョウ,福岡県,福岡市中央区,大名,0,0,1,0,0,0
40133,810,8100011,フクオカケン,フクオカシチュウオウク,タカサゴ,福岡県,福岡市中央区,高砂,0,0,1,0,0,0
40133,810,8100031,フクオカケン,フクオカシチュウオウク,タニ,福岡県,福岡市中央区,谷,0,0,1,0,0,0
40133,810,8100032,フクオカケン,フクオカシチュウオウク,テルクニ,福岡県,福岡市中央区,輝国,0,0,1,0,0,0
40133,810,8100001,フクオカケン,フクオカシチュウオウク,テンジン,福岡県,福岡市中央区,天神,0,0,1,0,0,0
40133,810,8100063,フクオカケン,フクオカシチュウオウク,トウジンマチ,福岡県,福岡市中央区,唐人町,0,0,1,0,0,0
40133,810,8100053,フクオカケン,フクオカシチュウオウク,トリカイ,福岡県,福岡市中央区,鳥飼,0,0,1,0,0,0
40133,810,8100072,フクオカケン,フクオカシチュウオウク,ナガハマ,福岡県,福岡市中央区,長浜,0,0,1,0,0,0
40133,810,8100015,フクオカケン,フクオカシチュウオウク,ナノカワ,福岡県,福岡市中央区,那の川,0,0,1,0,0,0
40133,810,8100071,フクオカケン,フクオカシチュウオウク,ナノツ,福岡県,福岡市中央区,那の津,0,0,1,0,0,0
40133,810,8100061,フクオカケン,フクオカシチュウオウク,ニシコウエン,福岡県,福岡市中央区,西公園,0,0,0,0,0,0
40133,810,8100002,フクオカケン,フクオカシチュウオウク,ニシナカス,福岡県,福岡市中央区,西中洲,0,0,0,0,0,0
40133,810,8100035,フクオカケン,フクオカシチュウオウク,バイコウエン,福岡県,福岡市中央区,梅光園,0,0,1,0,0,0
40133,810,8100036,フクオカケン,フクオカシチュウオウク,バイコウエンダンチ,福岡県,福岡市中央区,梅光園団地,0,0,0,0,0,0
40133,810,8100003,フクオカケン,フクオカシチュウオウク,ハルヨシ,福岡県,福岡市中央区,春吉,0,0,1,0,0,0
40133,810,8100014,フクオカケン,フクオカシチュウオウク,ヒラオ,福岡県,福岡市中央区,平尾,0,0,1,0,0,0
40133,810,8100017,フクオカケン,フクオカシチュウオウク,ヒラオカマチ,福岡県,福岡市中央区,平丘町,0,0,0,0,0,0
40133,810,8100029,フクオカケン,フクオカシチュウオウク,ヒラオジョウスイマチ,福岡県,福岡市中央区,平尾浄水町,0,0,0,0,0,0
40133,810,8100066,フクオカケン,フクオカシチュウオウク,フクハマ,福岡県,福岡市中央区,福浜,0,0,1,0,0,0
40133,810,8100026,フクオカケン,フクオカシチュウオウク,フルコガラスマチ,福岡県,福岡市中央区,古小烏町,0,0,0,0,0,0
40133,810,8100016,フクオカケン,フクオカシチュウオウク,ヘイワ,福岡県,福岡市中央区,平和,0,0,1,0,0,0
40133,810,8100073,フクオカケン,フクオカシチュウオウク,マイヅル,福岡県,福岡市中央区,舞鶴,0,0,1,0,0,0
40133,810,8100075,フクオカケン,フクオカシチュウオウク,ミナト,福岡県,福岡市中央区,港,0,0,1,0,0,0
40133,810,8100037,フクオカケン,フクオカシチュウオウク,ミナミコウエン,福岡県,福岡市中央区,南公園,0,0,0,0,0,0
40133,810,8100022,フクオカケン,フクオカシチュウオウク,ヤクイン,福岡県,福岡市中央区,薬院,0,0,1,0,0,0
40133,810,8100025,フクオカケン,フクオカシチュウオウク,ヤクインイフクマチ,福岡県,福岡市中央区,薬院伊福町,0,0,0,0,0,0
40133,810,8100044,フクオカケン,フクオカシチュウオウク,ロッポンマツ,福岡県,福岡市中央区,六本松,0,0,1,0,0,0
40133,810,8100004,フクオカケン,フクオカシチュウオウク,ワタナベドオリ,福岡県,福岡市中央区,渡辺通,0,0,1,0,0,0
40134,815,8150000,フクオカケン,フクオカシミナミク,イカニケイサイガナイバアイ,福岡県,福岡市南区,以下に掲載がない場合,0,0,0,0,0,0
40134,816,8111302,フクオカケン,フクオカシミナミク,イジリ,福岡県,福岡市南区,井尻,0,0,1,0,0,0
40134,815,8150084,フクオカケン,フクオカシミナミク,イチザキ,福岡県,福岡市南区,市崎,0,0,1,0,0,0
40134,815,8150073,フクオカケン,フクオカシミナミク,オオイケ,福岡県,福岡市南区,大池,0,0,1,0,0,0
40134,815,8150082,フクオカケン,フクオカシミナミク,オオグス,福岡県,福岡市南区,大楠,0,0,1,0,0,0
40134,815,8150033,フクオカケン,フクオカシミナミク,オオハシ,福岡県,福岡市南区,大橋,0,0,1,0,0,0
40134,815,8150038,フクオカケン,フクオカシミナミク,オオハシダンチ,福岡県,福岡市南区,大橋団地,0,0,0,0,0,0
40134,816,8111313,フクオカケン,フクオカシミナミク,オサ,福岡県,福岡市南区,曰佐,0,0,1,0,0,0
40134,816,8111303,フクオカケン,フクオカシミナミク,オリタテマチ,福岡県,福岡市南区,折立町,0,0,0,0,0,0
40134,815,8111353,フクオカケン,フクオカシミナミク,カシワラ,福岡県,福岡市南区,柏原,0,0,1,0,0,0
40134,816,8111324,フクオカケン,フクオカシミナミク,ケヤゴウ,福岡県,福岡市南区,警弥郷,0,0,1,0,0,0
40134,816,8150001,フクオカケン,フクオカシミナミク,ゴジッカワ,福岡県,福岡市南区,五十川,0,0,1,0,0,0
40134,815,8111365,フクオカケン,フクオカシミナミク,サラヤマ,福岡県,福岡市南区,皿山,0,0,1,0,0,0
40134,815,8150032,フクオカケン,フクオカシミナミク,シオバル,福岡県,福岡市南区,塩原,0,0,1,0,0,0
40134,815,8150031,フクオカケン,フクオカシミナミク,シミズ,福岡県,福岡市南区,清水,0,0,1,0,0,0
40134,815,8111354,フクオカケン,フクオカシミナミク,タイヘイジ,福岡県,福岡市南区,大平寺,0,0,1,0,0,0
40134,816,8150004,フクオカケン,フクオカシミナミク,タカキ,福岡県,福岡市南区,高木,0,0,1,0,0,0
40134,815,8150083,フクオカケン,フクオカシミナミク,タカミヤ,福岡県,福岡市南区,高宮,0,0,1,0,0,0
40134,815,8150072,フクオカケン,フクオカシミナミク,タガ,福岡県,福岡市南区,多賀,0,0,1,0,0,0
40134,815,8150037,フクオカケン,フクオカシミナミク,タマガワマチ,福岡県,福岡市南区,玉川町,0,0,0,0,0,0
40134,815,8150036,フクオカケン,フクオカシミナミク,チクシガオカ,福岡県,福岡市南区,筑紫丘,0,0,1,0,0,0
40134,815,8111352,フクオカケン,フクオカシミナミク,ツルタ,福岡県,福岡市南区,鶴田,0,0,1,0,0,0
40134,815,8150074,フクオカケン,フクオカシミナミク,テラヅカ,福岡県,福岡市南区,寺塚,0,0,1,0,0,0
40134,815,8111364,フクオカケン,フクオカシミナミク,ナカオ,福岡県,福岡市南区,中尾,0,0,1,0,0,0
40134,815,8150075,フクオカケン,フクオカシミナミク,ナガオカ,福岡県,福岡市南区,長丘,0,0,1,0,0,0
40134,815,8111362,フクオカケン,フクオカシミナミク,ナガズミ,福岡県,福岡市南区,長住,0,0,1,0,0,0
40134,815,8150081,フクオカケン,フクオカシミナミク,ナノカワ,福岡県,福岡市南区,那の川,0,0,1,0,0,0
40134,815,8111361,フクオカケン,フクオカシミナミク,ニシナガズミ,福岡県,福岡市南区,西長住,0,0,1,0,0,0
40134,815,8111347,フクオカケン,フクオカシミナミク,ノタメ,福岡県,福岡市南区,野多目,0,0,1,0,0,0
40134,815,8150041,フクオカケン,フクオカシミナミク,ノマ,福岡県,福岡市南区,野間,0,0,1,0,0,0
40134,815,8111356,フクオカケン,フクオカシミナミク,ハナハタ,福岡県,福岡市南区,花畑,0,0,1,0,0,0
40134,815,8111355,フクオカケン,フクオカシミナミク,ヒバル,福岡県,福岡市南区,桧原,0,0,1,0,0,0
40134,815,8150071,フクオカケン,フクオカシミナミク,ヘイワ,福岡県,福岡市南区,平和,0,0,1,0,0,0
40134,816,8111314,フクオカケン,フクオカシミナミク,マトバ,福岡県,福岡市南区,的場,0,0,1,0,0,0
40134,815,8150034,フクオカケン,フクオカシミナミク,ミナミオオハシ,福岡県,福岡市南区,南大橋,0,0,1,0,0,0
40134,815,8111344,フクオカケン,フクオカシミナミク,ミヤケ,福岡県,福岡市南区,三宅,0,0,1,0,0,0
40134,815,8111345,フクオカケン,フクオカシミナミク,ムカイシンマチ,福岡県,福岡市南区,向新町,0,0,1,0,0,0
40134,815,8150035,フクオカケン,フクオカシミナミク,ムカイノ,福岡県,福岡市南区,向野,0,0,1,0,0,0
40134,815,8111351,フクオカケン,フクオカシミナミク,ヤカタバル,福岡県,福岡市南区,屋形原,0,0,1,0,0,0
40134,816,8111323,フクオカケン,フクオカシミナミク,ヤナガ,福岡県,福岡市南区,弥永,0,0,1,0,0,0
40134,816,8111322,フクオカケン,フクオカシミナミク,ヤナガダンチ,福岡県,福岡市南区,弥永団地,0,0,0,0,0,0
40134,815,8150063,フクオカケン,フクオカシミナミク,ヤナゴウチ,福岡県,福岡市南区,柳河内,0,0,1,0,0,0
40134,816,8111321,フクオカケン,フクオカシミナミク,ヤナセ,福岡県,福岡市南区,柳瀬,0,0,1,0,0,0
40134,816,8111311,フクオカケン,フクオカシミナミク,ヨコテ,福岡県,福岡市南区,横手,0,0,1,0,0,0
40134,816,8111312,フクオカケン,フクオカシミナミク,ヨコテミナミマチ,福岡県,福岡市南区,横手南町,0,0,0,0,0,0
40134,815,8111346,フクオカケン,フクオカシミナミク,ロウジ,福岡県,福岡市南区,老司,0,0,1,0,0,0
40134,815,8150042,フクオカケン,フクオカシミナミク,ワカヒサ,福岡県,福岡市南区,若久,0,0,1,0,0,0
40134,815,8150048,フクオカケン,フクオカシミナミク,ワカヒサダンチ,福岡県,福岡市南区,若久団地,0,0,0,0,0,0
40134,815,8111343,フクオカケン,フクオカシミナミク,ワダ,福岡県,福岡市南区,和田,0,0,1,0,0,0
40135,819,8190000,フクオカケン,フクオカシニシク,イカニケイサイガナイバアイ,福岡県,福岡市西区,以下に掲載がない場合,0,0,0,0,0,0
40135,819,8190015,フクオカケン,フクオカシニシク,アタゴ,福岡県,福岡市西区,愛宕,0,0,1,0,0,0
40135,819,8190013,フクオカケン,フクオカシニシク,アタゴハマ,福岡県,福岡市西区,愛宕浜,0,0,1,0,0,0
40135,819,8190007,フクオカケン,フクオカシニシク,アタゴミナミ,福岡県,福岡市西区,愛宕南,0,0,1,0,0,0
40135,81903,8190371,フクオカケン,フクオカシニシク,イイジ,福岡県,福岡市西区,飯氏,0,0,0,0,0,0
40135,819,8190037,フクオカケン,フクオカシニシク,イイモリ,福岡県,福岡市西区,飯盛,0,0,0,0,0,0
40135,819,8190042,フクオカケン,フクオカシニシク,イキダンチ,福岡県,福岡市西区,壱岐団地,0,0,0,0,0,0
40135,819,8190055,フクオカケン,フクオカシニシク,イキノマツバラ,福岡県,福岡市西区,生の松原,0,0,1,0,0,0
40135,819,8190044,フクオカケン,フクオカシニシク,イキマツダイ,福岡県,福岡市西区,生松台,0,0,1,0,0,0
40135,819,8190025,フクオカケン,フクオカシニシク,イシマル,福岡県,福岡市西区,石丸,0,0,1,0,0,0
40135,81903,8190381,フクオカケン,フクオカシニシク,イズミ,福岡県,福岡市西区,泉,0,0,1,0,0,0
40135,81901,8190167,フクオカケン,フクオカシニシク,イマジュク,福岡県,福岡市西区,今宿,0,0,1,0,0,0
40135,81901,8190162,フクオカケン,フクオカシニシク,イマジュクアオキ,福岡県,福岡市西区,今宿青木,0,0,0,0,0,0
40135,81901,8190168,フクオカケン,フクオカシニシク,イマジュクエキマエ,福岡県,福岡市西区,今宿駅前,0,0,1,0,0,0
40135,81901,8190163,フクオカケン,フクオカシニシク,イマジュクカミノハル,福岡県,福岡市西区,今宿上ノ原,0,0,0,0,0,0
40135,81901,8190164,フクオカケン,フクオカシニシク,イマジュクマチ,福岡県,福岡市西区,今宿町,0,0,0,0,0,0
40135,81901,8190161,フクオカケン,フクオカシニシク,イマジュクヒガシ,福岡県,福岡市西区,今宿東,0,0,1,0,0,0
40135,81901,8190165,フクオカケン,フクオカシニシク,イマヅ,福岡県,福岡市西区,今津,0,0,0,0,0,0
40135,81903,8190372,フクオカケン,フクオカシニシク,ウダガワラ,福岡県,福岡市西区,宇田川原,0,0,0,0,0,0
40135,819,8190005,フクオカケン,フクオカシニシク,ウチハマ,福岡県,福岡市西区,内浜,0,0,1,0,0,0
40135,819,8190021,フクオカケン,フクオカシニシク,オオマチダンチ,福岡県,福岡市西区,大町団地,0,0,0,0,0,0
40135,819,8190001,フクオカケン,フクオカシニシク,オド,福岡県,福岡市西区,小戸,0,0,1,0,0,0
40135,819,8190011,フクオカケン,フクオカシニシク,オロノシマ,福岡県,福岡市西区,小呂島,0,0,0,0,0,0
40135,819,8190035,フクオカケン,フクオカシニシク,カナタケ,福岡県,福岡市西区,金武,0,0,0,0,0,0
40135,819,8190054,フクオカケン,フクオカシニシク,カミヤマト,福岡県,福岡市西区,上山門,0,0,1,0,0,0
40135,81903,8190388,フクオカケン,フクオカシニシク,キュウダイシンマチ,福岡県,福岡市西区,九大新町,0,0,0,0,0,0
40135,81902,8190204,フクオカケン,フクオカシニシク,クサバ,福岡県,福岡市西区,草場,0,0,0,0,0,0
40135,81903,8190382,フクオカケン,フクオカシニシク,クワバラ,福岡県,福岡市西区,桑原,0,0,0,0,0,0
40135,81902,8190205,フクオカケン,フクオカシニシク,ゲンカイシマ,福岡県,福岡市西区,玄界島,0,0,0,0,0,0
40135,81902,8190203,フクオカケン,フクオカシニシク,コタ,福岡県,福岡市西区,小田,0,0,0,0,0,0
40135,819,8190052,フクオカケン,フクオカシニシク,シモヤマト,福岡県,福岡市西区,下山門,0,0,1,0,0,0
40135,819,8190051,フクオカケン,フクオカシニシク,シモヤマトダンチ,福岡県,福岡市西区,下山門団地,0,0,0,0,0,0
40135,819,8190024,フクオカケン,フクオカシニシク,ジュウロウガワダンチ,福岡県,福岡市西区,十郎川団地,0,0,0,0,0,0
40135,819,8190041,フクオカケン,フクオカシニシク,ジュウロクチョウ,福岡県,福岡市西区,拾六町,0,0,1,0,0,0
40135,819,8190045,フクオカケン,フクオカシニシク,ジュウロクチョウダンチ,福岡県,福岡市西区,拾六町団地,0,0,0,0,0,0
40135,819,8190053,フクオカケン,フクオカシニシク,ジョウノハルダンチ,福岡県,福岡市西区,城の原団地,0,0,0,0,0,0
40135,81903,8190373,フクオカケン,フクオカシニシク,スセンジ,福岡県,福岡市西区,周船寺,0,0,1,0,0,0
40135,81903,8190374,フクオカケン,フクオカシニシク,センリ,福岡県,福岡市西区,千里,0,0,0,0,0,0
40135,819,8190034,フクオカケン,フクオカシニシク,タ,福岡県,福岡市西区,田,0,0,0,0,0,0
40135,81903,8190383,フクオカケン,フクオカシニシク,タジリ,福岡県,福岡市西区,田尻,0,0,1,0,0,0
40135,81903,8190384,フクオカケン,フクオカシニシク,タロウマル,福岡県,福岡市西区,太郎丸,0,0,1,0,0,0
40135,819,8190032,フクオカケン,フクオカシニシク,トギレ,福岡県,福岡市西区,戸切,0,0,1,0,0,0
40135,81903,8190375,フクオカケン,フクオカシニシク,トクナガ,福岡県,福岡市西区,徳永,0,0,0,0,0,0
40135,819,8190014,フクオカケン,フクオカシニシク,トヨハマ,福岡県,福岡市西区,豊浜,0,0,1,0,0,0
40135,819,8190039,フクオカケン,フクオカシニシク,ニシイリベ,福岡県,福岡市西区,西入部,0,0,0,0,0,0
40135,81902,8190202,フクオカケン,フクオカシニシク,ニシノウラ,福岡県,福岡市西区,西浦,0,0,0,0,0,0
40135,819,8190046,フクオカケン,フクオカシニシク,ニシノオカ,福岡県,福岡市西区,西の丘,0,0,1,0,0,0
40135,819,8190043,フクオカケン,フクオカシニシク,ノカタ,福岡県,福岡市西区,野方,0,0,1,0,0,0
40135,819,8190012,フクオカケン,フクオカシニシク,ノコ,福岡県,福岡市西区,能古,0,0,0,0,0,0
40135,819,8190033,フクオカケン,フクオカシニシク,ハシモト(オオアザ),福岡県,福岡市西区,橋本(大字),1,0,1,0,0,0
40135,819,8190031,フクオカケン,フクオカシニシク,ハシモト(チョウメ),福岡県,福岡市西区,橋本(丁目),1,0,1,0,0,0
40135,819,8190038,フクオカケン,フクオカシニシク,ハネド,福岡県,福岡市西区,羽根戸,0,0,0,0,0,0
40135,819,8190022,フクオカケン,フクオカシニシク,フクシゲ,福岡県,福岡市西区,福重,0,0,1,0,0,0
40135,819,8190023,フクオカケン,フクオカシニシク,フクシゲダンチ,福岡県,福岡市西区,福重団地,0,0,0,0,0,0
40135,81903,8190387,フクオカケン,フクオカシニシク,フジミ,福岡県,福岡市西区,富士見,0,0,1,0,0,0
40135,81902,8190201,フクオカケン,フクオカシニシク,ミヤノウラ,福岡県,福岡市西区,宮浦,0,0,0,0,0,0
40135,81903,8190376,フクオカケン,フクオカシニシク,ミョウバル,福岡県,福岡市西区,女原,0,0,0,0,0,0
40135,819,8190030,フクオカケン,フクオカシニシク,ムロミガオカ,福岡県,福岡市西区,室見が丘,0,0,1,0,0,0
40135,819,8190002,フクオカケン,フクオカシニシク,メイノハマ,福岡県,福岡市西区,姪の浜,0,0,1,0,0,0
40135,819,8190006,フクオカケン,フクオカシニシク,メイノハマエキミナミ,福岡県,福岡市西区,姪浜駅南,0,0,1,0,0,0
40135,81903,8190385,フクオカケン,フクオカシニシク,モトオカ,福岡県,福岡市西区,元岡,0,0,0,0,0,0
40135,81903,8190386,フクオカケン,フクオカシニシク,モトハマ,福岡県,福岡市西区,元浜,0,0,1,0,0,0
40135,81901,8190166,フクオカケン,フクオカシニシク,ヨコハマ(1-2チョウメ),福岡県,福岡市西区,横浜(1〜2丁目),1,0,1,0,0,0
40135,81903,8190366,フクオカケン,フクオカシニシク,ヨコハマ(3チョウメ),福岡県,福岡市西区,横浜(3丁目),1,0,1,0,0,0
40135,819,8190036,フクオカケン,フクオカシニシク,ヨシタケ,福岡県,福岡市西区,吉武,0,0,0,0,0,0
40136,81401,8140100,フクオカケン,フクオカシジョウナンク,イカニケイサイガナイバアイ,福岡県,福岡市城南区,以下に掲載がない場合,0,0,0,0,0,0
40136,81401,8140101,フクオカケン,フクオカシジョウナンク,アラエ,福岡県,福岡市城南区,荒江,0,0,1,0,0,0
40136,81401,8140102,フクオカケン,フクオカシジョウナンク,アラエダンチ,福岡県,福岡市城南区,荒江団地,0,0,0,0,0,0
40136,81401,8140134,フクオカケン,フクオカシジョウナンク,イイクラ,福岡県,福岡市城南区,飯倉,0,0,1,0,0,0
40136,81401,8140144,フクオカケン,フクオカシジョウナンク,ウメバヤシ,福岡県,福岡市城南区,梅林,0,0,1,0,0,0
40136,81401,8140142,フクオカケン,フクオカシジョウナンク,カタエ,福岡県,福岡市城南区,片江,0,0,0,0,0,0
40136,81401,8140114,フクオカケン,フクオカシジョウナンク,カナヤマダンチ,福岡県,福岡市城南区,金山団地,0,0,0,0,0,0
40136,81401,8140105,フクオカケン,フクオカシジョウナンク,ジョウセイダンチ,福岡県,福岡市城南区,城西団地,0,0,0,0,0,0
40136,81401,8140121,フクオカケン,フクオカシジョウナンク,シンショウジ,福岡県,福岡市城南区,神松寺,0,0,1,0,0,0
40136,81401,8140154,フクオカケン,フクオカシジョウナンク,タカラダイダンチ,福岡県,福岡市城南区,宝台団地,0,0,0,0,0,0
40136,81401,8140113,フクオカケン,フクオカシジョウナンク,タシマ,福岡県,福岡市城南区,田島,0,0,1,0,0,0
40136,81401,8140111,フクオカケン,フクオカシジョウナンク,チャヤマ,福岡県,福岡市城南区,茶山,0,0,1,0,0,0
40136,81401,8140151,フクオカケン,フクオカシジョウナンク,ツツミ,福岡県,福岡市城南区,堤,0,0,1,0,0,0
40136,81401,8140152,フクオカケン,フクオカシジョウナンク,ツツミダンチ,福岡県,福岡市城南区,堤団地,0,0,0,0,0,0
40136,81401,8140112,フクオカケン,フクオカシジョウナンク,トモオカ,福岡県,福岡市城南区,友丘,0,0,1,0,0,0
40136,81401,8140103,フクオカケン,フクオカシジョウナンク,トリカイ,福岡県,福岡市城南区,鳥飼,0,0,1,0,0,0
40136,81401,8140123,フクオカケン,フクオカシジョウナンク,ナガオ,福岡県,福岡市城南区,長尾,0,0,1,0,0,0
40136,81401,8140133,フクオカケン,フクオカシジョウナンク,ナナクマ,福岡県,福岡市城南区,七隈,0,0,0,0,0,0
40136,81401,8140141,フクオカケン,フクオカシジョウナンク,ニシカタエ,福岡県,福岡市城南区,西片江,0,0,1,0,0,0
40136,81401,8140153,フクオカケン,フクオカシジョウナンク,ヒイカワ,福岡県,福岡市城南区,樋井川,0,0,1,0,0,0
40136,81401,8140155,フクオカケン,フクオカシジョウナンク,ヒガシアブラヤマ,福岡県,福岡市城南区,東油山,0,0,1,0,0,0
40136,81401,8140104,フクオカケン,フクオカシジョウナンク,ベフ,福岡県,福岡市城南区,別府,0,0,1,0,0,0
40136,81401,8140106,フクオカケン,フクオカシジョウナンク,ベフダンチ,福岡県,福岡市城南区,別府団地,0,0,0,0,0,0
40136,81401,8140132,フクオカケン,フクオカシジョウナンク,ホシクマ,福岡県,福岡市城南区,干隈,0,0,1,0,0,0
40136,81401,8140131,フクオカケン,フクオカシジョウナンク,マツヤマ,福岡県,福岡市城南区,松山,0,0,1,0,0,0
40136,81401,8140143,フクオカケン,フクオカシジョウナンク,ミナミカタエ,福岡県,福岡市城南区,南片江,0,0,1,0,0,0
40136,81401,8140122,フクオカケン,フクオカシジョウナンク,ユウセンテイ,福岡県,福岡市城南区,友泉亭,0,0,0,0,0,0
40137,814,8140000,フクオカケン,フクオカシサワラク,イカニケイサイガナイバアイ,福岡県,福岡市早良区,以下に掲載がない場合,0,0,0,0,0,0
40137,814,8140004,フクオカケン,フクオカシサワラク,アケボノ,福岡県,福岡市早良区,曙,0,0,1,0,0,0
40137,814,8140021,フクオカケン,フクオカシサワラク,アラエ,福岡県,福岡市早良区,荒江,0,0,1,0,0,0
40137,814,8140033,フクオカケン,フクオカシサワラク,アリタ,福岡県,福岡市早良区,有田,0,0,1,0,0,0
40137,814,8140034,フクオカケン,フクオカシサワラク,アリタダンチ,福岡県,福岡市早良区,有田団地,0,0,0,0,0,0
40137,81401,8140161,フクオカケン,フクオカシサワラク,イイクラ,福岡県,福岡市早良区,飯倉,0,0,1,0,0,0
40137,81111,8111134,フクオカケン,フクオカシサワラク,イイバ,福岡県,福岡市早良区,飯場,0,0,0,0,0,0
40137,81111,8111132,フクオカケン,フクオカシサワラク,イシガマ,福岡県,福岡市早良区,石釜,0,0,0,0,0,0
40137,81111,8111113,フクオカケン,フクオカシサワラク,イタヤ,福岡県,福岡市早良区,板屋,0,0,0,0,0,0
40137,81111,8111123,フクオカケン,フクオカシサワラク,ウチノ,福岡県,福岡市早良区,内野,0,0,1,0,0,0
40137,81401,8140172,フクオカケン,フクオカシサワラク,ウメバヤシ,福岡県,福岡市早良区,梅林,0,0,1,0,0,0
40137,81111,8111112,フクオカケン,フクオカシサワラク,オカサギ,福岡県,福岡市早良区,小笠木,0,0,0,0,0,0
40137,81111,8111124,フクオカケン,フクオカシサワラク,カナタケ,福岡県,福岡市早良区,金武,0,0,0,0,0,0
40137,81401,8140164,フクオカケン,フクオカシサワラク,カモ,福岡県,福岡市早良区,賀茂,0,0,1,0,0,0
40137,814,8140032,フクオカケン,フクオカシサワラク,コタベ,福岡県,福岡市早良区,小田部,0,0,1,0,0,0
40137,81111,8111122,フクオカケン,フクオカシサワラク,サワラ,福岡県,福岡市早良区,早良,0,0,1,0,0,0
40137,81111,8111114,フクオカケン,フクオカシサワラク,シイバ,福岡県,福岡市早良区,椎原,0,0,0,0,0,0
40137,81111,8111103,フクオカケン,フクオカシサワラク,シカ,福岡県,福岡市早良区,四箇,0,0,1,0,0,0
40137,81401,8140176,フクオカケン,フクオカシサワラク,シカタダンチ,福岡県,福岡市早良区,四箇田団地,0,0,0,0,0,0
40137,81111,8111101,フクオカケン,フクオカシサワラク,シゲドメ,福岡県,福岡市早良区,重留,0,0,1,0,0,0
40137,814,8140003,フクオカケン,フクオカシサワラク,ジョウセイ,福岡県,福岡市早良区,城西,0,0,1,0,0,0
40137,814,8140012,フクオカケン,フクオカシサワラク,ショウダイ,福岡県,福岡市早良区,昭代,0,0,1,0,0,0
40137,81401,8140165,フクオカケン,フクオカシサワラク,ジロウマル,福岡県,福岡市早良区,次郎丸,0,0,1,0,0,0
40137,814,8140005,フクオカケン,フクオカシサワラク,ソハラ,福岡県,福岡市早良区,祖原,0,0,0,0,0,0
40137,81401,8140177,フクオカケン,フクオカシサワラク,タ,福岡県,福岡市早良区,田,0,0,0,0,0,0
40137,814,8140011,フクオカケン,フクオカシサワラク,タカトリ,福岡県,福岡市早良区,高取,0,0,1,0,0,0
40137,81401,8140174,フクオカケン,フクオカシサワラク,タグマ,福岡県,福岡市早良区,田隈,0,0,1,0,0,0
40137,81401,8140175,フクオカケン,フクオカシサワラク,タムラ,福岡県,福岡市早良区,田村,0,0,1,0,0,0
40137,81111,8111131,フクオカケン,フクオカシサワラク,ニシ,福岡県,福岡市早良区,西,0,0,0,0,0,0
40137,81401,8140173,フクオカケン,フクオカシサワラク,ニシアブラヤマ,福岡県,福岡市早良区,西油山,0,0,0,0,0,0
40137,81111,8111121,フクオカケン,フクオカシサワラク,ニシイルベ,福岡県,福岡市早良区,西入部,0,0,1,0,0,0
40137,814,8140002,フクオカケン,フクオカシサワラク,ニシジン,福岡県,福岡市早良区,西新,0,0,1,0,0,0
40137,81401,8140171,フクオカケン,フクオカシサワラク,ノケ,福岡県,福岡市早良区,野芥,0,0,1,0,0,0
40137,814,8140022,フクオカケン,フクオカシサワラク,ハラ,福岡県,福岡市早良区,原,0,0,1,0,0,0
40137,814,8140023,フクオカケン,フクオカシサワラク,ハラダンチ,福岡県,福岡市早良区,原団地,0,0,0,0,0,0
40137,81111,8111102,フクオカケン,フクオカシサワラク,ヒガシイルベ,福岡県,福岡市早良区,東入部,0,0,1,0,0,0
40137,814,8140013,フクオカケン,フクオカシサワラク,フジサキ,福岡県,福岡市早良区,藤崎,0,0,1,0,0,0
40137,81401,8140163,フクオカケン,フクオカシサワラク,ホシクマ,福岡県,福岡市早良区,干隈,0,0,1,0,0,0
40137,81401,8140162,フクオカケン,フクオカシサワラク,ホシノハラダンチ,福岡県,福岡市早良区,星の原団地,0,0,0,0,0,0
40137,81111,8111133,フクオカケン,フクオカシサワラク,マガリブチ,福岡県,福岡市早良区,曲渕,0,0,0,0,0,0
40137,814,8140031,フクオカケン,フクオカシサワラク,ミナミショウ,福岡県,福岡市早良区,南庄,0,0,1,0,0,0
40137,814,8140035,フクオカケン,フクオカシサワラク,ムロズミダンチ,福岡県,福岡市早良区,室住団地,0,0,0,0,0,0
40137,814,8140015,フクオカケン,フクオカシサワラク,ムロミ,福岡県,福岡市早良区,室見,0,0,1,0,0,0
40137,814,8140006,フクオカケン,フクオカシサワラク,モモチ,福岡県,福岡市早良区,百道,0,0,1,0,0,0
40137,814,8140001,フクオカケン,フクオカシサワラク,モモチハマ,福岡県,福岡市早良区,百道浜,0,0,1,0,0,0
40137,814,8140014,フクオカケン,フクオカシサワラク,ヤヨイ,福岡県,福岡市早良区,弥生,0,0,1,0,0,0
40137,81111,8111111,フクオカケン,フクオカシサワラク,ワキヤマ,福岡県,福岡市早良区,脇山,0,0,1,0,0,0
40217,818,8180000,フクオカケン,チクシノシ,イカニケイサイガナイバアイ,福岡県,筑紫野市,以下に掲載がない場合,0,0,0,0,0,0
40217,818,8180011,フクオカケン,チクシノシ,アシキ,福岡県,筑紫野市,阿志岐,0,0,0,0,0,0
40217,818,8180012,フクオカケン,チクシノシ,アマヤマ,福岡県,筑紫野市,天山,0,0,0,0,0,0
40217,818,8180068,フクオカケン,チクシノシ,イシザキ,福岡県,筑紫野市,石崎,0,0,1,0,0,0
40217,818,8180014,フクオカケン,チクシノシ,ウシジマ,福岡県,筑紫野市,牛島,0,0,0,0,0,0
40217,818,8180034,フクオカケン,チクシノシ,ウツクシガオカミナミ,福岡県,筑紫野市,美しが丘南,0,0,1,0,0,0
40217,818,8180035,フクオカケン,チクシノシ,ウツクシガオカキタ,福岡県,筑紫野市,美しが丘北,0,0,1,0,0,0
40217,818,8180033,フクオカケン,チクシノシ,ウマイチ,福岡県,筑紫野市,馬市,0,0,0,0,0,0
40217,818,8180006,フクオカケン,チクシノシ,オオイシ,福岡県,筑紫野市,大石,0,0,0,0,0,0
40217,818,8180013,フクオカケン,チクシノシ,オカダ,福岡県,筑紫野市,岡田,0,0,1,0,0,0
40217,818,8180041,フクオカケン,チクシノシ,カミコガ,福岡県,筑紫野市,上古賀,0,0,1,0,0,0
40217,818,8180031,フクオカケン,チクシノシ,クマ,福岡県,筑紫野市,隈,0,0,0,0,0,0
40217,818,8180002,フクオカケン,チクシノシ,コウゾノ,福岡県,筑紫野市,香園,0,0,0,0,0,0
40217,818,8180047,フクオカケン,チクシノシ,コガ,福岡県,筑紫野市,古賀,0,0,0,0,0,0
40217,818,8180063,フクオカケン,チクシノシ,サクラダイ,福岡県,筑紫野市,桜台,0,0,1,0,0,0
40217,818,8180021,フクオカケン,チクシノシ,シタミ,福岡県,筑紫野市,下見,0,0,0,0,0,0
40217,818,8180054,フクオカケン,チクシノシ,スギヅカ,福岡県,筑紫野市,杉塚,0,0,1,0,0,0
40217,818,8180067,フクオカケン,チクシノシ,ゾクミョウイン,福岡県,筑紫野市,俗明院,0,0,1,0,0,0
40217,818,8180025,フクオカケン,チクシノシ,チクシ,福岡県,筑紫野市,筑紫,0,0,0,0,0,0
40217,818,8180022,フクオカケン,チクシノシ,チクシエキマエドオリ,福岡県,筑紫野市,筑紫駅前通,0,0,1,0,0,0
40217,818,8180064,フクオカケン,チクシノシ,ツネマツ,福岡県,筑紫野市,常松,0,0,0,0,0,0
40217,818,8180053,フクオカケン,チクシノシ,テンパイザカ,福岡県,筑紫野市,天拝坂,0,0,1,0,0,0
40217,818,8180055,フクオカケン,チクシノシ,トウノハル,福岡県,筑紫野市,塔原,0,0,0,0,0,0
40217,818,8180059,フクオカケン,チクシノシ,トウノハルヒガシ,福岡県,筑紫野市,塔原東,0,0,1,0,0,0
40217,818,8180073,フクオカケン,チクシノシ,トウノハルニシ,福岡県,筑紫野市,塔原西,0,0,1,0,0,0
40217,818,8180074,フクオカケン,チクシノシ,トウノハルミナミ,福岡県,筑紫野市,塔原南,0,0,1,0,0,0
40217,818,8180066,フクオカケン,チクシノシ,ナガオカ,福岡県,筑紫野市,永岡,0,0,0,0,0,0
40217,818,8180032,フクオカケン,チクシノシ,ニシオダ,福岡県,筑紫野市,西小田,0,0,0,0,0,0
40217,818,8180044,フクオカケン,チクシノシ,ハギワラ,福岡県,筑紫野市,萩原,0,0,0,0,0,0
40217,818,8180062,フクオカケン,チクシノシ,ハリスリ,福岡県,筑紫野市,針摺,0,0,0,0,0,0
40217,818,8180085,フクオカケン,チクシノシ,ハリスリキタ,福岡県,筑紫野市,針摺北,0,0,1,0,0,0
40217,818,8180083,フクオカケン,チクシノシ,ハリスリチュウオウ,福岡県,筑紫野市,針摺中央,0,0,1,0,0,0
40217,818,8180084,フクオカケン,チクシノシ,ハリスリニシ,福岡県,筑紫野市,針摺西,0,0,1,0,0,0
40217,818,8180081,フクオカケン,チクシノシ,ハリスリヒガシ,福岡県,筑紫野市,針摺東,0,0,1,0,0,0
40217,818,8180082,フクオカケン,チクシノシ,ハリスリミナミ,福岡県,筑紫野市,針摺南,0,0,1,0,0,0
40217,818,8180005,フクオカケン,チクシノシ,ハル,福岡県,筑紫野市,原,0,0,0,0,0,0
40217,818,8180024,フクオカケン,チクシノシ,ハルダ,福岡県,筑紫野市,原田,0,0,1,0,0,0
40217,818,8180036,フクオカケン,チクシノシ,ヒカリガオカ,福岡県,筑紫野市,光が丘,0,0,1,0,0,0
40217,818,8180045,フクオカケン,チクシノシ,ビョウドウジ,福岡県,筑紫野市,平等寺,0,0,0,0,0,0
40217,818,8180051,フクオカケン,チクシノシ,フツカイチ,福岡県,筑紫野市,二日市,0,0,0,0,0,0
40217,818,8180056,フクオカケン,チクシノシ,フツカイチキタ,福岡県,筑紫野市,二日市北,0,0,1,0,0,0
40217,818,8180072,フクオカケン,チクシノシ,フツカイチチュウオウ,福岡県,筑紫野市,二日市中央,0,0,1,0,0,0
40217,818,8180071,フクオカケン,チクシノシ,フツカイチニシ,福岡県,筑紫野市,二日市西,0,0,1,0,0,0
40217,818,8180057,フクオカケン,チクシノシ,フツカイチミナミ,福岡県,筑紫野市,二日市南,0,0,1,0,0,0
40217,818,8180007,フクオカケン,チクシノシ,ホンドウジ,福岡県,筑紫野市,本道寺,0,0,0,0,0,0
40217,818,8180026,フクオカケン,チクシノシ,ミサキ,福岡県,筑紫野市,美咲,0,0,0,0,0,0
40217,818,8180052,フクオカケン,チクシノシ,ムサシ,福岡県,筑紫野市,武藏,0,0,1,0,0,0
40217,818,8180043,フクオカケン,チクシノシ,ムサシガオカ,福岡県,筑紫野市,むさしケ丘,0,0,1,0,0,0
40217,818,8180061,フクオカケン,チクシノシ,ムラサキ,福岡県,筑紫野市,紫,0,0,1,0,0,0
40217,818,8180065,フクオカケン,チクシノシ,モロタ,福岡県,筑紫野市,諸田,0,0,0,0,0,0
40217,818,8180003,フクオカケン,チクシノシ,ヤマエ,福岡県,筑紫野市,山家,0,0,0,0,0,0
40217,818,8180046,フクオカケン,チクシノシ,ヤマグチ,福岡県,筑紫野市,山口,0,0,0,0,0,0
40217,818,8180001,フクオカケン,チクシノシ,ユスバル,福岡県,筑紫野市,柚須原,0,0,0,0,0,0
40217,818,8180058,フクオカケン,チクシノシ,ユマチ,福岡県,筑紫野市,湯町,0,0,1,0,0,0
40217,818,8180004,フクオカケン,チクシノシ,ヨシキ,福岡県,筑紫野市,吉木,0,0,0,0,0,0
40217,818,8180042,フクオカケン,チクシノシ,リュウミョウジ,福岡県,筑紫野市,立明寺,0,0,0,0,0,0
40217,818,8180023,フクオカケン,チクシノシ,ワカエ,福岡県,筑紫野市,若江,0,0,0,0,0,0
40218,816,8160000,フクオカケン,カスガシ,イカニケイサイガナイバアイ,福岡県,春日市,以下に掲載がない場合,0,0,0,1,0,0
40218,816,8160853,フクオカケン,カスガシ,イズミ,福岡県,春日市,泉,0,0,1,0,0,0
40218,816,8160852,フクオカケン,カスガシ,イチノタニ,福岡県,春日市,一の谷,0,0,1,0,0,0
40218,816,8160831,フクオカケン,カスガシ,オオタニ,福岡県,春日市,大谷,0,0,1,0,0,0
40218,816,8160847,フクオカケン,カスガシ,オオドイ,福岡県,春日市,大土居,0,0,1,0,0,0
40218,816,8160861,フクオカケン,カスガシ,オカモト,福岡県,春日市,岡本,0,0,1,0,0,0
40218,816,8160814,フクオカケン,カスガシ,カスガ,福岡県,春日市,春日,0,0,1,0,0,0
40218,816,8160811,フクオカケン,カスガシ,カスガコウエン,福岡県,春日市,春日公園,0,0,1,0,0,0
40218,816,8160801,フクオカケン,カスガシ,カスガバルヒガシマチ,福岡県,春日市,春日原東町,0,0,1,0,0,0
40218,816,8160803,フクオカケン,カスガシ,カスガバルミナミマチ,福岡県,春日市,春日原南町,0,0,1,0,0,0
40218,816,8160802,フクオカケン,カスガシ,カスガバルキタマチ,福岡県,春日市,春日原北町,0,0,1,0,0,0
40218,816,8160844,フクオカケン,カスガシ,カミシロウズ,福岡県,春日市,上白水,0,0,1,0,0,0
40218,816,8160824,フクオカケン,カスガシ,コクラ,福岡県,春日市,小倉,0,0,1,0,0,0
40218,816,8160826,フクオカケン,カスガシ,コクラヒガシ,福岡県,春日市,小倉東,0,0,1,0,0,0
40218,816,8160872,フクオカケン,カスガシ,サクラガオカ,福岡県,春日市,桜ケ丘,0,0,1,0,0,0
40218,816,8160842,フクオカケン,カスガシ,シモシロウズ,福岡県,春日市,下白水,0,0,0,0,0,0
40218,816,8160854,フクオカケン,カスガシ,シモシロウズキタ,福岡県,春日市,下白水北,0,0,1,0,0,0
40218,816,8160846,フクオカケン,カスガシ,シモシロウズミナミ,福岡県,春日市,下白水南,0,0,1,0,0,0
40218,816,8160848,フクオカケン,カスガシ,シロウズイケ,福岡県,春日市,白水池,0,0,1,0,0,0
40218,816,8160845,フクオカケン,カスガシ,シロウズガオカ,福岡県,春日市,白水ケ丘,0,0,1,0,0,0
40218,816,8160871,フクオカケン,カスガシ,スグ,福岡県,春日市,須玖,0,0,0,0,0,0
40218,816,8160863,フクオカケン,カスガシ,スグミナミ,福岡県,春日市,須玖南,0,0,1,0,0,0
40218,816,8160864,フクオカケン,カスガシ,スグキタ,福岡県,春日市,須玖北,0,0,1,0,0,0
40218,816,8160813,フクオカケン,カスガシ,ソウリ,福岡県,春日市,惣利,0,0,1,0,0,0
40218,816,8160807,フクオカケン,カスガシ,タカラマチ,福岡県,春日市,宝町,0,0,1,0,0,0
40218,816,8160822,フクオカケン,カスガシ,チクシダイ,福岡県,春日市,ちくし台,0,0,1,0,0,0
40218,816,8160805,フクオカケン,カスガシ,チトセマチ,福岡県,春日市,千歳町,0,0,1,0,0,0
40218,816,8160841,フクオカケン,カスガシ,ツカハラダイ,福岡県,春日市,塚原台,0,0,1,0,0,0
40218,816,8160855,フクオカケン,カスガシ,テンジンヤマ,福岡県,春日市,天神山,0,0,1,0,0,0
40218,816,8160851,フクオカケン,カスガシ,ノボリマチ,福岡県,春日市,昇町,0,0,1,0,0,0
40218,816,8160825,フクオカケン,カスガシ,ハクゲンチョウ,福岡県,春日市,伯玄町,0,0,1,0,0,0
40218,816,8160804,フクオカケン,カスガシ,ハラマチ,福岡県,春日市,原町,0,0,1,0,0,0
40218,816,8160806,フクオカケン,カスガシ,ヒカリマチ,福岡県,春日市,光町,0,0,1,0,0,0
40218,816,8160873,フクオカケン,カスガシ,ヒノデマチ,福岡県,春日市,日の出町,0,0,1,0,0,0
40218,816,8160812,フクオカケン,カスガシ,ヒラタダイ,福岡県,春日市,平田台,0,0,1,0,0,0
40218,816,8160849,フクオカケン,カスガシ,ホシミガオカ,福岡県,春日市,星見ヶ丘,0,0,1,0,0,0
40218,816,8160843,フクオカケン,カスガシ,マツガオカ,福岡県,春日市,松ケ丘,0,0,1,0,0,0
40218,816,8160833,フクオカケン,カスガシ,モミジガオカヒガシ,福岡県,春日市,紅葉ケ丘東,0,0,1,0,0,0
40218,816,8160832,フクオカケン,カスガシ,モミジガオカニシ,福岡県,春日市,紅葉ケ丘西,0,0,1,0,0,0
40218,816,8160874,フクオカケン,カスガシ,ヤマトマチ,福岡県,春日市,大和町,0,0,1,0,0,0
40218,816,8160862,フクオカケン,カスガシ,ヤヨイ,福岡県,春日市,弥生,0,0,1,0,0,0
40218,816,8160821,フクオカケン,カスガシ,ワカバダイヒガシ,福岡県,春日市,若葉台東,0,0,1,0,0,0
40218,816,8160823,フクオカケン,カスガシ,ワカバダイニシ,福岡県,春日市,若葉台西,0,0,1,0,0,0
40219,816,8160000,フクオカケン,オオノジョウシ,イカニケイサイガナイバアイ,福岡県,大野城市,以下に掲載がない場合,0,0,0,1,0,0
40219,816,8160934,フクオカケン,オオノジョウシ,アケボノマチ,福岡県,大野城市,曙町,0,0,1,0,0,0
40219,816,8160953,フクオカケン,オオノジョウシ,アサヒガオカ,福岡県,大野城市,旭ケ丘,0,0,1,0,0,0
40219,816,8160971,フクオカケン,オオノジョウシ,ウシクビ,福岡県,大野城市,牛頸,0,0,1,0,0,0
40219,816,8160904,フクオカケン,オオノジョウシ,オオイケ,福岡県,大野城市,大池,0,0,1,0,0,0
40219,816,8160911,フクオカケン,オオノジョウシ,オオキ,福岡県,大野城市,大城,0,0,1,0,0,0
40219,816,8160902,フクオカケン,オオノジョウシ,オトガナ,福岡県,大野城市,乙金,0,0,1,0,0,0
40219,816,8160903,フクオカケン,オオノジョウシ,オトガナダイ,福岡県,大野城市,乙金台,0,0,1,0,0,0
40219,816,8160901,フクオカケン,オオノジョウシ,オトガナヒガシ,福岡県,大野城市,乙金東,0,0,1,0,0,0
40219,816,8160955,フクオカケン,オオノジョウシ,カミオオリ,福岡県,大野城市,上大利,0,0,1,0,0,0
40219,816,8160905,フクオカケン,オオノジョウシ,カワクボ,福岡県,大野城市,川久保,0,0,1,0,0,0
40219,816,8160932,フクオカケン,オオノジョウシ,カワラダ,福岡県,大野城市,瓦田,0,0,1,0,0,0
40219,816,8160924,フクオカケン,オオノジョウシ,サカエマチ,福岡県,大野城市,栄町,0,0,1,0,0,0
40219,816,8160923,フクオカケン,オオノジョウシ,ザツショノクママチ,福岡県,大野城市,雑餉隈町,0,0,1,0,0,0
40219,816,8160952,フクオカケン,オオノジョウシ,シモオオリ,福岡県,大野城市,下大利,0,0,1,0,0,0
40219,816,8160951,フクオカケン,オオノジョウシ,シモオオリダンチ,福岡県,大野城市,下大利団地,0,0,0,0,0,0
40219,816,8160943,フクオカケン,オオノジョウシ,シラキバル,福岡県,大野城市,白木原,0,0,1,0,0,0
40219,816,8160942,フクオカケン,オオノジョウシ,チュウオウ,福岡県,大野城市,中央,0,0,1,0,0,0
40219,816,8160983,フクオカケン,オオノジョウシ,ツキノウラ,福岡県,大野城市,月の浦,0,0,1,0,0,0
40219,816,8160931,フクオカケン,オオノジョウシ,ツツイ,福岡県,大野城市,筒井,0,0,1,0,0,0
40219,816,8160962,フクオカケン,オオノジョウシ,ツツジガオカ,福岡県,大野城市,つつじケ丘,0,0,1,0,0,0
40219,816,8160906,フクオカケン,オオノジョウシ,ナカ,福岡県,大野城市,中,0,0,1,0,0,0
40219,816,8160921,フクオカケン,オオノジョウシ,ナカハタ,福岡県,大野城市,仲畑,0,0,1,0,0,0
40219,816,8160935,フクオカケン,オオノジョウシ,ニシキマチ,福岡県,大野城市,錦町,0,0,1,0,0,0
40219,816,8160982,フクオカケン,オオノジョウシ,ハタガサカ,福岡県,大野城市,畑ケ坂,0,0,1,0,0,0
40219,816,8160941,フクオカケン,オオノジョウシ,ヒガシオオリ,福岡県,大野城市,東大利,0,0,1,0,0,0
40219,816,8160972,フクオカケン,オオノジョウシ,ヒラノダイ,福岡県,大野城市,平野台,0,0,1,0,0,0
40219,816,8160912,フクオカケン,オオノジョウシ,ミカサガワ,福岡県,大野城市,御笠川,0,0,1,0,0,0
40219,816,8160933,フクオカケン,オオノジョウシ,ミズホマチ,福岡県,大野城市,瑞穂町,0,0,1,0,0,0
40219,816,8160961,フクオカケン,オオノジョウシ,ミドリガオカ,福岡県,大野城市,緑ケ丘,0,0,1,0,0,0
40219,816,8160956,フクオカケン,オオノジョウシ,ミナミオオリ,福岡県,大野城市,南大利,0,0,1,0,0,0
40219,816,8160964,フクオカケン,オオノジョウシ,ミナミガオカ,福岡県,大野城市,南ケ丘,0,0,1,0,0,0
40219,816,8160963,フクオカケン,オオノジョウシ,ミヤノダイ,福岡県,大野城市,宮野台,0,0,0,0,0,0
40219,816,8160954,フクオカケン,オオノジョウシ,ムラサキダイ,福岡県,大野城市,紫台,0,0,0,0,0,0
40219,816,8160922,フクオカケン,オオノジョウシ,ヤマダ,福岡県,大野城市,山田,0,0,1,0,0,0
40219,816,8160973,フクオカケン,オオノジョウシ,ヨコミネ,福岡県,大野城市,横峰,0,0,1,0,0,0
40219,816,8160981,フクオカケン,オオノジョウシ,ワカクサ,福岡県,大野城市,若草,0,0,1,0,0,0
40220,81134,8113400,フクオカケン,ムナカタシ,イカニケイサイガナイバアイ,福岡県,宗像市,以下に掲載がない場合,0,0,0,0,0,0
40220,81141,8114162,フクオカケン,ムナカタシ,アオバダイ,福岡県,宗像市,青葉台,0,0,1,0,0,0
40220,81141,8114146,フクオカケン,ムナカタシ,アカマ,福岡県,宗像市,赤間,0,0,1,0,0,0
40220,81141,8114185,フクオカケン,ムナカタシ,アカマエキマエ,福岡県,宗像市,赤間駅前,0,0,1,0,0,0
40220,81141,8114176,フクオカケン,ムナカタシ,アカマガオカ,福岡県,宗像市,赤間ケ丘,0,0,0,0,0,0
40220,81141,8114148,フクオカケン,ムナカタシ,アカマブンキョウマチ,福岡県,宗像市,赤間文教町,0,0,0,0,0,0
40220,81134,8113415,フクオカケン,ムナカタシ,アサノ,福岡県,宗像市,朝野,0,0,0,0,0,0
40220,81141,8114161,フクオカケン,ムナカタシ,アサマチ,福岡県,宗像市,朝町,0,0,0,0,0,0
40220,81141,8114157,フクオカケン,ムナカタシ,アスティ,福岡県,宗像市,アスティ,0,0,1,0,0,0
40220,81134,8113401,フクオカケン,ムナカタシ,イケウラ,福岡県,宗像市,池浦,0,0,0,0,0,0
40220,81135,8113515,フクオカケン,ムナカタシ,イケダ,福岡県,宗像市,池田,0,0,0,0,0,0
40220,81141,8114147,フクオカケン,ムナカタシ,イシマル,福岡県,宗像市,石丸,0,0,1,0,0,0
40220,81141,8114142,フクオカケン,ムナカタシ,イズミガオカ,福岡県,宗像市,泉ケ丘,0,0,1,0,0,0
40220,81134,8113406,フクオカケン,ムナカタシ,イナモト,福岡県,宗像市,稲元,0,0,1,0,0,0
40220,81135,8113502,フクオカケン,ムナカタシ,エグチ,福岡県,宗像市,江口,0,0,0,0,0,0
40220,81141,8114177,フクオカケン,ムナカタシ,オウビダイ,福岡県,宗像市,桜美台,0,0,0,0,0,0
40220,81134,8113422,フクオカケン,ムナカタシ,オウマル,福岡県,宗像市,王丸,0,0,0,0,0,0
40220,81134,8113432,フクオカケン,ムナカタシ,オオイ,福岡県,宗像市,大井,0,0,0,0,0,0
40220,81134,8113433,フクオカケン,ムナカタシ,オオイダイ,福岡県,宗像市,大井台,0,0,0,0,0,0
40220,81134,8113440,フクオカケン,ムナカタシ,オオイミナミ,福岡県,宗像市,大井南,0,0,0,0,0,0
40220,81137,8113701,フクオカケン,ムナカタシ,オオシマ,福岡県,宗像市,大島,0,0,0,0,0,0
40220,81141,8114141,フクオカケン,ムナカタシ,オオタニ,福岡県,宗像市,大谷,0,0,0,0,0,0
40220,81134,8113421,フクオカケン,ムナカタシ,オオブ,福岡県,宗像市,大穂,0,0,0,0,0,0
40220,81134,8113402,フクオカケン,ムナカタシ,カトウ,福岡県,宗像市,河東,0,0,0,0,0,0
40220,81135,8113512,フクオカケン,ムナカタシ,カネザキ,福岡県,宗像市,鐘崎,0,0,0,0,0,0
40220,81134,8113437,フクオカケン,ムナカタシ,クバラ,福岡県,宗像市,久原,0,0,0,0,0,0
40220,81141,8114184,フクオカケン,ムナカタシ,クリエイト,福岡県,宗像市,くりえいと,0,0,1,0,0,0
40220,81135,8113516,フクオカケン,ムナカタシ,コウエンドオリ,福岡県,宗像市,公園通り,0,0,1,0,0,0
40220,81135,8113513,フクオカケン,ムナカタシ,コウジョウ,福岡県,宗像市,上八,0,0,0,0,0,0
40220,81135,8113501,フクオカケン,ムナカタシ,コウノミナト,福岡県,宗像市,神湊,0,0,0,0,0,0
40220,81141,8114165,フクオカケン,ムナカタシ,コウリョウダイ,福岡県,宗像市,広陵台,0,0,1,0,0,0
40220,81141,8114173,フクオカケン,ムナカタシ,サカエマチ,福岡県,宗像市,栄町,0,0,0,0,0,0
40220,81141,8114166,フクオカケン,ムナカタシ,サクラ,福岡県,宗像市,桜,0,0,1,0,0,0
40220,81141,8114143,フクオカケン,ムナカタシ,サブロウマル,福岡県,宗像市,三郎丸,0,0,1,0,0,0
40220,81135,8113511,フクオカケン,ムナカタシ,ジノシマ,福岡県,宗像市,地島,0,0,0,0,0,0
40220,81141,8114163,フクオカケン,ムナカタシ,ジユウガオカ,福岡県,宗像市,自由ケ丘,0,0,1,0,0,0
40220,81141,8114174,フクオカケン,ムナカタシ,ジユウガオカニシマチ,福岡県,宗像市,自由ケ丘西町,0,0,0,0,0,0
40220,81141,8114156,フクオカケン,ムナカタシ,ジユウガオカミナミ,福岡県,宗像市,自由ケ丘南,0,0,1,0,0,0
40220,81141,8114182,フクオカケン,ムナカタシ,ジョウガタニ,福岡県,宗像市,城ケ谷,0,0,0,0,0,0
40220,81134,8113404,フクオカケン,ムナカタシ,ジョウセイガオカ,福岡県,宗像市,城西ケ丘,0,0,1,0,0,0
40220,81141,8114151,フクオカケン,ムナカタシ,ジョウナンガオカ,福岡県,宗像市,城南ケ丘,0,0,0,0,0,0
40220,81141,8114181,フクオカケン,ムナカタシ,ジョウヤマニュータウン,福岡県,宗像市,城山ニュータウン,0,0,0,0,0,0
40220,81134,8113408,フクオカケン,ムナカタシ,ショウヨウダイ,福岡県,宗像市,樟陽台,0,0,1,0,0,0
40220,81134,8113405,フクオカケン,ムナカタシ,スエ,福岡県,宗像市,須恵,0,0,1,0,0,0
40220,81141,8114175,フクオカケン,ムナカタシ,タク,福岡県,宗像市,田久,0,0,1,0,0,0
40220,81134,8113431,フクオカケン,ムナカタシ,タグマ,福岡県,宗像市,田熊,0,0,1,0,0,0
40220,81141,8114152,フクオカケン,ムナカタシ,タケマル,福岡県,宗像市,武丸,0,0,0,0,0,0
40220,81135,8113505,フクオカケン,ムナカタシ,タシマ,福岡県,宗像市,田島,0,0,0,0,0,0
40220,81135,8113514,フクオカケン,ムナカタシ,タノ,福岡県,宗像市,田野,0,0,0,0,0,0
40220,81135,8113507,フクオカケン,ムナカタシ,タレ,福岡県,宗像市,多禮,0,0,0,0,0,0
40220,81141,8114183,フクオカケン,ムナカタシ,ツチアナ,福岡県,宗像市,土穴,0,0,1,0,0,0
40220,81134,8113407,フクオカケン,ムナカタシ,テンピョウダイ,福岡県,宗像市,天平台,0,0,0,0,0,0
40220,81134,8113436,フクオカケン,ムナカタシ,トウゴウ,福岡県,宗像市,東郷,0,0,0,0,0,0
40220,81141,8114164,フクオカケン,ムナカタシ,トクシゲ,福岡県,宗像市,徳重,0,0,1,0,0,0
40220,81141,8114155,フクオカケン,ムナカタシ,ナゴリ,福岡県,宗像市,名残,0,0,1,0,0,0
40220,81134,8113423,フクオカケン,ムナカタシ,ノサカ,福岡県,宗像市,野坂,0,0,0,0,0,0
40220,81141,8114171,フクオカケン,ムナカタシ,ハヤマ,福岡県,宗像市,葉山,0,0,1,0,0,0
40220,81134,8113424,フクオカケン,ムナカタシ,ハルマチ,福岡県,宗像市,原町,0,0,0,0,0,0
40220,81134,8113403,フクオカケン,ムナカタシ,ヒカリガオカ,福岡県,宗像市,ひかりケ丘,0,0,1,0,0,0
40220,81134,8113425,フクオカケン,ムナカタシ,ヒノサト,福岡県,宗像市,日の里,0,0,1,0,0,0
40220,81134,8113412,フクオカケン,ムナカタシ,ビョウドウジ,福岡県,宗像市,平等寺,0,0,0,0,0,0
40220,81134,8113430,フクオカケン,ムナカタシ,ヒライ,福岡県,宗像市,平井,0,0,1,0,0,0
40220,81135,8113504,フクオカケン,ムナカタシ,フカタ,福岡県,宗像市,深田,0,0,0,0,0,0
40220,81141,8114154,フクオカケン,ムナカタシ,フジワラ,福岡県,宗像市,冨地原,0,0,0,0,0,0
40220,81141,8114113,フクオカケン,ムナカタシ,マガリ(25、35),福岡県,宗像市,曲(25、35),1,0,0,0,0,0
40220,81134,8113413,フクオカケン,ムナカタシ,マガリ(ソノタ),福岡県,宗像市,曲(その他),1,0,0,0,0,0
40220,81134,8113439,フクオカケン,ムナカタシ,ミクラ,福岡県,宗像市,三倉,0,0,0,0,0,0
40220,81134,8113414,フクオカケン,ムナカタシ,ミツオカ,福岡県,宗像市,光岡,0,0,0,0,0,0
40220,81141,8114172,フクオカケン,ムナカタシ,ミドリマチ,福岡県,宗像市,緑町,0,0,0,0,0,0
40220,81134,8113416,フクオカケン,ムナカタシ,ミヤタ,福岡県,宗像市,宮田,0,0,1,0,0,0
40220,81135,8113503,フクオカケン,ムナカタシ,ムタジリ,福岡県,宗像市,牟田尻,0,0,0,0,0,0
40220,81134,8113434,フクオカケン,ムナカタシ,ムラヤマダ,福岡県,宗像市,村山田,0,0,0,0,0,0
40220,81134,8113435,フクオカケン,ムナカタシ,モチヤマ,福岡県,宗像市,用山,0,0,0,0,0,0
40220,81134,8113411,フクオカケン,ムナカタシ,ヤマダ,福岡県,宗像市,山田,0,0,0,0,0,0
40220,81135,8113506,フクオカケン,ムナカタシ,ヨシダ,福岡県,宗像市,吉田,0,0,0,0,0,0
40220,81141,8114153,フクオカケン,ムナカタシ,ヨシドメ,福岡県,宗像市,吉留,0,0,0,0,0,0
40220,81141,8114145,フクオカケン,ムナカタシ,リョウゲンジ,福岡県,宗像市,陵厳寺,0,0,1,0,0,0
40220,81134,8113438,フクオカケン,ムナカタシ,ワカミダイ,福岡県,宗像市,和歌美台,0,0,0,0,0,0
40221,81801,8180100,フクオカケン,ダザイフシ,イカニケイサイガナイバアイ,福岡県,太宰府市,以下に掲載がない場合,0,0,0,0,0,0
40221,81801,8180137,フクオカケン,ダザイフシ,アオバダイ,福岡県,太宰府市,青葉台,0,0,1,0,0,0
40221,81801,8180121,フクオカケン,ダザイフシ,アオヤマ,福岡県,太宰府市,青山,0,0,1,0,0,0
40221,81801,8180118,フクオカケン,ダザイフシ,イシザカ,福岡県,太宰府市,石坂,0,0,1,0,0,0
40221,81801,8180115,フクオカケン,ダザイフシ,ウチヤマ,福岡県,太宰府市,内山,0,0,0,0,0,0
40221,81801,8180123,フクオカケン,ダザイフシ,ウメガオカ,福岡県,太宰府市,梅ケ丘,0,0,1,0,0,0
40221,81801,8180134,フクオカケン,ダザイフシ,オオザノ,福岡県,太宰府市,大佐野,0,0,0,0,0,0
40221,81801,8180101,フクオカケン,ダザイフシ,カンゼオンジ,福岡県,太宰府市,観世音寺,0,0,1,0,0,0
40221,81801,8180114,フクオカケン,ダザイフシ,キタダニ,福岡県,太宰府市,北谷,0,0,0,0,0,0
40221,81801,8180132,フクオカケン,ダザイフシ,コクブ,福岡県,太宰府市,国分,0,0,1,0,0,0
40221,81801,8180125,フクオカケン,ダザイフシ,ゴジョウ,福岡県,太宰府市,五条,0,0,1,0,0,0
40221,81801,8180139,フクオカケン,ダザイフシ,サイト,福岡県,太宰府市,宰都,0,0,1,0,0,0
40221,81801,8180117,フクオカケン,ダザイフシ,サイフ,福岡県,太宰府市,宰府,0,0,1,0,0,0
40221,81801,8180133,フクオカケン,ダザイフシ,サカモト,福岡県,太宰府市,坂本,0,0,1,0,0,0
40221,81801,8180111,フクオカケン,ダザイフシ,サンジョウ,福岡県,太宰府市,三条,0,0,1,0,0,0
40221,81801,8180102,フクオカケン,ダザイフシ,シラカワ,福岡県,太宰府市,白川,0,0,0,0,0,0
40221,81801,8180103,フクオカケン,ダザイフシ,スザク,福岡県,太宰府市,朱雀,0,0,1,0,0,0
40221,81801,8180122,フクオカケン,ダザイフシ,タカオ,福岡県,太宰府市,高雄,0,0,1,0,0,0
40221,81801,8180104,フクオカケン,ダザイフシ,トオノコガ,福岡県,太宰府市,通古賀,0,0,1,0,0,0
40221,81801,8180105,フクオカケン,ダザイフシ,トフロウミナミ,福岡県,太宰府市,都府楼南,0,0,1,0,0,0
40221,81801,8180136,フクオカケン,ダザイフシ,ナガウラダイ,福岡県,太宰府市,長浦台,0,0,1,0,0,0
40221,81801,8180124,フクオカケン,ダザイフシ,バイコウエン,福岡県,太宰府市,梅香苑,0,0,1,0,0,0
40221,81801,8180110,フクオカケン,ダザイフシ,ミカサ,福岡県,太宰府市,御笠,0,0,1,0,0,0
40221,81801,8180131,フクオカケン,ダザイフシ,ミズキ,福岡県,太宰府市,水城,0,0,1,0,0,0
40221,81801,8180135,フクオカケン,ダザイフシ,ムカイザノ,福岡県,太宰府市,向佐野,0,0,0,0,0,0
40221,81801,8180138,フクオカケン,ダザイフシ,ヨシマツ,福岡県,太宰府市,吉松,0,0,1,0,0,0
40221,81801,8180119,フクオカケン,ダザイフシ,レンガヤ,福岡県,太宰府市,連歌屋,0,0,1,0,0,0
40223,81131,8113100,フクオカケン,コガシ,イカニケイサイガナイバアイ,福岡県,古賀市,以下に掲載がない場合,0,0,0,0,0,0
40223,81131,8113134,フクオカケン,コガシ,アオヤギ,福岡県,古賀市,青柳,0,0,0,0,0,0
40223,81131,8113133,フクオカケン,コガシ,アオヤギマチ,福岡県,古賀市,青柳町,0,0,0,0,0,0
40223,81131,8113136,フクオカケン,コガシ,イトガウラ,福岡県,古賀市,糸ケ浦,0,0,0,0,0,0
40223,81131,8113131,フクオカケン,コガシ,イマザイケ,福岡県,古賀市,今在家,0,0,0,0,0,0
40223,81131,8113117,フクオカケン,コガシ,イマノショウ,福岡県,古賀市,今の庄,0,0,1,0,0,0
40223,81131,8113102,フクオカケン,コガシ,エキヒガシ,福岡県,古賀市,駅東,0,0,1,0,0,0
40223,81131,8113135,フクオカケン,コガシ,オダケ,福岡県,古賀市,小竹,0,0,0,0,0,0
40223,81131,8113126,フクオカケン,コガシ,オヤマダ,福岡県,古賀市,小山田,0,0,0,0,0,0
40223,81131,8113104,フクオカケン,コガシ,カヅルガオカ,福岡県,古賀市,花鶴丘,0,0,1,0,0,0
40223,81131,8113132,フクオカケン,コガシ,カワバル,福岡県,古賀市,川原,0,0,0,0,0,0
40223,81131,8113115,フクオカケン,コガシ,クボ,福岡県,古賀市,久保,0,0,0,0,0,0
40223,81131,8113137,フクオカケン,コガシ,コガ,福岡県,古賀市,古賀,0,0,0,0,0,0
40223,81131,8113122,フクオカケン,コガシ,コモノ,福岡県,古賀市,薦野,0,0,0,0,0,0
40223,81131,8113105,フクオカケン,コガシ,シシブ,福岡県,古賀市,鹿部,0,0,0,0,0,0
40223,81131,8113116,フクオカケン,コガシ,ショウ,福岡県,古賀市,庄,0,0,0,0,0,0
40223,81131,8113118,フクオカケン,コガシ,シンクボ,福岡県,古賀市,新久保,0,0,1,0,0,0
40223,81131,8113127,フクオカケン,コガシ,シンバル,福岡県,古賀市,新原,0,0,0,0,0,0
40223,81131,8113125,フクオカケン,コガシ,タニヤマ,福岡県,古賀市,谷山,0,0,0,0,0,0
40223,81131,8113113,フクオカケン,コガシ,チドリ,福岡県,古賀市,千鳥,0,0,1,0,0,0
40223,81131,8113103,フクオカケン,コガシ,チュウオウ,福岡県,古賀市,中央,0,0,1,0,0,0
40223,81131,8113101,フクオカケン,コガシ,テンジン,福岡県,古賀市,天神,0,0,1,0,0,0
40223,81131,8113123,フクオカケン,コガシ,ネタビ,福岡県,古賀市,米多比,0,0,0,0,0,0
40223,81131,8113112,フクオカケン,コガシ,ハナミヒガシ,福岡県,古賀市,花見東,0,0,1,0,0,0
40223,81131,8113111,フクオカケン,コガシ,ハナミミナミ,福岡県,古賀市,花見南,0,0,1,0,0,0
40223,81131,8113106,フクオカケン,コガシ,ヒヨシ,福岡県,古賀市,日吉,0,0,1,0,0,0
40223,81131,8113114,フクオカケン,コガシ,マイノサト,福岡県,古賀市,舞の里,0,0,1,0,0,0
40223,81131,8113107,フクオカケン,コガシ,ミアケ,福岡県,古賀市,美明,0,0,1,0,0,0
40223,81131,8113121,フクオカケン,コガシ,ムシロウチ,福岡県,古賀市,筵内,0,0,0,0,0,0
40223,81131,8113124,フクオカケン,コガシ,ヤクオウジ,福岡県,古賀市,薬王寺,0,0,0,0,0,0
40224,81132,8113200,フクオカケン,フクツシ,イカニケイサイガナイバアイ,福岡県,福津市,以下に掲載がない場合,0,0,0,0,0,0
40224,81132,8113213,フクオカケン,フクツシ,フクツシノツギニバンチガクルバアイ,福岡県,福津市,福津市の次に番地がくる場合,0,0,0,0,0,0
40224,81132,8113220,フクオカケン,フクツシ,アケボノ,福岡県,福津市,あけぼの,0,0,0,0,0,0
40224,81132,8113202,フクオカケン,フクツシ,アゼマチ,福岡県,福津市,畦町,0,0,0,0,0,0
40224,81133,8113301,フクオカケン,フクツシ,アラジ,福岡県,福津市,在自,0,0,0,0,0,0
40224,81132,8113205,フクオカケン,フクツシ,ウチドノ,福岡県,福津市,内殿,0,0,0,0,0,0
40224,81133,8113302,フクオカケン,フクツシ,オオイシ,福岡県,福津市,大石,0,0,0,0,0,0
40224,81132,8113226,フクオカケン,フクツシ,オダケ,福岡県,福津市,小竹,0,0,1,0,0,0
40224,81135,8113521,フクオカケン,フクツシ,カツウラ,福岡県,福津市,勝浦,0,0,0,0,0,0
40224,81132,8113207,フクオカケン,フクツシ,カミサイゴウ,福岡県,福津市,上西郷,0,0,0,0,0,0
40224,81132,8113223,フクオカケン,フクツシ,コウヨウダイ,福岡県,福津市,光陽台,0,0,1,0,0,0
40224,81132,8113228,フクオカケン,フクツシ,コウヨウダイミナミ,福岡県,福津市,光陽台南,0,0,1,0,0,0
40224,81132,8113229,フクオカケン,フクツシ,サクラガワ,福岡県,福津市,桜川,0,0,0,0,0,0
40224,81132,8113204,フクオカケン,フクツシ,シャリクラ,福岡県,福津市,舎利蔵,0,0,0,0,0,0
40224,81133,8113303,フクオカケン,フクツシ,スダタ,福岡県,福津市,須多田,0,0,0,0,0,0
40224,81132,8113227,フクオカケン,フクツシ,タカヒラ,福岡県,福津市,高平,0,0,0,0,0,0
40224,81132,8113217,フクオカケン,フクツシ,チュウオウ,福岡県,福津市,中央,0,0,1,0,0,0
40224,81132,8113222,フクオカケン,フクツシ,ツマル,福岡県,福津市,津丸,0,0,0,0,0,0
40224,81133,8113304,フクオカケン,フクツシ,ツヤザキ,福岡県,福津市,津屋崎,0,0,1,0,0,0
40224,81132,8113224,フクオカケン,フクツシ,テビカ,福岡県,福津市,手光,0,0,0,0,0,0
40224,81132,8113218,フクオカケン,フクツシ,テビカミナミ,福岡県,福津市,手光南,0,0,1,0,0,0
40224,81132,8113219,フクオカケン,フクツシ,ニシフクマ,福岡県,福津市,西福間,0,0,1,0,0,0
40224,81135,8113522,フクオカケン,フクツシ,ヌヤマ,福岡県,福津市,奴山,0,0,0,0,0,0
40224,81132,8113214,フクオカケン,フクツシ,ハナミガオカ,福岡県,福津市,花見が丘,0,0,1,0,0,0
40224,81132,8113216,フクオカケン,フクツシ,ハナミガハマ,福岡県,福津市,花見が浜,0,0,1,0,0,0
40224,81132,8113215,フクオカケン,フクツシ,ハナミノサト,福岡県,福津市,花見の里,0,0,1,0,0,0
40224,81132,8113225,フクオカケン,フクツシ,ヒガシフクマ,福岡県,福津市,東福間,0,0,1,0,0,0
40224,81132,8113206,フクオカケン,フクツシ,ヒサスエ,福岡県,福津市,久末,0,0,0,0,0,0
40224,81132,8113208,フクオカケン,フクツシ,フクマエキヒガシ,福岡県,福津市,福間駅東,0,0,1,0,0,0
40224,81132,8113212,フクオカケン,フクツシ,フクマミナミ,福岡県,福津市,福間南,0,0,1,0,0,0
40224,81133,8113308,フクオカケン,フクツシ,ホシガオカ,福岡県,福津市,星ケ丘,0,0,0,0,0,0
40224,81133,8113305,フクオカケン,フクツシ,ミヤジ,福岡県,福津市,宮司,0,0,1,0,0,0
40224,81133,8113312,フクオカケン,フクツシ,ミヤジガオカ,福岡県,福津市,宮司ヶ丘,0,0,0,0,0,0
40224,81133,8113311,フクオカケン,フクツシ,ミヤジハマ,福岡県,福津市,宮司浜,0,0,1,0,0,0
40224,81133,8113309,フクオカケン,フクツシ,ミヤジモトマチ,福岡県,福津市,宮司元町,0,0,0,0,0,0
40224,81132,8113203,フクオカケン,フクツシ,モトギ,福岡県,福津市,本木,0,0,0,0,0,0
40224,81132,8113201,フクオカケン,フクツシ,ヤツナミ,福岡県,福津市,八並,0,0,0,0,0,0
40224,81133,8113306,フクオカケン,フクツシ,ユクエ,福岡県,福津市,生家,0,0,0,0,0,0
40224,81132,8113211,フクオカケン,フクツシ,ユミノサト,福岡県,福津市,有弥の里,0,0,1,0,0,0
40224,81132,8113221,フクオカケン,フクツシ,ワカギダイ,福岡県,福津市,若木台,0,0,1,0,0,0
40224,81133,8113307,フクオカケン,フクツシ,ワタリ,福岡県,福津市,渡,0,0,0,0,0,0
40230,81911,8191100,フクオカケン,イトシマシ,イカニケイサイガナイバアイ,福岡県,糸島市,以下に掲載がない場合,0,0,0,0,0,0
40230,81911,8191132,フクオカケン,イトシマシ,アリタ,福岡県,糸島市,有田,0,0,0,0,0,0
40230,81911,8191127,フクオカケン,イトシマシ,アリタチュウオウ,福岡県,糸島市,有田中央,0,0,1,0,0,0
40230,81911,8191152,フクオカケン,イトシマシ,イイバル,福岡県,糸島市,飯原,0,0,0,0,0,0
40230,81911,8191103,フクオカケン,イトシマシ,イケダ,福岡県,糸島市,池田,0,0,0,0,0,0
40230,81915,8191562,フクオカケン,イトシマシ,イタ,福岡県,糸島市,井田,0,0,0,0,0,0
40230,81911,8191101,フクオカケン,イトシマシ,イタモチ,福岡県,糸島市,板持,0,0,1,0,0,0
40230,81911,8191126,フクオカケン,イトシマシ,イワモト,福岡県,糸島市,岩本,0,0,0,0,0,0
40230,81915,8191582,フクオカケン,イトシマシ,イワラ,福岡県,糸島市,井原,0,0,0,0,0,0
40230,81911,8191112,フクオカケン,イトシマシ,ウラシ,福岡県,糸島市,浦志,0,0,1,0,0,0
40230,81911,8191105,フクオカケン,イトシマシ,ウルウ,福岡県,糸島市,潤,0,0,1,0,0,0
40230,81915,8191573,フクオカケン,イトシマシ,オウマル,福岡県,糸島市,王丸,0,0,0,0,0,0
40230,81911,8191135,フクオカケン,イトシマシ,オオウラ,福岡県,糸島市,大浦,0,0,0,0,0,0
40230,81911,8191121,フクオカケン,イトシマシ,オギノウラ,福岡県,糸島市,荻浦,0,0,0,0,0,0
40230,81911,8191124,フクオカケン,イトシマシ,カフリ,福岡県,糸島市,加布里,0,0,0,0,0,0
40230,81911,8191123,フクオカケン,イトシマシ,カミアリ,福岡県,糸島市,神在,0,0,0,0,0,0
40230,81911,8191155,フクオカケン,イトシマシ,カワツキ,福岡県,糸島市,川付,0,0,0,0,0,0
40230,81915,8191574,フクオカケン,イトシマシ,カワバル,福岡県,糸島市,川原,0,0,0,0,0,0
40230,81911,8191141,フクオカケン,イトシマシ,クラモチ,福岡県,糸島市,蔵持,0,0,0,0,0,0
40230,81915,8191563,フクオカケン,イトシマシ,コウライジ,福岡県,糸島市,高来寺,0,0,0,0,0,0
40230,81911,8191147,フクオカケン,イトシマシ,コウリキ,福岡県,糸島市,香力,0,0,0,0,0,0
40230,81911,8191106,フクオカケン,イトシマシ,シト,福岡県,糸島市,志登,0,0,0,0,0,0
40230,81911,8191131,フクオカケン,イトシマシ,シノワラ,福岡県,糸島市,篠原,0,0,0,0,0,0
40230,81911,8191129,フクオカケン,イトシマシ,シノワラニシ,福岡県,糸島市,篠原西,0,0,1,0,0,0
40230,81911,8191128,フクオカケン,イトシマシ,シノワラヒガシ,福岡県,糸島市,篠原東,0,0,1,0,0,0
40230,81913,8191301,フクオカケン,イトシマシ,シマイダハラ,福岡県,糸島市,志摩井田原,0,0,0,0,0,0
40230,81913,8191313,フクオカケン,イトシマシ,シマイナドメ,福岡県,糸島市,志摩稲留,0,0,0,0,0,0
40230,81913,8191315,フクオカケン,イトシマシ,シマイナバ,福岡県,糸島市,志摩稲葉,0,0,0,0,0,0
40230,81913,8191334,フクオカケン,イトシマシ,シマキシ,福岡県,糸島市,志摩岐志,0,0,0,0,0,0
40230,81913,8191331,フクオカケン,イトシマシ,シマクガ,福岡県,糸島市,志摩久家,0,0,0,0,0,0
40230,81913,8191335,フクオカケン,イトシマシ,シマケヤ,福岡県,糸島市,志摩芥屋,0,0,0,0,0,0
40230,81913,8191323,フクオカケン,イトシマシ,シマコガネマル,福岡県,糸島市,志摩小金丸,0,0,0,0,0,0
40230,81913,8191321,フクオカケン,イトシマシ,シマコフジ,福岡県,糸島市,志摩小富士,0,0,0,0,0,0
40230,81913,8191304,フクオカケン,イトシマシ,シマサクライ,福岡県,糸島市,志摩桜井,0,0,0,0,0,0
40230,81913,8191333,フクオカケン,イトシマシ,シマシンマチ,福岡県,糸島市,志摩新町,0,0,0,0,0,0
40230,81913,8191311,フクオカケン,イトシマシ,シマツワザキ,福岡県,糸島市,志摩津和崎,0,0,0,0,0,0
40230,81913,8191325,フクオカケン,イトシマシ,シマニシカイヅカ,福岡県,糸島市,志摩西貝塚,0,0,0,0,0,0
40230,81913,8191303,フクオカケン,イトシマシ,シマノギタ,福岡県,糸島市,志摩野北,0,0,0,0,0,0
40230,81913,8191312,フクオカケン,イトシマシ,シマハツ,福岡県,糸島市,志摩初,0,0,0,0,0,0
40230,81913,8191305,フクオカケン,イトシマシ,シマババ,福岡県,糸島市,志摩馬場,0,0,0,0,0,0
40230,81913,8191324,フクオカケン,イトシマシ,シマヒガシカイヅカ,福岡県,糸島市,志摩東貝塚,0,0,0,0,0,0
40230,81913,8191336,フクオカケン,イトシマシ,シマヒメシマ,福岡県,糸島市,志摩姫島,0,0,0,0,0,0
40230,81913,8191332,フクオカケン,イトシマシ,シマフナコシ,福岡県,糸島市,志摩船越,0,0,0,0,0,0
40230,81913,8191306,フクオカケン,イトシマシ,シママツグマ,福岡県,糸島市,志摩松隈,0,0,0,0,0,0
40230,81913,8191322,フクオカケン,イトシマシ,シマミトコ,福岡県,糸島市,志摩御床,0,0,0,0,0,0
40230,81913,8191314,フクオカケン,イトシマシ,シマモロヨシ,福岡県,糸島市,志摩師吉,0,0,0,0,0,0
40230,81913,8191302,フクオカケン,イトシマシ,シマヨシダ,福岡県,糸島市,志摩吉田,0,0,0,0,0,0
40230,81911,8191154,フクオカケン,イトシマシ,シライト,福岡県,糸島市,白糸,0,0,0,0,0,0
40230,81911,8191114,フクオカケン,イトシマシ,シンデン,福岡県,糸島市,新田,0,0,0,0,0,0
40230,81915,8191581,フクオカケン,イトシマシ,ズイバイジ,福岡県,糸島市,瑞梅寺,0,0,0,0,0,0
40230,81915,8191572,フクオカケン,イトシマシ,スエナガ,福岡県,糸島市,末永,0,0,0,0,0,0
40230,81911,8191156,フクオカケン,イトシマシ,セト,福岡県,糸島市,瀬戸,0,0,0,0,0,0
40230,81915,8191561,フクオカケン,イトシマシ,ソネ,福岡県,糸島市,曽根,0,0,0,0,0,0
40230,81915,8191564,フクオカケン,イトシマシ,ダイモン,福岡県,糸島市,大門,0,0,0,0,0,0
40230,81911,8191143,フクオカケン,イトシマシ,タカウエ,福岡県,糸島市,高上,0,0,0,0,0,0
40230,81915,8191571,フクオカケン,イトシマシ,タカス,福岡県,糸島市,高祖,0,0,0,0,0,0
40230,81911,8191102,フクオカケン,イトシマシ,タカタ,福岡県,糸島市,高田,0,0,1,0,0,0
40230,81911,8191134,フクオカケン,イトシマシ,タク,福岡県,糸島市,多久,0,0,0,0,0,0
40230,81911,8191125,フクオカケン,イトシマシ,チハヤシンデン,福岡県,糸島市,千早新田,0,0,0,0,0,0
40230,81911,8191111,フクオカケン,イトシマシ,トマリ,福岡県,糸島市,泊,0,0,0,0,0,0
40230,81911,8191133,フクオカケン,イトシマシ,トミ,福岡県,糸島市,富,0,0,0,0,0,0
40230,81911,8191153,フクオカケン,イトシマシ,ナガノ,福岡県,糸島市,長野,0,0,0,0,0,0
40230,81915,8191575,フクオカケン,イトシマシ,ニシノドウ,福岡県,糸島市,西堂,0,0,0,0,0,0
40230,81916,8191622,フクオカケン,イトシマシ,ニジョウイキサン,福岡県,糸島市,二丈一貴山,0,0,0,0,0,0
40230,81916,8191623,フクオカケン,イトシマシ,ニジョウイシザキ,福岡県,糸島市,二丈石崎,0,0,0,0,0,0
40230,81916,8191611,フクオカケン,イトシマシ,ニジョウカタヤマ,福岡県,糸島市,二丈片山,0,0,0,0,0,0
40230,81916,8191621,フクオカケン,イトシマシ,ニジョウカミフカエ,福岡県,糸島市,二丈上深江,0,0,0,0,0,0
40230,81916,8191642,フクオカケン,イトシマシ,ニジョウシカカ,福岡県,糸島市,二丈鹿家,0,0,0,0,0,0
40230,81916,8191616,フクオカケン,イトシマシ,ニジョウタケ,福岡県,糸島市,二丈武,0,0,0,0,0,0
40230,81916,8191615,フクオカケン,イトシマシ,ニジョウタナカ,福岡県,糸島市,二丈田中,0,0,0,0,0,0
40230,81916,8191625,フクオカケン,イトシマシ,ニジョウナガイシ,福岡県,糸島市,二丈長石,0,0,0,0,0,0
40230,81916,8191614,フクオカケン,イトシマシ,ニジョウハマクボ,福岡県,糸島市,二丈浜窪,0,0,0,0,0,0
40230,81916,8191626,フクオカケン,イトシマシ,ニジョウハロ,福岡県,糸島市,二丈波呂,0,0,0,0,0,0
40230,81916,8191601,フクオカケン,イトシマシ,ニジョウフカエ,福岡県,糸島市,二丈深江,0,0,0,0,0,0
40230,81916,8191631,フクオカケン,イトシマシ,ニジョウフクイ,福岡県,糸島市,二丈福井,0,0,0,0,0,0
40230,81916,8191612,フクオカケン,イトシマシ,ニジョウマスエ(シモマスエ),福岡県,糸島市,二丈松末(下松末),1,0,0,0,0,0
40230,81916,8191613,フクオカケン,イトシマシ,ニジョウマスエ(ソノタ),福岡県,糸島市,二丈松末(その他),1,0,0,0,0,0
40230,81916,8191627,フクオカケン,イトシマシ,ニジョウマツクニ,福岡県,糸島市,二丈松国,0,0,0,0,0,0
40230,81916,8191624,フクオカケン,イトシマシ,ニジョウミツヨシ,福岡県,糸島市,二丈満吉,0,0,0,0,0,0
40230,81916,8191641,フクオカケン,イトシマシ,ニジョウヨシイ,福岡県,糸島市,二丈吉井,0,0,0,0,0,0
40230,81911,8191104,フクオカケン,イトシマシ,ハタエ,福岡県,糸島市,波多江,0,0,0,0,0,0
40230,81911,8191107,フクオカケン,イトシマシ,ハタエエキキタ,福岡県,糸島市,波多江駅北,0,0,1,0,0,0
40230,81911,8191108,フクオカケン,イトシマシ,ハタエエキミナミ,福岡県,糸島市,波多江駅南,0,0,1,0,0,0
40230,81911,8191122,フクオカケン,イトシマシ,ヒガシ,福岡県,糸島市,東,0,0,0,0,0,0
40230,81911,8191151,フクオカケン,イトシマシ,ホン,福岡県,糸島市,本,0,0,0,0,0,0
40230,81911,8191113,フクオカケン,イトシマシ,マエバル,福岡県,糸島市,前原,0,0,0,0,0,0
40230,81911,8191138,フクオカケン,イトシマシ,マエバルエキミナミ,福岡県,糸島市,前原駅南,0,0,1,0,0,0
40230,81911,8191118,フクオカケン,イトシマシ,マエバルキタ,福岡県,糸島市,前原北,0,0,1,0,0,0
40230,81911,8191116,フクオカケン,イトシマシ,マエバルチュウオウ,福岡県,糸島市,前原中央,0,0,1,0,0,0
40230,81911,8191117,フクオカケン,イトシマシ,マエバルニシ,福岡県,糸島市,前原西,0,0,1,0,0,0
40230,81911,8191119,フクオカケン,イトシマシ,マエバルヒガシ,福岡県,糸島市,前原東,0,0,1,0,0,0
40230,81911,8191139,フクオカケン,イトシマシ,マエバルミナミ,福岡県,糸島市,前原南,0,0,1,0,0,0
40230,81915,8191583,フクオカケン,イトシマシ,ミクモ,福岡県,糸島市,三雲,0,0,0,0,0,0
40230,81911,8191146,フクオカケン,イトシマシ,ミサカ,福岡県,糸島市,三坂,0,0,0,0,0,0
40230,81911,8191136,フクオカケン,イトシマシ,ミサキガオカ,福岡県,糸島市,美咲が丘,0,0,1,0,0,0
40230,81911,8191137,フクオカケン,イトシマシ,ミナカゼダイ,福岡県,糸島市,南風台,0,0,1,0,0,0
40230,81911,8191142,フクオカケン,イトシマシ,ヤシマ,福岡県,糸島市,八島,0,0,0,0,0,0
40230,81911,8191144,フクオカケン,イトシマシ,ヤマギタ,福岡県,糸島市,山北,0,0,0,0,0,0
40230,81911,8191115,フクオカケン,イトシマシ,ユビ,福岡県,糸島市,油比,0,0,0,0,0,0
40230,81911,8191145,フクオカケン,イトシマシ,ライザン,福岡県,糸島市,雷山,0,0,0,0,0,0
40305,81112,8111200,フクオカケン,チクシグンナカガワマチ,イカニケイサイガナイバアイ,福岡県,筑紫郡那珂川町,以下に掲載がない場合,0,0,0,0,0,0
40305,81112,8111224,フクオカケン,チクシグンナカガワマチ,アントク,福岡県,筑紫郡那珂川町,安徳,0,0,0,0,0,0
40305,81112,8111233,フクオカケン,チクシグンナカガワマチ,イチノセ,福岡県,筑紫郡那珂川町,市ノ瀬,0,0,0,0,0,0
40305,81112,8111211,フクオカケン,チクシグンナカガワマチ,イマミツ,福岡県,筑紫郡那珂川町,今光,0,0,1,0,0,0
40305,81112,8111241,フクオカケン,チクシグンナカガワマチ,ウシロノ,福岡県,筑紫郡那珂川町,後野,0,0,0,0,0,0
40305,81112,8111232,フクオカケン,チクシグンナカガワマチ,ウメガネ,福岡県,筑紫郡那珂川町,埋金,0,0,0,0,0,0
40305,81112,8111255,フクオカケン,チクシグンナカガワマチ,エコ,福岡県,筑紫郡那珂川町,恵子,0,0,1,0,0,0
40305,81112,8111221,フクオカケン,チクシグンナカガワマチ,オウツカダイ,福岡県,筑紫郡那珂川町,王塚台,0,0,1,0,0,0
40305,81112,8111201,フクオカケン,チクシグンナカガワマチ,カタナワ,福岡県,筑紫郡那珂川町,片縄,0,0,1,0,0,0
40305,81112,8111203,フクオカケン,チクシグンナカガワマチ,カタナワキタ,福岡県,筑紫郡那珂川町,片縄北,0,0,1,0,0,0
40305,81112,8111202,フクオカケン,チクシグンナカガワマチ,カタナワニシ,福岡県,筑紫郡那珂川町,片縄西,0,0,1,0,0,0
40305,81112,8111204,フクオカケン,チクシグンナカガワマチ,カタナワヒガシ,福岡県,筑紫郡那珂川町,片縄東,0,0,1,0,0,0
40305,81112,8111223,フクオカケン,チクシグンナカガワマチ,カミカジワラ,福岡県,筑紫郡那珂川町,上梶原,0,0,0,0,0,0
40305,81112,8111234,フクオカケン,チクシグンナカガワマチ,ゴカヤマ,福岡県,筑紫郡那珂川町,五ケ山,0,0,0,0,0,0
40305,81112,8111252,フクオカケン,チクシグンナカガワマチ,ゴロウマル,福岡県,筑紫郡那珂川町,五郎丸,0,0,1,0,0,0
40305,81112,8111222,フクオカケン,チクシグンナカガワマチ,シモカジワラ,福岡県,筑紫郡那珂川町,下梶原,0,0,0,0,0,0
40305,81112,8111253,フクオカケン,チクシグンナカガワマチ,チュウ,福岡県,筑紫郡那珂川町,仲,0,0,1,0,0,0
40305,81112,8111256,フクオカケン,チクシグンナカガワマチ,チュウマル,福岡県,筑紫郡那珂川町,仲丸,0,0,1,0,0,0
40305,81112,8111254,フクオカケン,チクシグンナカガワマチ,ドウゼン,福岡県,筑紫郡那珂川町,道善,0,0,1,0,0,0
40305,81112,8111213,フクオカケン,チクシグンナカガワマチ,ナカバル,福岡県,筑紫郡那珂川町,中原,0,0,1,0,0,0
40305,81112,8111214,フクオカケン,チクシグンナカガワマチ,ナカバルヒガシ,福岡県,筑紫郡那珂川町,中原東,0,0,1,0,0,0
40305,81112,8111212,フクオカケン,チクシグンナカガワマチ,ナカバルミハルガオカ,福岡県,筑紫郡那珂川町,中原観晴が丘,0,0,0,0,0,0
40305,81112,8111236,フクオカケン,チクシグンナカガワマチ,ナメリ,福岡県,筑紫郡那珂川町,南面里,0,0,0,0,0,0
40305,81112,8111235,フクオカケン,チクシグンナカガワマチ,ナルタケ,福岡県,筑紫郡那珂川町,成竹,0,0,0,0,0,0
40305,81112,8111242,フクオカケン,チクシグンナカガワマチ,ニシグマ,福岡県,筑紫郡那珂川町,西隈,0,0,0,0,0,0
40305,81112,8111246,フクオカケン,チクシグンナカガワマチ,ニシハタ,福岡県,筑紫郡那珂川町,西畑,0,0,0,0,0,0
40305,81112,8111243,フクオカケン,チクシグンナカガワマチ,ヒガシグマ,福岡県,筑紫郡那珂川町,東隈,0,0,1,0,0,0
40305,81112,8111231,フクオカケン,チクシグンナカガワマチ,フニュウドウ,福岡県,筑紫郡那珂川町,不入道,0,0,0,0,0,0
40305,81112,8111245,フクオカケン,チクシグンナカガワマチ,ベッショ,福岡県,筑紫郡那珂川町,別所,0,0,0,0,0,0
40305,81112,8111251,フクオカケン,チクシグンナカガワマチ,マツノキ,福岡県,筑紫郡那珂川町,松木,0,0,1,0,0,0
40305,81112,8111215,フクオカケン,チクシグンナカガワマチ,マツバラ,福岡県,筑紫郡那珂川町,松原,0,0,0,0,0,0
40305,81112,8111216,フクオカケン,チクシグンナカガワマチ,ミハルガオカ,福岡県,筑紫郡那珂川町,観晴が丘,0,0,0,0,0,0
40305,81112,8111244,フクオカケン,チクシグンナカガワマチ,ヤマダ,福岡県,筑紫郡那珂川町,山田,0,0,0,0,0,0
40341,81121,8112100,フクオカケン,カスヤグンウミマチ,イカニケイサイガナイバアイ,福岡県,糟屋郡宇美町,以下に掲載がない場合,0,0,0,1,0,0
40341,81121,8112104,フクオカケン,カスヤグンウミマチ,イノ,福岡県,糟屋郡宇美町,井野,0,0,0,0,0,0
40341,81121,8112101,フクオカケン,カスヤグンウミマチ,ウミ,福岡県,糟屋郡宇美町,宇美,0,0,1,0,0,0
40341,81121,8112128,フクオカケン,カスヤグンウミマチ,ウミチュウオウ,福岡県,糟屋郡宇美町,宇美中央,0,0,1,0,0,0
40341,81121,8112125,フクオカケン,カスヤグンウミマチ,ウミヒガシ,福岡県,糟屋郡宇美町,宇美東,0,0,1,0,0,0
40341,81121,8112131,フクオカケン,カスヤグンウミマチ,キフネ,福岡県,糟屋郡宇美町,貴船,0,0,1,0,0,0
40341,81121,8112123,フクオカケン,カスヤグンウミマチ,コウショウジ,福岡県,糟屋郡宇美町,光正寺,0,0,1,0,0,0
40341,81121,8112109,フクオカケン,カスヤグンウミマチ,サクラバル,福岡県,糟屋郡宇美町,桜原,0,0,1,0,0,0
40341,81121,8112105,フクオカケン,カスヤグンウミマチ,シオウジ,福岡県,糟屋郡宇美町,四王寺,0,0,0,0,0,0
40341,81121,8112103,フクオカケン,カスヤグンウミマチ,シオウジザカ,福岡県,糟屋郡宇美町,四王寺坂,0,0,1,0,0,0
40341,81121,8112127,フクオカケン,カスヤグンウミマチ,ショウジダケ,福岡県,糟屋郡宇美町,障子岳,0,0,1,0,0,0
40341,81121,8112126,フクオカケン,カスヤグンウミマチ,ショウジダケミナミ,福岡県,糟屋郡宇美町,障子岳南,0,0,1,0,0,0
40341,81121,8112102,フクオカケン,カスヤグンウミマチ,スミヤキ,福岡県,糟屋郡宇美町,炭焼,0,0,0,0,0,0
40341,81121,8112107,フクオカケン,カスヤグンウミマチ,トビタケ,福岡県,糟屋郡宇美町,とびたけ,0,0,1,0,0,0
40341,81121,8112132,フクオカケン,カスヤグンウミマチ,ハルダ,福岡県,糟屋郡宇美町,原田,0,0,1,0,0,0
40341,81121,8112106,フクオカケン,カスヤグンウミマチ,ヒバリガオカ,福岡県,糟屋郡宇美町,ひばりが丘,0,0,1,0,0,0
40341,81121,8112121,フクオカケン,カスヤグンウミマチ,ヘイワ,福岡県,糟屋郡宇美町,平和,0,0,1,0,0,0
40341,81121,8112122,フクオカケン,カスヤグンウミマチ,ミョウジンザカ,福岡県,糟屋郡宇美町,明神坂,0,0,1,0,0,0
40341,81121,8112108,フクオカケン,カスヤグンウミマチ,ユリガオカ,福岡県,糟屋郡宇美町,ゆりが丘,0,0,1,0,0,0
40341,81121,8112124,フクオカケン,カスヤグンウミマチ,ワカクサ,福岡県,糟屋郡宇美町,若草,0,0,1,0,0,0
40342,81124,8112400,フクオカケン,カスヤグンササグリマチ,イカニケイサイガナイバアイ,福岡県,糟屋郡篠栗町,以下に掲載がない場合,0,0,0,0,0,0
40342,81124,8112412,フクオカケン,カスヤグンササグリマチ,オトイヌ,福岡県,糟屋郡篠栗町,乙犬,0,0,0,0,0,0
40342,81124,8112413,フクオカケン,カスヤグンササグリマチ,オナカ,福岡県,糟屋郡篠栗町,尾仲,0,0,0,0,0,0
40342,81124,8112402,フクオカケン,カスヤグンササグリマチ,カナイデ,福岡県,糟屋郡篠栗町,金出,0,0,0,0,0,0
40342,81124,8112405,フクオカケン,カスヤグンササグリマチ,ササグリ,福岡県,糟屋郡篠栗町,篠栗,0,0,0,0,0,0
40342,81124,8112401,フクオカケン,カスヤグンササグリマチ,タカタ,福岡県,糟屋郡篠栗町,高田,0,0,0,0,0,0
40342,81124,8112416,フクオカケン,カスヤグンササグリマチ,タナカ,福岡県,糟屋郡篠栗町,田中,0,0,0,0,0,0
40342,81124,8112415,フクオカケン,カスヤグンササグリマチ,ツバクロ,福岡県,糟屋郡篠栗町,津波黒,0,0,0,0,0,0
40342,81124,8112404,フクオカケン,カスヤグンササグリマチ,ナイジュウ,福岡県,糟屋郡篠栗町,内住,0,0,0,0,0,0
40342,81124,8112403,フクオカケン,カスヤグンササグリマチ,ハギノウ,福岡県,糟屋郡篠栗町,萩尾,0,0,0,0,0,0
40342,81124,8112411,フクオカケン,カスヤグンササグリマチ,ワカスギ,福岡県,糟屋郡篠栗町,若杉,0,0,0,0,0,0
40342,81124,8112414,フクオカケン,カスヤグンササグリマチ,ワダ,福岡県,糟屋郡篠栗町,和田,0,0,0,0,0,0
40343,81122,8112200,フクオカケン,カスヤグンシメマチ,イカニケイサイガナイバアイ,福岡県,糟屋郡志免町,以下に掲載がない場合,0,0,0,0,0,0
40343,81122,8112203,フクオカケン,カスヤグンシメマチ,イシバシダイ,福岡県,糟屋郡志免町,石橋台,0,0,0,0,0,0
40343,81122,8112209,フクオカケン,カスヤグンシメマチ,オウジ,福岡県,糟屋郡志免町,王子,0,0,1,0,0,0
40343,81122,8112245,フクオカケン,カスヤグンシメマチ,カタミネ,福岡県,糟屋郡志免町,片峰,0,0,1,0,0,0
40343,81122,8112246,フクオカケン,カスヤグンシメマチ,カタミネチュウオウ,福岡県,糟屋郡志免町,片峰中央,0,0,1,0,0,0
40343,81122,8112248,フクオカケン,カスヤグンシメマチ,サカセ,福岡県,糟屋郡志免町,坂瀬,0,0,0,0,0,0
40343,81122,8112201,フクオカケン,カスヤグンシメマチ,サクラガオカ,福岡県,糟屋郡志免町,桜丘,0,0,1,0,0,0
40343,81122,8112202,フクオカケン,カスヤグンシメマチ,シメ,福岡県,糟屋郡志免町,志免,0,0,0,0,0,0
40343,81122,8112244,フクオカケン,カスヤグンシメマチ,シメチュウオウ,福岡県,糟屋郡志免町,志免中央,0,0,1,0,0,0
40343,81122,8112243,フクオカケン,カスヤグンシメマチ,シメヒガシ,福岡県,糟屋郡志免町,志免東,0,0,1,0,0,0
40343,81122,8112204,フクオカケン,カスヤグンシメマチ,タドミ,福岡県,糟屋郡志免町,田富,0,0,0,0,0,0
40343,81122,8112241,フクオカケン,カスヤグンシメマチ,ヒガシコウエンダイ,福岡県,糟屋郡志免町,東公園台,0,0,1,0,0,0
40343,81122,8112205,フクオカケン,カスヤグンシメマチ,ベフ,福岡県,糟屋郡志免町,別府,0,0,0,0,0,0
40343,81122,8112233,フクオカケン,カスヤグンシメマチ,ベフキタ,福岡県,糟屋郡志免町,別府北,0,0,1,0,0,0
40343,81122,8112232,フクオカケン,カスヤグンシメマチ,ベフニシ,福岡県,糟屋郡志免町,別府西,0,0,1,0,0,0
40343,81122,8112231,フクオカケン,カスヤグンシメマチ,ベフヒガシ,福岡県,糟屋郡志免町,別府東,0,0,1,0,0,0
40343,81122,8112242,フクオカケン,カスヤグンシメマチ,マツガオカ,福岡県,糟屋郡志免町,松ケ丘,0,0,0,0,0,0
40343,81122,8112206,フクオカケン,カスヤグンシメマチ,ミタライ,福岡県,糟屋郡志免町,御手洗,0,0,1,0,0,0
40343,81122,8112207,フクオカケン,カスヤグンシメマチ,ミナミザト,福岡県,糟屋郡志免町,南里,0,0,1,0,0,0
40343,81122,8112247,フクオカケン,カスヤグンシメマチ,ムカイガオカ,福岡県,糟屋郡志免町,向ケ丘,0,0,1,0,0,0
40343,81122,8112208,フクオカケン,カスヤグンシメマチ,ヨシハラ,福岡県,糟屋郡志免町,吉原,0,0,0,0,0,0
40344,81121,8112100,フクオカケン,カスヤグンスエマチ,イカニケイサイガナイバアイ,福岡県,糟屋郡須惠町,以下に掲載がない場合,0,0,0,1,0,0
40344,81121,8112112,フクオカケン,カスヤグンスエマチ,ウエキ,福岡県,糟屋郡須惠町,植木,0,0,0,0,0,0
40344,81121,8112114,フクオカケン,カスヤグンスエマチ,カミスエ,福岡県,糟屋郡須惠町,上須惠,0,0,0,0,0,0
40344,81121,8112115,フクオカケン,カスヤグンスエマチ,サタニ,福岡県,糟屋郡須惠町,佐谷,0,0,0,0,0,0
40344,81121,8112111,フクオカケン,カスヤグンスエマチ,シンバル,福岡県,糟屋郡須惠町,新原,0,0,0,0,0,0
40344,81121,8112113,フクオカケン,カスヤグンスエマチ,スエ,福岡県,糟屋郡須惠町,須惠,0,0,0,0,0,0
40344,81122,8112221,フクオカケン,カスヤグンスエマチ,タビイシ,福岡県,糟屋郡須惠町,旅石,0,0,0,0,0,0
40345,81101,8110100,フクオカケン,カスヤグンシングウマチ,イカニケイサイガナイバアイ,福岡県,糟屋郡新宮町,以下に掲載がない場合,0,0,0,0,0,0
40345,81101,8110118,フクオカケン,カスヤグンシングウマチ,アイノシマ,福岡県,糟屋郡新宮町,相島,0,0,0,0,0,0
40345,81101,8110117,フクオカケン,カスヤグンシングウマチ,カミノフ,福岡県,糟屋郡新宮町,上府,0,0,0,0,0,0
40345,81101,8110113,フクオカケン,カスヤグンシングウマチ,サクラヤマテ,福岡県,糟屋郡新宮町,桜山手,0,0,1,0,0,0
40345,81101,8110112,フクオカケン,カスヤグンシングウマチ,シモノフ,福岡県,糟屋郡新宮町,下府,0,0,1,0,0,0
40345,81101,8110115,フクオカケン,カスヤグンシングウマチ,シングウ,福岡県,糟屋郡新宮町,新宮,0,0,0,0,0,0
40345,81101,8110102,フクオカケン,カスヤグンシングウマチ,タチバナグチ,福岡県,糟屋郡新宮町,立花口,0,0,0,0,0,0
40345,81101,8110120,フクオカケン,カスヤグンシングウマチ,チュウオウエキマエ,福岡県,糟屋郡新宮町,中央駅前,0,0,1,0,0,0
40345,81101,8110103,フクオカケン,カスヤグンシングウマチ,ハナタチバナ,福岡県,糟屋郡新宮町,花立花,0,0,1,0,0,0
40345,81101,8110101,フクオカケン,カスヤグンシングウマチ,ハルガミ,福岡県,糟屋郡新宮町,原上,0,0,0,0,0,0
40345,81101,8110104,フクオカケン,カスヤグンシングウマチ,マトノ,福岡県,糟屋郡新宮町,的野,0,0,0,0,0,0
40345,81101,8110121,フクオカケン,カスヤグンシングウマチ,ミサキ,福岡県,糟屋郡新宮町,美咲,0,0,1,0,0,0
40345,81101,8110111,フクオカケン,カスヤグンシングウマチ,ミシロ,福岡県,糟屋郡新宮町,三代,0,0,0,0,0,0
40345,81101,8110119,フクオカケン,カスヤグンシングウマチ,ミドリガハマ,福岡県,糟屋郡新宮町,緑ケ浜,0,0,1,0,0,0
40345,81101,8110116,フクオカケン,カスヤグンシングウマチ,ミナト,福岡県,糟屋郡新宮町,湊,0,0,0,0,0,0
40345,81101,8110114,フクオカケン,カスヤグンシングウマチ,ミナトザカ,福岡県,糟屋郡新宮町,湊坂,0,0,1,0,0,0
40345,81101,8110122,フクオカケン,カスヤグンシングウマチ,モリノミヤ,福岡県,糟屋郡新宮町,杜の宮,0,0,1,0,0,0
40345,81101,8110110,フクオカケン,カスヤグンシングウマチ,ユウス,福岡県,糟屋郡新宮町,夜臼,0,0,1,0,0,0
40348,81125,8112500,フクオカケン,カスヤグンヒサヤママチ,イカニケイサイガナイバアイ,福岡県,糟屋郡久山町,以下に掲載がない場合,0,0,0,0,0,0
40348,81125,8112503,フクオカケン,カスヤグンヒサヤママチ,イノ,福岡県,糟屋郡久山町,猪野,0,0,0,0,0,0
40348,81125,8112501,フクオカケン,カスヤグンヒサヤママチ,クバラ,福岡県,糟屋郡久山町,久原,0,0,0,0,0,0
40348,81125,8112502,フクオカケン,カスヤグンヒサヤママチ,ヤマダ,福岡県,糟屋郡久山町,山田,0,0,0,0,0,0
40349,81123,8112300,フクオカケン,カスヤグンカスヤマチ,イカニケイサイガナイバアイ,福岡県,糟屋郡粕屋町,以下に掲載がない場合,0,0,0,0,0,0
40349,81123,8112306,フクオカケン,カスヤグンカスヤマチ,アエ,福岡県,糟屋郡粕屋町,阿恵,0,0,0,0,0,0
40349,813,8130008,フクオカケン,カスヤグンカスヤマチ,ウチハシ790-1(タノツダンチ),福岡県,糟屋郡粕屋町,内橋790の1(多ノ津団地),1,0,0,0,0,0
40349,81123,8112308,フクオカケン,カスヤグンカスヤマチ,ウチハシ(ソノタ),福岡県,糟屋郡粕屋町,内橋(その他),1,0,0,0,0,0
40349,81123,8112313,フクオカケン,カスヤグンカスヤマチ,エツジ,福岡県,糟屋郡粕屋町,江辻,0,0,0,0,0,0
40349,81123,8112302,フクオカケン,カスヤグンカスヤマチ,オオクマ,福岡県,糟屋郡粕屋町,大隈,0,0,0,0,0,0
40349,81123,8112301,フクオカケン,カスヤグンカスヤマチ,カミオオクマ,福岡県,糟屋郡粕屋町,上大隈,0,0,0,0,0,0
40349,81123,8112309,フクオカケン,カスヤグンカスヤマチ,カヨイチョウ,福岡県,糟屋郡粕屋町,駕与丁,0,0,1,0,0,0
40349,81123,8112315,フクオカケン,カスヤグンカスヤマチ,コウナカバル,福岡県,糟屋郡粕屋町,甲仲原,0,0,1,0,0,0
40349,81123,8112303,フクオカケン,カスヤグンカスヤマチ,サカド,福岡県,糟屋郡粕屋町,酒殿,0,0,0,0,0,0
40349,81123,8112311,フクオカケン,カスヤグンカスヤマチ,チョウジャバル,福岡県,糟屋郡粕屋町,長者原,0,0,0,0,0,0
40349,81123,8112312,フクオカケン,カスヤグンカスヤマチ,トバラ,福岡県,糟屋郡粕屋町,戸原,0,0,0,0,0,0
40349,81123,8112304,フクオカケン,カスヤグンカスヤマチ,ナカバル,福岡県,糟屋郡粕屋町,仲原,0,0,0,0,0,0
40349,81123,8112310,フクオカケン,カスヤグンカスヤマチ,ハナガウラ,福岡県,糟屋郡粕屋町,花ヶ浦,0,0,1,0,0,0
40349,81123,8112307,フクオカケン,カスヤグンカスヤマチ,ハルマチ,福岡県,糟屋郡粕屋町,原町,0,0,1,0,0,0
40349,81123,8112305,フクオカケン,カスヤグンカスヤマチ,ユス,福岡県,糟屋郡粕屋町,柚須,0,0,0,0,0,0
40349,81123,8112314,フクオカケン,カスヤグンカスヤマチ,ワカミヤ,福岡県,糟屋郡粕屋町,若宮,0,0,1,0,0,0
40383,81142,8114200,フクオカケン,オンガグンオカガキマチ,イカニケイサイガナイバアイ,福岡県,遠賀郡岡垣町,以下に掲載がない場合,0,0,0,0,0,0
40383,81142,8114215,フクオカケン,オンガグンオカガキマチ,アサヒダイ,福岡県,遠賀郡岡垣町,旭台,0,0,1,0,0,0
40383,81142,8114216,フクオカケン,オンガグンオカガキマチ,アサヒミナミ,福岡県,遠賀郡岡垣町,旭南,0,0,0,0,0,0
40383,81142,8114203,フクオカケン,オンガグンオカガキマチ,ウツラ,福岡県,遠賀郡岡垣町,内浦,0,0,0,0,0,0
40383,81142,8114231,フクオカケン,オンガグンオカガキマチ,エビツ,福岡県,遠賀郡岡垣町,海老津,0,0,1,0,0,0
40383,81142,8114236,フクオカケン,オンガグンオカガキマチ,エビツエキマエ,福岡県,遠賀郡岡垣町,海老津駅前,0,0,0,0,0,0
40383,81142,8114238,フクオカケン,オンガグンオカガキマチ,エビツエキミナミ,福岡県,遠賀郡岡垣町,海老津駅南,0,0,1,0,0,0
40383,81142,8114212,フクオカケン,オンガグンオカガキマチ,クロヤマ,福岡県,遠賀郡岡垣町,黒山,0,0,0,0,0,0
40383,81142,8114235,フクオカケン,オンガグンオカガキマチ,コウエンドオリ,福岡県,遠賀郡岡垣町,公園通り,0,0,1,0,0,0
40383,81142,8114227,フクオカケン,オンガグンオカガキマチ,コウヨウダイ,福岡県,遠賀郡岡垣町,高陽台,0,0,1,0,0,0
40383,81142,8114217,フクオカケン,オンガグンオカガキマチ,サクラダイ,福岡県,遠賀郡岡垣町,桜台,0,0,0,0,0,0
40383,81142,8114232,フクオカケン,オンガグンオカガキマチ,ジョウハタ,福岡県,遠賀郡岡垣町,上畑,0,0,0,0,0,0
40383,81142,8114234,フクオカケン,オンガグンオカガキマチ,タカクラ,福岡県,遠賀郡岡垣町,高倉,0,0,0,0,0,0
40383,81142,8114218,フクオカケン,オンガグンオカガキマチ,チュウオウダイ,福岡県,遠賀郡岡垣町,中央台,0,0,1,0,0,0
40383,81142,8114204,フクオカケン,オンガグンオカガキマチ,テノ,福岡県,遠賀郡岡垣町,手野,0,0,0,0,0,0
40383,81142,8114222,フクオカケン,オンガグンオカガキマチ,トギリ,福岡県,遠賀郡岡垣町,戸切,0,0,0,0,0,0
40383,81142,8114224,フクオカケン,オンガグンオカガキマチ,ナベタ,福岡県,遠賀郡岡垣町,鍋田,0,0,1,0,0,0
40383,81142,8114213,フクオカケン,オンガグンオカガキマチ,ヌカヅカ,福岡県,遠賀郡岡垣町,糠塚,0,0,0,0,0,0
40383,81142,8114233,フクオカケン,オンガグンオカガキマチ,ノマ,福岡県,遠賀郡岡垣町,野間,0,0,1,0,0,0
40383,81142,8114239,フクオカケン,オンガグンオカガキマチ,ノマミナミ,福岡県,遠賀郡岡垣町,野間南,0,0,0,0,0,0
40383,81142,8114201,フクオカケン,オンガグンオカガキマチ,ハツ,福岡県,遠賀郡岡垣町,波津,0,0,0,0,0,0
40383,81142,8114202,フクオカケン,オンガグンオカガキマチ,ハラ,福岡県,遠賀郡岡垣町,原,0,0,0,0,0,0
40383,81142,8114225,フクオカケン,オンガグンオカガキマチ,ヒガシコウヨウ,福岡県,遠賀郡岡垣町,東高陽,0,0,1,0,0,0
40383,81142,8114237,フクオカケン,オンガグンオカガキマチ,ヒガシタカクラ,福岡県,遠賀郡岡垣町,東高倉,0,0,1,0,0,0
40383,81142,8114228,フクオカケン,オンガグンオカガキマチ,ヒガシマツバラ,福岡県,遠賀郡岡垣町,東松原,0,0,1,0,0,0
40383,81142,8114220,フクオカケン,オンガグンオカガキマチ,ヒガシヤマダ,福岡県,遠賀郡岡垣町,東山田,0,0,1,0,0,0
40383,81142,8114214,フクオカケン,オンガグンオカガキマチ,マツガダイ,福岡県,遠賀郡岡垣町,松ケ台,0,0,1,0,0,0
40383,81142,8114226,フクオカケン,オンガグンオカガキマチ,ミナミコウヨウ,福岡県,遠賀郡岡垣町,南高陽,0,0,0,0,0,0
40383,81142,8114205,フクオカケン,オンガグンオカガキマチ,ミヨシ,福岡県,遠賀郡岡垣町,三吉,0,0,0,0,0,0
40383,81142,8114221,フクオカケン,オンガグンオカガキマチ,ヤマダ,福岡県,遠賀郡岡垣町,山田,0,0,0,0,0,0
40383,81142,8114223,フクオカケン,オンガグンオカガキマチ,ヤマダトウゲ,福岡県,遠賀郡岡垣町,山田峠,0,0,1,0,0,0
40383,81142,8114229,フクオカケン,オンガグンオカガキマチ,ユリガオカ,福岡県,遠賀郡岡垣町,百合ケ丘,0,0,1,0,0,0
40383,81142,8114211,フクオカケン,オンガグンオカガキマチ,ヨシキ,福岡県,遠賀郡岡垣町,吉木,0,0,0,0,0,0
40383,81142,8114242,フクオカケン,オンガグンオカガキマチ,ヨシキニシ,福岡県,遠賀郡岡垣町,吉木西,0,0,1,0,0,0
40383,81142,8114241,フクオカケン,オンガグンオカガキマチ,ヨシキヒガシ,福岡県,遠賀郡岡垣町,吉木東,0,0,1,0,0,0
40384,81143,8114300,フクオカケン,オンガグンオンガチョウ,イカニケイサイガナイバアイ,福岡県,遠賀郡遠賀町,以下に掲載がない場合,0,0,0,0,0,0
40384,81143,8114312,フクオカケン,オンガグンオンガチョウ,アサギ,福岡県,遠賀郡遠賀町,浅木,0,0,1,0,0,0
40384,81143,8114303,フクオカケン,オンガグンオンガチョウ,イマコガ,福岡県,遠賀郡遠賀町,今古賀,0,0,0,0,0,0
40384,81143,8114311,フクオカケン,オンガグンオンガチョウ,オイラ,福岡県,遠賀郡遠賀町,老良,0,0,0,0,0,0
40384,81143,8114342,フクオカケン,オンガグンオンガチョウ,オザキ,福岡県,遠賀郡遠賀町,尾崎,0,0,0,0,0,0
40384,81143,8114341,フクオカケン,オンガグンオンガチョウ,オニヅ,福岡県,遠賀郡遠賀町,鬼津,0,0,0,0,0,0
40384,81143,8114307,フクオカケン,オンガグンオンガチョウ,オンガガワ,福岡県,遠賀郡遠賀町,遠賀川,0,0,1,0,0,0
40384,81143,8114332,フクオカケン,オンガグンオンガチョウ,カミベフ,福岡県,遠賀郡遠賀町,上別府,0,0,0,0,0,0
40384,81143,8114313,フクオカケン,オンガグンオンガチョウ,キモリ,福岡県,遠賀郡遠賀町,木守,0,0,0,0,0,0
40384,81143,8114306,フクオカケン,オンガグンオンガチョウ,キュウテイ,福岡県,遠賀郡遠賀町,旧停,0,0,1,0,0,0
40384,81143,8114301,フクオカケン,オンガグンオンガチョウ,シマヅ,福岡県,遠賀郡遠賀町,島津,0,0,0,0,0,0
40384,81143,8114333,フクオカケン,オンガグンオンガチョウ,シマド,福岡県,遠賀郡遠賀町,島門,0,0,0,0,0,0
40384,81143,8114343,フクオカケン,オンガグンオンガチョウ,デンエン,福岡県,遠賀郡遠賀町,田園,0,0,1,0,0,0
40384,81143,8114302,フクオカケン,オンガグンオンガチョウ,ヒロワタリ,福岡県,遠賀郡遠賀町,広渡,0,0,1,0,0,0
40384,81143,8114322,フクオカケン,オンガグンオンガチョウ,フヨウ,福岡県,遠賀郡遠賀町,芙蓉,0,0,1,0,0,0
40384,81143,8114331,フクオカケン,オンガグンオンガチョウ,ベフ,福岡県,遠賀郡遠賀町,別府,0,0,0,0,0,0
40384,81143,8114305,フクオカケン,オンガグンオンガチョウ,マツノモト,福岡県,遠賀郡遠賀町,松の本,0,0,1,0,0,0
40384,81143,8114321,フクオカケン,オンガグンオンガチョウ,ムショウヅ,福岡県,遠賀郡遠賀町,虫生津,0,0,0,0,0,0
40384,81143,8114324,フクオカケン,オンガグンオンガチョウ,ムショウヅミナミ,福岡県,遠賀郡遠賀町,虫生津南,0,0,0,0,0,0
40384,81143,8114334,フクオカケン,オンガグンオンガチョウ,レンガク,福岡県,遠賀郡遠賀町,蓮角,0,0,0,0,0,0
40384,81143,8114323,フクオカケン,オンガグンオンガチョウ,ワカバダイ,福岡県,遠賀郡遠賀町,若葉台,0,0,0,0,0,0
40384,81143,8114304,フクオカケン,オンガグンオンガチョウ,ワカマツ,福岡県,遠賀郡遠賀町,若松,0,0,0,0,0,0
42209,817,8170000,ナガサキケン,ツシマシ,イカニケイサイガナイバアイ,長崎県,対馬市,以下に掲載がない場合,0,0,0,0,0,0
42209,817,8170034,ナガサキケン,ツシマシ,イヅハラマチアガミ,長崎県,対馬市,厳原町安神,0,0,0,0,0,0
42209,81701,8170153,ナガサキケン,ツシマシ,イヅハラマチアザモ,長崎県,対馬市,厳原町浅藻,0,0,0,0,0,0
42209,81702,8170241,ナガサキケン,ツシマシ,イヅハラマチアレ,長崎県,対馬市,厳原町阿連,0,0,0,0,0,0
42209,817,8170021,ナガサキケン,ツシマシ,イヅハラマチイマヤシキ,長崎県,対馬市,厳原町今屋敷,0,0,0,0,0,0
42209,81701,8170157,ナガサキケン,ツシマシ,イヅハラマチウチヤマ,長崎県,対馬市,厳原町内山,0,0,0,0,0,0
42209,817,8170033,ナガサキケン,ツシマシ,イヅハラマチオウラ,長崎県,対馬市,厳原町尾浦,0,0,0,0,0,0
42209,817,8170024,ナガサキケン,ツシマシ,イヅハラマチオオテバシ,長崎県,対馬市,厳原町大手橋,0,0,0,0,0,0
42209,81702,8170243,ナガサキケン,ツシマシ,イヅハラマチカシネ,長崎県,対馬市,厳原町樫根,0,0,0,0,0,0
42209,817,8170006,ナガサキケン,ツシマシ,イヅハラマチキタザト,長崎県,対馬市,厳原町北里,0,0,0,0,0,0
42209,817,8170032,ナガサキケン,ツシマシ,イヅハラマチクタ,長崎県,対馬市,厳原町久田,0,0,0,0,0,0
42209,817,8170031,ナガサキケン,ツシマシ,イヅハラマチクタミチ,長崎県,対馬市,厳原町久田道,0,0,0,0,0,0
42209,81702,8170245,ナガサキケン,ツシマシ,イヅハラマチクネイナカ,長崎県,対馬市,厳原町久根田舎,0,0,0,0,0,0
42209,81702,8170244,ナガサキケン,ツシマシ,イヅハラマチクネハマ,長崎県,対馬市,厳原町久根浜,0,0,0,0,0,0
42209,817,8170035,ナガサキケン,ツシマシ,イヅハラマチクワ,長崎県,対馬市,厳原町久和,0,0,0,0,0,0
42209,81702,8170246,ナガサキケン,ツシマシ,イヅハラマチコウツキ,長崎県,対馬市,厳原町上槻,0,0,0,0,0,0
42209,817,8170001,ナガサキケン,ツシマシ,イヅハラマチコウラ,長崎県,対馬市,厳原町小浦,0,0,0,0,0,0
42209,817,8170022,ナガサキケン,ツシマシ,イヅハラマチコクブ,長崎県,対馬市,厳原町国分,0,0,0,0,0,0
42209,81702,8170248,ナガサキケン,ツシマシ,イヅハラマチコモダ,長崎県,対馬市,厳原町小茂田,0,0,0,0,0,0
42209,817,8170005,ナガサキケン,ツシマシ,イヅハラマチサジキバラ,長崎県,対馬市,厳原町桟原,0,0,0,0,0,0
42209,81701,8170156,ナガサキケン,ツシマシ,イヅハラマチサスセ,長崎県,対馬市,厳原町佐須瀬,0,0,0,0,0,0
42209,81702,8170247,ナガサキケン,ツシマシ,イヅハラマチシイネ,長崎県,対馬市,厳原町椎根,0,0,0,0,0,0
42209,81702,8170242,ナガサキケン,ツシマシ,イヅハラマチシモバル,長崎県,対馬市,厳原町下原,0,0,0,0,0,0
42209,817,8170023,ナガサキケン,ツシマシ,イヅハラマチタブチ,長崎県,対馬市,厳原町田渕,0,0,0,0,0,0
42209,81701,8170154,ナガサキケン,ツシマシ,イヅハラマチツツ,長崎県,対馬市,厳原町豆酘,0,0,0,0,0,0
42209,81701,8170155,ナガサキケン,ツシマシ,イヅハラマチツツセ,長崎県,対馬市,厳原町豆酘瀬,0,0,0,0,0,0
42209,81701,8170152,ナガサキケン,ツシマシ,イヅハラマチツツナイイン,長崎県,対馬市,厳原町豆酘内院,0,0,0,0,0,0
42209,817,8170014,ナガサキケン,ツシマシ,イヅハラマチテンドウシゲ,長崎県,対馬市,厳原町天道茂,0,0,0,0,0,0
42209,817,8170013,ナガサキケン,ツシマシ,イヅハラマチナカムラ,長崎県,対馬市,厳原町中村,0,0,0,0,0,0
42209,817,8170003,ナガサキケン,ツシマシ,イヅハラマチナムロ,長崎県,対馬市,厳原町南室,0,0,0,0,0,0
42209,817,8170015,ナガサキケン,ツシマシ,イヅハラマチニシザト,長崎県,対馬市,厳原町西里,0,0,0,0,0,0
42209,817,8170016,ナガサキケン,ツシマシ,イヅハラマチヒガシザト,長崎県,対馬市,厳原町東里,0,0,0,0,0,0
42209,817,8170012,ナガサキケン,ツシマシ,イヅハラマチヒヨシ,長崎県,対馬市,厳原町日吉,0,0,0,0,0,0
42209,817,8170002,ナガサキケン,ツシマシ,イヅハラマチマガリ,長崎県,対馬市,厳原町曲,0,0,0,0,0,0
42209,817,8170011,ナガサキケン,ツシマシ,イヅハラマチミヤダニ,長崎県,対馬市,厳原町宮谷,0,0,0,0,0,0
42209,81701,8170151,ナガサキケン,ツシマシ,イヅハラマチヨラナイイン,長崎県,対馬市,厳原町与良内院,0,0,0,0,0,0
42209,81715,8171532,ナガサキケン,ツシマシ,カミアガタマチイナ,長崎県,対馬市,上県町伊奈,0,0,0,0,0,0
42209,81715,8171524,ナガサキケン,ツシマシ,カミアガタマチイヌガウラ,長崎県,対馬市,上県町犬ケ浦,0,0,0,0,0,0
42209,81715,8171513,ナガサキケン,ツシマシ,カミアガタマチウナツラ,長崎県,対馬市,上県町女連,0,0,0,0,0,0
42209,81715,8171521,ナガサキケン,ツシマシ,カミアガタマチカイドコロ,長崎県,対馬市,上県町飼所,0,0,0,0,0,0
42209,81715,8171522,ナガサキケン,ツシマシ,カミアガタマチカシタキ,長崎県,対馬市,上県町樫滝,0,0,0,0,0,0
42209,81715,8171512,ナガサキケン,ツシマシ,カミアガタマチクバラ,長崎県,対馬市,上県町久原,0,0,0,0,0,0
42209,81715,8171531,ナガサキケン,ツシマシ,カミアガタマチコシタカ,長崎県,対馬市,上県町越高,0,0,0,0,0,0
42209,81716,8171603,ナガサキケン,ツシマシ,カミアガタマチサゴ,長崎県,対馬市,上県町佐護,0,0,0,0,0,0
42209,81716,8171602,ナガサキケン,ツシマシ,カミアガタマチサスナ,長崎県,対馬市,上県町佐須奈,0,0,0,0,0,0
42209,81715,8171511,ナガサキケン,ツシマシ,カミアガタマチシシミ,長崎県,対馬市,上県町鹿見,0,0,0,0,0,0
42209,81715,8171533,ナガサキケン,ツシマシ,カミアガタマチシタル,長崎県,対馬市,上県町志多留,0,0,0,0,0,0
42209,81715,8171523,ナガサキケン,ツシマシ,カミアガタマチセタ,長崎県,対馬市,上県町瀬田,0,0,0,0,0,0
42209,81716,8171601,ナガサキケン,ツシマシ,カミアガタマチニシツヤ,長崎県,対馬市,上県町西津屋,0,0,0,0,0,0
42209,81715,8171525,ナガサキケン,ツシマシ,カミアガタマチミソ,長崎県,対馬市,上県町御園,0,0,0,0,0,0
42209,81722,8172243,ナガサキケン,ツシマシ,カミツシママチアシミ,長崎県,対馬市,上対馬町芦見,0,0,0,0,0,0
42209,81717,8171704,ナガサキケン,ツシマシ,カミツシママチアジロ,長崎県,対馬市,上対馬町網代,0,0,0,0,0,0
42209,81717,8171725,ナガサキケン,ツシマシ,カミツシママチイズミ,長崎県,対馬市,上対馬町泉,0,0,0,0,0,0
42209,81717,8171722,ナガサキケン,ツシマシ,カミツシママチオオウラ,長崎県,対馬市,上対馬町大浦,0,0,0,0,0,0
42209,81717,8171715,ナガサキケン,ツシマシ,カミツシママチオオマス,長崎県,対馬市,上対馬町大増,0,0,0,0,0,0
42209,81722,8172241,ナガサキケン,ツシマシ,カミツシママチオシカ,長崎県,対馬市,上対馬町小鹿,0,0,0,0,0,0
42209,81717,8171721,ナガサキケン,ツシマシ,カミツシママチカワチ,長崎県,対馬市,上対馬町河内,0,0,0,0,0,0
42209,81723,8172331,ナガサキケン,ツシマシ,カミツシママチキン,長崎県,対馬市,上対馬町琴,0,0,0,0,0,0
42209,81717,8171714,ナガサキケン,ツシマシ,カミツシママチクス,長崎県,対馬市,上対馬町玖須,0,0,0,0,0,0
42209,81723,8172332,ナガサキケン,ツシマシ,カミツシママチゴネオ,長崎県,対馬市,上対馬町五根緒,0,0,0,0,0,0
42209,81723,8172333,ナガサキケン,ツシマシ,カミツシママチシュウシ,長崎県,対馬市,上対馬町舟志,0,0,0,0,0,0
42209,81717,8171712,ナガサキケン,ツシマシ,カミツシママチトウジュウシ,長崎県,対馬市,上対馬町唐舟志,0,0,0,0,0,0
42209,81717,8171711,ナガサキケン,ツシマシ,カミツシママチトミガウラ,長崎県,対馬市,上対馬町冨浦,0,0,0,0,0,0
42209,81717,8171724,ナガサキケン,ツシマシ,カミツシママチトヨ,長崎県,対馬市,上対馬町豊,0,0,0,0,0,0
42209,81717,8171703,ナガサキケン,ツシマシ,カミツシママチニシドマリ,長崎県,対馬市,上対馬町西泊,0,0,0,0,0,0
42209,81717,8171713,ナガサキケン,ツシマシ,カミツシママチハマグス,長崎県,対馬市,上対馬町浜久須,0,0,0,0,0,0
42209,81717,8171701,ナガサキケン,ツシマシ,カミツシママチヒタカツ,長崎県,対馬市,上対馬町比田勝,0,0,0,0,0,0
42209,81722,8172242,ナガサキケン,ツシマシ,カミツシママチヒトエ,長崎県,対馬市,上対馬町一重,0,0,0,0,0,0
42209,81717,8171702,ナガサキケン,ツシマシ,カミツシママチフルサト,長崎県,対馬市,上対馬町古里,0,0,0,0,0,0
42209,81717,8171723,ナガサキケン,ツシマシ,カミツシママチワニウラ,長崎県,対馬市,上対馬町鰐浦,0,0,0,0,0,0
42209,81712,8171231,ナガサキケン,ツシマシ,トヨタママチイトセ,長崎県,対馬市,豊玉町糸瀬,0,0,0,0,0,0
42209,81712,8171253,ナガサキケン,ツシマシ,トヨタママチウムギ,長崎県,対馬市,豊玉町卯麦,0,0,0,0,0,0
42209,81712,8171252,ナガサキケン,ツシマシ,トヨタママチオオツナ,長崎県,対馬市,豊玉町大綱,0,0,0,0,0,0
42209,81712,8171241,ナガサキケン,ツシマシ,トヨタママチカイグチ,長崎県,対馬市,豊玉町貝口,0,0,0,0,0,0
42209,81712,8171233,ナガサキケン,ツシマシ,トヨタママチカイフナ,長崎県,対馬市,豊玉町貝鮒,0,0,0,0,0,0
42209,81712,8171245,ナガサキケン,ツシマシ,トヨタママチカラス,長崎県,対馬市,豊玉町唐洲,0,0,0,0,0,0
42209,81712,8171256,ナガサキケン,ツシマシ,トヨタママチコヅナ,長崎県,対馬市,豊玉町小綱,0,0,0,0,0,0
42209,81712,8171232,ナガサキケン,ツシマシ,トヨタママチサガ,長崎県,対馬市,豊玉町嵯峨,0,0,0,0,0,0
42209,81712,8171234,ナガサキケン,ツシマシ,トヨタママチサシカ,長崎県,対馬市,豊玉町佐志賀,0,0,0,0,0,0
42209,81712,8171254,ナガサキケン,ツシマシ,トヨタママチサホ,長崎県,対馬市,豊玉町佐保,0,0,0,0,0,0
42209,81712,8171255,ナガサキケン,ツシマシ,トヨタママチシタノウラ,長崎県,対馬市,豊玉町志多浦,0,0,0,0,0,0
42209,81712,8171212,ナガサキケン,ツシマシ,トヨタママチソ,長崎県,対馬市,豊玉町曽,0,0,0,0,0,0
42209,81712,8171251,ナガサキケン,ツシマシ,トヨタママチタ,長崎県,対馬市,豊玉町田,0,0,0,0,0,0
42209,81712,8171213,ナガサキケン,ツシマシ,トヨタママチチロモ,長崎県,対馬市,豊玉町千尋藻,0,0,0,0,0,0
42209,81712,8171201,ナガサキケン,ツシマシ,トヨタママチニイ,長崎県,対馬市,豊玉町仁位,0,0,0,0,0,0
42209,81712,8171246,ナガサキケン,ツシマシ,トヨタママチマワリ,長崎県,対馬市,豊玉町廻,0,0,0,0,0,0
42209,81712,8171257,ナガサキケン,ツシマシ,トヨタママチメイ,長崎県,対馬市,豊玉町銘,0,0,0,0,0,0
42209,81712,8171214,ナガサキケン,ツシマシ,トヨタママチヤリカワ,長崎県,対馬市,豊玉町鑓川,0,0,0,0,0,0
42209,81712,8171223,ナガサキケン,ツシマシ,トヨタママチヨコウラ,長崎県,対馬市,豊玉町横浦,0,0,0,0,0,0
42209,81712,8171202,ナガサキケン,ツシマシ,トヨタママチワイタ,長崎県,対馬市,豊玉町和板,0,0,0,0,0,0
42209,81711,8171106,ナガサキケン,ツシマシ,ミツシママチイヌボエ,長崎県,対馬市,美津島町犬吠,0,0,0,0,0,0
42209,81704,8170432,ナガサキケン,ツシマシ,ミツシママチイマザト,長崎県,対馬市,美津島町今里,0,0,0,0,0,0
42209,81703,8170323,ナガサキケン,ツシマシ,ミツシママチオオフナコシ,長崎県,対馬市,美津島町大船越,0,0,0,0,0,0
42209,81703,8170325,ナガサキケン,ツシマシ,ミツシママチオカタ,長崎県,対馬市,美津島町緒方,0,0,0,0,0,0
42209,81704,8170431,ナガサキケン,ツシマシ,ミツシママチオサキ,長崎県,対馬市,美津島町尾崎,0,0,0,0,0,0
42209,81711,8171105,ナガサキケン,ツシマシ,ミツシママチオヤマ,長崎県,対馬市,美津島町大山,0,0,0,0,0,0
42209,81704,8170433,ナガサキケン,ツシマシ,ミツシママチカシ,長崎県,対馬市,美津島町加志,0,0,0,0,0,0
42209,81711,8171107,ナガサキケン,ツシマシ,ミツシママチカモイセ,長崎県,対馬市,美津島町鴨居瀬,0,0,0,0,0,0
42209,81711,8171103,ナガサキケン,ツシマシ,ミツシママチガヤ,長崎県,対馬市,美津島町賀谷,0,0,0,0,0,0
42209,81703,8170324,ナガサキケン,ツシマシ,ミツシママチクスボ,長崎県,対馬市,美津島町久須保,0,0,0,0,0,0
42209,81705,8170512,ナガサキケン,ツシマシ,ミツシママチクロセ,長崎県,対馬市,美津島町黒瀬,0,0,0,0,0,0
42209,81703,8170322,ナガサキケン,ツシマシ,ミツシママチケチ,長崎県,対馬市,美津島町鶏知,0,0,0,0,0,0
42209,81711,8171101,ナガサキケン,ツシマシ,ミツシママチコフナコシ,長崎県,対馬市,美津島町小船越,0,0,0,0,0,0
42209,81705,8170514,ナガサキケン,ツシマシ,ミツシママチシマヤマ,長崎県,対馬市,美津島町島山,0,0,0,0,0,0
42209,81703,8170321,ナガサキケン,ツシマシ,ミツシママチスモ,長崎県,対馬市,美津島町洲藻,0,0,0,0,0,0
42209,81705,8170511,ナガサキケン,ツシマシ,ミツシママチタケシキ,長崎県,対馬市,美津島町竹敷,0,0,0,0,0,0
42209,81703,8170326,ナガサキケン,ツシマシ,ミツシママチネオ,長崎県,対馬市,美津島町根緒,0,0,0,0,0,0
42209,81711,8171104,ナガサキケン,ツシマシ,ミツシママチノブ,長崎県,対馬市,美津島町濃部,0,0,0,0,0,0
42209,81705,8170513,ナガサキケン,ツシマシ,ミツシママチヒルガウラ,長崎県,対馬市,美津島町昼ケ浦,0,0,0,0,0,0
42209,81704,8170434,ナガサキケン,ツシマシ,ミツシママチフクザキ,長崎県,対馬市,美津島町吹崎,0,0,0,0,0,0
42209,81704,8170435,ナガサキケン,ツシマシ,ミツシママチミカタ,長崎県,対馬市,美津島町箕形,0,0,0,0,0,0
42209,81711,8171102,ナガサキケン,ツシマシ,ミツシママチヨシガウラ,長崎県,対馬市,美津島町芦浦,0,0,0,0,0,0
42209,81713,8171304,ナガサキケン,ツシマシ,ミネマチオウミ,長崎県,対馬市,峰町青海,0,0,0,0,0,0
42209,81713,8171307,ナガサキケン,ツシマシ,ミネマチカサ,長崎県,対馬市,峰町賀佐,0,0,0,0,0,0
42209,81713,8171302,ナガサキケン,ツシマシ,ミネマチカリオ,長崎県,対馬市,峰町狩尾,0,0,0,0,0,0
42209,81713,8171303,ナガサキケン,ツシマシ,ミネマチキサカ,長崎県,対馬市,峰町木坂,0,0,0,0,0,0
42209,81714,8171411,ナガサキケン,ツシマシ,ミネマチクシ,長崎県,対馬市,峰町櫛,0,0,0,0,0,0
42209,81714,8171412,ナガサキケン,ツシマシ,ミネマチサカ,長崎県,対馬市,峰町佐賀,0,0,0,0,0,0
42209,81714,8171413,ナガサキケン,ツシマシ,ミネマチシタカ,長崎県,対馬市,峰町志多賀,0,0,0,0,0,0
42209,81713,8171305,ナガサキケン,ツシマシ,ミネマチツヤナギ,長崎県,対馬市,峰町津柳,0,0,0,0,0,0
42209,81713,8171301,ナガサキケン,ツシマシ,ミネマチミネ,長崎県,対馬市,峰町三根,0,0,0,0,0,0
42209,81713,8171306,ナガサキケン,ツシマシ,ミネマチヨシダ,長崎県,対馬市,峰町吉田,0,0,0,0,0,0
42210,81151,8115100,ナガサキケン,イキシ,イカニケイサイガナイバアイ,長崎県,壱岐市,以下に掲載がない場合,0,0,0,0,0,0
42210,81153,8115301,ナガサキケン,イキシ,アシベチョウアシベウラ,長崎県,壱岐市,芦辺町芦辺浦,0,0,0,0,0,0
42210,81157,8115733,ナガサキケン,イキシ,アシベチョウコクブカワムカエフレ,長崎県,壱岐市,芦辺町国分川迎触,0,0,0,0,0,0
42210,81157,8115731,ナガサキケン,イキシ,アシベチョウコクブトウダフレ,長崎県,壱岐市,芦辺町国分当田触,0,0,0,0,0,0
42210,81157,8115732,ナガサキケン,イキシ,アシベチョウコクブヒガシフレ,長崎県,壱岐市,芦辺町国分東触,0,0,0,0,0,0
42210,81157,8115734,ナガサキケン,イキシ,アシベチョウコクブホンムラフレ,長崎県,壱岐市,芦辺町国分本村触,0,0,0,0,0,0
42210,81157,8115744,ナガサキケン,イキシ,アシベチョウスミヨシウシロフレ,長崎県,壱岐市,芦辺町住吉後触,0,0,0,0,0,0
42210,81157,8115742,ナガサキケン,イキシ,アシベチョウスミヨシヒガシフレ,長崎県,壱岐市,芦辺町住吉東触,0,0,0,0,0,0
42210,81157,8115743,ナガサキケン,イキシ,アシベチョウスミヨシマエフレ,長崎県,壱岐市,芦辺町住吉前触,0,0,0,0,0,0
42210,81157,8115741,ナガサキケン,イキシ,アシベチョウスミヨシヤマノブフレ,長崎県,壱岐市,芦辺町住吉山信触,0,0,0,0,0,0
42210,81154,8115461,ナガサキケン,イキシ,アシベチョウセトウラ,長崎県,壱岐市,芦辺町瀬戸浦,0,0,0,0,0,0
42210,81157,8115751,ナガサキケン,イキシ,アシベチョウナカノゴウナカフレ,長崎県,壱岐市,芦辺町中野郷仲触,0,0,0,0,0,0
42210,81157,8115757,ナガサキケン,イキシ,アシベチョウナカノゴウニシフレ,長崎県,壱岐市,芦辺町中野郷西触,0,0,0,0,0,0
42210,81157,8115752,ナガサキケン,イキシ,アシベチョウナカノゴウヒガシフレ,長崎県,壱岐市,芦辺町中野郷東触,0,0,0,0,0,0
42210,81157,8115756,ナガサキケン,イキシ,アシベチョウナカノゴウホンムラフレ,長崎県,壱岐市,芦辺町中野郷本村触,0,0,0,0,0,0
42210,81154,8115467,ナガサキケン,イキシ,アシベチョウハコザキエスミフレ,長崎県,壱岐市,芦辺町箱崎江角触,0,0,0,0,0,0
42210,81154,8115465,ナガサキケン,イキシ,アシベチョウハコザキクギノオフレ,長崎県,壱岐市,芦辺町箱崎釘ノ尾触,0,0,0,0,0,0
42210,81154,8115462,ナガサキケン,イキシ,アシベチョウハコザキタイソウフレ,長崎県,壱岐市,芦辺町箱崎大左右触,0,0,0,0,0,0
42210,81154,8115464,ナガサキケン,イキシ,アシベチョウハコザキタニエフレ,長崎県,壱岐市,芦辺町箱崎谷江触,0,0,0,0,0,0
42210,81154,8115463,ナガサキケン,イキシ,アシベチョウハコザキナカヤマフレ,長崎県,壱岐市,芦辺町箱崎中山触,0,0,0,0,0,0
42210,81154,8115466,ナガサキケン,イキシ,アシベチョウハコザキホンムラフレ,長崎県,壱岐市,芦辺町箱崎本村触,0,0,0,0,0,0
42210,81154,8115468,ナガサキケン,イキシ,アシベチョウハコザキモロツフレ,長崎県,壱岐市,芦辺町箱崎諸津触,0,0,0,0,0,0
42210,81153,8115321,ナガサキケン,イキシ,アシベチョウフカエサカエフレ,長崎県,壱岐市,芦辺町深江栄触,0,0,0,0,0,0
42210,81153,8115322,ナガサキケン,イキシ,アシベチョウフカエツルキフレ,長崎県,壱岐市,芦辺町深江鶴亀触,0,0,0,0,0,0
42210,81153,8115324,ナガサキケン,イキシ,アシベチョウフカエヒガシフレ,長崎県,壱岐市,芦辺町深江東触,0,0,0,0,0,0
42210,81153,8115323,ナガサキケン,イキシ,アシベチョウフカエヒラフレ,長崎県,壱岐市,芦辺町深江平触,0,0,0,0,0,0
42210,81153,8115326,ナガサキケン,イキシ,アシベチョウフカエホンムラフレ,長崎県,壱岐市,芦辺町深江本村触,0,0,0,0,0,0
42210,81153,8115325,ナガサキケン,イキシ,アシベチョウフカエミナミフレ,長崎県,壱岐市,芦辺町深江南触,0,0,0,0,0,0
42210,81153,8115316,ナガサキケン,イキシ,アシベチョウモロヨシオオイシフレ,長崎県,壱岐市,芦辺町諸吉大石触,0,0,0,0,0,0
42210,81153,8115313,ナガサキケン,イキシ,アシベチョウモロヨシナカフレ,長崎県,壱岐市,芦辺町諸吉仲触,0,0,0,0,0,0
42210,81153,8115314,ナガサキケン,イキシ,アシベチョウモロヨシヒガシフレ,長崎県,壱岐市,芦辺町諸吉東触,0,0,0,0,0,0
42210,81153,8115315,ナガサキケン,イキシ,アシベチョウモロヨシフタマタフレ,長崎県,壱岐市,芦辺町諸吉二亦触,0,0,0,0,0,0
42210,81153,8115311,ナガサキケン,イキシ,アシベチョウモロヨシホンムラフレ,長崎県,壱岐市,芦辺町諸吉本村触,0,0,0,0,0,0
42210,81153,8115312,ナガサキケン,イキシ,アシベチョウモロヨシミナミフレ,長崎県,壱岐市,芦辺町諸吉南触,0,0,0,0,0,0
42210,81157,8115755,ナガサキケン,イキシ,アシベチョウユタケコウフレ,長崎県,壱岐市,芦辺町湯岳興触,0,0,0,0,0,0
42210,81157,8115754,ナガサキケン,イキシ,アシベチョウユタケコンザカフレ,長崎県,壱岐市,芦辺町湯岳今坂触,0,0,0,0,0,0
42210,81157,8115753,ナガサキケン,イキシ,アシベチョウユタケホンムラフレ,長崎県,壱岐市,芦辺町湯岳本村触,0,0,0,0,0,0
42210,81152,8115222,ナガサキケン,イキシ,イシダチョウイケダナカフレ,長崎県,壱岐市,石田町池田仲触,0,0,0,0,0,0
42210,81152,8115224,ナガサキケン,イキシ,イシダチョウイケダニシフレ,長崎県,壱岐市,石田町池田西触,0,0,0,0,0,0
42210,81152,8115221,ナガサキケン,イキシ,イシダチョウイケダヒガシフレ,長崎県,壱岐市,石田町池田東触,0,0,0,0,0,0
42210,81152,8115215,ナガサキケン,イキシ,イシダチョウイシダニシフレ,長崎県,壱岐市,石田町石田西触,0,0,0,0,0,0
42210,81152,8115211,ナガサキケン,イキシ,イシダチョウイシダヒガシフレ,長崎県,壱岐市,石田町石田東触,0,0,0,0,0,0
42210,81152,8115214,ナガサキケン,イキシ,イシダチョウインドオジウラ,長崎県,壱岐市,石田町印通寺浦,0,0,0,0,0,0
42210,81152,8115223,ナガサキケン,イキシ,イシダチョウクキフレ,長崎県,壱岐市,石田町久喜触,0,0,0,0,0,0
42210,81152,8115202,ナガサキケン,イキシ,イシダチョウツツキナカフレ,長崎県,壱岐市,石田町筒城仲触,0,0,0,0,0,0
42210,81152,8115204,ナガサキケン,イキシ,イシダチョウツツキニシフレ,長崎県,壱岐市,石田町筒城西触,0,0,0,0,0,0
42210,81152,8115203,ナガサキケン,イキシ,イシダチョウツツキヒガシフレ,長崎県,壱岐市,石田町筒城東触,0,0,0,0,0,0
42210,81152,8115212,ナガサキケン,イキシ,イシダチョウホンムラフレ,長崎県,壱岐市,石田町本村触,0,0,0,0,0,0
42210,81152,8115213,ナガサキケン,イキシ,イシダチョウミナミフレ,長崎県,壱岐市,石田町南触,0,0,0,0,0,0
42210,81152,8115201,ナガサキケン,イキシ,イシダチョウヤマサキフレ,長崎県,壱岐市,石田町山崎触,0,0,0,0,0,0
42210,81152,8115226,ナガサキケン,イキシ,イシダチョウユタケイテヨシフレ,長崎県,壱岐市,石田町湯岳射手吉触,0,0,0,0,0,0
42210,81152,8115225,ナガサキケン,イキシ,イシダチョウユタケコウフレ,長崎県,壱岐市,石田町湯岳興触,0,0,0,0,0,0
42210,81155,8115543,ナガサキケン,イキシ,カツモトチョウウワバフレ,長崎県,壱岐市,勝本町上場触,0,0,0,0,0,0
42210,81155,8115532,ナガサキケン,イキシ,カツモトチョウオオクボフレ,長崎県,壱岐市,勝本町大久保触,0,0,0,0,0,0
42210,81155,8115523,ナガサキケン,イキシ,カツモトチョウカタヤマフレ,長崎県,壱岐市,勝本町片山触,0,0,0,0,0,0
42210,81155,8115501,ナガサキケン,イキシ,カツモトチョウカツモトウラ,長崎県,壱岐市,勝本町勝本浦,0,0,0,0,0,0
42210,81155,8115513,ナガサキケン,イキシ,カツモトチョウキタフレ,長崎県,壱岐市,勝本町北触,0,0,0,0,0,0
42210,81155,8115521,ナガサキケン,イキシ,カツモトチョウサイドフレ,長崎県,壱岐市,勝本町西戸触,0,0,0,0,0,0
42210,81155,8115531,ナガサキケン,イキシ,カツモトチョウサカモトフレ,長崎県,壱岐市,勝本町坂本触,0,0,0,0,0,0
42210,81155,8115533,ナガサキケン,イキシ,カツモトチョウシンジョウニシフレ,長崎県,壱岐市,勝本町新城西触,0,0,0,0,0,0
42210,81155,8115522,ナガサキケン,イキシ,カツモトチョウシンジョウヒガシフレ,長崎県,壱岐市,勝本町新城東触,0,0,0,0,0,0
42210,81155,8115554,ナガサキケン,イキシ,カツモトチョウタテイシナカフレ,長崎県,壱岐市,勝本町立石仲触,0,0,0,0,0,0
42210,81155,8115556,ナガサキケン,イキシ,カツモトチョウタテイシニシフレ,長崎県,壱岐市,勝本町立石西触,0,0,0,0,0,0
42210,81155,8115553,ナガサキケン,イキシ,カツモトチョウタテイシヒガシフレ,長崎県,壱岐市,勝本町立石東触,0,0,0,0,0,0
42210,81155,8115555,ナガサキケン,イキシ,カツモトチョウタテイシミナミフレ,長崎県,壱岐市,勝本町立石南触,0,0,0,0,0,0
42210,81155,8115511,ナガサキケン,イキシ,カツモトチョウナカフレ,長崎県,壱岐市,勝本町仲触,0,0,0,0,0,0
42210,81155,8115512,ナガサキケン,イキシ,カツモトチョウヒガシフレ,長崎県,壱岐市,勝本町東触,0,0,0,0,0,0
42210,81155,8115544,ナガサキケン,イキシ,カツモトチョウフケフレ,長崎県,壱岐市,勝本町布気触,0,0,0,0,0,0
42210,81155,8115546,ナガサキケン,イキシ,カツモトチョウホングウナカフレ,長崎県,壱岐市,勝本町本宮仲触,0,0,0,0,0,0
42210,81155,8115541,ナガサキケン,イキシ,カツモトチョウホングウニシフレ,長崎県,壱岐市,勝本町本宮西触,0,0,0,0,0,0
42210,81155,8115542,ナガサキケン,イキシ,カツモトチョウホングウヒガシフレ,長崎県,壱岐市,勝本町本宮東触,0,0,0,0,0,0
42210,81155,8115545,ナガサキケン,イキシ,カツモトチョウホングウミナミフレ,長崎県,壱岐市,勝本町本宮南触,0,0,0,0,0,0
42210,81155,8115551,ナガサキケン,イキシ,カツモトチョウユノモトウラ,長崎県,壱岐市,勝本町湯本浦,0,0,0,0,0,0
42210,81155,8115552,ナガサキケン,イキシ,カツモトチョウユリハタフレ,長崎県,壱岐市,勝本町百合畑触,0,0,0,0,0,0
42210,81151,8115107,ナガサキケン,イキシ,ゴウノウラチョウアリヤスフレ,長崎県,壱岐市,郷ノ浦町有安触,0,0,0,0,0,0
42210,81151,8115113,ナガサキケン,イキシ,ゴウノウラチョウウシカタフレ,長崎県,壱岐市,郷ノ浦町牛方触,0,0,0,0,0,0
42210,81151,8115112,ナガサキケン,イキシ,ゴウノウラチョウオオウラフレ,長崎県,壱岐市,郷ノ浦町大浦触,0,0,0,0,0,0
42210,81151,8115161,ナガサキケン,イキシ,ゴウノウラチョウオオシマ,長崎県,壱岐市,郷ノ浦町大島,0,0,0,0,0,0
42210,81151,8115136,ナガサキケン,イキシ,ゴウノウラチョウカタバルフレ,長崎県,壱岐市,郷ノ浦町片原触,0,0,0,0,0,0
42210,81151,8115115,ナガサキケン,イキシ,ゴウノウラチョウキダフレ,長崎県,壱岐市,郷ノ浦町木田触,0,0,0,0,0,0
42210,81151,8115123,ナガサキケン,イキシ,ゴウノウラチョウクギヤマフレ,長崎県,壱岐市,郷ノ浦町釘山触,0,0,0,0,0,0
42210,81151,8115135,ナガサキケン,イキシ,ゴウノウラチョウゴウノウラ,長崎県,壱岐市,郷ノ浦町郷ノ浦,0,0,0,0,0,0
42210,81151,8115105,ナガサキケン,イキシ,ゴウノウラチョウコマキニシフレ,長崎県,壱岐市,郷ノ浦町小牧西触,0,0,0,0,0,0
42210,81151,8115106,ナガサキケン,イキシ,ゴウノウラチョウコマキヒガシフレ,長崎県,壱岐市,郷ノ浦町小牧東触,0,0,0,0,0,0
42210,81151,8115104,ナガサキケン,イキシ,ゴウノウラチョウサトフレ,長崎県,壱岐市,郷ノ浦町里触,0,0,0,0,0,0
42210,81151,8115125,ナガサキケン,イキシ,ゴウノウラチョウシハラニシフレ,長崎県,壱岐市,郷ノ浦町志原西触,0,0,0,0,0,0
42210,81151,8115124,ナガサキケン,イキシ,ゴウノウラチョウシハラミナミフレ,長崎県,壱岐市,郷ノ浦町志原南触,0,0,0,0,0,0
42210,81151,8115134,ナガサキケン,イキシ,ゴウノウラチョウショウフレ,長崎県,壱岐市,郷ノ浦町庄触,0,0,0,0,0,0
42210,81151,8115103,ナガサキケン,イキシ,ゴウノウラチョウシンデンフレ,長崎県,壱岐市,郷ノ浦町新田触,0,0,0,0,0,0
42210,81151,8115122,ナガサキケン,イキシ,ゴウノウラチョウタイバルフレ,長崎県,壱岐市,郷ノ浦町大原触,0,0,0,0,0,0
42210,81151,8115117,ナガサキケン,イキシ,ゴウノウラチョウタナカフレ,長崎県,壱岐市,郷ノ浦町田中触,0,0,0,0,0,0
42210,81151,8115142,ナガサキケン,イキシ,ゴウノウラチョウツボフレ,長崎県,壱岐市,郷ノ浦町坪触,0,0,0,0,0,0
42210,81151,8115162,ナガサキケン,イキシ,ゴウノウラチョウナガシマ,長崎県,壱岐市,郷ノ浦町長島,0,0,0,0,0,0
42210,81151,8115131,ナガサキケン,イキシ,ゴウノウラチョウナガタフレ,長崎県,壱岐市,郷ノ浦町永田触,0,0,0,0,0,0
42210,81151,8115102,ナガサキケン,イキシ,ゴウノウラチョウナガミネヒガシフレ,長崎県,壱岐市,郷ノ浦町長峰東触,0,0,0,0,0,0
42210,81151,8115101,ナガサキケン,イキシ,ゴウノウラチョウナガミネホンムラフレ,長崎県,壱岐市,郷ノ浦町長峰本村触,0,0,0,0,0,0
42210,81151,8115143,ナガサキケン,イキシ,ゴウノウラチョウハツヤマニシフレ,長崎県,壱岐市,郷ノ浦町初山西触,0,0,0,0,0,0
42210,81151,8115144,ナガサキケン,イキシ,ゴウノウラチョウハツヤマヒガシフレ,長崎県,壱岐市,郷ノ浦町初山東触,0,0,0,0,0,0
42210,81151,8115163,ナガサキケン,イキシ,ゴウノウラチョウハルシマ,長崎県,壱岐市,郷ノ浦町原島,0,0,0,0,0,0
42210,81151,8115111,ナガサキケン,イキシ,ゴウノウラチョウハンセイホンムラフレ,長崎県,壱岐市,郷ノ浦町半城本村触,0,0,0,0,0,0
42210,81151,8115132,ナガサキケン,イキシ,ゴウノウラチョウヒガシフレ,長崎県,壱岐市,郷ノ浦町東触,0,0,0,0,0,0
42210,81151,8115121,ナガサキケン,イキシ,ゴウノウラチョウヒロウトフレ,長崎県,壱岐市,郷ノ浦町平人触,0,0,0,0,0,0
42210,81151,8115133,ナガサキケン,イキシ,ゴウノウラチョウホンムラフレ,長崎県,壱岐市,郷ノ浦町本村触,0,0,0,0,0,0
42210,81151,8115155,ナガサキケン,イキシ,ゴウノウラチョウムギヤフレ,長崎県,壱岐市,郷ノ浦町麦谷触,0,0,0,0,0,0
42210,81151,8115116,ナガサキケン,イキシ,ゴウノウラチョウモノベホンムラフレ,長崎県,壱岐市,郷ノ浦町物部本村触,0,0,0,0,0,0
42210,81151,8115114,ナガサキケン,イキシ,ゴウノウラチョウヤナギダフレ,長崎県,壱岐市,郷ノ浦町柳田触,0,0,0,0,0,0
42210,81151,8115141,ナガサキケン,イキシ,ゴウノウラチョウワカマツフレ,長崎県,壱岐市,郷ノ浦町若松触,0,0,0,0,0,0
42210,81151,8115151,ナガサキケン,イキシ,ゴウノウラチョウワタラウラ,長崎県,壱岐市,郷ノ浦町渡良浦,0,0,0,0,0,0
42210,81151,8115153,ナガサキケン,イキシ,ゴウノウラチョウワタラニシフレ,長崎県,壱岐市,郷ノ浦町渡良西触,0,0,0,0,0,0
42210,81151,8115154,ナガサキケン,イキシ,ゴウノウラチョウワタラヒガシフレ,長崎県,壱岐市,郷ノ浦町渡良東触,0,0,0,0,0,0
42210,81151,8115152,ナガサキケン,イキシ,ゴウノウラチョウワタラミナミフレ,長崎県,壱岐市,郷ノ浦町渡良南触,0,0,0,0,0,0 | [
"[email protected]"
] | |
70425764af9a4af7b00d9a87514deba1e28c8fda | 722af8e6fa81960a6119c2e45ba6795771bad595 | /agents/migrations/0043_veri.py | a9fbb71d81b2b140fcb68e8c4a02de3f0a744641 | [] | no_license | witty-technologies-empowerment/pmc | 85d21fa3c360d40adeec7ca93792b5bc68c258e5 | 201bee60197240eec911637e136cf14bc5814eec | refs/heads/master | 2023-05-27T12:37:48.894933 | 2021-06-13T04:34:57 | 2021-06-13T04:34:57 | 376,439,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | # Generated by Django 2.2.6 on 2020-02-04 15:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0042_auto_20191022_0303'),
]
operations = [
migrations.CreateModel(
name='Veri',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.CharField(max_length=20)),
('rcode', models.CharField(max_length=100)),
('count', models.CharField(default=1, max_length=2)),
('created', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-created'],
},
),
]
| [
"[email protected]"
] | |
6bc8e6bda70fb29b075f4f3c8c40b9a6b2656fcf | 9c6e63eb1796bbf4c37d93fca941fb67b4cd4741 | /trunk/scarlett/app.py | 7f7179015d2a9cefbdbe4557f2fd080029521298 | [] | no_license | BGCX261/zizw-svn-to-git | ffc6636d8e0d91b24f124ba3d16c61af10d7441c | c8d068af7a36396ce707dc035b15330c77b02f2a | refs/heads/master | 2016-09-05T13:11:22.053860 | 2015-08-25T15:51:45 | 2015-08-25T15:51:45 | 41,585,036 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,466 | py |
import logging
import webob
import wsgiref.handlers
import simplejson.encoder
import simplejson.decoder
from google.appengine.ext import db
from google.appengine.api import users
from scarlett import model
from scarlett import utils
jsonEncoder = simplejson.encoder.JSONEncoder()
jsonDecoder = simplejson.decoder.JSONDecoder()
def scarlett(environ, start_response):
#
# create request & response objects
#
request = webob.Request(environ)
response = webob.Response()
#
# create session object
#
session = Session(request)
# do job
channel = session.message["channel"]
if channel == "refresh":
if session.isAdmin:
response.body = shell % ("Scarlett-Admin", "scarlett.Admin")
elif session.user:
response.body = shell % ("Scarlett", "scarlett.Main")
else:
response.body = shell % ("Login", "scarlett.Login")
elif channel == "locateservice":
fullName = str(session.message["fullName"])
service = utils.my_import(fullName)
simpleName = fullName.split('.')[-1]
response.body = generateServiceStub(service, fullName, simpleName)
response.content_type = "text/plain"
response.charset = "UTF-8"
elif channel == "rmi":
fullName = str(session.message["serviceName"])
methodName = str(session.message["methodName"])
args = session.message["args"];
argList = ""
for i in range(len(args)):
argList += "args[%s], " % i
argList = argList[:-2]
service = utils.my_import(fullName)
outMessage = {
"result": eval("service."+methodName+"(session, "+argList+")")
}
if fullName == "scarlett.admin" and methodName == "login" and outMessage["result"]:
response.set_cookie("sid", userToSid(args[0]))
response.body = jsonEncoder.encode(outMessage)
response.content_type = "text/plain"
response.charset = "UTF-8"
elif channel == "admin":
user = users.get_current_user()
if not user:
response.body = users.create_login_url("/")
logging.info("admin: do login")
else:
response.body = "/"
logging.info("admin: do normal")
else:
response.body = "unknown channel: %s" % str(channel)
#
return response(environ, start_response)
#
# Tips:
# session.message
# session.message.channel
# session.isAdmin
# session.user
# session.user.alias
#
class Session():
def __init__(self, request):
#
# setting message
#
if request.method == "GET":
self.message = {"channel":"refresh"}
else:
self.message = jsonDecoder.decode(request.body)
#
# setting isAdmin & user
#
if users.is_current_user_admin():
self.isAdmin = True
self.user = None
elif "sid" not in request.cookies:
self.isAdmin = False
self.user = None
elif not request.cookies["sid"]:
self.isAdmin = False
self.user = None
else:
self.isAdmin = False
self.user = sidToUser(request.cookies["sid"])
def sidToUser(sid):
#
# TODO: a real sid should be used
#
return model.User.get(db.Key.from_path("User", "ID_"+sid, _app="scarlett"))
def userToSid(userName):
#
# TODO: a real sid should be used
#
return userName
def generateServiceStub(service, fullName, simpleName):
methodList= filter(lambda x : x[0:1]!= "_", dir(service))
stub = "var " + simpleName + " = function(){\n"
stub += "}\n\n"
for method in methodList:
stub += simpleName + ".prototype." + method + " = function() {\n"
stub += "\treturn jsloader.doRmi('%s', '%s', arguments);\n" % (fullName, method)
stub += "};\n"
return stub
def main():
wsgiref.handlers.CGIHandler().run(scarlett)
shell = """
<html>
<head>
<title>%s</title>
<script>
var App = null;
var app = null;
function init() {
App = jsloader.resolve("%s")
app = new App(document.body);
var welcome = document.getElementById("welcome");
document.body.removeChild(welcome);
}
function destroy() {
app.destroy();
}
</script>
</head>
<body scroll="no" style="overflow: hidden; margin: 0px; padding: 0px" onload="init()" onunload="destroy()">
<span id="welcome">Loading ...</span>
</body>
<script src="js/lang/JSLoader.js"></script>
</html>
"""
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
388430234a19c8d3bb7df514027066b68cf8fc68 | 3507fdc5012e55f6a784d70a7ad6da11224e5bfe | /caesar_cipher.py | 2fa71c8cece652f7e97971c772561d702a65ad0c | [] | no_license | jonasthiel/100-days-of-code-python | 640be865bdba10cca17ba72c4923cf9961ed570c | 94ad366d10ed862c6c699ae1f242bd462f2ba597 | refs/heads/main | 2023-04-03T11:53:16.993098 | 2021-04-09T14:20:41 | 2021-04-09T14:20:41 | 330,404,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,924 | py | from os import system
alphabet = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
logo = """
,adPPYba, ,adPPYYba, ,adPPYba, ,adPPYba, ,adPPYYba, 8b,dPPYba,
a8" "" "" `Y8 a8P_____88 I8[ "" "" `Y8 88P' "Y8
8b ,adPPPPP88 8PP""""""" `"Y8ba, ,adPPPPP88 88
"8a, ,aa 88, ,88 "8b, ,aa aa ]8I 88, ,88 88
`"Ybbd8"' `"8bbdP"Y8 `"Ybbd8"' `"YbbdP"' `"8bbdP"Y8 88
88 88
"" 88
88
,adPPYba, 88 8b,dPPYba, 88,dPPYba, ,adPPYba, 8b,dPPYba,
a8" "" 88 88P' "8a 88P' "8a a8P_____88 88P' "Y8
8b 88 88 d8 88 88 8PP""""""" 88
"8a, ,aa 88 88b, ,a8" 88 88 "8b, ,aa 88
`"Ybbd8"' 88 88`YbbdP"' 88 88 `"Ybbd8"' 88
88
88
"""
end = False
while not end:
print(logo)
direction = input("Type 'encode' to encrypt, type 'decode' to decrypt:\n")
text = input("Type your message:\n").lower()
shift = int(input("Type the shift number:\n"))
def caesar(direction, text, shift):
output_text = ""
if shift > 26:
shift %= 26
if direction == "encode":
for i in text:
if i in alphabet:
index = alphabet.index(i)
if index + shift > 25:
output_text += alphabet[index + shift - 26]
else:
output_text += alphabet[index + shift]
else:
output_text += i
elif direction == "decode":
for i in text:
if i in alphabet:
index = alphabet.index(i)
if index - shift < 0:
output_text += alphabet[index - shift + 26]
else:
output_text += alphabet[index - shift]
else:
output_text += i
print(f"The {direction}d text is {output_text}")
caesar(direction, text, shift)
if input("Type 'yes' if you want to go again. Otherwise type 'no'.\n").lower() == "no":
end = True
else:
system('clear') | [
"[email protected]"
] | |
ceb0c0134cb3480fdab988077750fcef69ee298d | f8ea3582884df87172cb747e424ebd0c20223614 | /(sandbox,tobemerged)/setup.py | bfdf53b7357b1a52aaad77a7986bc61cc1b5ddd9 | [
"MIT"
] | permissive | karimbahgat/PythonGis | 94f52f800a769ee54b12c7277604ead011465321 | fb99148a15bcbe0438ddca67b484a15076bd961a | refs/heads/master | 2023-04-12T15:59:08.522464 | 2022-09-09T22:48:32 | 2022-09-09T22:48:32 | 47,153,255 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,260 | py | ############
### allow building the exe by simply running this script
import sys
sys.argv.append("py2exe")
############
### imports
from distutils.core import setup
import py2exe
###########
### options
WINDOWS = [{"script": "guitester.py",
"icon_resources": [(1,"pythongis/app/logo.ico")] }]
OPTIONS = {"skip_archive": True,
"dll_excludes": ["python26.dll","python27.so"]}
###########
### create the application icon
##import PIL, PIL.Image
##img = PIL.Image.open("icon.png")
##img.save("icon.ico", sizes=[(255,255),(128,128),(64,64),(48,48),(32,32),(16,16),(8,8)])
###########
### build
setup(windows=WINDOWS,
options={"py2exe": OPTIONS}
)
###########
### manually copy pythongis package to dist
### ...because py2exe may not copy all files
import os
import shutil
frompath = "pythongis"
topath = os.path.join("dist","pythongis")
shutil.rmtree(topath) # deletes the folder copied by py2exe
shutil.copytree(frompath, topath)
###########
### and same with dependencies
for dependname in os.listdir("dependencies"):
frompath = os.path.join("dependencies", dependname)
topath = os.path.join("dist", dependname)
shutil.rmtree(topath) # deletes the folder copied by py2exe
shutil.copytree(frompath, topath)
| [
"[email protected]"
] | |
d519581682c5b4acb68ab1878e3cda3a7b8c4ddd | 5e2655fb23e558c54695dea5c9456b5552570947 | /localdev/seed/management/commands/seed_data.py | f42ad2be00ea5d9f4f5111900de0d82b66bf4e16 | [
"BSD-3-Clause"
] | permissive | mitodl/bootcamp-ecommerce | 992cb23243462d82c75cfae6c115a27728491219 | 339c67b84b661a37ffe32580da72383d95666c5c | refs/heads/master | 2023-08-31T10:45:57.827990 | 2023-07-25T13:55:32 | 2023-07-25T13:55:32 | 82,849,185 | 6 | 3 | BSD-3-Clause | 2023-08-24T20:25:47 | 2017-02-22T20:27:24 | Python | UTF-8 | Python | false | false | 709 | py | """Management command to create or update seed data"""
from django.core.management.base import BaseCommand
from localdev.seed.api import create_seed_data
from localdev.seed.utils import get_raw_seed_data_from_file
class Command(BaseCommand):
"""Creates or updates seed data based on a raw seed data file"""
help = __doc__
def handle(self, *args, **options):
raw_seed_data = get_raw_seed_data_from_file()
results = create_seed_data(raw_seed_data)
if not results.has_results:
self.stdout.write(self.style.WARNING("No results logged."))
else:
self.stdout.write(self.style.SUCCESS("RESULTS"))
self.stdout.write(results.report)
| [
"[email protected]"
] | |
c7040497fddc70804c791aa8caffd6ee49621d0d | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_2/hbbirf001/question3.py | 86f1543deb5d0d08303893c1de5d53fe0d63e38e | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | import math
pi =2
denom = math.sqrt(2)
while denom != 2:
pi = pi*2/denom
denom = math.sqrt(2+denom)
print('Approximation of pi:',round(pi,3),sep=' ')
radius = eval(input('Enter the radius:\n'))
area = pi*radius**2
print('Area:', round(area,3)) | [
"[email protected]"
] | |
907107ef98f88293e5eab6076021cbe6900e6c7d | 44acca58155b0a5a2b46d6a9ed255befece4f5d1 | /api_vendas/api_vendas/wsgi.py | 298a0f3193ddd7ce468b07db9e5f06b15df79e98 | [] | no_license | GeovaneCavalcante/appHubVendas | 6f6c74cb2f94b2534ab1c3d0f241422fb88b81f4 | 068bb08e2a270d132e60502c35edc11a4526f671 | refs/heads/master | 2020-03-20T07:22:32.555287 | 2018-06-13T22:38:53 | 2018-06-13T22:38:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | """
WSGI config for api_vendas project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api_vendas.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
65fc92a79bd813ef453b821d8a02b1a20e6cd577 | e588da296dd6ec3bedee9d24444dfca6e8780aef | /classroom examples/10.py | ab241aa1417f606aba6c9459a043d03a16b9e3e0 | [] | no_license | sujith1919/TCS-Python | 98eac61a02500a0e8f3139e431c98a509828c867 | c988cf078616540fe7f56e3ebdfd964aebd14519 | refs/heads/master | 2023-03-02T09:03:10.052633 | 2021-02-02T16:40:18 | 2021-02-02T16:40:18 | 335,355,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | import time
import os
starttime=time.time()
for x in range(1000):
x**x
endtime=time.time()
print(endtime-starttime)
time.sleep(1) #sleeps for 1 second
ts = os.path.getctime("10.py")
print(ts)
print(time.ctime(ts))
| [
"[email protected]"
] | |
22256ba682801c86d92e53c516104a2ac18db1fd | b27b26462524984951bfbab9250abd145ecfd4c8 | /Demoing/stage_two/bloomingtonnormal/craigslist_sample/craigslist_sample/spiders/craigslist_spider.py | 9ccd525099e5b2802a2344337a1293d1d28242f0 | [] | no_license | afcarl/fastTraffickingGrab | cb813d066f1f69f359598e0b55e632dafd273c89 | 9ff274cb7c9b6c7b60d1436c209b2bfc5907267d | refs/heads/master | 2020-03-26T06:21:21.404931 | 2014-08-16T12:38:29 | 2014-08-16T12:38:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,042 | py |
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from craigslist_sample.items import CraigslistSampleItem
class CraigslistSpider(CrawlSpider):
name = "craigslist"
allowed_domains = ["craigslist.org"]
start_urls = [
"http://bn.craigslist.org",
"http://bn.craigslist.org/cas/",
"http://bn.craigslist.org/cas/index100.html",
"http://bn.craigslist.org/cas/index200.html",
"http://bn.craigslist.org/cas/index300.html",
"http://bn.craigslist.org/cas/index400.html",
"http://bn.craigslist.org/cas/index500.html",
"http://bn.craigslist.org/cas/index600.html",
"http://bn.craigslist.org/cas/index700.html",
"http://bn.craigslist.org/cas/index800.html",
"http://bn.craigslist.org/cas/index900.html",
"http://bn.craigslist.org/cas/index1000.html",
"http://bn.craigslist.org/cas/index1100.html",
"http://bn.craigslist.org/cas/index1200.html",
"http://bn.craigslist.org/cas/index1300.html",
"http://bn.craigslist.org/cas/index1400.html",
"http://bn.craigslist.org/cas/index1500.html",
"http://bn.craigslist.org/cas/index1600.html",
"http://bn.craigslist.org/cas/index1700.html",
"http://bn.craigslist.org/cas/index1800.html",
"http://bn.craigslist.org/cas/index1900.html",
"http://bn.craigslist.org/cas/index2000.html",
"http://bn.craigslist.org/cas/index2100.html",
"http://bn.craigslist.org/cas/index2200.html",
"http://bn.craigslist.org/cas/index2300.html",
"http://bn.craigslist.org/cas/index2400.html",
"http://bn.craigslist.org/cas/index2500.html",
"http://bn.craigslist.org/cas/index2600.html",
"http://bn.craigslist.org/cas/index2700.html",
"http://bn.craigslist.org/cas/index2800.html",
"http://bn.craigslist.org/cas/index2900.html",
"http://bn.craigslist.org/cas/index3000.html",
"http://bn.craigslist.org/cas/index3100.html",
"http://bn.craigslist.org/cas/index3200.html",
"http://bn.craigslist.org/cas/index3300.html",
"http://bn.craigslist.org/cas/index3400.html",
"http://bn.craigslist.org/cas/index3500.html",
"http://bn.craigslist.org/cas/index3600.html",
"http://bn.craigslist.org/cas/index3700.html",
"http://bn.craigslist.org/cas/index3800.html",
"http://bn.craigslist.org/cas/index3900.html",
"http://bn.craigslist.org/cas/index4000.html",
"http://bn.craigslist.org/cas/index4100.html",
"http://bn.craigslist.org/cas/index4200.html",
"http://bn.craigslist.org/cas/index4300.html",
"http://bn.craigslist.org/cas/index4400.html",
"http://bn.craigslist.org/cas/index4500.html",
"http://bn.craigslist.org/cas/index4600.html",
"http://bn.craigslist.org/cas/index4700.html",
"http://bn.craigslist.org/cas/index4800.html",
"http://bn.craigslist.org/cas/index4900.html",
"http://bn.craigslist.org/cas/index5000.html",
"http://bn.craigslist.org/cas/index5100.html",
"http://bn.craigslist.org/cas/index5200.html",
"http://bn.craigslist.org/cas/index5300.html",
"http://bn.craigslist.org/cas/index5400.html",
"http://bn.craigslist.org/cas/index5500.html",
"http://bn.craigslist.org/cas/index5600.html",
"http://bn.craigslist.org/cas/index5700.html",
"http://bn.craigslist.org/cas/index5800.html",
"http://bn.craigslist.org/cas/index5900.html",
"http://bn.craigslist.org/cas/index6000.html",
"http://bn.craigslist.org/cas/index6100.html",
"http://bn.craigslist.org/cas/index6200.html",
"http://bn.craigslist.org/cas/index6300.html",
"http://bn.craigslist.org/cas/index6400.html",
"http://bn.craigslist.org/cas/index6500.html",
"http://bn.craigslist.org/cas/index6600.html",
"http://bn.craigslist.org/cas/index6700.html",
"http://bn.craigslist.org/cas/index6800.html",
"http://bn.craigslist.org/cas/index6900.html",
"http://bn.craigslist.org/cas/index7000.html",
"http://bn.craigslist.org/cas/index7100.html",
"http://bn.craigslist.org/cas/index7200.html",
"http://bn.craigslist.org/cas/index7300.html",
"http://bn.craigslist.org/cas/index7400.html",
"http://bn.craigslist.org/cas/index7500.html",
"http://bn.craigslist.org/cas/index7600.html",
"http://bn.craigslist.org/cas/index7700.html",
"http://bn.craigslist.org/cas/index7800.html",
"http://bn.craigslist.org/cas/index7900.html",
"http://bn.craigslist.org/cas/index8000.html",
"http://bn.craigslist.org/cas/index8100.html",
"http://bn.craigslist.org/cas/index8200.html",
"http://bn.craigslist.org/cas/index8300.html",
"http://bn.craigslist.org/cas/index8400.html",
"http://bn.craigslist.org/cas/index8500.html",
"http://bn.craigslist.org/cas/index8600.html",
"http://bn.craigslist.org/cas/index8700.html",
"http://bn.craigslist.org/cas/index8800.html",
"http://bn.craigslist.org/cas/index8900.html",
"http://bn.craigslist.org/cas/index9000.html",
"http://bn.craigslist.org/cas/index9100.html",
"http://bn.craigslist.org/cas/index9200.html",
"http://bn.craigslist.org/cas/index9300.html",
"http://bn.craigslist.org/cas/index9400.html",
"http://bn.craigslist.org/cas/index9500.html",
"http://bn.craigslist.org/cas/index9600.html",
"http://bn.craigslist.org/cas/index9700.html",
"http://bn.craigslist.org/cas/index9800.html",
"http://bn.craigslist.org/cas/index9900.html"
]
rules = (Rule(SgmlLinkExtractor(allow=(),restrict_xpaths=('//a')), callback="parse", follow= True),)
def parse(self, response):
hxs = HtmlXPathSelector(response)
titles = hxs.select("//span[@class='pl']")
date_info = hxs.select("//h4[@class='ban']/span[@class='bantext']/text()")
items = []
file_to = open("things.txt","a")
file_to.write(response.body)
for titles in titles:
item = CraigslistSampleItem()
item ["title"] = titles.select("a/text()").extract()
item ["link"] = titles.select("a/@href").extract()
item ["date"] = date_info.extract()
items.append(item)
return items
| [
"[email protected]"
] | |
07b8a5019433683f2a6f9216935aaa0a5caa2f35 | f0b75bd94f133a13f469f429a696f26be3be9862 | /week 2/.history/python_second_assignment_20200204163718.py | b9cd1fdfdd8aa3efdde2ac692d9c4aefc42371f3 | [] | no_license | dechavez4/Python_handin_assignments | 023350fabd212cdf2a4ee9cd301306dc5fd6bea0 | 82fd8c991e560c18ecb2152ea5a8fc35dfc3c608 | refs/heads/master | 2023-01-11T23:31:27.220757 | 2020-05-22T10:33:56 | 2020-05-22T10:33:56 | 237,179,899 | 0 | 0 | null | 2022-12-30T20:14:04 | 2020-01-30T09:30:16 | Python | UTF-8 | Python | false | false | 2,196 | py | import csv
from sys import argv
import platform
import argparse
import os.path
from os import path
# Create a python file with 3 functions:
# A. def print_file_content(file) that can print content of a csv file to the console
def print_file_content(file):
with open(file) as csv_file:
content = csv_file.readlines()
for line in content[:20]:
print(line.strip().split(','))
# kan overskrive den gamle file.
# B. def write_list_to_file(output_file, lst) that can take a list of tuple and write each element to a new line in file
def write_list_to_file(output_file, *lst):
if platform.system() == 'Windows':
newline=''
else:
newline=None
with open (output_file, 'w', newline=newline) as output_file:
output_writer = csv.writer(output_file)
for ele in lst:
output_writer.writerow(ele)
# C. def read_csv(input_file) that take a csv file and read each row into a list
def read_line(file):
with open(file) as file_object:
lines = file_object.readlines()
print(lines)
for line in lines:
print(line.rstrip())
# 2. Add a functionality so that the file can be called from cli with 2 arguments
def run():
if args.print:
print_file_content(argv[2])
if args.write:
write_list_to_file(argv[2], argv[3:])
if args.read:
read_line(argv[2])
if args.file:
path.exists(argv[2])
write_list_to_file(argv[2], argv[3:])
else:
print("file doesnt exist", argv[2])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="this is my menu")
parser.add_argument("--print", help='function that can print content of a csv file to the console')
parser.add_argument("--write", nargs="*", help='function that can take a list of tuple and write each element to a new line in file')
parser.add_argument("--read", help='function that take a csv file and read each row into a list')
parser.add_argument("--
", nargs="*", help="an argument that if given will write the content to file_name or otherwise will print it to the console.")
args = parser.parse_args()
run() | [
"[email protected]"
] | |
8d1e0879923a18a294c104bbdfeb17dc5fd8e53f | ed63c99ccb0beebcfe9bff2ef68e9c86877fa7d8 | /synthesizer/train.py | 823dcd119ae7f939f68829aa6c221721e8806a3a | [
"MIT"
] | permissive | X-CCS/Real-Time-Voice-Cloning-1 | d25588a852b87849f9a517d587a3a36d086bbae0 | ae4aa2aa1605168d2f04275e1a45f6de2d88f3f0 | refs/heads/master | 2022-02-28T03:29:26.135339 | 2019-10-23T12:01:10 | 2019-10-23T12:01:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,152 | py | from synthesizer.utils.symbols import symbols
from synthesizer.utils.text import sequence_to_text
from synthesizer.hparams import hparams_debug_string
from synthesizer.feeder import Feeder
from synthesizer.models import create_model
from synthesizer.utils import ValueWindow, plot
from synthesizer import infolog, audio
from datetime import datetime
from tqdm import tqdm
import tensorflow as tf
import numpy as np
import traceback
import time
import os
log = infolog.log
def add_embedding_stats(summary_writer, embedding_names, paths_to_meta, checkpoint_path):
# Create tensorboard projector
config = tf.contrib.tensorboard.plugins.projector.ProjectorConfig()
config.model_checkpoint_path = checkpoint_path
for embedding_name, path_to_meta in zip(embedding_names, paths_to_meta):
# Initialize config
embedding = config.embeddings.add()
# Specifiy the embedding variable and the metadata
embedding.tensor_name = embedding_name
embedding.metadata_path = path_to_meta
# Project the embeddings to space dimensions for visualization
tf.contrib.tensorboard.plugins.projector.visualize_embeddings(summary_writer, config)
def add_train_stats(model, hparams):
with tf.variable_scope("stats"):
for i in range(hparams.tacotron_num_gpus):
tf.summary.histogram("mel_outputs %d" % i, model.tower_mel_outputs[i])
tf.summary.histogram("mel_targets %d" % i, model.tower_mel_targets[i])
tf.summary.scalar("before_loss", model.before_loss)
tf.summary.scalar("after_loss", model.after_loss)
if hparams.predict_linear:
tf.summary.scalar("linear_loss", model.linear_loss)
for i in range(hparams.tacotron_num_gpus):
tf.summary.histogram("mel_outputs %d" % i, model.tower_linear_outputs[i])
tf.summary.histogram("mel_targets %d" % i, model.tower_linear_targets[i])
tf.summary.scalar("regularization_loss", model.regularization_loss)
tf.summary.scalar("stop_token_loss", model.stop_token_loss)
tf.summary.scalar("loss", model.loss)
tf.summary.scalar("learning_rate", model.learning_rate) # Control learning rate decay speed
if hparams.tacotron_teacher_forcing_mode == "scheduled":
tf.summary.scalar("teacher_forcing_ratio", model.ratio) # Control teacher forcing
# ratio decay when mode = "scheduled"
gradient_norms = [tf.norm(grad) for grad in model.gradients]
tf.summary.histogram("gradient_norm", gradient_norms)
tf.summary.scalar("max_gradient_norm", tf.reduce_max(gradient_norms)) # visualize
# gradients (in case of explosion)
return tf.summary.merge_all()
def add_eval_stats(summary_writer, step, linear_loss, before_loss, after_loss, stop_token_loss,
loss):
values = [
tf.Summary.Value(tag="Tacotron_eval_model/eval_stats/eval_before_loss",
simple_value=before_loss),
tf.Summary.Value(tag="Tacotron_eval_model/eval_stats/eval_after_loss",
simple_value=after_loss),
tf.Summary.Value(tag="Tacotron_eval_model/eval_stats/stop_token_loss",
simple_value=stop_token_loss),
tf.Summary.Value(tag="Tacotron_eval_model/eval_stats/eval_loss", simple_value=loss),
]
if linear_loss is not None:
values.append(tf.Summary.Value(tag="Tacotron_eval_model/eval_stats/eval_linear_loss",
simple_value=linear_loss))
test_summary = tf.Summary(value=values)
summary_writer.add_summary(test_summary, step)
def time_string():
return datetime.now().strftime("%Y-%m-%d %H:%M")
def model_train_mode(args, feeder, hparams, global_step):
with tf.variable_scope("Tacotron_model", reuse=tf.AUTO_REUSE):
model = create_model("Tacotron", hparams)
model.initialize(feeder.inputs, feeder.input_lengths, feeder.speaker_embeddings,
feeder.mel_targets, feeder.token_targets,
targets_lengths=feeder.targets_lengths, global_step=global_step,
is_training=True, split_infos=feeder.split_infos)
model.add_loss()
model.add_optimizer(global_step)
stats = add_train_stats(model, hparams)
return model, stats
def model_test_mode(args, feeder, hparams, global_step):
with tf.variable_scope("Tacotron_model", reuse=tf.AUTO_REUSE):
model = create_model("Tacotron", hparams)
model.initialize(feeder.eval_inputs, feeder.eval_input_lengths,
feeder.eval_speaker_embeddings, feeder.eval_mel_targets,
feeder.eval_token_targets, targets_lengths=feeder.eval_targets_lengths,
global_step=global_step, is_training=False, is_evaluating=True,
split_infos=feeder.eval_split_infos)
model.add_loss()
return model
def train(log_dir, args, hparams):
log_dir = str(log_dir)
save_dir = os.path.join(log_dir, "taco_pretrained")
plot_dir = os.path.join(log_dir, "plots")
wav_dir = os.path.join(log_dir, "wavs")
mel_dir = os.path.join(log_dir, "mel-spectrograms")
eval_dir = os.path.join(log_dir, "eval-dir")
eval_plot_dir = os.path.join(eval_dir, "plots")
eval_wav_dir = os.path.join(eval_dir, "wavs")
tensorboard_dir = os.path.join(log_dir, "tacotron_events")
meta_folder = os.path.join(log_dir, "metas")
os.makedirs(save_dir, exist_ok=True)
os.makedirs(plot_dir, exist_ok=True)
os.makedirs(wav_dir, exist_ok=True)
os.makedirs(mel_dir, exist_ok=True)
os.makedirs(eval_dir, exist_ok=True)
os.makedirs(eval_plot_dir, exist_ok=True)
os.makedirs(eval_wav_dir, exist_ok=True)
os.makedirs(tensorboard_dir, exist_ok=True)
os.makedirs(meta_folder, exist_ok=True)
checkpoint_fpath = os.path.join(save_dir, "tacotron_model.ckpt")
metadat_fpath = os.path.join(str(args.synthesizer_root), "train.txt")
log("Checkpoint path: {}".format(checkpoint_fpath))
log("Loading training data from: {}".format(metadat_fpath))
log("Using model: Tacotron")
log(hparams_debug_string())
# Start by setting a seed for repeatability
tf.set_random_seed(hparams.tacotron_random_seed)
# Set up data feeder
coord = tf.train.Coordinator()
with tf.variable_scope("datafeeder"):
feeder = Feeder(coord, metadat_fpath, hparams)
# Set up model:
global_step = tf.Variable(0, name="global_step", trainable=False)
model, stats = model_train_mode(args, feeder, hparams, global_step)
eval_model = model_test_mode(args, feeder, hparams, global_step)
# Embeddings metadata
char_embedding_meta = os.path.join(meta_folder, "CharacterEmbeddings.tsv")
if not os.path.isfile(char_embedding_meta):
with open(char_embedding_meta, "w", encoding="utf-8") as f:
for symbol in symbols:
if symbol == " ":
symbol = "\\s" # For visual purposes, swap space with \s
f.write("{}\n".format(symbol))
char_embedding_meta = char_embedding_meta.replace(log_dir, "..")
# Book keeping
step = 0
time_window = ValueWindow(100)
loss_window = ValueWindow(100)
saver = tf.train.Saver(max_to_keep=50)
log("Tacotron training set to a maximum of {} steps".format(args.tacotron_train_steps))
# Memory allocation on the GPU as needed
config = tf.ConfigProto()
# config.gpu_options.per_process_gpu_memory_fraction = 0.9
# config.gpu_options.allow_growth = True
config.allow_soft_placement = True
# Train
with tf.Session(config=config) as sess:
try:
summary_writer = tf.summary.FileWriter(tensorboard_dir, sess.graph)
sess.run(tf.global_variables_initializer())
# saved model restoring
if args.restore:
# Restore saved model if the user requested it, default = True
try:
checkpoint_state = tf.train.get_checkpoint_state(save_dir)
if checkpoint_state and checkpoint_state.model_checkpoint_path:
log("Loading checkpoint {}".format(checkpoint_state.model_checkpoint_path),
slack=True)
saver.restore(sess, checkpoint_state.model_checkpoint_path)
else:
log("No model to load at {}".format(save_dir), slack=True)
saver.save(sess, checkpoint_fpath, global_step=global_step)
except tf.errors.OutOfRangeError as e:
log("Cannot restore checkpoint: {}".format(e), slack=True)
else:
log("Starting new training!", slack=True)
saver.save(sess, checkpoint_fpath, global_step=global_step)
# initializing feeder
feeder.start_threads(sess)
# Training loop
while not coord.should_stop() and step < args.tacotron_train_steps:
start_time = time.time()
step, loss, opt = sess.run([global_step, model.loss, model.optimize])
time_window.append(time.time() - start_time)
loss_window.append(loss)
message = "Step {:7d} [{:.3f} sec/step, loss={:.5f}, avg_loss={:.5f}]".format(
step, time_window.average, loss, loss_window.average)
log(message, end="\r", slack=(step % args.checkpoint_interval == 0))
print(message, flush=True)
if loss > 100 or np.isnan(loss):
log("Loss exploded to {:.5f} at step {}".format(loss, step))
raise Exception("Loss exploded")
if step % args.summary_interval == 0:
log("\nWriting summary at step {}".format(step))
summary_writer.add_summary(sess.run(stats), step)
if step % args.eval_interval == 0:
# Run eval and save eval stats
log("\nRunning evaluation at step {}".format(step))
eval_losses = []
before_losses = []
after_losses = []
stop_token_losses = []
linear_losses = []
linear_loss = None
if hparams.predict_linear:
for i in tqdm(range(feeder.test_steps)):
eloss, before_loss, after_loss, stop_token_loss, linear_loss, mel_p, \
mel_t, t_len, align, lin_p, lin_t = sess.run(
[
eval_model.tower_loss[0], eval_model.tower_before_loss[0],
eval_model.tower_after_loss[0],
eval_model.tower_stop_token_loss[0],
eval_model.tower_linear_loss[0],
eval_model.tower_mel_outputs[0][0],
eval_model.tower_mel_targets[0][0],
eval_model.tower_targets_lengths[0][0],
eval_model.tower_alignments[0][0],
eval_model.tower_linear_outputs[0][0],
eval_model.tower_linear_targets[0][0],
])
eval_losses.append(eloss)
before_losses.append(before_loss)
after_losses.append(after_loss)
stop_token_losses.append(stop_token_loss)
linear_losses.append(linear_loss)
linear_loss = sum(linear_losses) / len(linear_losses)
wav = audio.inv_linear_spectrogram(lin_p.T, hparams)
audio.save_wav(wav, os.path.join(eval_wav_dir,
"step-{}-eval-wave-from-linear.wav".format(
step)), sr=hparams.sample_rate)
else:
for i in tqdm(range(feeder.test_steps)):
eloss, before_loss, after_loss, stop_token_loss, mel_p, mel_t, t_len,\
align = sess.run(
[
eval_model.tower_loss[0], eval_model.tower_before_loss[0],
eval_model.tower_after_loss[0],
eval_model.tower_stop_token_loss[0],
eval_model.tower_mel_outputs[0][0],
eval_model.tower_mel_targets[0][0],
eval_model.tower_targets_lengths[0][0],
eval_model.tower_alignments[0][0]
])
eval_losses.append(eloss)
before_losses.append(before_loss)
after_losses.append(after_loss)
stop_token_losses.append(stop_token_loss)
eval_loss = sum(eval_losses) / len(eval_losses)
before_loss = sum(before_losses) / len(before_losses)
after_loss = sum(after_losses) / len(after_losses)
stop_token_loss = sum(stop_token_losses) / len(stop_token_losses)
log("Saving eval log to {}..".format(eval_dir))
# Save some log to monitor model improvement on same unseen sequence
wav = audio.inv_mel_spectrogram(mel_p.T, hparams)
audio.save_wav(wav, os.path.join(eval_wav_dir,
"step-{}-eval-wave-from-mel.wav".format(step)),
sr=hparams.sample_rate)
plot.plot_alignment(align, os.path.join(eval_plot_dir,
"step-{}-eval-align.png".format(step)),
title="{}, {}, step={}, loss={:.5f}".format("Tacotron",
time_string(),
step,
eval_loss),
max_len=t_len // hparams.outputs_per_step)
plot.plot_spectrogram(mel_p, os.path.join(eval_plot_dir,
"step-{"
"}-eval-mel-spectrogram.png".format(
step)),
title="{}, {}, step={}, loss={:.5f}".format("Tacotron",
time_string(),
step,
eval_loss),
target_spectrogram=mel_t,
max_len=t_len)
if hparams.predict_linear:
plot.plot_spectrogram(lin_p, os.path.join(eval_plot_dir,
"step-{}-eval-linear-spectrogram.png".format(
step)),
title="{}, {}, step={}, loss={:.5f}".format(
"Tacotron", time_string(), step, eval_loss),
target_spectrogram=lin_t,
max_len=t_len, auto_aspect=True)
log("Eval loss for global step {}: {:.3f}".format(step, eval_loss))
log("Writing eval summary!")
add_eval_stats(summary_writer, step, linear_loss, before_loss, after_loss,
stop_token_loss, eval_loss)
if step % args.checkpoint_interval == 0 or step == args.tacotron_train_steps or \
step == 300:
# Save model and current global step
saver.save(sess, checkpoint_fpath, global_step=global_step)
log("\nSaving alignment, Mel-Spectrograms and griffin-lim inverted waveform..")
input_seq, mel_prediction, alignment, target, target_length = sess.run([
model.tower_inputs[0][0],
model.tower_mel_outputs[0][0],
model.tower_alignments[0][0],
model.tower_mel_targets[0][0],
model.tower_targets_lengths[0][0],
])
# save predicted mel spectrogram to disk (debug)
mel_filename = "mel-prediction-step-{}.npy".format(step)
np.save(os.path.join(mel_dir, mel_filename), mel_prediction.T,
allow_pickle=False)
# save griffin lim inverted wav for debug (mel -> wav)
wav = audio.inv_mel_spectrogram(mel_prediction.T, hparams)
audio.save_wav(wav,
os.path.join(wav_dir, "step-{}-wave-from-mel.wav".format(step)),
sr=hparams.sample_rate)
# save alignment plot to disk (control purposes)
plot.plot_alignment(alignment,
os.path.join(plot_dir, "step-{}-align.png".format(step)),
title="{}, {}, step={}, loss={:.5f}".format("Tacotron",
time_string(),
step, loss),
max_len=target_length // hparams.outputs_per_step)
# save real and predicted mel-spectrogram plot to disk (control purposes)
plot.plot_spectrogram(mel_prediction, os.path.join(plot_dir,
"step-{}-mel-spectrogram.png".format(
step)),
title="{}, {}, step={}, loss={:.5f}".format("Tacotron",
time_string(),
step, loss),
target_spectrogram=target,
max_len=target_length)
log("Input at step {}: {}".format(step, sequence_to_text(input_seq)))
if step % args.embedding_interval == 0 or step == args.tacotron_train_steps or step == 1:
# Get current checkpoint state
checkpoint_state = tf.train.get_checkpoint_state(save_dir)
# Update Projector
log("\nSaving Model Character Embeddings visualization..")
add_embedding_stats(summary_writer, [model.embedding_table.name],
[char_embedding_meta],
checkpoint_state.model_checkpoint_path)
log("Tacotron Character embeddings have been updated on tensorboard!")
log("Tacotron training complete after {} global steps!".format(
args.tacotron_train_steps), slack=True)
return save_dir
except Exception as e:
log("Exiting due to exception: {}".format(e), slack=True)
traceback.print_exc()
coord.request_stop(e)
def tacotron_train(args, log_dir, hparams):
return train(log_dir, args, hparams)
| [
"[email protected]"
] | |
8f0ea1ddcb842afbdfefab10bdc1a50be19625f3 | a140b45f9f16b74353d15ed573ea765b3fef046d | /algorithms/leet.0693.src.1.py | 04b92c007caace7e60b187ff08050dfd9eefba49 | [] | no_license | fish-ball/leetcode | 258d4b37f05560d914bcd29f7c54820deeadb33f | 3dfd8f73c65d43cc2766c20700a619141acb927b | refs/heads/master | 2023-05-28T18:32:43.638675 | 2023-05-20T04:25:23 | 2023-05-20T04:25:23 | 31,968,994 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | class Solution:
def hasAlternatingBits(self, n: int) -> bool:
if n <= 2:
return True
if n & 3 in (3, 0):
return False
return self.hasAlternatingBits(n>>1)
| [
"[email protected]"
] | |
5fa3c9d9bb0d62ebb1c3fba841f5fde8baeb38ba | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /tDswMNY7X9h7tyTS4_22.py | cf345fc278bf3cb0fa4a9810e75fe0ead3c22a1a | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,116 | py | """
**Mubashir** was reading about [Pascal's
triangle](https://en.wikipedia.org/wiki/Pascal's_triangle) on Wikipedia.
In mathematics, Pascal's triangle is a triangular array of the binomial
coefficients that arises in probability theory, combinatorics, and algebra.

Formula for Pascal's triangle is given by:

where `n` denotes a row of the triangle, and `k` is the position of a term in
the row.
Create a function which takes a number `n` and returns **n top rows** of
Pascal's Triangle flattened into a one-dimensional list.
### Examples
pascals_triangle(1) ➞ [1]
pascals_triangle(2) ➞ [1, 1, 1]
pascals_triangle(4) ➞ [1, 1, 1, 1, 2, 1, 1, 3, 3, 1]
### Notes
N/A
"""
import math
def pascals_triangle(n):
triangle = []
for row in range(n):
new_row = []
for k in range(row+1):
new_row.append(math.factorial(row)//(math.factorial(k)*math.factorial(row-k)))
triangle += new_row
return triangle
| [
"[email protected]"
] | |
dc0795e8588404f2f441e385ff7792de19d21846 | f0e0c1637f3b49fd914410361c3f1f3948462659 | /Python/Math/integers_come_in_all_sizes.py | 067bf933bb0a96f4a2758091ba2df74899b1be13 | [] | no_license | georggoetz/hackerrank-py | 399bcd0599f3c96d456725471708068f6c0fc4b1 | a8478670fcc65ca034df8017083269cb37ebf8b0 | refs/heads/master | 2021-09-18T07:47:32.224981 | 2018-07-11T09:24:49 | 2018-07-11T09:24:49 | 111,611,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | # http://www.hackerrank.com/contests/python-tutorial/challenges/python-integers-come-in-all-sizes
def solve(a, b, c, d):
"""
>>> solve(9, 29, 7, 27)
4710194409608608369201743232
"""
print(a ** b + c ** d)
if __name__ == "__main__":
a = int(input())
b = int(input())
c = int(input())
d = int(input())
print(solve(a, b, c, d))
| [
"[email protected]"
] | |
e70f14eb83da74ee83dd9e8854f5f79da094837c | fb783dda8d0ca34ad95d0c3f7ebbb6794a4b0467 | /ball.py | a2d479dde631ec996cf01de0feb2431d739b6875 | [] | no_license | Loai17/Y--Project | 967ec843ccc033fcdfdb59bd676adcfbea397446 | 155e9820bfa42c13e3dc7a82976146b1b86505ce | refs/heads/master | 2020-04-12T16:29:00.322136 | 2018-12-27T10:34:39 | 2018-12-27T10:34:39 | 162,613,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,014 | py | from turtle import *
class Ball(Turtle):
def __init__(self,x,y,dx,dy,r,color):
Turtle.__init__(self)
self.dx=dx
self.dy=dy
self.r=r
self.pu()
self.goto(x,y)
self.shape("circle")
self.shapesize(r*r)
self.color(color)
print(self.xcor())
print(self.ycor())
def move(self,screen_width,screen_height):
current_x = self.xcor()
new_x = current_x + self.dx
current_y = self.ycor()
new_y = current_y + self.dy
right_side_ball = new_x + self.r
left_side_ball = new_x - self.r
top_side_ball = new_y + self.r
bottom_side_ball = new_y - self.r
self.goto(new_x,new_y)
if (current_x >= screen_width/2):
self.dx = -self.dx
elif(current_x <= (-screen_width/2)):
self.dx = -self.dx
if(current_y >= screen_height/2):
self.dy = -self.dy
elif(current_y <= (-screen_height/2)):
self.dy = -self.dy
| [
"[email protected]"
] | |
aa2a8c8f570a1c0f44928db8d59780469b207993 | 4f97122844fb8cbaccf9ed9fa300a27a290d1a37 | /1/111.py | 0a46d7eb3eec2fe044cfcd027f9ffbf0dbd17e63 | [] | no_license | cq146637/Advanced | 52d97ab0f8e7ec85e6d81692e92bad967af066e6 | 18380e5c51124ef1e6d243ae216280b49edc7001 | refs/heads/master | 2020-03-22T03:05:02.960444 | 2018-07-02T08:30:27 | 2018-07-02T08:30:27 | 139,151,714 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 834 | py | __author__ = 'Cq'
from collections import deque
import pickle
from random import randint
import os
result = randint(1,100)
print("result is ",result)
deque1 = deque([],5)
if os.path.isfile("save.data"):
deque1 = pickle.load(open("save.data"))
while True:
k = input("\nplease input your guess number: ")
if k.isdigit():
k = int(k)
elif k == 'h' or k == 'H':
print("your input history is ",list(deque1))
else:
continue
if k != result:
if k > result:
print("your number is greater than result\n")
else:
print("your number is less than result\n")
deque1.append(k)
else:
print("It was good result...")
deque1.append(k)
break
if k == 100:
break
f = open("save.data",'w')
pickle.dump(deque1, f) | [
"[email protected]"
] | |
4148ba0011b8da0c23ac14048f68d96a7d5a144f | ed7f2c5c235d1a3beca2ad78f8ef6eecd9afeea6 | /src/motors.py | d32ecdccd49447815025fb0116e63b984bb1da0e | [] | no_license | mvwicky/roboSim | 8f48bdfa291cfe6abc1c6a7294c7ab59161e3304 | c4d5d5f641ed976c71a591085019fcedc2ec3a5a | refs/heads/master | 2016-09-10T10:40:11.235120 | 2014-08-19T18:51:06 | 2014-08-19T18:51:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,741 | py | import os
import sys
import random
import math
import utilFunctions as utlF
class motor(object):
"""Generic motor object"""
def __init__(self,port,wheelRad=0,ticks=1000,tolerance=0,sprite=None):
"""port:which port the motor is in
wheelRad:the radius of the attached wheel
if not a drive motor: wheelRad=0
ticks:number of ticks per revolution
tolerance:
sprite:path to the sprite
"""
self.port=port
self.positon=0
self.wheelRad=wheelRad
self.ticks=ticks
self.lastSpeed=0
self.currentSpeed=0
#self.distPerTick
self.context=None
if sprite==None:
pass
elif sprite!=None and type(sprite)!=utlF.sprite:
print("Invalid sprite")
elif sprite!=None and type(sprite)==utlF.sprite:
self.sprite=sprite
self.tolerance=tolerance
def update(self):
if self.context==None:
print("Context not defined")
return -1
else:
pass
def draw(self):
pass
def moveAtVelocity(self,velocity):
self.currentSpeed=velocity
return 0
def moveRelativePosition(self,velocity,delta):
pass
def moveToPosition(self,velocity,position):
pass
def moveAngleDeg(self,velocity,theta):
pass
def moveAngleRad(self,velocity,theta):
pass
def getPosition(self):
pass
def forward(self):
pass
def off(self):
pass
def zeroMotor(self):
"""Sets the motor position back to zero"""
pass
def mav(self,velocity):
return self.moveAtVelocity(velocity)
def mrp(self,velocity,position):
return self.moveRelativePosition(velocity,position)
def mtp(self,velocity,position):
return self.moveToPosition(velocity,position)
def mad(self,velocity,theta):
return self.moveAngleDeg(velocity,theta)
def mar(self,velocity,theta):
return self.moveAngleRad(velocity,theta) | [
"[email protected]"
] | |
2ea42ed75506284aeaca6832127c5ac1f95139ab | c23b4c6253ca5a0d42822dd0d28ffa752c11ebf5 | /exercises/c3ec2a04-cbca-459a-951f-f17cc34310c7/skeletons/8fd3c5ac-35d2-40cd-9d21-77a4a6671d7c/skeleton4.py3 | e2a36f52de4511c924c13798bc533064cd0477c9 | [] | no_license | josepaiva94/e57d8867-6234-41a6-b239-2cd978ad1e70 | 803e2eb1e2db23c64409bc72ff00c4463875a82f | aa270941dd8cf7b2e1ec8ac89445b1ab3a47f89d | refs/heads/master | 2023-01-07T10:49:56.871378 | 2020-11-16T11:28:14 | 2020-11-16T11:28:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py3 | if largest != root_index:
nums[root_index], nums[largest] = nums[largest], nums[root_index]
heapify(nums, heap_size, largest)
| [
"[email protected]"
] | |
b8b5d53aedd215e4c38db5455b764f4b73bb83b5 | 3420aba3622faf2d4aede984c656f68ad24a1f3c | /backend/personal_care_22730/settings.py | 230da7088fe365290e5935afd842c015a2ea9d7d | [] | no_license | crowdbotics-apps/personal-care-22730 | bb81af122e64cb58f6d52df31df328b6dfa4b25d | 066d2cd5e890057df054ea7c5b3b5f061e872371 | refs/heads/master | 2023-01-11T06:30:05.971088 | 2020-11-18T16:23:30 | 2020-11-18T16:23:30 | 313,990,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,048 | py | """
Django settings for personal_care_22730 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"healthcare",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
"storages",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "personal_care_22730.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "personal_care_22730.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID
and AWS_SECRET_ACCESS_KEY
and AWS_STORAGE_BUCKET_NAME
and AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = "/mediafiles/"
MEDIA_ROOT = os.path.join(BASE_DIR, "mediafiles")
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning(
"You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails."
)
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
52bc7632cb2fb0f992aefdbbb894875a1607ea42 | 508321d683975b2339e5292202f3b7a51bfbe22d | /Userset.vim/ftplugin/python/CompletePack/maya/app/renderSetup/model/collection.py | 6f5c78e3c5ec754621968564b253a3121787e876 | [] | no_license | cundesi/vimSetSa | 4947d97bcfe89e27fd2727423112bb37aac402e2 | 0d3f9e5724b471ab21aa1199cc3b4676e30f8aab | refs/heads/master | 2020-03-28T05:54:44.721896 | 2018-08-31T07:23:41 | 2018-08-31T07:23:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54,254 | py | """Collection node class and utility functions.
This module provides the collection class, as well as utility
functions to operate on collections.
The collection owns its associated selector node: on collection
delete, the collection is deleted as well.
Conceptually, a collection fulfills four roles in render setup:
1) It is a container of overrides. If enabled, the collection will
apply all its enabled overrides on nodes it selects (see (2)).
2) It selects nodes onto which overrides will be applied. These nodes
can be DAG or DG nodes.
3) It is a container of child collections. Child collections always
select nodes based on their parent's selected nodes (see (2)).
4) It defines render layer membership. Members of a render layer can
only be DAG nodes. These are always a subset of the nodes selected
by the collection (see (2)). The members of the render layer are the
union of the top-level collection members; children collections can
exclude or re-include members. See RenderLayer.getMembers for more
details (including the effect of isolate select mode).
The application of overrides only obeys enabled / disabled status.
Render layer membership is determined from enabled / disabled, in
conjunction with isolate select."""
import maya
maya.utils.loadStringResourcesForModule(__name__)
import re
import maya.cmds as cmds
import maya.api.OpenMaya as OpenMaya
import maya.app.renderSetup.model.nodeList as nodeList
import maya.app.renderSetup.model.utils as utils
import maya.app.renderSetup.model.plug as plug
import maya.app.renderSetup.model.typeIDs as typeIDs
import maya.app.renderSetup.model.selector as selector
import maya.app.renderSetup.model.undo as undo
import maya.app.renderSetup.model.override as override
import maya.app.renderSetup.model.overrideUtils as overrideUtils
import maya.app.renderSetup.model.childNode as childNode
import maya.app.renderSetup.model.enabled as computeEnabled
import maya.app.renderSetup.model.namespace as namespace
import maya.app.renderSetup.model.renderSettings as renderSettings
import maya.app.renderSetup.model.rendererCallbacks as rendererCallbacks
import maya.app.renderSetup.model.traverse as traverse
from maya.app.renderSetup.model.renderLayerSwitchObservable import RenderLayerSwitchObservable
import maya.app.renderSetup.model.clipboardData as clipboardData
import maya.app.renderSetup.common.utils as commonUtils
import maya.app.renderSetup.common.profiler as profiler
import maya.app.renderSetup.common.guard as guard
import maya.app.renderSetup.model.context as context
import maya.app.renderSetup.model.jsonTranslatorUtils as jsonTranslatorUtils
import maya.app.renderSetup.model.jsonTranslatorGlobals as jsonTranslatorGlobals
# List all error messages below
kInvalidChildName = maya.stringTable['y_collection.kInvalidChildName' ]
kUnknownChild = maya.stringTable['y_collection.kUnknownChild' ]
kOverrideCreationFailed = maya.stringTable['y_collection.kOverrideCreationFailed' ]
kCollectionMissingSelector = maya.stringTable['y_collection.kCollectionMissingSelector' ]
kRendererMismatch = maya.stringTable['y_collection.kRendererMismatch' ]
kIncorrectChildType = maya.stringTable['y_collection.kIncorrectChildType' ]
# List of undo messages
kChildAttached = maya.stringTable['y_collection.kChildAttached' ]
kChildDetached = maya.stringTable['y_collection.kChildDetached' ]
kSet = maya.stringTable['y_collection.kSet' ]
def collections(c):
return c.getCollections()
class Collection(nodeList.ListBase, childNode.TreeOrderedItem,
childNode.ChildNode):
"""
Collection node.
A collection has an ordered list of children, and a selector to
determine nodes to which the children apply.
MAYA-59277:
- When we start implementing proper hierarchical collections we
need to decide on the relationship between parent and child
selectors. Do we always consider a parent collection to be the
union of its child collections, and propagate the selector
information upwards when a child collection is added or changed?
Or do we go the opposite direction and restrict the child collection
to use the intersection between its selector and its parent's selector?
- Light child collections always have a single light source member.
We should utilize this and create a specific selector for such
use cases for better performance.
"""
kTypeId = typeIDs.collection
kTypeName = 'collection'
# Attributes for collection as list of children.
#
# Connections to lowest-priority and highest-priority child
# on children linked list. The lowest-priority child
# is considered to be the front of the list, and the highest-priority
# child the back of the list.
childLowest = OpenMaya.MObject()
childHighest = OpenMaya.MObject()
# Connection to all children in the list.
children = OpenMaya.MObject()
# Attribute for message connection to selector node associated with the
# collection. This attribute is a destination, as only one selector
# can be associated with each collection.
aSelector = OpenMaya.MObject()
# Enabled behavior. See enabled module for documentation.
enabled = OpenMaya.MObject()
selfEnabled = OpenMaya.MObject()
parentEnabled = OpenMaya.MObject()
# isolateSelected flag as attribute
isolateSelected = OpenMaya.MObject()
# The number of isolate selected children in a collection's subtree.
numIsolatedChildren = OpenMaya.MObject()
# The number of isolate selected ancestors of this collection.
numIsolatedAncestors = OpenMaya.MObject()
# the SimpleSelector is the default.
kDefaultSelectorTypeName = selector.SimpleSelector.kTypeName
@staticmethod
def creator():
return Collection()
@staticmethod
def initializer():
# A collection is a render layer list element.
# inheritAttributesFrom() must be called before adding any other
# attributes.
Collection.inheritAttributesFrom(nodeList.ListItem.kTypeName)
# A collection is a list of children.
Collection.children = Collection.initListItems()
Collection.childLowest = utils.createDstMsgAttr(
'childLowest', 'cl')
Collection.addAttribute(Collection.childLowest)
Collection.childHighest = utils.createDstMsgAttr(
'childHighest', 'ch')
Collection.addAttribute(Collection.childHighest)
Collection.aSelector = utils.createDstMsgAttr('selector', 'sel')
Collection.addAttribute(Collection.aSelector)
# Set up enabled attribute.
computeEnabled.initializeAttributes(Collection)
# Add isolateSelected attribute
Collection.numIsolatedChildren = computeEnabled.createNumIsolatedChildrenAttribute()
Collection.addAttribute(Collection.numIsolatedChildren)
Collection.numIsolatedAncestors = computeEnabled.createHiddenIntAttribute(
"numIsolatedAncestors", "nia")
Collection.addAttribute(Collection.numIsolatedAncestors)
# Add isolateSelected attribute
numAttrFn = OpenMaya.MFnNumericAttribute()
Collection.isolateSelected = numAttrFn.create("isolateSelected", "is", OpenMaya.MFnNumericData.kBoolean, 0)
numAttrFn.storable = True
numAttrFn.keyable = False
numAttrFn.readable = True
numAttrFn.writable = True
numAttrFn.hidden = True
OpenMaya.MPxNode.addAttribute(Collection.isolateSelected)
Collection.attributeAffects(Collection.numIsolatedChildren, Collection.enabled)
Collection.attributeAffects(Collection.numIsolatedAncestors, Collection.enabled)
Collection.attributeAffects(Collection.isolateSelected, Collection.enabled)
def __init__(self):
super(Collection, self).__init__()
self._enabledDirty = False
self._callbackIds = []
def postConstructor(self):
# Call parent class postConstructor
super(Collection, self).postConstructor()
# Listen to changes in the enabled attribute.
self._callbackIds = computeEnabled.addChangeCallbacks(self)
def typeId(self):
return Collection.kTypeId
def typeName(self):
return Collection.kTypeName
def _createSelector(self, parent=None, selArgs=None):
"""Create a selector node, and attach it to the collection.
parent is an optional parent collection. This method must be
overridden by derived classes."""
self.setSelectorType(parent.getSelector().kTypeName if parent else \
self.kDefaultSelectorTypeName)
if parent:
self.getSelector().minimalClone(parent.getSelector())
def _createAndConnectSelector(self, typeName, selArgs=None):
"""Engine method for _createSelector.
selArgs is an optional dictionary passed to _createSelectorNode."""
newSelector = self._createSelectorNode(
typeName, self.name()+'Selector', selArgs)
cmds.connectAttr(newSelector + '.c', self.name() + '.selector')
def _createSelectorNode(self, typeName, selectorName, selArgs):
"""Create the selector node.
Can be overridden by derived classes."""
return cmds.createNode(typeName, name=selectorName, skipSelect=True)
def getSelectorType(self):
try: return self.getSelector().kTypeName
except: return None
def setSelectorType(self, typeName):
'''Sets the selector type of this collection.'''
if self.getSelectorType() == typeName:
return
with undo.NotifyCtxMgr("Set selector type", self._selectorChanged):
children = [child for child in self.getChildren() if isinstance(child, Collection)]
# need to disconnect all selector children
# otherwise they get deleted along with their parent selector
for child in children:
child.getSelector().setParent(None)
try: self._deleteSelector()
except: pass
self._createAndConnectSelector(typeName)
parent = self.parent()
selector = self.getSelector()
if isinstance(parent, Collection):
selector.setParent(parent.getSelector())
for child in children:
child.getSelector().setParent(selector)
def _deleteSelector(self):
selector = self.getSelector()
cmds.disconnectAttr(selector.name() + '.c', self.name() + '.selector')
utils.deleteNode(selector)
def _getInputAttr(self, attr, dataBlock=None):
return dataBlock.inputValue(attr) if dataBlock else OpenMaya.MPlug(self.thisMObject(), attr)
def _getSelfEnabledPlug(self):
return OpenMaya.MPlug(self.thisMObject(), Collection.selfEnabled)
def _getIsolatePlug(self):
return OpenMaya.MPlug(self.thisMObject(), Collection.isolateSelected)
def hasIsolatedAncestors(self, dataBlock=None):
return self._getInputAttr(self.numIsolatedAncestors, dataBlock).asInt() > 0
def hasIsolatedChildren(self, dataBlock=None):
return self._getInputAttr(self.numIsolatedChildren, dataBlock).asInt() > 0
def compute(self, plug, dataBlock):
if plug == self.enabled:
# We are enabled if:
#
# o The normal enabled computation is true (self enabled is true AND
# parent enabled is true).
#
# AND
#
# o We're in batch mode OR
# o No node is isolated OR
# o This node is isolated OR
# o This node has isolate selected children OR
# o This node has isolate selected ancestors.
#
value = computeEnabled.computeEnabled(self, dataBlock) and \
(cmds.about(batch=True) or \
dataBlock.inputValue(self.layerNumIsolatedChildren).asInt()==0 or \
self.isIsolateSelected(dataBlock) or \
self.hasIsolatedAncestors(dataBlock) or \
self.hasIsolatedChildren(dataBlock))
computeEnabled.setEnabledOutput(self, dataBlock, value)
def enabledChanged(self):
layer = self.getRenderLayer()
if layer:
layer._enabledChanged(self)
self.itemChanged()
def isEnabled(self, dataBlock=None):
return self._getInputAttr(self.enabled, dataBlock).asBool()
def isSelfEnabled(self, dataBlock=None):
return self._getInputAttr(self.selfEnabled, dataBlock).asBool()
def setSelfEnabled(self, value):
if value != self.isSelfEnabled():
# pulling isEnabled will trigger enabledChanged
# (no matter if enable output value has changed or not)
with undo.NotifyCtxMgr("Set Override Enabled",self.isEnabled):
cmds.setAttr(self.name()+".selfEnabled", 1 if value else 0)
@guard.state(computeEnabled.isPulling, computeEnabled.setPulling, True)
def pullEnabled(self):
# This will force pulling the enabled plug on overrides. It solves
# the problem of connection overrides not being applied / unapplied
# when not visible in the RenderSetup window; being visible in the
# RenderSetup window causes enabled to be pulled.
#
# Connection overrides are not part of the network; they are a
# procedure that must be run on enable change to modify the
# network. Therefore, the enabled plug is not pulled, contrary to
# value overrides that get inserted in the network, and thus we
# need to force the plug to be pulled.
# Two phase procedure to avoid DG cycle check warnings. First,
# pull on enabled output of connection overrides.
needsUpdate = set()
for n in traverse.depthFirst(self, traverse.nodeListChildren):
if isinstance(n, override.Override) and n.updateOnEnabledChanged():
# Call isEnabled to force computation of the enabled output.
n.isEnabled()
needsUpdate.add(n)
# Second, update the connection override. This will iterate over
# the connection override apply nodes, which query the connection
# override enabled state we've finished computing above. Had we
# done the override enabled computation and the update in the same
# call, we would have gotten a DG evaluation cycle (compute
# enabled, cause update, which queries enabled).
for o in needsUpdate:
o.update()
def getRenderLayer(self):
# For hierarchical collections the parent
# could be another collection, otherwise
# the parent is always the render layer
parent = self.parent()
if isinstance(parent, Collection):
return parent.getRenderLayer()
return parent
def isolateSelectedChanged(self):
layer = self.getRenderLayer()
if layer:
layer._isolateSelectedChanged(self)
def isIsolateSelected(self, dataBlock=None):
""" Get if isolate selected. Will always return False in batch mode """
return False if cmds.about(batch=True) else self._getInputAttr(self.isolateSelected, dataBlock).asBool()
def setIsolateSelected(self, val):
if val!=self.isIsolateSelected() and not cmds.about(batch=True):
with undo.NotifyCtxMgr(kSet % (self.name(), 'isolateSelected', val), self.isolateSelectedChanged):
# Use a command to support the undo mechanism
cmds.setAttr(self._getIsolatePlug().name(), val)
self._updateIsolateSelected(1 if val else -1)
def _findSubcollectionForType(self, typeName):
'''Finds the subcollection of this collection that will handle that typeName
or creates it and returns it if it doesn't exist.'''
filterType, customFilter = selector.Filters.getFiltersFor(typeName)
def predicate(child):
if not isinstance(child, Collection):
return False
sel = child.getSelector()
return sel.kTypeName == selector.SimpleSelector.kTypeName and \
sel.getPattern() == "*" and \
len(sel.staticSelection) == 0 and \
sel.getFilterType() == filterType and \
(filterType != selector.Filters.kCustom or sel.getCustomFilterValue() == customFilter)
def creator():
name = self.name() + "_" + selector.Filters.names.get(filterType, customFilter)
col = create(name)
col.setSelectorType(selector.SimpleSelector.kTypeName)
sel = col.getSelector()
sel.setPattern('*')
sel.setFilterType(filterType)
sel.setCustomFilterValue(customFilter)
return col
return self.findChild(predicate, creator)
@undo.chunk('Create and append an override')
def createOverride(self, overrideName, overrideType):
""" Add an override to the Collection using its node type id or type name."""
# Note: No need to propagate the change notification
# as an empty override does not affect the collection
over = override.create(overrideName, overrideType)
if not over:
raise Exception(kOverrideCreationFailed % overrideName)
# special handle for shader override as they apply to shading engines
# => create subcollection of shading engines if we're in a dag only collection
from maya.app.renderSetup.model.connectionOverride import ShaderOverride
if over.typeId() != typeIDs.shaderOverride or \
self.getSelector().acceptsType('shadingEngine'):
self.appendChild(over)
else:
self._findSubcollectionForType('shadingEngine').appendChild(over)
return over
def _getOverrideType(self, plg, overrideType):
'''Returns the override type that should be created for the given
plg in the given collection (self). Overrides that can't be relative will become absolute.'''
return plg.overrideType(overrideType)
@undo.chunk('Create and append an override')
def _createOverride(self, plg, overrideType):
over = override.create(plg.attributeName, self._getOverrideType(plg, overrideType))
if not over:
raise Exception(kOverrideCreationFailed % attrName)
over.finalize(plg.name)
typeName = OpenMaya.MFnDependencyNode(plg.node()).typeName
collection = self if self.getSelector().acceptsType(typeName) else \
self._findSubcollectionForType(typeName)
collection.appendChild(over)
return over
@undo.chunk('Create and append an absolute override')
def createAbsoluteOverride(self, nodeName, attrName):
""" Add an absolute override to a collection """
return self._createOverride(plug.Plug(nodeName,attrName), typeIDs.absOverride)
@undo.chunk('Create and append a relative override')
def createRelativeOverride(self, nodeName, attrName):
""" Add a relative override to a collection """
return self._createOverride(plug.Plug(nodeName,attrName), typeIDs.relOverride)
@undo.chunk('Create and append a child collection')
def _createCollection(self, collectionName, typeName):
col = create(collectionName, typeName, parent=self)
self.appendChild(col)
return col
def createCollection(self, collectionName):
""" Add a child collection to the Collection. """
return self._createCollection(collectionName, Collection.kTypeName)
def _childAttached(self, child):
'''Perform work to attach a child.
The child has already been added to collection's list when this
method is called.'''
with undo.NotifyCtxMgr(kChildAttached % (self.name(), child.name()), self.itemChanged):
# Once inserted, hook up the child's parentEnabled input to our
# enabled output. Use existing command for undo / redo purposes.
cmds.connectAttr(self.name() + '.enabled',
child.name() + '.parentEnabled')
if isinstance(child, Collection):
child.getSelector().setParent(self.getSelector())
child._attach(self.getRenderLayer())
layer = self.getRenderLayer()
if layer:
layer.descendantAdded(child)
def _detachChild(self, child):
'''Perform work to detach a child.
The child has not yet been removed from the collection's list when
this method is called.'''
with undo.NotifyCtxMgr(kChildDetached % (self.name(), child.name()), self.itemChanged):
# Disconnect the child's parentEnabled input from our enabled
# output. Use existing command for undo / redo purposes.
childParentEnabled = child.name() + '.parentEnabled'
cmds.disconnectAttr(self.name() + '.enabled', childParentEnabled)
# Child parentEnabled will retain its last value, so set it
# to True in case the collection gets parented to the render layer.
cmds.setAttr(childParentEnabled, 1)
if isinstance(child, Collection):
child.getSelector().setParent(None)
child._detach(self.getRenderLayer())
def _attach(self, layer):
"""Attach this collection."""
self._connectLayerIsolatedChildren(layer)
# Number of isolated children doesn't change when we attach.
# Update isolated children of our ancestors.
self._updateAncestorsIsolatedChildren(
self.getNumIsolatedChildren(includeSelf=True))
# Update isolated ancestors of ourselves and our children.
self._updateChildrenIsolatedAncestors(
self.getNumIsolatedAncestors(), includeSelf=True)
def _detach(self, layer):
"""Detach this collection."""
self._disconnectLayerIsolatedChildren(layer)
# Number of isolated children doesn't change when we detach.
# Update isolated children of our ancestors.
self._updateAncestorsIsolatedChildren(
-self.getNumIsolatedChildren(includeSelf=True))
# Update isolated ancestors of ourselves and our children.
self._updateChildrenIsolatedAncestors(
-self.getNumIsolatedAncestors(), includeSelf=True)
@undo.chunk('Append to collection')
def appendChild(self, child):
""" Add a child as the highest-priority child."""
if child.typeId()==RenderSettingsCollection.kTypeId \
or child.typeId()==LightsCollection.kTypeId:
raise RuntimeError(kIncorrectChildType % child.typeName())
nodeList.append(self, child)
self._childAttached(child)
@undo.chunk('Attach to collection')
def attachChild(self, pos, child):
""" Attach a child at a specific position. """
if child.typeId()==RenderSettingsCollection.kTypeId \
or child.typeId()==LightsCollection.kTypeId:
raise RuntimeError(kIncorrectChildType % child.typeName())
nodeList.insert(self, pos, child)
self._childAttached(child)
@undo.chunk('Detach from collection')
def detachChild(self, child):
""" Detach a child whatever its position. """
unapply(child) # NoOp if not applied; otherwise commands are used
# Must perform detach operations before removing from list,
# otherwise parenting information is gone.
self._detachChild(child)
nodeList.remove(self, child)
def getChildren(self, cls=childNode.ChildNode):
""" Get the list of all children.
Optionally only the children matching the given class. """
return list(nodeList.forwardListNodeClassGenerator(self, cls))
def hasChildren(self):
return self.findChild(lambda child: True) is not None
def getCollections(self):
return self.getChildren(cls=Collection)
def getCollectionByName(self, collectionName, nested=False):
for collection in nodeList.forwardListNodeClassGenerator(self, cls=Collection):
if collection.name() == collectionName:
return collection
elif nested:
collection2 = collection.getCollectionByName(collectionName, True)
if collection2:
return collection2
return None
def findChild(self, predicate, creator=None):
'''Find the child of this collection satisfying the predicate function or creates it
with the creator function if not found and a creator function is specified.
Function signatures are:
predicate(childNode): returns boolean.
creator(void) : returns the created node.'''
for child in nodeList.forwardListNodeClassGenerator(self, childNode.ChildNode):
if predicate(child):
return child
if not creator:
return None
child = creator()
self.appendChild(child)
return child
def getChild(self, childName, cls=childNode.ChildNode):
""" Look for an existing child by name and optionally class.
@type childName: string
@param childName: Name of child to look for
@type cls: class name
@param cls: Class name for the type of class to look for
@rtype: Child model instance
@return: Found instance or throw an exception
"""
if not childName:
raise Exception(kInvalidChildName)
for child in nodeList.forwardListNodeClassGenerator(self, cls):
if child.name() == childName:
return child
raise Exception(kUnknownChild % (childName, self.name()))
def isAbstractClass(self):
# Override method inherited from base class: not an abstract class.
return False
def getSelector(self):
"""Return the selector user node for this collection."""
selector = utils.getSrcUserNode(
utils.findPlug(self, Collection.aSelector))
if (selector is None):
raise Exception(kCollectionMissingSelector % self.name())
return selector
@context.applyCollection
def apply(self):
""" Apply all children in this collection. """
with profiler.ProfilerMgr('Collection::apply'):
# Apply all our children to the selection
for child in nodeList.forwardListGenerator(self):
child.apply()
# UI Feedback (progressBar)
RenderLayerSwitchObservable.getInstance().notifyRenderLayerSwitchObserver()
@context.applyCollection
def postApply(self):
'''Post applies all children in this collection. This function may be called to apply a collection (with contained overrides)
after the layer was set visible. It allows inserting new overrides in the currently visible layer
without the need to toggle visibility.'''
with profiler.ProfilerMgr('Collection::postApply'):
# Post apply all our children
for child in nodeList.forwardListGenerator(self):
child.postApply()
@context.unapplyCollection
def unapply(self):
"""Unapply all children in this collection."""
with profiler.ProfilerMgr('Collection::unapply'):
for child in nodeList.reverseListGenerator(self):
child.unapply()
# UI Feedback (progressBar)
RenderLayerSwitchObservable.getInstance().notifyRenderLayerSwitchObserver()
def getOverrides(self):
return self.getChildren(cls=override.Override)
# Collection interface as list of children.
# These methods implement the list requirements for the nodeList module.
#
# The list front and back are destination plugs connected to the child
# node's message plug (which is a source).
def _getFrontAttr(self):
return Collection.childLowest
def _getBackAttr(self):
return Collection.childHighest
def _getListItemsAttr(self):
return Collection.children
def _preChildDelete(self, child):
# Private interface for child to inform its parent that it is
# about to be deleted. Remove the child from our list.
self.detachChild(child)
def _selectedNodesChanged(self):
""" Ownership of this collection or one of its children changed """
layer = self.getRenderLayer()
if layer:
layer._selectedNodesChanged(self)
self.itemChanged()
def _selectorChanged(self):
"""Selector of this collection changed.
Identical to _selectedNodesChanged(), except that the itemChanged()
notification is given with selectorChanged=True."""
layer = self.getRenderLayer()
if layer:
layer._selectedNodesChanged(self)
self.itemChanged(selectorChanged=True)
def _refreshRendering(self):
''' Some changes impose to refresh the rendering for the visible layer only. '''
parent = self.parent()
if parent:
parent._refreshRendering()
def getLayerNumIsolatedChildren(self):
return OpenMaya.MPlug(
self.thisMObject(), Collection.layerNumIsolatedChildren).asInt()
def _getNumIsolatedChildrenPlug(self):
return OpenMaya.MPlug(self.thisMObject(), Collection.numIsolatedChildren)
def getNumIsolatedChildren(self, includeSelf=False):
nic = self._getNumIsolatedChildrenPlug().asInt()
if includeSelf and self.isIsolateSelected():
nic += 1
return nic
def _getNumIsolatedAncestorsPlug(self):
return OpenMaya.MPlug(self.thisMObject(), Collection.numIsolatedAncestors)
def getNumIsolatedAncestors(self):
return self._getNumIsolatedAncestorsPlug().asInt()
# See comments in RenderLayer._updateIsolateSelected.
def _updateNumIsolatedChildren(self, val):
# Use a command to support the undo mechanism
if val != 0:
newVal = self.getNumIsolatedChildren() + val
cmds.setAttr(self._getNumIsolatedChildrenPlug().name(), newVal)
def _updateNumIsolatedAncestors(self, val):
# Use a command to support the undo mechanism
if val != 0:
newVal = self.getNumIsolatedAncestors() + val
cmds.setAttr(self._getNumIsolatedAncestorsPlug().name(), newVal)
def _updateIsolateSelected(self, val):
self._updateAncestorsIsolatedChildren(val)
self._updateChildrenIsolatedAncestors(val)
def _updateAncestorsIsolatedChildren(self, val):
layer = self.getRenderLayer()
if layer:
layer._updateIsolateSelected(val)
for c in self.ancestorCollections():
c._updateNumIsolatedChildren(val)
def _updateChildrenIsolatedAncestors(self, val, includeSelf=False):
# Tell descendants there has been a change in their ancestors'
# isolate select.
for c in traverse.depthFirst(self, collections):
if c is self and not includeSelf:
continue
c._updateNumIsolatedAncestors(val)
def _connectLayerIsolatedChildren(self, layer):
# Connect subtree to layer's isolated children attribute.
if layer:
for c in traverse.depthFirst(self, collections):
c._connectSelfLayerIsolatedChildren(layer)
def _disconnectLayerIsolatedChildren(self, layer):
# Disconnect subtree from layer's isolated children attribute.
if layer:
for c in traverse.depthFirst(self, collections):
c._disconnectSelfLayerIsolatedChildren(layer)
def _connectSelfLayerIsolatedChildren(self, layer):
if layer:
# Use existing command for undo / redo purposes.
cmds.connectAttr(layer.name() + '.numIsolatedChildren',
self.name() + '.parentNumIsolatedChildren')
def _disconnectSelfLayerIsolatedChildren(self, layer):
if layer:
# Use existing command for undo / redo purposes.
cmds.disconnectAttr(layer.name() + '.numIsolatedChildren',
self.name() + '.parentNumIsolatedChildren')
def _importChild(self, childName, nodeType, selArgs=None):
name = cmds.createNode(nodeType, name=childName, skipSelect=True)
child = utils.nameToUserNode(name)
if isinstance(child, Collection):
child._createSelector(None, selArgs)
self.appendChild(child)
return child
def activate(self):
'''
Called when this list item is inserted into the list.
Override this method to do any scene specific initialization.
'''
if len(self._callbackIds) == 0:
self._callbackIds = computeEnabled.addChangeCallbacks(self)
self.getSelector().activate()
def deactivate(self):
'''
Called when this list item is removed from the list.
Override this method to do any scene specific teardown.
'''
# Remove all callbacks.
OpenMaya.MMessage.removeCallbacks(self._callbackIds)
self._callbackIds = []
self.getSelector().deactivate()
def _encodeProperties(self, dict):
super(Collection, self)._encodeProperties(dict)
dict[self._getSelfEnabledPlug().partialName(useLongNames=True)] = self.isEnabled()
dict[self._getIsolatePlug().partialName(useLongNames=True)] = self.isIsolateSelected()
if self.getSelectorType() == selector.BasicSelector.kTypeName: # backward comp with 2016 R2
selectorDict = dict
else:
selectorDict = {}
dict[jsonTranslatorGlobals.SELECTOR_ATTRIBUTE_NAME] = { self.getSelectorType() : selectorDict }
self.getSelector()._encodeProperties(selectorDict)
dict[jsonTranslatorGlobals.CHILDREN_ATTRIBUTE_NAME] = jsonTranslatorUtils.encodeObjectArray(self.getChildren())
def _decodeChildren(self, children, mergeType, prependToName):
jsonTranslatorUtils.decodeObjectArray(children,
jsonTranslatorUtils.MergePolicy(self.getChild,
self._importChild,
mergeType,
prependToName))
def _decodeProperties(self, dict, mergeType, prependToName):
super(Collection, self)._decodeProperties(dict, mergeType, prependToName)
if self._getSelfEnabledPlug().partialName(useLongNames=True) in dict:
self.setSelfEnabled(dict[self._getSelfEnabledPlug().partialName(useLongNames=True)])
if self._getIsolatePlug().partialName(useLongNames=True) in dict:
self.setIsolateSelected(dict[self._getIsolatePlug().partialName(useLongNames=True)])
if jsonTranslatorGlobals.SELECTOR_ATTRIBUTE_NAME not in dict: # backward comp with 2016 R2
self.setSelectorType(selector.BasicSelector.kTypeName)
selectorProperties = dict
else:
selectorType = dict[jsonTranslatorGlobals.SELECTOR_ATTRIBUTE_NAME].keys()[0]
if self.getSelectorType() != selectorType:
self.setSelectorType(selectorType)
selectorProperties = dict[jsonTranslatorGlobals.SELECTOR_ATTRIBUTE_NAME].values()[0]
self.getSelector()._decodeProperties(selectorProperties)
if jsonTranslatorGlobals.CHILDREN_ATTRIBUTE_NAME in dict:
self._decodeChildren(dict[jsonTranslatorGlobals.CHILDREN_ATTRIBUTE_NAME],
mergeType,
prependToName)
def acceptImport(self):
super(Collection, self).acceptImport()
for child in self.getChildren():
child.acceptImport()
def isSelfAcceptableChild(self):
"""Overridden instances that return False, prevent copy/paste of the collection type to itself."""
return True
def isAcceptableChild(self, modelOrData):
""" Check if the model could be a child"""
if isinstance(modelOrData, clipboardData.ClipboardData):
isOverride = modelOrData.typeName() in _overrideTypes
parentTypeName = modelOrData.parentTypeName
else:
isOverride = isinstance(modelOrData, override.Override)
parentTypeName = modelOrData.parent().typeName()
return isOverride and parentTypeName == self.typeName() or (modelOrData.typeName() == self.typeName() and self.isSelfAcceptableChild())
def isTopLevel(self):
"""Is the collection's parent a render layer?"""
# Don't have access to renderLayer.RenderLayer, type check on
# Collection instead.
return not isinstance(self.parent(), Collection)
def ancestorCollections(self):
"""Return this collection's ancestors.
Neither the collection itself, nor the render layer, are included
in the ancestors. Therefore, a top-level collection has no
ancestors."""
parent = self.parent()
while isinstance(parent, Collection):
yield parent
parent = parent.parent()
class LightsCollection(Collection):
"""
LightsCollection node.
A collection node specific for grouping light sources
and overrides on those light sources.
This collection should have all light sources as member by default. All nodes
matching the light classification should be returned by the selector
on this collection.
"""
kTypeId = typeIDs.lightsCollection
kTypeName = 'lightsCollection'
@staticmethod
def creator():
return LightsCollection()
@staticmethod
def initializer():
# Inherit all attributes from parent class
LightsCollection.inheritAttributesFrom(Collection.kTypeName)
def __init__(self):
super(LightsCollection, self).__init__()
def typeId(self):
return LightsCollection.kTypeId
def typeName(self):
return LightsCollection.kTypeName
def _createSelector(self, parent=None, selArgs=None):
self._createAndConnectSelector(selector.SimpleSelector.kTypeName)
# Make it select all light sources in the scene
self.getSelector().setPattern("*")
self.getSelector().setFilterType(selector.Filters.kLights)
def setSelectorType(self, typeName):
raise RuntimeError('Illegal call to derived class method.')
def createCollection(self, collectionName):
""" Add a lights child collection to the Collection. """
return self._createCollection(collectionName, LightsChildCollection.kTypeName)
def isAcceptableChild(self, modelOrData):
"""Check if the argument can be a child of this collection.
We want to prevent copying LightsChildCollections in the same
LightsCollection at the expense of not being able to copy
LightsChildCollections between different LightsCollections.
"""
return False
def compute(self, plug, dataBlock):
computeEnabled.compute(self, plug, dataBlock)
class LightsChildCollection(Collection):
"""
LightsChildCollection node.
A child collection node specific for one single light source
and overrides on this light source.
"""
kTypeId = typeIDs.lightsChildCollection
kTypeName = 'lightsChildCollection'
@staticmethod
def creator():
return LightsChildCollection()
@staticmethod
def initializer():
# Inherit all attributes from parent class
LightsChildCollection.inheritAttributesFrom(Collection.kTypeName)
def __init__(self):
super(LightsChildCollection, self).__init__()
def typeId(self):
return LightsChildCollection.kTypeId
def typeName(self):
return LightsChildCollection.kTypeName
def _createSelector(self, parent=None, selArgs=None):
self._createAndConnectSelector(selector.SimpleSelector.kTypeName)
# Only accepts light sources.
self.getSelector().setFilterType(selector.Filters.kLights)
def setSelectorType(self, typeName):
raise RuntimeError('Illegal call to derived class method.')
def compute(self, plug, dataBlock):
computeEnabled.compute(self, plug, dataBlock)
def isAcceptableChild(self, modelOrData):
"""Check if the argument can be a child of this collection.
Pasting is prevented because the Light Editor considers only the
first override in the LightsChildCollection. Additionally dragging
is prevented between overrides in LightsChildCollections to prevent
dragging between incompatible LightsChildCollection types
(ie. point light, spot light)
"""
return False
class RenderSettingsCollection(Collection):
"""
Render Settings Collection node.
This collection has an ordered list of children, and a static & const selector
to determine nodes to which the children apply. The list of nodes is based
on the selected renderer at the time of creation.
MAYA-66757:
- A base collection will be needed to factorize commonalities and segregate differences.
- A static selector is needed which could be the existing static selection or an object set.
- The name is read-only.
- The selector content is read-only
- The render name should be part of the collection so that the settings are clearly linked
to the used renderer, or linked using a plug
"""
kTypeId = typeIDs.renderSettingsCollection
kTypeName = 'renderSettingsCollection'
# Type of selector created by this collection
kSelectorTypeName = selector.SimpleSelector.kTypeName
@staticmethod
def creator():
return RenderSettingsCollection()
@staticmethod
def initializer():
# A render settings collection is a render layer list element.
# inheritAttributesFrom() must be called before adding any other attributes.
RenderSettingsCollection.inheritAttributesFrom(Collection.kTypeName)
def __init__(self):
super(RenderSettingsCollection, self).__init__()
@staticmethod
def containsNodeName(nodeName):
return nodeName in renderSettings.getDefaultNodes()
def _createSelector(self, parent=None, selArgs=None):
self._createAndConnectSelector(self.kSelectorTypeName)
# Set the default nodes as static selection
# Note: Some renderers could return nodes which do not exist yet.
self.getSelector().staticSelection.setWithoutExistenceCheck(renderSettings.getDefaultNodes())
self.getSelector().setFilterType(selector.Filters.kAll)
def setSelectorType(self, typeName):
raise RuntimeError('Illegal call to derived class method.')
def typeId(self):
return RenderSettingsCollection.kTypeId
def typeName(self):
return RenderSettingsCollection.kTypeName
def appendChild(self, child):
if isinstance(child, Collection):
raise RuntimeError(kIncorrectChildType % child.typeName())
else:
super(RenderSettingsCollection, self).appendChild(child)
def attachChild(self, pos, child):
if isinstance(child, Collection):
raise RuntimeError(kIncorrectChildType % child.typeName())
else:
super(RenderSettingsCollection, self).attachChild(pos, child)
def _createCollection(self, collectionName, typeName):
raise RuntimeError(kIncorrectChildType % typeName)
def compute(self, plug, dataBlock):
computeEnabled.compute(self, plug, dataBlock)
def isAcceptableChild(self, modelOrData):
"""Check if the argument can be a child of this collection.
No collection of any kind can be a child of this collection."""
return modelOrData.typeName() not in _collectionTypes and \
super(RenderSettingsCollection, self).isAcceptableChild(modelOrData)
def _getOverrideType(self, plg, overrideType):
overrideType = super(RenderSettingsCollection, self)._getOverrideType(plg, overrideType)
return typeIDs.absUniqueOverride if overrideType == typeIDs.absOverride else typeIDs.relUniqueOverride
class AOVCollection(Collection):
"""
AOV (arbitrary output variable) parent collection node.
"""
kTypeId = typeIDs.aovCollection
kTypeName = 'aovCollection'
@staticmethod
def creator():
return AOVCollection()
@staticmethod
def initializer():
# An AOV collection is a render layer list element.
# inheritAttributesFrom() must be called before adding any other attributes.
AOVCollection.inheritAttributesFrom(Collection.kTypeName)
def __init__(self):
super(AOVCollection, self).__init__()
@staticmethod
def containsNodeName(nodeName):
callbacks = rendererCallbacks.getCallbacks(rendererCallbacks.CALLBACKS_TYPE_AOVS)
try:
callbacks.getAOVName(nodeName)
return True
except:
return False
def _createSelector(self, parent=None, selArgs=None):
# Selector type name argument is ignored.
self._createAndConnectSelector('')
def _createSelectorNode(self, typeName, selectorName, selArgs):
# Ignore the argument selector type name: get the AOV collection
# selector from the AOV renderer callback.
callbacks = rendererCallbacks.getCallbacks(rendererCallbacks.CALLBACKS_TYPE_AOVS)
return callbacks.getCollectionSelector(selectorName)
def setSelectorType(self, typeName):
raise RuntimeError('Illegal call to derived class method.')
def typeId(self):
return AOVCollection.kTypeId
def typeName(self):
return AOVCollection.kTypeName
def appendChild(self, child):
if isinstance(child, Collection) and not isinstance(child, AOVChildCollection):
raise RuntimeError(kIncorrectChildType % child.typeName())
else:
super(AOVCollection, self).appendChild(child)
def attachChild(self, pos, child):
if isinstance(child, Collection) and not isinstance(child, AOVChildCollection):
raise RuntimeError(kIncorrectChildType % child.typeName())
else:
super(AOVCollection, self).attachChild(pos, child)
# This should never be called, as AOVCollections are created in renderLayer.py in aovCollectionInstance()
def _createCollection(self, collectionName, typeName):
raise RuntimeError(kIncorrectChildType % typeName)
def compute(self, plug, dataBlock):
computeEnabled.compute(self, plug, dataBlock)
class AOVChildCollection(Collection):
"""
AOV (arbitrary output variable) Child Collection node.
"""
kTypeId = typeIDs.aovChildCollection
kTypeName = 'aovChildCollection'
@staticmethod
def creator():
return AOVChildCollection()
@staticmethod
def initializer():
# Inherit all attributes from parent class
AOVChildCollection.inheritAttributesFrom(Collection.kTypeName)
def __init__(self):
super(AOVChildCollection, self).__init__()
def containsNodeName(self, nodeName):
return nodeName in self.getSelector().getAbsoluteNames()
def typeId(self):
return AOVChildCollection.kTypeId
def typeName(self):
return AOVChildCollection.kTypeName
def _createSelector(self, parent=None, selArgs=None):
# Selector type name argument is ignored.
self._createAndConnectSelector('', selArgs)
def _createSelectorNode(self, typeName, selectorName, selArgs):
# Ignore the argument selector type name: get the AOV child
# collection selector from the AOV renderer callback.
#
# selArgs is a dictionary for selector argument
# construction. It must contain a value for 'aovName'.
callbacks = rendererCallbacks.getCallbacks(rendererCallbacks.CALLBACKS_TYPE_AOVS)
return callbacks.getChildCollectionSelector(selectorName, selArgs['aovName'])
def setSelectorType(self, typeName):
raise RuntimeError('Illegal call to derived class method.')
def compute(self, plug, dataBlock):
computeEnabled.compute(self, plug, dataBlock)
def isSelfAcceptableChild(self):
"""This code prevents copy/paste of AOV child collections to themselves/other AOV child collections."""
return False
@undo.chunk('Create collection')
@namespace.root
def create(name, nodeType=Collection.kTypeName, parent=None, **selArgs):
""" Create a collection.
Returns the MPxNode object corresponding to the created
collection node. A RuntimeError is raised in case of error.
The selArgs keyword arguments are passed along to the selector creation.
This function is undoable.
"""
# collection names should never contain namespace delimiter or other invalid characters
# collections belong to current namespace (i.e. root)
name = re.sub(r'[^a-zA-Z0-9_]', '_', name)
if isinstance(nodeType, basestring):
typeName = nodeType
else:
typeName = cmds.objectType(typeFromTag=nodeType.id())
# To avoid writing a command to implement collection creation,
# re-use existing name-based commands for undo / redo purposes, since
# collection creation is not performance-critical. If the name
# flag is specified, it cannot be an empty string.
returnCollectionName = cmds.createNode(
typeName, name=name, skipSelect=True) if name else \
cmds.createNode(typeName, skipSelect=True)
collection = utils.nameToUserNode(returnCollectionName)
collection._createSelector(parent=parent, selArgs=selArgs)
return collection
@undo.chunk('Delete collection')
def delete(collection):
"""Remove the argument collection from the scene.
All overrides and sub-collections in the collection are removed."""
# Inform our parent (if any) of upcoming delete.
# This will remove the collection from its parent,
# and will trigger deactivation of the collection
# causing it and the selector to stop listening to scene and attribute changes.
# Need to call _preChildDelete before removing children, otherwise we lose the parenting information
# to the children which may be used by the parent (ex: renderLayers use that information
# to determine if they need to be refreshed).
parent = collection.parent()
if parent:
parent._preChildDelete(collection)
# Delete the children.
for child in collection.getChildren():
if isinstance(child, Collection):
delete(child)
else:
override.delete(child)
# Deleting the selector means unhooking the selector node
# from the collection and removing it from the scene.
collection._deleteSelector()
# Deleting the node will remove it from the scene.
utils.deleteNode(collection)
@undo.chunk('Unapply a collection')
def unapply(collection):
''' Command to unapply a collection '''
if isinstance(collection, Collection):
for c in collection.getChildren():
unapply(c)
else:
# End of recursion so unapply the override
# using a command
override.UnapplyCmd.execute(collection)
def getAllCollectionClasses():
""" Returns the list of Collection subclasses """
return commonUtils.getSubClasses(Collection)
_collectionTypes = { c.kTypeName for c in getAllCollectionClasses() }
_overrideTypes = { o.kTypeName for o in overrideUtils.getAllOverrideClasses() }
# ===========================================================================
# Copyright 2016 Autodesk, Inc. All rights reserved.
#
# Use of this software is subject to the terms of the Autodesk license
# agreement provided at the time of installation or download, or which
# otherwise accompanies this software in either electronic or hard copy form.
# ===========================================================================
| [
"[email protected]"
] | |
56c15e78ef411bada79abd374bd7d67e36ff9929 | 234c7fb0bdabdd696c8e4c6a449ac2c8e3f14ad5 | /build/PureCloudPlatformClientV2/models/workday_values_trend.py | 2cd0430e7aa02ba9e1bd12ac92f707c225c96002 | [
"Apache-2.0",
"MIT"
] | permissive | humano7/platform-client-sdk-python | 2a942c43cc2d69e8cb0c4113d998e6e0664fdedb | dd5b693b1fc90c9dcb36885d7227f11221db5980 | refs/heads/master | 2023-04-12T05:05:53.932393 | 2021-04-22T03:41:22 | 2021-04-22T03:41:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,713 | py | # coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class WorkdayValuesTrend(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
WorkdayValuesTrend - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'date_start_workday': 'date',
'date_end_workday': 'date',
'division': 'Division',
'user': 'UserReference',
'timezone': 'str',
'results': 'list[WorkdayValuesMetricItem]'
}
self.attribute_map = {
'date_start_workday': 'dateStartWorkday',
'date_end_workday': 'dateEndWorkday',
'division': 'division',
'user': 'user',
'timezone': 'timezone',
'results': 'results'
}
self._date_start_workday = None
self._date_end_workday = None
self._division = None
self._user = None
self._timezone = None
self._results = None
@property
def date_start_workday(self):
"""
Gets the date_start_workday of this WorkdayValuesTrend.
The start workday for the query range for the metric value trend. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd
:return: The date_start_workday of this WorkdayValuesTrend.
:rtype: date
"""
return self._date_start_workday
@date_start_workday.setter
def date_start_workday(self, date_start_workday):
"""
Sets the date_start_workday of this WorkdayValuesTrend.
The start workday for the query range for the metric value trend. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd
:param date_start_workday: The date_start_workday of this WorkdayValuesTrend.
:type: date
"""
self._date_start_workday = date_start_workday
@property
def date_end_workday(self):
"""
Gets the date_end_workday of this WorkdayValuesTrend.
The end workday for the query range for the metric value trend. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd
:return: The date_end_workday of this WorkdayValuesTrend.
:rtype: date
"""
return self._date_end_workday
@date_end_workday.setter
def date_end_workday(self, date_end_workday):
"""
Sets the date_end_workday of this WorkdayValuesTrend.
The end workday for the query range for the metric value trend. Dates are represented as an ISO-8601 string. For example: yyyy-MM-dd
:param date_end_workday: The date_end_workday of this WorkdayValuesTrend.
:type: date
"""
self._date_end_workday = date_end_workday
@property
def division(self):
"""
Gets the division of this WorkdayValuesTrend.
The targeted division for the query
:return: The division of this WorkdayValuesTrend.
:rtype: Division
"""
return self._division
@division.setter
def division(self, division):
"""
Sets the division of this WorkdayValuesTrend.
The targeted division for the query
:param division: The division of this WorkdayValuesTrend.
:type: Division
"""
self._division = division
@property
def user(self):
"""
Gets the user of this WorkdayValuesTrend.
The targeted user for the query
:return: The user of this WorkdayValuesTrend.
:rtype: UserReference
"""
return self._user
@user.setter
def user(self, user):
"""
Sets the user of this WorkdayValuesTrend.
The targeted user for the query
:param user: The user of this WorkdayValuesTrend.
:type: UserReference
"""
self._user = user
@property
def timezone(self):
"""
Gets the timezone of this WorkdayValuesTrend.
The time zone used for aggregating metric values
:return: The timezone of this WorkdayValuesTrend.
:rtype: str
"""
return self._timezone
@timezone.setter
def timezone(self, timezone):
"""
Sets the timezone of this WorkdayValuesTrend.
The time zone used for aggregating metric values
:param timezone: The timezone of this WorkdayValuesTrend.
:type: str
"""
self._timezone = timezone
@property
def results(self):
"""
Gets the results of this WorkdayValuesTrend.
The metric value trends
:return: The results of this WorkdayValuesTrend.
:rtype: list[WorkdayValuesMetricItem]
"""
return self._results
@results.setter
def results(self, results):
"""
Sets the results of this WorkdayValuesTrend.
The metric value trends
:param results: The results of this WorkdayValuesTrend.
:type: list[WorkdayValuesMetricItem]
"""
self._results = results
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""
Returns the model as raw JSON
"""
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
491a1f11b35ab27ff0eb2c2ce7bb95b422862b4a | ed7cd7760c708720f5a847a02b0c3a50cca0175e | /docs/conf.py | c6db3e446649d27013be9c86061f2f9677830789 | [
"MIT"
] | permissive | jcapriot/aurora | bf98b1236e7dc43e0189df71725f7f862d271984 | 08d5ccc671054a2b646a4effb412a2ed48314646 | refs/heads/main | 2023-09-05T00:07:16.984109 | 2021-10-27T02:49:41 | 2021-10-27T02:49:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,585 | py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import aurora
from sphinx_gallery.sorting import FileNameSortKey
# -- Project information -----------------------------------------------------
project = 'aurora'
copyright = '2021, Karl Kappler, Jared Peacock, Lindsey Heagy, Douglas Oldenburg'
author = 'Karl Kappler, Jared Peacock, Lindsey Heagy, Douglas Oldenburg'
# The full version, including alpha/beta/rc tags
release = '0.0.1'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
"sphinx.ext.mathjax",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"matplotlib.sphinxext.plot_directive",
"numpydoc",
# "nbsphinx",
"sphinx_gallery.gen_gallery"
]
# Autosummary pages will be generated by sphinx-autogen instead of sphinx-build
autosummary_generate = True
numpydoc_class_members_toctree = False
# API doc options
apidoc_module_dir = "../aurora"
apidoc_output_dir = "api/generated"
apidoc_toc_file = False
apidoc_excluded_paths = []
apidoc_separate_modules = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
pass
except Exception:
html_theme = "default"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Intersphinx
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"numpy": ("https://docs.scipy.org/doc/numpy/", None),
"scipy": ("https://docs.scipy.org/doc/scipy/reference/", None),
"matplotlib": ("https://matplotlib.org/", None),
}
# Sphinx Gallery
sphinx_gallery_conf = {
# path to your examples scripts
"examples_dirs": [
"../examples",
],
"gallery_dirs": [
"examples",
],
"within_subsection_order": FileNameSortKey,
"filename_pattern": "\.py",
"backreferences_dir": "api/generated/backreferences",
"doc_module": "aurora",
# 'reference_url': {'discretize': None},
}
| [
"[email protected]"
] | |
13f0735af7afa71669e0b00ec47e9d7d07d8bce0 | d5214b1331c9dae59d95ba5b3aa3e9f449ad6695 | /qPloneDropDownMenu/branches/0.2/skins/qPloneDropDownMenu/qpdm_reorder.py | e0bfe0d169c75bd9dae28edd63c26790aeb59ec2 | [] | no_license | kroman0/products | 1661ee25a224c4b5f172f98110944f56136c77cf | f359bb64db22f468db5d1e411638790e94d535a2 | refs/heads/master | 2021-01-10T07:58:04.579234 | 2014-06-11T12:05:56 | 2014-06-11T12:05:56 | 52,677,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | ## Script (Python) "qpdm_reorder"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters= submenu_path, idxs
##title=
##
from Products.CMFCore.utils import getToolByName
menu_tool = getToolByName(context, 'portal_dropdownmenu')
menuitem = menu_tool.manage_reorderItems(idxs, submenu_path)
return context.getSubmenu(submenu=menu_tool.getSubMenuByPath(submenu_path),submenu_path=submenu_path)
| [
"mylan@4df3d6c7-0a05-0410-9bee-ae8b7a76f946"
] | mylan@4df3d6c7-0a05-0410-9bee-ae8b7a76f946 |
432aae4837c6d251b61eb69326fd327cebce4c6c | a63d907ad63ba6705420a6fb2788196d1bd3763c | /src/api/resourcecenter/serializers/processing_metrics_serializers.py | bb72d5540d96efd33b60750a04d702611cbf0b03 | [
"MIT"
] | permissive | Tencent/bk-base | a38461072811667dc2880a13a5232004fe771a4b | 6d483b4df67739b26cc8ecaa56c1d76ab46bd7a2 | refs/heads/master | 2022-07-30T04:24:53.370661 | 2022-04-02T10:30:55 | 2022-04-02T10:30:55 | 381,257,882 | 101 | 51 | NOASSERTION | 2022-04-02T10:30:56 | 2021-06-29T06:10:01 | Python | UTF-8 | Python | false | false | 2,364 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import datetime
from django.utils.translation import ugettext as _
from rest_framework import serializers
from common.exceptions import ValidationError
class ProcessingMetricSummarySerializer(serializers.Serializer):
start_time = serializers.CharField(label=_("开始日期"))
end_time = serializers.CharField(label=_("结束日期"))
geog_area_code = serializers.CharField(required=False, label=_("地区"))
def validate_start_time(self, start_time):
try:
datetime.datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
except ValueError:
raise ValidationError(_("开始日期,格式为YYYY-MM-DD HH:mm:SS"))
return start_time
def validate_end_time(self, end_time):
try:
datetime.datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
except ValueError:
raise ValidationError(_("结束日期,格式为YYYY-MM-DD HH:mm:SS"))
return end_time
| [
"[email protected]"
] | |
ff96ced9ce7021a3e0768e0e4493dcaaee8df6fd | a6086dcd794ee1419081761e473433081249059f | /app/api/errors.py | 9e92b5acf600ead372909b7faad5a3d73fe777ea | [] | no_license | billy0402/flask-stock-api | f1d6f51d7d67300eccc2d7621eacc41f3a8ec609 | 2d656c80b2a062f8dd4f7f8466ed3060f7d56477 | refs/heads/master | 2023-07-18T15:40:53.869479 | 2021-09-08T18:57:47 | 2021-09-08T18:57:47 | 402,569,924 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 632 | py | from flask import jsonify
from . import api
from ..exceptions import ValidationError
def bad_request(message):
response = jsonify({'error': 'bad request', 'message': message})
response.status_code = 400
return response
def unauthorized(message):
response = jsonify({'error': 'unauthorized', 'message': message})
response.status_code = 401
return response
def forbidden(message):
response = jsonify({'error': 'forbidden', 'message': message})
response.status_code = 403
return response
@api.errorhandler(ValidationError)
def validation_error(error):
return bad_request(error.args[0])
| [
"[email protected]"
] | |
95b9fdca571f3e098ef2c1ff21e6bd48597afc65 | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-cms/aliyunsdkcms/request/v20190101/ModifyHostInfoRequest.py | b5160544bcb63311836cf513c07824b15c12694d | [
"Apache-2.0"
] | permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,332 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class ModifyHostInfoRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cms', '2019-01-01', 'ModifyHostInfo','cms')
self.set_method('POST')
def get_HostName(self):
return self.get_query_params().get('HostName')
def set_HostName(self,HostName):
self.add_query_param('HostName',HostName)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId) | [
"[email protected]"
] | |
d21c0896c06e1415355d55f1c6aa4eda00358cbc | 46559fa48bb8ae722149b600ecd5e05e558553ac | /RumourEval2019Models/Bert-MFajcik/data_preprocessing/text_preprocessing.py | 185113f8777ca3d00c738e17f3b504dde6cda8ea | [
"MIT"
] | permissive | isspek/veracity-detection | f84eeba6aceb8b2f3f753c5e856bb46d9581c0c5 | 9368309722bead209e49e52c206758e3d173092a | refs/heads/master | 2022-07-15T10:25:10.327352 | 2019-11-14T13:24:55 | 2019-11-14T13:24:55 | 214,429,773 | 0 | 0 | MIT | 2022-06-21T23:08:54 | 2019-10-11T12:23:39 | Python | UTF-8 | Python | false | false | 7,549 | py | import re
import string
import warnings
import preprocessor as twitter_preprocessor
import spacy
# See spacy tag_map.py for tag explanation
from nltk.corpus import stopwords
from spacy.symbols import PUNCT, SYM, ADJ, CCONJ, NUM, DET, ADV, ADP, VERB, NOUN, PROPN, PART, PRON, ORTH
from utils import DotDict
warnings.filterwarnings("ignore", category=UserWarning, module='bs4')
nlp = None
punctuation = list(string.punctuation) + ["``"]
stopWords = set(stopwords.words('english'))
validPOS = [PUNCT, SYM, ADJ, CCONJ, NUM, DET, ADV, ADP, VERB, NOUN, PROPN, PART, PRON]
POS_dict = {x: i + 2 for i, x in enumerate(validPOS)}
POS_dict['UNK'] = 0
POS_dict['EOS'] = 1
validNER = ["UNK",
"PERSON", # People, including fictional.
"NORP", # Nationalities or religious or political groups.
"FAC", # Buildings, airports, highways, bridges, etc.
"ORG", # Companies, agencies, institutions, etc.
"GPE", # Countries, cities, states.
"LOC", # Non-GPE locations, mountain ranges, bodies of water.
"PRODUCT", # Objects, vehicles, foods, etc. (Not services.)
"EVENT", # Named hurricanes, battles, wars, sports events, etc.
"WORK_OF_ART", # Titles of books, songs, etc.
"LAW", # Named documents made into laws.
"LANGUAGE", # Any named language.
"DATE", # Absolute or relative dates or periods.
"TIME", # Times smaller than a day.
"PERCENT", # Percentage, including "%".
"MONEY", # Monetary values, including unit.
"QUANTITY", # Measurements, as of weight or distance.
"ORDINAL", # "first", "second", etc.
"CARDINAL", # Numerals that do not fall under another type.
]
validDEPS = ['UNK',
'acl',
'acomp',
'advcl',
'advmod',
'agent',
'amod',
'appos',
'attr',
'aux',
'auxpass',
'case',
'cc',
'ccomp',
'complm',
'compound',
'conj',
'cop',
'csubj',
'csubjpass',
'dative',
'dep',
'det',
'dobj',
'expl',
'hmod',
'hyph',
'infmod',
'intj',
'iobj',
'mark',
'meta',
'neg',
'nmod',
'nn',
'npadvmod',
'nsubj',
'nsubjpass',
'num',
'number',
'nummod',
'obj',
'obl',
'oprd',
'parataxis',
'partmod',
'pcomp',
'pobj',
'poss',
'possessive',
'preconj',
'predet',
'prep',
'prt',
'punct',
'quantmod',
'rcmod',
'relcl',
'root',
'xcomp']
def preprocess_text(text: str, opts, nlpengine=None, lang='en', special_tags=["<pad>", "<eos>"],
use_tw_preprocessor=True):
if use_tw_preprocessor:
## ! There is a bug in original package for twitter preprocessing
# Sometomes regexp for link preprocessing freezes
# So we preprocess links separately
text = re.sub(r"(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?", "$URL$",
text.strip())
twitter_preprocessor.set_options('mentions')
text = twitter_preprocessor.tokenize(text)
# processed_chunk = twitter_preprocessor.clean(text)
if nlpengine is None:
global nlp
if nlp is None:
nlp = spacy.load(lang)
nlp.add_pipe(nlp.create_pipe('sentencizer'))
for x in ['URL', 'MENTION', 'HASHTAG', 'RESERVED', 'EMOJI', 'SMILEY', 'NUMBER', ]:
nlp.tokenizer.add_special_case(f'${x}$', [{ORTH: f'${x}$'}])
nlpengine = nlp
BLvec = []
POSvec = []
DEPvec = []
NERvec = []
processed_chunk = ""
doc = nlpengine(text)
doclen = 0
for sentence in doc.sents:
for w in sentence:
# Some phrases are automatically tokenized by Spacy
# i.e. New York, in that case we want New_York in our dictionary
word = "_".join(w.text.split())
if word.isspace() or word == "":
continue
if opts.remove_stop_words and word.lower() in stopWords:
continue
if opts.remove_puncuation and word in punctuation:
continue
# Spacy lemmatized I,He/She/It into artificial
# -PRON- lemma, which is unwanted
if opts.lemmatize_words:
output = w.lemma_ if w.lemma_ != '-PRON-' else w.lower_
else:
output = word
if opts.to_lowercase:
output = output.lower()
if opts.replace_nums and output.replace('.', '', 1).isdigit():
output = opts.num_replacement
output = output.replace("n't", "not")
doclen += 1
processed_chunk += "%s " % (output)
# Sometimes, when the word contains punctuation and we split it manually
# the output can contain multiple tokens
# In such case, just copy the features..., it happens rarely
if opts.returnbiglettervector:
BLvec.append(int(w.text[0].isupper()))
if opts.returnposvector:
POSvec.append(POS_dict.get(w.pos, POS_dict['UNK']))
if opts.returnDEPvector:
try:
DEPvec.append(validDEPS.index(w.dep_.lower()))
except ValueError:
DEPvec.append(validDEPS.index('UNK'))
if opts.returnNERvector:
try:
NERvec.append(validNER.index(w.ent_type_))
except ValueError:
NERvec.append(validNER.index('UNK'))
if opts.add_eos:
doclen += 1
processed_chunk += opts.eos + "\n"
if opts.returnbiglettervector:
BLvec.append(0)
if opts.returnposvector:
POSvec.append(POS_dict['EOS'])
if opts.returnDEPvector:
DEPvec.append(0)
if opts.returnNERvector:
NERvec.append(0)
else:
processed_chunk += "\n"
processed_chunk = processed_chunk.strip()
assert len(processed_chunk.split()) == len(BLvec) == len(POSvec) == len(DEPvec) == len(NERvec)
return processed_chunk, BLvec, POSvec, DEPvec, NERvec
def initopts():
o = DotDict()
o.stopwords_file = ""
o.remove_puncuation = False
o.remove_stop_words = False
o.lemmatize_words = False
o.num_replacement = "[NUM]"
o.to_lowercase = False
o.replace_nums = False # Nums are important, since rumour may be lying about count
o.eos = "[EOS]"
o.add_eos = True
o.returnNERvector = True
o.returnDEPvector = True
o.returnbiglettervector = True
o.returnposvector = True
return o
if __name__ == "__main__":
print(preprocess_text(
"Appalled by the attack on Charlie Hebdo in Paris, 10 - probably journalists - now confirmed dead. An attack on free speech everywhere.",
initopts()))
| [
"[email protected]"
] | |
eedc1a1a7b87294894b34aefd03488bb442339be | 33e5e4b883671f7f40a48e6e0a4b544b3f8f839a | /imageflow/apps.py | 2b8872cb5e0adfd69a6677056fd89db00b564baa | [
"MIT"
] | permissive | typpo/astrokit | ad7ee83664e3d920733d7e008aec4801c7aa84f2 | 59cea2e06c027e83dfa70defb4053820c79ccced | refs/heads/master | 2023-04-12T15:44:11.669710 | 2022-06-21T21:21:04 | 2022-06-21T21:21:04 | 47,933,931 | 9 | 7 | MIT | 2023-03-31T14:28:40 | 2015-12-13T19:52:01 | Python | UTF-8 | Python | false | false | 134 | py | from __future__ import unicode_literals
from django.apps import AppConfig
class ImageflowConfig(AppConfig):
name = 'imageflow'
| [
"[email protected]"
] | |
9b3ef03ef6d8de217adbc634e63f038ae42d5d52 | 0a3bf0a6f10eb143c9291090125946538ee73279 | /summarize/sumy/summarizers/edmundson_location.py | 406597f5a31d71b8b516c58c710328b273d06797 | [
"Apache-2.0"
] | permissive | AIPHES/live-blog-summarization | 19ec1c01b7e254f74b2de153ac3972780daa7506 | a5f899ea07a098e1e0b3ab92cd3d430776e6412a | refs/heads/master | 2022-11-24T09:39:25.750313 | 2019-02-12T13:53:12 | 2019-02-12T13:53:12 | 166,268,167 | 2 | 1 | Apache-2.0 | 2022-11-02T20:47:14 | 2019-01-17T17:34:10 | Python | UTF-8 | Python | false | false | 2,516 | py | # -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from operator import attrgetter
from .._compat import ffilter
from ._summarizer import AbstractSummarizer
class EdmundsonLocationMethod(AbstractSummarizer):
def __init__(self, stemmer, null_words):
super(EdmundsonLocationMethod, self).__init__(stemmer)
self._null_words = null_words
def __call__(self, document, sentences_count, w_h, w_p1, w_p2, w_s1, w_s2):
significant_words = self._compute_significant_words(document)
ratings = self._rate_sentences(document, significant_words, w_h, w_p1,
w_p2, w_s1, w_s2)
return self._get_best_sentences(document.sentences, sentences_count, ratings)
def _compute_significant_words(self, document):
headings = document.headings
significant_words = chain(*map(attrgetter("words"), headings))
significant_words = map(self.stem_word, significant_words)
significant_words = ffilter(self._is_null_word, significant_words)
return frozenset(significant_words)
def _is_null_word(self, word):
return word in self._null_words
def _rate_sentences(self, document, significant_words, w_h, w_p1, w_p2, w_s1, w_s2):
rated_sentences = {}
paragraphs = document.paragraphs
for paragraph_order, paragraph in enumerate(paragraphs):
sentences = paragraph.sentences
for sentence_order, sentence in enumerate(sentences):
rating = self._rate_sentence(sentence, significant_words)
rating *= w_h
if paragraph_order == 0:
rating += w_p1
elif paragraph_order == len(paragraphs) - 1:
rating += w_p2
if sentence_order == 0:
rating += w_s1
elif sentence_order == len(sentences) - 1:
rating += w_s2
rated_sentences[sentence] = rating
return rated_sentences
def _rate_sentence(self, sentence, significant_words):
words = map(self.stem_word, sentence.words)
return sum(w in significant_words for w in words)
def rate_sentences(self, document, w_h=1, w_p1=1, w_p2=1, w_s1=1, w_s2=1):
significant_words = self._compute_significant_words(document)
return self._rate_sentences(document, significant_words, w_h, w_p1, w_p2, w_s1, w_s2)
| [
"[email protected]"
] | |
3941489ec2a7e0de2b1adcec8caab3fafca2f3a0 | 4b4df51041551c9a855468ddf1d5004a988f59a2 | /leetcode_python/Array/corporate-flight-bookings.py | d6486593ea2dc4f37b79869a1f72ef71fc6dc067 | [] | no_license | yennanliu/CS_basics | 99b7ad3ef6817f04881d6a1993ec634f81525596 | 035ef08434fa1ca781a6fb2f9eed3538b7d20c02 | refs/heads/master | 2023-09-03T13:42:26.611712 | 2023-09-03T12:46:08 | 2023-09-03T12:46:08 | 66,194,791 | 64 | 40 | null | 2022-08-20T09:44:48 | 2016-08-21T11:11:35 | Python | UTF-8 | Python | false | false | 5,073 | py | """
1109. Corporate Flight Bookings
Medium
There are n flights that are labeled from 1 to n.
You are given an array of flight bookings bookings, where bookings[i] = [firsti, lasti, seatsi] represents a booking for flights firsti through lasti (inclusive) with seatsi seats reserved for each flight in the range.
Return an array answer of length n, where answer[i] is the total number of seats reserved for flight i.
Example 1:
Input: bookings = [[1,2,10],[2,3,20],[2,5,25]], n = 5
Output: [10,55,45,25,25]
Explanation:
Flight labels: 1 2 3 4 5
Booking 1 reserved: 10 10
Booking 2 reserved: 20 20
Booking 3 reserved: 25 25 25 25
Total seats: 10 55 45 25 25
Hence, answer = [10,55,45,25,25]
Example 2:
Input: bookings = [[1,2,10],[2,2,15]], n = 2
Output: [10,25]
Explanation:
Flight labels: 1 2
Booking 1 reserved: 10 10
Booking 2 reserved: 15
Total seats: 10 25
Hence, answer = [10,25]
Constraints:
1 <= n <= 2 * 104
1 <= bookings.length <= 2 * 104
bookings[i].length == 3
1 <= firsti <= lasti <= n
1 <= seatsi <= 104
"""
# V0
# V1
# IDEA : ARRAY + prefix sum
# https://leetcode.com/problems/corporate-flight-bookings/discuss/328856/JavaC%2B%2BPython-Sweep-Line
# IDEA :
# Set the change of seats for each day.
# If booking = [i, j, k],
# it needs k more seat on ith day,
# and we don't need these seats on j+1th day.
# We accumulate these changes then we have the result that we want.
# Complexity
# Time O(booking + N) for one pass on bookings
# Space O(N) for the result
class Solution:
def corpFlightBookings(self, bookings, n):
res = [0] * (n + 1)
for i, j, k in bookings:
res[i - 1] += k
res[j] -= k
for i in range(1, n):
res[i] += res[i - 1]
return res[:-1]
# V1'
# IDEA : ARRAY + prefix sum
# https://leetcode.com/problems/corporate-flight-bookings/discuss/328949/Simple-Python-solution
class Solution:
def corpFlightBookings(self, bookings: List[List[int]], n: int) -> List[int]:
answer = n * [0]
lst = []
for i, j, num in bookings:
lst.append((i - 1, num))
lst.append((j, -num))
lst.sort()
curr_num = 0
prev_i = 0
for i, num in lst:
for j in range(prev_i, i):
answer[j] += curr_num
prev_i = i
curr_num += num
return answer
# V1''
# IDEA : ARRAY
# https://leetcode.com/problems/corporate-flight-bookings/discuss/328893/Short-python-solution
# IDEA : Simply use two arrays to keep track of how many bookings are added for every flight.
class Solution:
def corpFlightBookings(self, bookings: List[List[int]], n: int) -> List[int]:
opens = [0]*n
closes = [0]*n
for e in bookings:
opens[e[0]-1] += e[2]
closes[e[1]-1] += e[2]
ret, tmp = [0]*n, 0
for i in range(n):
tmp += opens[i]
ret[i] = tmp
tmp -= closes[i]
return ret
# V1'''
# https://leetcode.com/problems/corporate-flight-bookings/discuss/328986/Python-linear-solution
class Solution:
def corpFlightBookings(self, bookings: List[List[int]], n: int) -> List[int]:
res = [0] * (n + 2)
for booking in bookings:
start, end, seats = booking
res[start] += seats
res[end + 1] -= seats
for i in range(1, len(res)):
res[i] += res[i - 1]
# don't keep first because bookings are 1-based
# don't keep last because it's out of range
return res[1:-1]
# V1''''
# https://leetcode.com/problems/corporate-flight-bookings/discuss/328863/Python-concise-sum
class Solution:
def corpFlightBookings(self, bookings: List[List[int]], n: int) -> List[int]:
res = [0] * n
i = cur = 0
for j, val in sorted([[i - 1, k] for i, j, k in bookings] + [[j, -k] for i, j, k in bookings]):
while i < j:
res[i] = cur
i += 1
cur += val
return res
# V1''''''
# https://zxi.mytechroad.com/blog/math/leetcode-1109-corporate-flight-bookings/
# C++
# class Solution {
# public:
# vector<int> corpFlightBookings(vector<vector<int>>& bookings, int n) {
# vector<int> ans(n + 1);
# for (const auto& b : bookings) {
# ans[b[0] - 1] += b[2];
# ans[b[1]] -= b[2];
# }
# for (int i = 1; i < n; ++i)
# ans[i] += ans[i - 1];
# ans.pop_back();
# return ans;
# }
# };
# V1''''''''
# https://blog.51cto.com/u_15344287/3646723
class Solution:
def corpFlightBookings(self, bookings: List[List[int]], n: int) -> List[int]:
lst = [0] * (n + 1)
for j, k, l in bookings:
lst[j - 1] += l
lst[k] -= l
lst.pop()
ans = []
now = 0
for i in range(len(lst)):
now += lst[i]
ans.append(now)
return ans
# V2 | [
"[email protected]"
] | |
8da121d649ea828a915d2f8fee0f8d2f41569f13 | bfc25f1ad7bfe061b57cfab82aba9d0af1453491 | /data/external/repositories_2to3/120243/tradeshift-text-classification-master/src/online-model/tk7_solution.py | ffa812f783556c5f81ae943cd1fa4a0497105321 | [
"MIT"
] | permissive | Keesiu/meta-kaggle | 77d134620ebce530d183467202cf45639d9c6ff2 | 87de739aba2399fd31072ee81b391f9b7a63f540 | refs/heads/master | 2020-03-28T00:23:10.584151 | 2018-12-20T19:09:50 | 2018-12-20T19:09:50 | 147,406,338 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,176 | py | '''
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
Version 2, December 2004
Copyright (C) 2004 Sam Hocevar <[email protected]>
Everyone is permitted to copy and distribute verbatim or modified
copies of this license document, and changing it is allowed as long
as the name is changed.
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. You just DO WHAT THE FUCK YOU WANT TO.
'''
from datetime import datetime
from math import log, exp, sqrt
# TL; DR
# the main learning process start at line 122
# parameters #################################################################
import sys
data_dir=sys.argv[1]
sub_dir=sys.argv[2]
train = data_dir+'train.csv' # path to training file
label = data_dir+'trainLabels.csv' # path to label file of training data
test = data_dir+'test.csv' # path to testing file
D = 2 ** 23 # number of weights use for each model, we have 32 of them
alpha = .1 # learning rate for sgd optimization
# function, generator definitions ############################################
# A. x, y generator
# INPUT:
# path: path to train.csv or test.csv
# label_path: (optional) path to trainLabels.csv
# YIELDS:
# ID: id of the instance (can also acts as instance count)
# x: a list of indices that its value is 1
# y: (if label_path is present) label value of y1 to y33
def data(path, label_path=None):
for t, line in enumerate(open(path)):
# initialize our generator
if t == 0:
# create a static x,
# so we don't have to construct a new x for every instance
x = [0] * (146+13*14/2+1)
if label_path:
label = open(label_path)
label.readline() # we don't need the headers
continue
# parse x
for m, feat in enumerate(line.rstrip().split(',')):
if m == 0:
ID = int(feat)
else:
# one-hot encode everything with hash trick
# categorical: one-hotted
# boolean: ONE-HOTTED
# numerical: ONE-HOTTED!
# note, the build in hash(), although fast is not stable,
# i.e., same value won't always have the same hash
# on different machines
x[m] = abs(hash(str(m) + '_' + feat)) % D
row=line.rstrip().split(',')
hash_cols = [64,65,61,62,91,92,142,3,4,61,34,91,94,95]
t = 146
for i in range(14):
for j in range(i+1,14):
t += 1
x[t] = abs(hash(str(i)+'_'+str(j)+'_'+row[hash_cols[i]]+"_x_"+row[hash_cols[j]])) % D
# parse y, if provided
if label_path:
# use float() to prevent future type casting, [1:] to ignore id
y = [float(y) for y in label.readline().split(',')[1:]]
yield (ID, x, y) if label_path else (ID, x)
# B. Bounded logloss
# INPUT:
# p: our prediction
# y: real answer
# OUTPUT
# bounded logarithmic loss of p given y
def logloss(p, y):
p = max(min(p, 1. - 10e-15), 10e-15)
return -log(p) if y == 1. else -log(1. - p)
# C. Get probability estimation on x
# INPUT:
# x: features
# w: weights
# OUTPUT:
# probability of p(y = 1 | x; w)
def predict(x, w):
wTx = 0.
for i in x: # do wTx
wTx += w[i] * 1. # w[i] * x[i], but if i in x we got x[i] = 1.
return 1. / (1. + exp(-max(min(wTx, 20.), -20.))) # bounded sigmoid
# D. Update given model
# INPUT:
# alpha: learning rate
# w: weights
# n: sum of previous absolute gradients for a given feature
# this is used for adaptive learning rate
# x: feature, a list of indices
# p: prediction of our model
# y: answer
# MODIFIES:
# w: weights
# n: sum of past absolute gradients
def update(alpha, w, n, x, p, y):
for i in x:
# alpha / sqrt(n) is the adaptive learning rate
# (p - y) * x[i] is the current gradient
# note that in our case, if i in x then x[i] = 1.
n[i] += abs(p - y)
w[i] -= (p - y) * 1. * alpha / sqrt(n[i])
# training and testing #######################################################
start = datetime.now()
# a list for range(0, 33) - 13, no need to learn y14 since it is always 0
K = [k for k in range(33) if k != 13]
# initialize our model, all 32 of them, again ignoring y14
w = [[0.] * D if k != 13 else None for k in range(33)]
n = [[0.] * D if k != 13 else None for k in range(33)]
loss = 0.
loss_y14 = log(1. - 10**-15)
for ID, x, y in data(train, label):
# get predictions and train on all labels
for k in K:
p = predict(x, w[k])
update(alpha, w[k], n[k], x, p, y[k])
loss += logloss(p, y[k]) # for progressive validation
loss += loss_y14 # the loss of y14, logloss is never zero
# print out progress, so that we know everything is working
if ID % 100000 == 0:
print(('%s\tencountered: %d\tcurrent logloss: %f' % (
datetime.now(), ID, (loss/33.)/ID)))
for ID, x, y in data(train, label):
# get predictions and train on all labels
for k in K:
p = predict(x, w[k])
update(alpha, w[k], n[k], x, p, y[k])
loss += logloss(p, y[k]) # for progressive validation
loss += loss_y14 # the loss of y14, logloss is never zero
# print out progress, so that we know everything is working
if ID % 100000 == 0:
print(('%s\tencountered: %d\tcurrent logloss: %f' % (
datetime.now(), ID, (loss/33.)/ID)))
with open(sub_dir+'./submissiontk7.csv', 'w') as outfile:
outfile.write('id_label,pred\n')
for ID, x in data(test):
for k in K:
p = predict(x, w[k])
outfile.write('%s_y%d,%s\n' % (ID, k+1, str(p)))
if k == 12:
outfile.write('%s_y14,0.0\n' % ID)
print(('Done, elapsed time: %s' % str(datetime.now() - start)))
| [
"[email protected]"
] | |
7da0cdbd0ae336d14f7023b24a2e9169e58abf11 | 94fd1381adcfaa5ea64dc13123aef16697b0396a | /covid_dashboard/views/get_districts_daily_report_day_wise/request_response_mocks.py | 540280aed7e3719046e9b95366e4f81bc83ed4df | [] | no_license | bharathi151/covid_dashboard | 30ac9fe4720b8cd42028b33dcc1b620e0f1ebdb1 | 930bf3e46e8d7c56c682ce10f7f6e5fa7f50cab8 | refs/heads/master | 2022-11-14T20:18:24.648922 | 2020-06-27T10:08:53 | 2020-06-27T10:08:53 | 269,612,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py |
RESPONSE_200_JSON = """
[
{
"district_name": "string",
"district_id": 1,
"day_wise_statistics": [
{
"total_confirmed_cases": 1,
"total_deaths": 1,
"total_recovered_cases": 1,
"date": "string"
}
]
}
]
"""
| [
"[email protected]"
] | |
b90fbfd3c2d421fb70c9156499e70a3a7511340d | 4af090efabd08ef73c411a00ce4972a1c6f30a22 | /python_100days/7day/practice11.py | 82eb730e0a554302387bf8dc26b7ee42b67aaddd | [] | no_license | predatory123/byhytest | e52bca664f9461c9309aaa9bf779c02368ed937c | 578206c9ec9253d0d9325e72cdc13dde6eeb2fc1 | refs/heads/master | 2023-04-26T13:33:14.462408 | 2021-05-20T13:33:37 | 2021-05-20T14:26:22 | 369,213,148 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | # 综合案例2:约瑟夫环问题
"""
《幸运的基督徒》
有15个基督徒和15个非基督徒在海上遇险,为了能让一部分人活下来不得不将其中15个人扔到海里面去,
有个人想了个办法就是大家围成一个圈,由某个人开始从1报数,报到9的人就扔到海里面,他后面的人接着从1开始报数,
报到9的人继续扔到海里面,直到扔掉15个人。由于上帝的保佑,15个基督徒都幸免于难,问这些人最开始是怎么站的,
哪些位置是基督徒哪些位置是非基督徒。
"""
def main():
persons = [True] * 30
counter, index, number = 0, 0, 0
while counter < 15:
if persons[index]:
number += 1
if number == 9:
persons[index] = False
counter += 1
number = 0
index += 1
index %= 30
for person in persons:
print('基' if person else '非', end='')
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
3fce41e05b897b1b5f9cb8483bc9db41b2f751a0 | 3c8701e04900389adb40a46daedb5205d479016c | /oldboy-python18/day02-列表-字典/home-work-stu/购物车.py | 63b937b4063f23e586269f417564b2537968ebdd | [] | no_license | huboa/xuexi | 681300653b834eaf506f49987dcca83df48e8db7 | 91287721f188b5e24fbb4ccd63b60a80ed7b9426 | refs/heads/master | 2020-07-29T16:39:12.770272 | 2018-09-02T05:39:45 | 2018-09-02T05:39:45 | 73,660,825 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,584 | py | #coding:utf-8
goods = [
{"name": "电脑", "price": 1999},
{"name": "鼠标", "price": 10},
{"name": "游艇", "price": 20},
{"name": "美女", "price": 998},
]
shopping_car=[]
while True:
# 获取总资产
total_assets = input('请输入你的总资产:').strip()
if len(total_assets) == 0:
continue
else:
if total_assets.isdigit():
total_assets = int(total_assets)
print('您的总资产:%d' % total_assets)
break
else:
print('您输入的不符合标准:')
continue
while True:
#显示商品信息
n=1
print('-----------商品信息-----------')
for good in goods:
good['id']=n
print('商品编号:%d ,商品名称:%s ,商品价格:%d' %(n,good['name'],good['price']))
n+=1
print('-----------------------------')
#
#
while True:
choice = input('请选择商品:').strip()
if len(choice) == 0:
continue
else:
if choice.isdigit():
n=0
for good in goods:
if int(choice) == good['id']:
#加入到购物车
shopping_car.append((good['name'],good['price']))
n=1
if n == 0:
print('你选择的商品不存在:')
else:
#显示购物车
print('-----------购物车信息-----------')
if len(shopping_car) == 0:
print('购物车为空')
else:
for value in shopping_car:
print('商品名称:%s ,商品价格:%d' % (value[0], value[1]))
print('-----------------------------')
break
# 结算
while True:
is_buy=input('结算请输入y,继续选择商品按任意键').strip()
if len(is_buy) != 0 and is_buy == 'y':
total_price=0
for i in shopping_car:
total_price+=i[1]
print('您购买的商品总价格为:%d' %total_price)
if total_price > total_assets:
print('余额不足。您的余额为%d' %total_assets)
break
else:
total_assets=total_assets-total_price
print('购买成功,余额为%d' %total_assets)
shopping_car.clear()
break
else:
break
| [
"[email protected]"
] | |
2372a02f129a67fbf7970e593aecdaeb2bdb38b5 | 55647a80c8b412af9df0ba3f50595cc2f29c25e6 | /res/scripts/client/messenger/doc_loaders/colors_schemes.py | 5d932c37ceee7ccf7724d9394a83e08eff0f0204 | [] | no_license | cnsuhao/WOT-0.9.17-CT | 0035eb6070fb4fab8d8ee9f8bbc676c10d511cfb | d1f932d8cabaf8aa21708622e87f83c8d24d6451 | refs/heads/master | 2021-06-08T18:11:07.039293 | 2016-11-19T19:12:37 | 2016-11-19T19:12:37 | null | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 2,484 | py | # 2016.11.19 19:53:40 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/messenger/doc_loaders/colors_schemes.py
from messenger.doc_loaders import _xml_helpers
def _readColors(xmlCtx, section, colorsNames, defName):
result = {}
notFound = colorsNames[:]
for tagName, subSec in section.items():
if tagName != 'color':
raise _xml_helpers.XMLError(xmlCtx, 'Tag "{0:>s}" is invalid'.format(tagName))
ctx = xmlCtx.next(subSec)
name = _xml_helpers.readNoEmptyStr(ctx, subSec, 'name', 'Section "name" is not defined')
if name not in colorsNames:
raise _xml_helpers.XMLError(ctx, 'Name of color {0:>s} is invalid'.format(name))
result[name] = _xml_helpers.readRGB(ctx, subSec, 'rgb', 'Color is invalid.')
notFound.remove(name)
if len(notFound):
defColor = 0
if defName in result:
defColor = result[defName]
for name in notFound:
result[name] = defColor
return result
def _readColorScheme(xmlCtx, section, colorScheme):
names = colorScheme.getColorsNames()
defName = colorScheme.getDefColorName()
for tagName, subSec in section.items():
if tagName == 'name':
continue
if tagName != 'item':
raise _xml_helpers.XMLError(xmlCtx, 'Tag "{0:>s}" is invalid'.format(tagName))
ctx = xmlCtx.next(subSec)
name = _xml_helpers.readNoEmptyStr(ctx, subSec, 'name', 'Section "name" is not defined')
colorsSec = subSec['colors']
if not colorsSec:
raise _xml_helpers.XMLError(ctx, 'Section "colors" is not defined')
colorScheme[name] = _readColors(ctx.next(colorsSec), colorsSec, names, defName)
def load(xmlCtx, section, messengerSettings):
for tagName, subSec in section.items():
if tagName != 'colorScheme':
raise _xml_helpers.XMLError(xmlCtx, 'Tag {0:>s} is invalid'.format(tagName))
ctx = xmlCtx.next(subSec)
name = _xml_helpers.readNoEmptyStr(ctx, subSec, 'name', 'Color scheme name is not defined')
colorScheme = messengerSettings.getColorScheme(name)
if colorScheme is not None:
_readColorScheme(ctx, subSec, colorScheme)
return
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\messenger\doc_loaders\colors_schemes.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.11.19 19:53:40 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
6aafd67487c0bd93b6877eceb974ad1a5b907767 | ec7ecc5abbdd03fb55f24e89dbbdfa23ebd7b60f | /evaluate postfix expression.py | 0287083b4698fdbb7abd669aeabc7e66044a9f3e | [] | no_license | poojithayadavalli/codekata | cd290e009cf3e2f504c99dd4f6de9171f217c6be | 1885c45a277cf1023e483bd77edf0c6edf8d95f3 | refs/heads/master | 2020-07-18T14:06:17.190229 | 2020-05-30T09:00:29 | 2020-05-30T09:00:29 | 206,259,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,541 | py | class Evaluate:
# Constructor to initialize the class variables
def __init__(self, capacity):
self.top = -1
self.capacity = capacity
# This array is used a stack
self.array = []
# check if the stack is empty
def isEmpty(self):
return True if self.top == -1 else False
# Return the value of the top of the stack
def peek(self):
return self.array[-1]
# Pop the element from the stack
def pop(self):
if not self.isEmpty():
self.top -= 1
return self.array.pop()
else:
return "$"
# Push the element to the stack
def push(self, op):
self.top += 1
self.array.append(op)
# The main function that converts given infix expression
# to postfix expression
def evaluatePostfix(self, exp):
# Iterate over the expression for conversion
for i in exp:
# If the scanned character is an operand
# (number here) push it to the stack
if i.isdigit():
self.push(i)
# If the scanned character is an operator,
# pop two elements from stack and apply it.
else:
val1 = self.pop()
val2 = self.pop()
self.push(str(eval(val2 + i + val1)))
return int(self.pop())
exp =input()
obj = Evaluate(len(exp))
print(obj.evaluatePostfix(exp))
| [
"[email protected]"
] | |
4be0a9347751505cc966aaaae4aa8a00df3626f7 | f13acd0d707ea9ab0d2f2f010717b35adcee142f | /AtCoder_Virtual_Contest/macle_20220825/c/main.py | 02c948ca2212d942ef5f1445c169292d56933fb5 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | KATO-Hiro/AtCoder | 126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7 | bf43320bc1af606bfbd23c610b3432cddd1806b9 | refs/heads/master | 2023-08-18T20:06:42.876863 | 2023-08-17T23:45:21 | 2023-08-17T23:45:21 | 121,067,516 | 4 | 0 | CC0-1.0 | 2023-09-14T21:59:38 | 2018-02-11T00:32:45 | Python | UTF-8 | Python | false | false | 5,236 | py | # -*- coding: utf-8 -*-
import math
from bisect import bisect_left, bisect_right, insort
from typing import Generic, Iterable, Iterator, TypeVar, Union, List
T = TypeVar('T')
class SortedMultiset(Generic[T]):
"""Sorted multi set (set) in C++.
See:
https://qiita.com/tatyam/items/492c70ac4c955c055602
https://github.com/tatyam-prime/SortedSet/blob/main/SortedMultiset.py
"""
BUCKET_RATIO = 50
REBUILD_RATIO = 170
def _build(self, a=None) -> None:
"Evenly divide `a` into buckets."
if a is None:
a = list(self)
size = self.size = len(a)
bucket_size = int(math.ceil(math.sqrt(size / self.BUCKET_RATIO)))
self.a = [a[size * i // bucket_size: size * (i + 1) // bucket_size] for i in range(bucket_size)]
def __init__(self, a: Iterable[T] = []) -> None:
"Make a new SortedMultiset from iterable. / O(N) if sorted / O(N log N)"
a = list(a)
if not all(a[i] <= a[i + 1] for i in range(len(a) - 1)): # type: ignore
a = sorted(a) # type: ignore
self._build(a)
def __iter__(self) -> Iterator[T]:
for i in self.a:
for j in i:
yield j # type: ignore
def __reversed__(self) -> Iterator[T]:
for i in reversed(self.a):
for j in reversed(i):
yield j
def __len__(self) -> int:
return self.size
def __repr__(self) -> str:
return "SortedMultiset" + str(self.a)
def __str__(self) -> str:
s = str(list(self))
return "{" + s[1: len(s) - 1] + "}"
def _find_bucket(self, x: T) -> List[T]:
"Find the bucket which should contain x. self must not be empty."
for a in self.a:
if x <= a[-1]: # type: ignore
return a
return a # type: ignore
def __contains__(self, x: T) -> bool:
if self.size == 0:
return False
a = self._find_bucket(x)
i = bisect_left(a, x) # type: ignore
return i != len(a) and a[i] == x
def count(self, x: T) -> int:
"Count the number of x."
return self.index_right(x) - self.index(x)
def add(self, x: T) -> None:
"Add an element. / O(√N)"
if self.size == 0:
self.a = [[x]]
self.size = 1
return
a = self._find_bucket(x)
insort(a, x) # type: ignore
self.size += 1
if len(a) > len(self.a) * self.REBUILD_RATIO:
self._build()
def discard(self, x: T) -> bool:
"Remove an element and return True if removed. / O(√N)"
if self.size == 0:
return False
a = self._find_bucket(x)
i = bisect_left(a, x) # type: ignore
if i == len(a) or a[i] != x:
return False
a.pop(i)
self.size -= 1
if len(a) == 0:
self._build()
return True
def lt(self, x: T) -> Union[T, None]:
"Find the largest element < x, or None if it doesn't exist."
for a in reversed(self.a):
if a[0] < x: # type: ignore
return a[bisect_left(a, x) - 1] # type: ignore
return None
def le(self, x: T) -> Union[T, None]:
"Find the largest element <= x, or None if it doesn't exist."
for a in reversed(self.a):
if a[0] <= x: # type: ignore
return a[bisect_right(a, x) - 1] # type: ignore
return None
def gt(self, x: T) -> Union[T, None]:
"Find the smallest element > x, or None if it doesn't exist."
for a in self.a:
if a[-1] > x: # type: ignore
return a[bisect_right(a, x)] # type: ignore
return None
def ge(self, x: T) -> Union[T, None]:
"Find the smallest element >= x, or None if it doesn't exist."
for a in self.a:
if a[-1] >= x: # type: ignore
return a[bisect_left(a, x)] # type: ignore
return None
def __getitem__(self, x: int) -> T:
"Return the x-th element, or IndexError if it doesn't exist."
if x < 0:
x += self.size
if x < 0:
raise IndexError
for a in self.a:
if x < len(a):
return a[x] # type: ignore
x -= len(a)
raise IndexError
def index(self, x: T) -> int:
"Count the number of elements < x."
ans = 0
for a in self.a:
if a[-1] >= x: # type: ignore
return ans + bisect_left(a, x) # type: ignore
ans += len(a)
return ans
def index_right(self, x: T) -> int:
"Count the number of elements <= x."
ans = 0
for a in self.a:
if a[-1] > x: # type: ignore
return ans + bisect_right(a, x) # type: ignore
ans += len(a)
return ans
def main():
import sys
input = sys.stdin.readline
l, q = map(int, input().split())
s = SortedMultiset([0, l])
for i in range(q):
ci, xi = map(int, input().split())
if ci == 1:
s.add(xi)
else:
print(s.gt(xi) - s.lt(xi))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
f3e2452d08102097b71299f1835a5000ecc6f07d | e4f8b14cead542586a96bcaa75993b0a29b3c3d0 | /pyNastran/f06/test/test_f06.py | 1bd6ea7db2cd64bd4ae4a058a7e38f9e763c9e81 | [] | no_license | afcarl/cyNastran | f1d1ef5f1f7cb05f435eac53b05ff6a0cc95c19b | 356ee55dd08fdc9880c5ffba47265125cba855c4 | refs/heads/master | 2020-03-26T02:09:00.350237 | 2014-08-07T00:00:29 | 2014-08-07T00:00:29 | 144,398,645 | 1 | 0 | null | 2018-08-11T15:56:50 | 2018-08-11T15:56:50 | null | UTF-8 | Python | false | false | 5,968 | py | import os
import sys
import time
from traceback import print_exc
import pyNastran
from pyNastran.f06.f06 import F06
#from pyNastran.op2.test.test_op2 import parseTableNamesFromF06, getFailedFiles
def run_lots_of_files(files, debug=True, saveCases=True, skipFiles=[],
stopOnFailure=False, nStart=0, nStop=1000000000):
n = ''
iSubcases = []
failedCases = []
nFailed = 0
nTotal = 0
nPassed = 0
t0 = time.time()
for i, f06file in enumerate(files[nStart:nStop], nStart): # 149
baseName = os.path.basename(f06file)
#if baseName not in skipFiles and not baseName.startswith('acms') and i not in nSkip:
if baseName not in skipFiles:
print("%" * 80)
print('file=%s\n' % f06file)
n = '%s ' % (i)
sys.stderr.write('%sfile=%s\n' % (n, f06file))
nTotal += 1
isPassed = run_f06(f06file, iSubcases=iSubcases, debug=debug,
stopOnFailure=stopOnFailure) # True/False
if not isPassed:
sys.stderr.write('**file=%s\n' % (f06file))
failedCases.append(f06file)
nFailed += 1
else:
nPassed += 1
#sys.exit('end of test...test_f06.py')
if saveCases:
f = open('failedCases.in', 'wb')
for f06file in failedCases:
f.write('%s\n' % (f06file))
f.close()
print("dt = %s seconds" % (time.time() - t0))
#f06 = F06('test_tet10_subcase_1.f06')
#f06.readF06()
sys.exit('-----done with all models %s/%s=%.2f%% nFailed=%s-----' % (nPassed, nTotal, 100. * nPassed / float(nTotal), nTotal - nPassed))
def run_f06(f06_filename, iSubcases=[], write_f06=True, debug=False,
stopOnFailure=True):
isPassed = False
#stopOnFailure = False
#debug = True
try:
f06 = F06(debug=debug)
#f06.set_subcases(iSubcases) # TODO not supported
#f06.readBDF(f06.bdf_filename,includeDir=None,xref=False)
f06.read_f06(f06_filename)
#tableNamesF06 = parseTableNamesFromF06(f06.f06FileName)
#tableNamesF06 = f06.getTableNamesFromF06()
assert write_f06 == True, write_f06
if write_f06:
(model, ext) = os.path.splitext(f06_filename)
f06.write_f06(model + '.test_f06.f06')
#print "subcases = ",f06.subcases
#assert tableNamesF06==tableNamesF06,'tableNamesF06=%s tableNamesF06=%s' %(tableNamesF06,tableNamesF06)
#f06.caseControlDeck.sol = f06.sol
#print f06.caseControlDeck.getF06Data()
#print f06.print_results()
#print f06.caseControlDeck.getF06Data()
isPassed = True
except KeyboardInterrupt:
sys.stdout.flush()
print_exc(file=sys.stdout)
sys.stderr.write('**file=%r\n' % f06file)
sys.exit('keyboard stop...')
#except AddNewElementError:
# raise
#except IOError: # missing file
#pass
#except AssertionError:
# isPassed = True
#except InvalidFormatCodeError:
# isPassed = True
#except RuntimeError: #InvalidAnalysisCode
# isPassed = True
#except SyntaxError: #Invalid Markers
# isPassed = True
except SystemExit:
#print_exc(file=sys.stdout)
#sys.exit('stopping on sys.exit')
raise
#except NameError: # variable isnt defined
# if stopOnFailure:
# raise
# else:
# isPassed = True
#except AttributeError: # missing function
# if stopOnFailure:
# raise
# else:
# isPassed = True
#except KeyError:
# raise
#except TypeError: # numpy error
# isPassed = True
#except IndexError: # bad bdf
# isPassed = True
#except IOError: # missing bdf file
#isPassed = False
#raise
#except SyntaxError: #Invalid Subcase
# isPassed = True
#except SyntaxError: # Param Parse:
# isPassed = True
#except NotImplementedError:
#isPassed = True
#except InvalidFieldError: # bad bdf field
# isPassed = True
except:
#print e
print_exc(file=sys.stdout)
if stopOnFailure:
raise
else:
isPassed = False
print "isPassed =", isPassed
return isPassed
def main():
from docopt import docopt
msg = 'Tests to see if an F06 will work with pyNastran.\n'
msg += 'Usage:\n'
msg += ' f06.py [-f] [-p] [-q] F06_FILENAME'
msg += ' f06.py -h | --help\n'
msg += ' f06.py -v | --version\n'
msg += '\n'
msg += 'Positional Arguments:\n'
msg += ' F06_FILENAME path to F06 file\n'
msg += '\n'
msg += 'Options:\n'
msg += ' -q, --quiet prints debug messages (default=False)\n'
msg += ' -f, --write_f06 writes the f06 to fem.f06.out (default=True)\n'
msg += ' -h, --help show this help message and exit\n'
msg += " -v, --version show program's version number and exit\n"
# disabled b/c the F06 doesn't support complex well
#msg += ' -z, --is_mag_phase F06 Writer writes Magnitude/Phase instead of\n'
#msg += ' Real/Imaginary (still stores Real/Imag)\n'
if len(sys.argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
data = docopt(msg, version=ver)
for key, value in sorted(data.iteritems()):
print("%-12s = %r" % (key.strip('--'), value))
if os.path.exists('skippedCards.out'):
os.remove('skippedCards.out')
run_f06(data['F06_FILENAME'],
write_f06 = data['--write_f06'],
debug = not(data['--quiet']),
stopOnFailure = True
)
if __name__ == '__main__': # f06
main()
| [
"mesheb82@abe5364a-6225-a519-111c-932ebcde5b3b"
] | mesheb82@abe5364a-6225-a519-111c-932ebcde5b3b |
6142e7a74039e267ec08477e21952b9991b89888 | 4ee5affb8b16ff7d26df9b19ffee8d675df11e4e | /nested-loops/train_the_trainers.py | ce76aebb5569e2ac15837eb95cccaa5edc35603a | [] | no_license | ayk-dev/python-basics | f60849f6502d64445105a0d27272d9910ea1d509 | af6d04f9001d9a45e8474f9bd4fa2b3ebe380c97 | refs/heads/main | 2023-01-12T11:56:12.210880 | 2020-11-17T20:06:40 | 2020-11-17T20:06:40 | 311,747,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | n = int(input()) # number of people in jury
presentation_counter = 0
presentaion = input()
all_presentations_grades = 0
while presentaion != 'Finish':
total = 0
for pres in range(1, n + 1):
grade = float(input())
total += grade
average_grade = total / n
all_presentations_grades += average_grade
print(f'{presentaion} - {average_grade:.2f}.')
presentaion = input()
presentation_counter += 1
final_average = all_presentations_grades / presentation_counter
print(f"Student's final assessment is {final_average:.2f}.")
| [
"[email protected]"
] | |
c994ba0a911d0bf5726934a74e94cc5b6ea8197c | da878a03674024f290775b2c10d745edf091a4dc | /Global Fires/venv/Scripts/pip3-script.py | d05b0aa101ecf28df5c3555bf979ec367071f105 | [
"MIT"
] | permissive | EnriqueGambra/Global-Fires | 1b3aa5670dbb69804c733b865c7906f6e9698995 | 652606ccd573e7bfd7a232876f0b59fcefc15f9b | refs/heads/master | 2020-08-03T00:44:38.156931 | 2019-09-28T23:30:43 | 2019-09-28T23:30:43 | 211,568,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | #!"C:\Users\Owner\github-Repos\Global-Fires\Global Fires\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"[email protected]"
] | |
74c3487b1ce6284d456f24c7a822e7f5b042c1b0 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp83_0.py | 55e6471489d774a44032f55978e0c9af8a653f9c | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,894 | py | ITEM: TIMESTEP
0
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
3.3480991349454570e-01 4.6865190086497961e+01
3.3480991349454570e-01 4.6865190086497961e+01
3.3480991349454570e-01 4.6865190086497961e+01
ITEM: ATOMS id type xs ys zs
8 1 0.130808 0.0685954 0.067749
35 1 0.0615812 0.131941 0.0620756
130 1 0.0673745 0.0640743 0.11748
165 1 0.131142 0.128914 0.121465
2 1 0.0695213 0.0667569 0.00435885
37 1 0.125561 0.133088 0.00372516
1 1 0.00214951 0.00360363 0.00137352
129 1 0.00721661 0.000399364 0.132507
133 1 0.12787 0.0091779 0.125678
3 1 0.0615281 0.00283245 0.0560259
33 1 0.00628241 0.122947 0.00199967
41 1 0.255074 0.120998 0.00247506
12 1 0.256702 0.0621165 0.0635116
39 1 0.18849 0.128713 0.0601789
43 1 0.314491 0.133976 0.0600459
134 1 0.190575 0.0721728 0.127107
138 1 0.312259 0.0636323 0.128498
169 1 0.249922 0.133413 0.117835
7 1 0.186992 0.00728301 0.0645151
137 1 0.250507 0.00351034 0.121993
6 1 0.189619 0.0663561 0.00165912
16 1 0.369832 0.065535 0.0613228
47 1 0.437339 0.134886 0.0575771
142 1 0.43311 0.0693917 0.124919
173 1 0.374931 0.129585 0.123094
145 1 0.49597 0.00509139 0.121231
20 1 0.490534 0.060222 0.0599609
15 1 0.433715 0.00178399 0.059005
14 1 0.429334 0.0650988 0.00372852
49 1 0.494926 0.120136 0.000961326
177 1 0.500933 0.127737 0.120011
24 1 0.618947 0.0656048 0.0659352
51 1 0.564982 0.123398 0.0535858
146 1 0.556652 0.0696211 0.120498
181 1 0.623493 0.127887 0.119896
149 1 0.621413 0.00203271 0.126088
19 1 0.55821 0.00671903 0.065014
28 1 0.744291 0.051325 0.0639339
55 1 0.682399 0.124839 0.0582936
59 1 0.812581 0.119231 0.0638001
150 1 0.688481 0.0611819 0.121978
154 1 0.799091 0.0663457 0.132579
185 1 0.746599 0.127579 0.125456
22 1 0.67589 0.0536424 0.00674597
57 1 0.74755 0.118531 0.00724268
26 1 0.819197 0.0635787 0.00609031
4 1 0.996363 0.0584481 0.0633073
161 1 0.996703 0.121693 0.122247
32 1 0.878943 0.0628986 0.0636405
63 1 0.931276 0.130829 0.0663827
158 1 0.933467 0.0579956 0.122245
189 1 0.877272 0.121874 0.130926
61 1 0.867437 0.135243 0.00464827
30 1 0.938778 0.0567226 0.000580732
40 1 0.125694 0.194609 0.0654106
67 1 0.0543099 0.249068 0.0724686
72 1 0.12366 0.312019 0.0666992
162 1 0.0688229 0.188212 0.127454
194 1 0.0574666 0.306123 0.127401
197 1 0.121783 0.247949 0.123449
36 1 0.000173399 0.183002 0.069021
69 1 0.113044 0.250965 0.00828519
34 1 0.06401 0.190304 0.00124161
44 1 0.242643 0.193217 0.0630084
71 1 0.186982 0.25364 0.0596591
75 1 0.306053 0.243772 0.0614634
76 1 0.254313 0.303096 0.0699722
166 1 0.187876 0.188445 0.125002
170 1 0.316137 0.184086 0.123622
198 1 0.189632 0.311759 0.129538
201 1 0.253681 0.249862 0.130996
202 1 0.321845 0.320619 0.13367
74 1 0.313776 0.313242 0.0055655
73 1 0.248241 0.25639 0.000483841
48 1 0.383878 0.190133 0.0542299
79 1 0.44665 0.247464 0.0529615
80 1 0.372873 0.311057 0.0600798
174 1 0.435719 0.19442 0.120077
205 1 0.374042 0.248904 0.115022
206 1 0.444925 0.299641 0.11928
84 1 0.504138 0.308064 0.0623404
52 1 0.50307 0.182235 0.0613968
209 1 0.5133 0.251198 0.12277
56 1 0.622931 0.183591 0.0613469
83 1 0.561503 0.242679 0.0566626
88 1 0.618381 0.316386 0.0585294
178 1 0.566574 0.184471 0.121372
210 1 0.568165 0.312496 0.125149
213 1 0.626406 0.250788 0.121887
60 1 0.748808 0.183019 0.0567971
87 1 0.685999 0.253104 0.0553365
91 1 0.804547 0.247052 0.0560679
92 1 0.747832 0.318432 0.0616899
182 1 0.69039 0.185775 0.117104
186 1 0.808283 0.182532 0.119399
214 1 0.68067 0.313849 0.121539
217 1 0.746881 0.247696 0.114411
218 1 0.811768 0.314322 0.114754
54 1 0.685855 0.18377 0.00494905
58 1 0.802232 0.184376 0.00288401
62 1 0.9406 0.191793 0.006952
93 1 0.873021 0.243789 0.00528555
193 1 0.999166 0.243232 0.135248
68 1 0.999754 0.30919 0.0644451
64 1 0.870868 0.18797 0.0687111
95 1 0.94118 0.253044 0.0627872
96 1 0.871386 0.31454 0.0528561
190 1 0.941147 0.187395 0.126725
221 1 0.881809 0.247341 0.122331
222 1 0.938404 0.310315 0.132704
94 1 0.94391 0.313816 0.00201044
1153 1 0.000742689 0.497468 0.119691
1027 1 0.0628977 0.491051 0.0670263
101 1 0.130759 0.379155 0.00167892
99 1 0.0673753 0.3724 0.0642353
104 1 0.132739 0.432169 0.06254
226 1 0.0654797 0.435363 0.125966
229 1 0.128378 0.36903 0.125893
1157 1 0.124968 0.497623 0.116302
97 1 1.37735e-05 0.371807 0.00414642
105 1 0.256089 0.371076 0.0117414
103 1 0.190901 0.369792 0.062353
107 1 0.317192 0.372 0.0646675
108 1 0.256967 0.440648 0.0749559
230 1 0.184297 0.431328 0.134146
233 1 0.254244 0.363377 0.125442
234 1 0.316812 0.442074 0.127498
1031 1 0.189927 0.497284 0.0727541
102 1 0.198861 0.437305 0.0100326
106 1 0.305361 0.444269 0.00570288
1169 1 0.495688 0.49476 0.129357
111 1 0.439739 0.370403 0.0596494
112 1 0.382055 0.432033 0.0645835
237 1 0.390137 0.373927 0.124025
238 1 0.445138 0.432259 0.129537
1165 1 0.381304 0.491683 0.12932
1039 1 0.438648 0.493656 0.0706543
116 1 0.503189 0.436567 0.0606158
241 1 0.500144 0.368258 0.121423
115 1 0.559084 0.376872 0.0574744
120 1 0.625103 0.441154 0.0575074
242 1 0.559678 0.442361 0.121917
245 1 0.623913 0.381813 0.116352
1043 1 0.559189 0.499218 0.0563459
117 1 0.622536 0.372631 7.60526e-05
113 1 0.503824 0.366053 0.00101907
114 1 0.556925 0.43937 0.000178973
119 1 0.690029 0.376238 0.0609137
123 1 0.815313 0.378998 0.0616236
124 1 0.759391 0.439723 0.0560861
246 1 0.685807 0.436367 0.126553
249 1 0.750717 0.380718 0.121318
250 1 0.819825 0.435973 0.123049
1047 1 0.694157 0.498194 0.057227
1177 1 0.753685 0.496646 0.118017
126 1 0.93435 0.439977 0.00627683
1053 1 0.882555 0.499621 0.00115897
225 1 0.996166 0.3724 0.125924
100 1 0.997643 0.433141 0.0620464
127 1 0.935941 0.368689 0.0662383
128 1 0.876206 0.441698 0.0601959
253 1 0.873727 0.365926 0.121554
254 1 0.935601 0.434055 0.120289
1055 1 0.939368 0.497967 0.0678297
1181 1 0.878863 0.499827 0.126726
259 1 0.0565572 0.000414809 0.308376
136 1 0.129436 0.064618 0.182635
163 1 0.0640827 0.123877 0.184253
258 1 0.0662541 0.0596461 0.251425
264 1 0.127049 0.0570362 0.306392
291 1 0.0732193 0.116105 0.319356
293 1 0.129167 0.123144 0.2474
289 1 0.00204297 0.125387 0.256579
131 1 0.0711838 0.000999719 0.194282
139 1 0.317293 0.000885977 0.18865
140 1 0.256016 0.055746 0.185537
167 1 0.190176 0.133277 0.185186
171 1 0.309518 0.125062 0.183755
262 1 0.187576 0.0652493 0.240892
266 1 0.317563 0.0575759 0.246528
268 1 0.249912 0.0540318 0.300983
295 1 0.187193 0.125175 0.315931
297 1 0.248099 0.116348 0.244811
299 1 0.310135 0.122105 0.308364
267 1 0.316216 0.00456555 0.306209
273 1 0.499105 0.00997211 0.246188
269 1 0.384073 0.000217788 0.247436
144 1 0.37568 0.0707162 0.182797
175 1 0.438743 0.133769 0.181454
270 1 0.436509 0.0691415 0.235673
272 1 0.380344 0.0652718 0.314133
301 1 0.373224 0.129184 0.243789
303 1 0.437185 0.120059 0.316603
305 1 0.495062 0.122584 0.250722
148 1 0.493762 0.0703723 0.182514
147 1 0.555132 0.00229531 0.185603
276 1 0.503994 0.0584305 0.313844
152 1 0.625503 0.0712222 0.182925
179 1 0.564569 0.123968 0.184759
274 1 0.569237 0.0610354 0.247504
280 1 0.629743 0.0629336 0.309739
307 1 0.56563 0.129756 0.306172
309 1 0.62913 0.123242 0.25323
281 1 0.747218 0.00396904 0.250125
155 1 0.813087 0.00253837 0.196412
283 1 0.812299 0.0109066 0.31619
156 1 0.74391 0.0670888 0.190519
183 1 0.684164 0.126009 0.180956
187 1 0.807814 0.126769 0.189073
278 1 0.690184 0.0685558 0.257577
282 1 0.809024 0.0675577 0.252296
284 1 0.750274 0.0698333 0.3201
311 1 0.685946 0.131985 0.312882
313 1 0.740982 0.135896 0.252441
315 1 0.80625 0.128088 0.31386
151 1 0.682924 0.000817487 0.185213
279 1 0.689308 0.00113855 0.31504
285 1 0.872135 0.000415968 0.257827
132 1 0.999423 0.065671 0.197055
159 1 0.934174 0.00318601 0.188882
260 1 0.997122 0.0699007 0.320016
160 1 0.876468 0.0687117 0.192343
191 1 0.936258 0.131905 0.189741
286 1 0.930376 0.0588509 0.253288
288 1 0.870897 0.0705589 0.314562
317 1 0.875712 0.127353 0.244116
319 1 0.930524 0.129582 0.315306
287 1 0.940206 0.00301296 0.315358
257 1 0.996595 0.00350416 0.250593
168 1 0.127209 0.191558 0.189696
195 1 0.0660123 0.251225 0.196061
200 1 0.125058 0.309582 0.187085
290 1 0.0712299 0.184039 0.258893
296 1 0.127664 0.183216 0.314672
322 1 0.0667037 0.312513 0.256821
323 1 0.0642226 0.250456 0.311865
325 1 0.130138 0.250571 0.257949
328 1 0.119002 0.314136 0.320616
321 1 0.000721527 0.241674 0.253799
172 1 0.251115 0.18914 0.185656
199 1 0.19344 0.247326 0.189114
203 1 0.31503 0.255798 0.181041
204 1 0.249209 0.309581 0.191835
294 1 0.190447 0.177052 0.254652
298 1 0.302164 0.175845 0.252911
300 1 0.248631 0.186913 0.321318
326 1 0.183707 0.313727 0.250178
327 1 0.188983 0.246045 0.312051
329 1 0.249613 0.251089 0.251748
330 1 0.313827 0.317204 0.247669
331 1 0.311272 0.248405 0.308032
332 1 0.243707 0.309753 0.311956
176 1 0.371865 0.192375 0.187245
207 1 0.438288 0.255202 0.19267
208 1 0.37923 0.317742 0.189826
302 1 0.431498 0.186668 0.245193
304 1 0.378788 0.180091 0.312252
333 1 0.370331 0.250343 0.248901
334 1 0.447412 0.311098 0.252938
335 1 0.436785 0.247291 0.30907
336 1 0.378604 0.313373 0.311762
308 1 0.493547 0.183053 0.307382
340 1 0.499381 0.309883 0.316631
212 1 0.505823 0.302948 0.18548
180 1 0.503677 0.188953 0.180575
337 1 0.507168 0.247042 0.249991
184 1 0.633089 0.196016 0.187141
211 1 0.569013 0.248835 0.186554
216 1 0.631089 0.312309 0.184568
306 1 0.558406 0.184284 0.243431
312 1 0.623391 0.19324 0.311526
338 1 0.562591 0.314372 0.260251
339 1 0.562076 0.243118 0.318261
341 1 0.623631 0.256272 0.25304
344 1 0.627891 0.29938 0.323897
188 1 0.745594 0.188846 0.179614
215 1 0.703908 0.259117 0.187684
219 1 0.81558 0.253431 0.179238
220 1 0.764444 0.316499 0.190309
310 1 0.684419 0.195572 0.247556
314 1 0.815778 0.188495 0.250109
316 1 0.748454 0.194287 0.318858
342 1 0.692404 0.314329 0.252913
343 1 0.689102 0.252227 0.309533
345 1 0.760707 0.249191 0.24599
346 1 0.813306 0.324464 0.262143
347 1 0.801085 0.257967 0.31788
348 1 0.743556 0.315792 0.315912
196 1 0.999119 0.306968 0.194388
292 1 0.993512 0.181872 0.32329
164 1 0.998136 0.182228 0.193998
324 1 0.994198 0.31042 0.312565
192 1 0.863492 0.187834 0.183061
223 1 0.931013 0.242174 0.191254
224 1 0.87258 0.312959 0.186686
318 1 0.934218 0.191 0.2583
320 1 0.868164 0.193348 0.310307
349 1 0.862555 0.25335 0.247697
350 1 0.929198 0.310755 0.250597
351 1 0.934351 0.253789 0.313127
352 1 0.869143 0.314194 0.3249
227 1 0.0641682 0.376947 0.19514
232 1 0.121093 0.441191 0.189849
354 1 0.0649897 0.43891 0.249511
355 1 0.0670658 0.376731 0.319004
357 1 0.11907 0.376049 0.250163
360 1 0.125581 0.436376 0.321527
228 1 0.000452675 0.439151 0.183139
356 1 0.00718559 0.434872 0.314016
1285 1 0.126234 0.498334 0.252558
1163 1 0.311303 0.492584 0.19305
231 1 0.184628 0.370412 0.193416
235 1 0.303967 0.383904 0.189288
236 1 0.245148 0.445977 0.192844
358 1 0.186834 0.430916 0.256134
359 1 0.186173 0.373662 0.320578
361 1 0.244455 0.372797 0.255943
362 1 0.315268 0.43998 0.259447
363 1 0.313867 0.373648 0.314304
364 1 0.242189 0.42989 0.317176
1287 1 0.193393 0.498441 0.307844
1289 1 0.248281 0.49779 0.251756
1167 1 0.438262 0.493268 0.191139
239 1 0.4461 0.369281 0.191217
240 1 0.380109 0.430089 0.18568
365 1 0.372631 0.376715 0.254974
366 1 0.43981 0.438263 0.249753
367 1 0.439216 0.376651 0.308024
368 1 0.375136 0.435768 0.315129
244 1 0.499482 0.432883 0.19302
1295 1 0.438602 0.49781 0.302718
372 1 0.502235 0.440118 0.312916
369 1 0.506102 0.376853 0.257459
243 1 0.566916 0.3698 0.190502
248 1 0.625246 0.442098 0.18351
370 1 0.566183 0.440842 0.248823
371 1 0.570289 0.383304 0.315865
373 1 0.63593 0.372603 0.25175
376 1 0.629336 0.443156 0.312789
1171 1 0.562188 0.496071 0.186107
1305 1 0.75248 0.498055 0.251257
247 1 0.698181 0.368625 0.180705
251 1 0.81892 0.372814 0.186165
252 1 0.752613 0.438241 0.176517
374 1 0.684833 0.435744 0.232467
375 1 0.68918 0.371733 0.312853
377 1 0.748556 0.383661 0.2448
378 1 0.812586 0.43996 0.240946
379 1 0.807743 0.384511 0.318592
380 1 0.74848 0.441809 0.307478
1175 1 0.693097 0.498991 0.180175
1179 1 0.814064 0.498886 0.178295
1307 1 0.815758 0.495645 0.314288
1303 1 0.691868 0.498188 0.307526
1183 1 0.939527 0.499632 0.177487
1311 1 0.930977 0.496889 0.313792
353 1 0.993963 0.373152 0.246172
255 1 0.929462 0.375471 0.18908
256 1 0.878582 0.441149 0.187155
381 1 0.877812 0.375232 0.250494
382 1 0.940345 0.441483 0.247686
383 1 0.937163 0.381519 0.316678
384 1 0.877079 0.437654 0.309054
386 1 0.0671931 0.0560101 0.378639
392 1 0.139051 0.0511576 0.434894
419 1 0.0695381 0.118189 0.443757
421 1 0.129883 0.119813 0.377369
417 1 0.00906949 0.117649 0.380875
388 1 0.00158545 0.0617551 0.443217
518 1 0.195112 0.0541287 0.495823
390 1 0.186618 0.0585888 0.370251
394 1 0.317515 0.0620401 0.363414
396 1 0.251887 0.0595268 0.437592
423 1 0.190646 0.118318 0.434899
425 1 0.250026 0.113863 0.372133
427 1 0.308295 0.117554 0.438646
395 1 0.318794 0.00149328 0.434646
398 1 0.442448 0.060856 0.373163
400 1 0.385455 0.0581187 0.432899
429 1 0.368808 0.120165 0.373746
431 1 0.432768 0.130943 0.437007
525 1 0.382653 0.000477807 0.488197
403 1 0.559799 0.00729634 0.438625
405 1 0.624702 0.00834647 0.370963
404 1 0.501251 0.0755657 0.437406
433 1 0.501471 0.12243 0.372744
402 1 0.562954 0.0681385 0.378609
408 1 0.624553 0.0661749 0.43386
435 1 0.568164 0.125738 0.439739
437 1 0.628638 0.117937 0.376083
401 1 0.504656 0.00313212 0.370533
565 1 0.633412 0.122192 0.496744
529 1 0.500518 0.00631837 0.499628
533 1 0.628427 0.00274921 0.499927
409 1 0.75091 0.00172483 0.375486
406 1 0.691995 0.0642952 0.378951
410 1 0.8104 0.0713467 0.375642
412 1 0.748857 0.0616365 0.440691
439 1 0.689425 0.129481 0.440864
441 1 0.7507 0.132929 0.379265
443 1 0.810692 0.129719 0.443751
569 1 0.749629 0.135791 0.492733
415 1 0.938783 0.00664016 0.436108
414 1 0.93898 0.0619269 0.37646
416 1 0.87034 0.0612477 0.432591
445 1 0.870868 0.129988 0.376086
447 1 0.937757 0.121259 0.436445
573 1 0.880569 0.121708 0.495782
418 1 0.0678268 0.185727 0.377017
424 1 0.133697 0.182642 0.442668
450 1 0.0612166 0.316289 0.386841
451 1 0.0629657 0.244691 0.447917
453 1 0.122826 0.249759 0.376897
456 1 0.126958 0.30679 0.441484
578 1 0.0617769 0.314399 0.499922
585 1 0.24715 0.25447 0.495774
422 1 0.185527 0.175578 0.376049
426 1 0.307385 0.177534 0.367406
428 1 0.246243 0.186573 0.437882
454 1 0.181544 0.305131 0.375893
455 1 0.186571 0.250063 0.438609
457 1 0.241276 0.244043 0.374309
458 1 0.319711 0.308124 0.374438
459 1 0.315047 0.252458 0.435654
460 1 0.246682 0.308974 0.434999
554 1 0.309818 0.184274 0.493833
586 1 0.308402 0.309881 0.493538
590 1 0.440778 0.303222 0.491464
430 1 0.434555 0.182258 0.375137
432 1 0.368079 0.17678 0.434922
461 1 0.372862 0.242914 0.372612
462 1 0.434146 0.302745 0.374125
463 1 0.43704 0.24154 0.435763
464 1 0.377722 0.311453 0.431851
468 1 0.497617 0.309256 0.428502
558 1 0.432604 0.188541 0.494337
436 1 0.505639 0.176088 0.44679
465 1 0.500957 0.243346 0.376107
467 1 0.565361 0.244054 0.448013
434 1 0.569407 0.184443 0.382102
469 1 0.627587 0.246635 0.383947
472 1 0.621607 0.313744 0.444152
466 1 0.567878 0.309661 0.377673
440 1 0.624173 0.19177 0.44737
594 1 0.563425 0.311455 0.496941
473 1 0.746324 0.255658 0.381248
475 1 0.806953 0.258056 0.449302
470 1 0.688583 0.318958 0.382257
442 1 0.808859 0.186448 0.379792
438 1 0.684901 0.192229 0.378514
476 1 0.753549 0.311547 0.442639
471 1 0.687759 0.256555 0.439059
444 1 0.754227 0.201552 0.442306
474 1 0.804879 0.315776 0.377931
449 1 0.993778 0.250126 0.381954
420 1 0.993694 0.185156 0.442256
452 1 0.994702 0.317014 0.439571
446 1 0.928157 0.197524 0.373125
448 1 0.877711 0.1887 0.432458
478 1 0.931088 0.315133 0.377649
480 1 0.874909 0.315179 0.446565
477 1 0.867167 0.253173 0.381455
479 1 0.933798 0.253872 0.439222
574 1 0.935019 0.1899 0.496134
613 1 0.126726 0.379082 0.495474
482 1 0.0603128 0.443197 0.375878
483 1 0.0626419 0.381061 0.442962
488 1 0.116745 0.445854 0.435294
485 1 0.121161 0.374819 0.379627
489 1 0.25395 0.380146 0.382088
486 1 0.187077 0.434374 0.379135
491 1 0.318201 0.371148 0.426344
490 1 0.311875 0.437118 0.367354
492 1 0.254021 0.444172 0.430995
1419 1 0.318808 0.498721 0.430363
617 1 0.254469 0.382272 0.488775
487 1 0.190331 0.371155 0.439157
614 1 0.183885 0.438609 0.498256
618 1 0.316014 0.433347 0.484262
621 1 0.372627 0.3724 0.497651
493 1 0.383378 0.375253 0.372097
497 1 0.495735 0.373691 0.38187
495 1 0.44011 0.36954 0.433977
494 1 0.442609 0.436531 0.37838
496 1 0.383347 0.442892 0.436895
625 1 0.497939 0.373702 0.499305
622 1 0.43304 0.429084 0.496445
500 1 0.504724 0.436732 0.434012
498 1 0.564148 0.445574 0.374327
501 1 0.628375 0.373421 0.375721
499 1 0.560876 0.375579 0.4411
504 1 0.629138 0.435543 0.437651
1427 1 0.567187 0.495566 0.441552
626 1 0.570155 0.434641 0.496437
503 1 0.68564 0.37307 0.441296
502 1 0.692414 0.428278 0.375312
505 1 0.747178 0.372143 0.382875
508 1 0.755072 0.438569 0.441064
506 1 0.814096 0.446775 0.371958
507 1 0.812354 0.383609 0.435667
1431 1 0.687781 0.491296 0.435966
634 1 0.815141 0.444945 0.4955
1433 1 0.744385 0.498243 0.373916
481 1 0.997047 0.374586 0.376238
1409 1 0.99439 0.493779 0.382113
512 1 0.879944 0.436258 0.44217
511 1 0.934131 0.376557 0.442888
510 1 0.939314 0.43681 0.377536
509 1 0.872807 0.380066 0.3705
484 1 0.990854 0.434178 0.445828
520 1 0.132482 0.056368 0.567599
547 1 0.071341 0.118291 0.557594
642 1 0.0639421 0.0590157 0.618476
677 1 0.120767 0.120947 0.620026
673 1 0.00928493 0.118228 0.618831
549 1 0.129357 0.108455 0.501716
514 1 0.0606581 0.0576103 0.502378
641 1 0.00390032 0.000386603 0.618468
524 1 0.25138 0.0595999 0.563012
551 1 0.189389 0.128261 0.558067
555 1 0.311577 0.120246 0.563421
646 1 0.199596 0.0615075 0.62415
650 1 0.30853 0.0569248 0.616628
681 1 0.242778 0.128274 0.619754
522 1 0.308078 0.0641344 0.50413
553 1 0.250453 0.122139 0.504919
528 1 0.373923 0.0580183 0.562554
559 1 0.436285 0.128057 0.567539
654 1 0.434464 0.0581554 0.628362
685 1 0.376234 0.12335 0.624459
532 1 0.492372 0.0576537 0.567989
557 1 0.375399 0.123509 0.507263
526 1 0.434199 0.0647741 0.505771
689 1 0.505234 0.119591 0.624169
536 1 0.625574 0.0654157 0.564643
563 1 0.560882 0.124958 0.566258
658 1 0.564378 0.0526167 0.6352
693 1 0.620881 0.125838 0.625667
661 1 0.630164 0.00432539 0.618959
531 1 0.564625 0.000641042 0.566246
530 1 0.562806 0.0624388 0.504075
561 1 0.503837 0.115487 0.507806
540 1 0.764578 0.0697048 0.558381
567 1 0.697633 0.12637 0.56686
571 1 0.818794 0.128966 0.561506
662 1 0.692404 0.0604863 0.618962
666 1 0.811552 0.0650363 0.62531
697 1 0.755682 0.130289 0.626598
539 1 0.811378 0.00198473 0.564049
534 1 0.696217 0.0690999 0.509214
538 1 0.815356 0.0628043 0.503181
516 1 0.996026 0.0635124 0.561896
544 1 0.87277 0.0569664 0.560937
575 1 0.942122 0.122931 0.557163
670 1 0.937802 0.0649534 0.627623
701 1 0.877259 0.122896 0.623291
669 1 0.87724 0.00595232 0.632447
542 1 0.933977 0.0610287 0.50089
545 1 0.99922 0.12351 0.502583
552 1 0.123382 0.179836 0.560693
579 1 0.0549391 0.249495 0.564604
584 1 0.119182 0.309567 0.562076
674 1 0.0630856 0.188664 0.62423
706 1 0.0587391 0.307644 0.632963
709 1 0.118698 0.250905 0.632382
546 1 0.0583156 0.188455 0.505324
548 1 0.00196756 0.186885 0.562425
581 1 0.125719 0.243896 0.503333
577 1 0.00211722 0.252193 0.501316
556 1 0.252907 0.184089 0.561511
583 1 0.178328 0.243964 0.564208
587 1 0.307677 0.244391 0.564534
588 1 0.248035 0.316658 0.558609
678 1 0.177605 0.187232 0.621663
682 1 0.3083 0.183021 0.620478
710 1 0.179472 0.319183 0.620958
713 1 0.247173 0.247915 0.61462
714 1 0.315179 0.313143 0.618755
550 1 0.188996 0.187693 0.503548
582 1 0.178631 0.319056 0.502421
560 1 0.372241 0.183126 0.562002
591 1 0.439857 0.247456 0.559834
592 1 0.368014 0.314231 0.555691
686 1 0.438044 0.194053 0.621975
717 1 0.366778 0.242384 0.617639
718 1 0.429128 0.308821 0.616883
564 1 0.498269 0.187286 0.561255
589 1 0.366949 0.244998 0.502101
597 1 0.632653 0.258114 0.503969
596 1 0.50216 0.316501 0.557816
721 1 0.501131 0.251153 0.621794
568 1 0.62651 0.187484 0.566356
595 1 0.568106 0.25667 0.558005
600 1 0.630321 0.314333 0.567664
690 1 0.562565 0.192933 0.63089
722 1 0.555978 0.316641 0.626321
725 1 0.622161 0.250843 0.626002
562 1 0.566745 0.186899 0.505129
593 1 0.504731 0.246932 0.500378
566 1 0.68617 0.194159 0.507507
598 1 0.69136 0.318972 0.505415
572 1 0.754183 0.19045 0.563445
599 1 0.68817 0.254251 0.568889
603 1 0.810959 0.258746 0.567872
604 1 0.747655 0.313936 0.565451
694 1 0.687222 0.179485 0.624728
698 1 0.811415 0.197592 0.626582
726 1 0.693989 0.316558 0.632612
729 1 0.746441 0.252035 0.628461
730 1 0.811543 0.31721 0.629391
570 1 0.810517 0.195477 0.503345
602 1 0.816186 0.313732 0.502867
601 1 0.747997 0.257038 0.506109
705 1 0.995392 0.248578 0.62238
580 1 0.999885 0.313103 0.561486
576 1 0.877128 0.190332 0.56383
607 1 0.945047 0.253742 0.564145
608 1 0.887607 0.316513 0.565135
702 1 0.943052 0.179133 0.619518
733 1 0.88536 0.257348 0.621749
734 1 0.936526 0.315971 0.629526
605 1 0.872407 0.251276 0.503077
606 1 0.942238 0.306094 0.503111
1541 1 0.124039 0.496917 0.501833
611 1 0.0583257 0.370699 0.564345
616 1 0.126191 0.441022 0.557561
738 1 0.0622368 0.442388 0.630611
741 1 0.116889 0.375147 0.627585
610 1 0.0646782 0.436537 0.507656
1547 1 0.316033 0.49751 0.562502
1545 1 0.254697 0.497237 0.502756
615 1 0.188206 0.379853 0.564208
619 1 0.310142 0.376213 0.567693
620 1 0.255298 0.434379 0.561729
742 1 0.179733 0.43427 0.624703
745 1 0.250687 0.381134 0.621385
746 1 0.309682 0.441325 0.633992
1551 1 0.440367 0.488348 0.558494
623 1 0.44146 0.369674 0.558723
624 1 0.369632 0.430302 0.567591
749 1 0.379423 0.366315 0.62758
750 1 0.440848 0.435179 0.626056
753 1 0.497386 0.374858 0.619552
1549 1 0.381859 0.499121 0.501535
1677 1 0.376847 0.489747 0.626038
1685 1 0.613752 0.499158 0.632723
628 1 0.500509 0.436401 0.567535
627 1 0.563826 0.375209 0.561513
632 1 0.620634 0.433046 0.565807
754 1 0.557046 0.434693 0.633606
757 1 0.618946 0.37912 0.629962
1555 1 0.553314 0.498264 0.575869
1553 1 0.507024 0.493564 0.506305
1557 1 0.62575 0.495738 0.513247
629 1 0.629331 0.371671 0.506681
631 1 0.689504 0.379675 0.572694
635 1 0.810213 0.373842 0.565441
636 1 0.752874 0.437738 0.562063
758 1 0.674584 0.442161 0.626577
761 1 0.745941 0.38189 0.62817
762 1 0.810054 0.430806 0.622838
1559 1 0.690099 0.499345 0.56465
633 1 0.751519 0.377224 0.504097
1561 1 0.750893 0.498819 0.505447
630 1 0.684609 0.436676 0.501766
609 1 0.995034 0.376015 0.50706
737 1 0.997707 0.379018 0.634066
612 1 0.993757 0.438554 0.571105
639 1 0.935103 0.381943 0.569422
640 1 0.867752 0.439414 0.563665
765 1 0.874936 0.378194 0.623276
766 1 0.93425 0.440362 0.631145
637 1 0.87002 0.380834 0.507667
638 1 0.930018 0.436579 0.502554
643 1 0.0654935 0.00510844 0.678605
773 1 0.123847 0.00868498 0.752024
648 1 0.132781 0.0622276 0.677449
675 1 0.0651296 0.115879 0.686133
770 1 0.0607574 0.0600819 0.746825
776 1 0.121566 0.0606807 0.815977
803 1 0.0650735 0.130197 0.806
805 1 0.126864 0.119568 0.745938
772 1 0.00531154 0.0645393 0.811635
652 1 0.250868 0.0651833 0.694998
679 1 0.193725 0.116039 0.68423
683 1 0.312087 0.118134 0.676875
774 1 0.192439 0.0592989 0.750766
778 1 0.31397 0.0664772 0.749962
780 1 0.245111 0.0557268 0.810522
807 1 0.185434 0.121681 0.812034
809 1 0.24968 0.12676 0.746731
811 1 0.299526 0.115861 0.811181
775 1 0.180097 0.0053913 0.820043
656 1 0.368644 0.055332 0.684148
687 1 0.438836 0.129767 0.686854
782 1 0.436124 0.0726336 0.748047
784 1 0.364022 0.057688 0.806643
813 1 0.36654 0.117435 0.745043
815 1 0.436212 0.135468 0.801063
660 1 0.494369 0.0615541 0.681535
788 1 0.495001 0.069678 0.798511
783 1 0.434702 0.00210318 0.807104
789 1 0.627961 0.008086 0.754316
787 1 0.560549 0.00540985 0.815863
817 1 0.500518 0.130181 0.746166
664 1 0.630862 0.0610571 0.689599
691 1 0.563308 0.126436 0.685856
786 1 0.561355 0.0702487 0.749482
792 1 0.623475 0.0667451 0.814124
819 1 0.554524 0.129899 0.813432
821 1 0.627088 0.126611 0.753559
793 1 0.755324 0.00146496 0.750426
791 1 0.690745 0.0032831 0.814177
668 1 0.743744 0.0692725 0.68451
695 1 0.674656 0.130546 0.690206
699 1 0.807763 0.125826 0.684891
790 1 0.695393 0.06685 0.751011
794 1 0.813101 0.0721961 0.760031
796 1 0.755597 0.0601523 0.810878
823 1 0.687631 0.123056 0.806698
825 1 0.747246 0.129825 0.75067
827 1 0.814041 0.12924 0.821644
667 1 0.813049 0.00455409 0.690862
663 1 0.691036 0.0017942 0.683721
799 1 0.938433 0.00199386 0.811744
644 1 0.999889 0.0651257 0.680902
797 1 0.876195 0.0027061 0.746412
801 1 0.999624 0.125968 0.745551
672 1 0.866917 0.0704702 0.695819
703 1 0.939225 0.133525 0.6865
798 1 0.941107 0.0636447 0.750567
800 1 0.878959 0.0612945 0.810759
829 1 0.878543 0.133138 0.750409
831 1 0.947115 0.128634 0.817372
671 1 0.936513 0.00195513 0.686559
707 1 0.0567452 0.248598 0.689017
680 1 0.122937 0.185624 0.689622
712 1 0.123162 0.311729 0.691704
802 1 0.0695885 0.19251 0.750192
808 1 0.133884 0.184693 0.805449
834 1 0.0641591 0.31576 0.757684
835 1 0.0635511 0.251371 0.814009
837 1 0.126435 0.253905 0.74794
840 1 0.128036 0.316702 0.814438
836 1 0.000639547 0.309102 0.813216
676 1 0.00268381 0.18402 0.690894
804 1 0.00164552 0.184861 0.808198
684 1 0.243197 0.191789 0.6813
711 1 0.182258 0.253506 0.6772
715 1 0.30871 0.260312 0.679244
716 1 0.235495 0.317081 0.674815
806 1 0.189299 0.186615 0.745976
810 1 0.302744 0.186836 0.74181
812 1 0.246652 0.18206 0.808054
838 1 0.190376 0.311499 0.755955
839 1 0.190261 0.246346 0.806219
841 1 0.244945 0.250858 0.742392
842 1 0.301467 0.310569 0.745331
843 1 0.302684 0.250335 0.812656
844 1 0.251458 0.311519 0.814651
688 1 0.373627 0.185534 0.683486
719 1 0.424117 0.256062 0.682183
720 1 0.366802 0.316031 0.693674
814 1 0.42883 0.185742 0.740604
816 1 0.361828 0.184847 0.81001
845 1 0.364855 0.247258 0.749175
846 1 0.427591 0.307711 0.754493
847 1 0.427621 0.246682 0.809567
848 1 0.362402 0.313782 0.807214
692 1 0.492697 0.199556 0.680468
820 1 0.497495 0.184286 0.812129
724 1 0.486976 0.305186 0.683221
849 1 0.501108 0.248168 0.752954
852 1 0.500979 0.309542 0.815048
696 1 0.617485 0.200218 0.68941
723 1 0.552288 0.256883 0.694884
728 1 0.625372 0.319453 0.685955
818 1 0.569516 0.185711 0.751344
824 1 0.627585 0.184865 0.822775
850 1 0.563434 0.30781 0.758464
851 1 0.557167 0.241495 0.822429
853 1 0.623929 0.255181 0.759316
856 1 0.628589 0.323673 0.81331
700 1 0.744682 0.187628 0.684949
727 1 0.68346 0.252739 0.692315
731 1 0.815175 0.255491 0.69226
732 1 0.751456 0.319781 0.693443
822 1 0.687066 0.190573 0.745839
826 1 0.803111 0.189799 0.743831
828 1 0.743736 0.189489 0.812922
854 1 0.695406 0.319635 0.755151
855 1 0.687462 0.248887 0.815172
857 1 0.746936 0.254446 0.753419
858 1 0.807485 0.316472 0.744502
859 1 0.821163 0.247643 0.811978
860 1 0.751906 0.317285 0.814434
708 1 0.998732 0.317619 0.696324
833 1 0.994911 0.248358 0.744842
704 1 0.869949 0.186584 0.682823
735 1 0.936199 0.246848 0.68608
736 1 0.878651 0.320177 0.685767
830 1 0.932131 0.190985 0.756269
832 1 0.878361 0.191364 0.821104
861 1 0.873528 0.24938 0.752941
862 1 0.935368 0.317579 0.757322
863 1 0.933071 0.251271 0.818414
864 1 0.872532 0.323961 0.815274
1797 1 0.119819 0.495765 0.749979
739 1 0.0557871 0.376562 0.696521
744 1 0.117949 0.441948 0.686819
866 1 0.0613815 0.438576 0.756331
867 1 0.0628723 0.370227 0.82298
869 1 0.132396 0.375497 0.75301
872 1 0.117499 0.440129 0.815094
1795 1 0.0590278 0.495924 0.821515
1801 1 0.254104 0.498527 0.763374
1671 1 0.186614 0.497783 0.687088
1803 1 0.315553 0.491356 0.818512
743 1 0.180321 0.37717 0.680092
747 1 0.304786 0.375852 0.69046
748 1 0.2383 0.433113 0.68472
870 1 0.181253 0.43863 0.750149
871 1 0.177135 0.382205 0.817079
873 1 0.237605 0.375871 0.754876
874 1 0.302595 0.432506 0.75101
875 1 0.303816 0.381996 0.81484
876 1 0.244151 0.436813 0.818081
1675 1 0.302741 0.493843 0.69488
1805 1 0.374363 0.499774 0.753094
751 1 0.434987 0.368721 0.695488
752 1 0.364937 0.436383 0.689937
877 1 0.367733 0.37635 0.754262
878 1 0.433026 0.434524 0.750872
879 1 0.441409 0.374049 0.810592
880 1 0.375868 0.441449 0.814873
756 1 0.497775 0.438367 0.69702
1807 1 0.441531 0.498731 0.811072
1809 1 0.498251 0.499382 0.749514
1679 1 0.438488 0.499836 0.696106
1683 1 0.561115 0.499278 0.697419
881 1 0.500171 0.363111 0.756621
884 1 0.500866 0.433355 0.807968
755 1 0.558252 0.369765 0.698138
760 1 0.621461 0.433296 0.692645
882 1 0.562465 0.438846 0.7568
883 1 0.567935 0.374283 0.816354
885 1 0.621531 0.373604 0.749323
888 1 0.629088 0.434371 0.815309
1813 1 0.626462 0.495017 0.758068
1815 1 0.688872 0.499893 0.81742
759 1 0.684132 0.380503 0.686143
763 1 0.816036 0.375035 0.69015
764 1 0.754122 0.446274 0.686556
886 1 0.695129 0.43908 0.748738
887 1 0.692105 0.376315 0.810285
889 1 0.756756 0.381363 0.747136
890 1 0.815592 0.444683 0.750149
891 1 0.814312 0.377616 0.812338
892 1 0.75675 0.439796 0.817781
1823 1 0.926352 0.498269 0.813468
865 1 0.998336 0.373242 0.757999
868 1 0.994865 0.436142 0.810721
740 1 0.996874 0.431382 0.698683
1695 1 0.937526 0.49618 0.691932
767 1 0.941544 0.37358 0.697815
768 1 0.868801 0.448728 0.68437
893 1 0.877606 0.382124 0.748619
894 1 0.927437 0.442764 0.750656
895 1 0.928803 0.382487 0.817631
896 1 0.865849 0.442888 0.817803
1793 1 0.992715 0.49394 0.750044
899 1 0.0612795 0.0050437 0.93913
901 1 0.122834 0.00619409 0.881508
898 1 0.0555217 0.0627826 0.88011
904 1 0.127045 0.0695294 0.942327
931 1 0.0675366 0.130796 0.943739
933 1 0.121583 0.121467 0.885682
900 1 0.000239848 0.0672502 0.943459
929 1 0.00657832 0.129185 0.874615
5 1 0.124175 0.00932353 0.999299
903 1 0.185749 0.00762248 0.944161
10 1 0.318646 0.0613694 0.996619
902 1 0.182632 0.072281 0.874397
906 1 0.306477 0.0550451 0.865734
908 1 0.255825 0.056344 0.939216
935 1 0.191646 0.128303 0.937916
937 1 0.252892 0.127557 0.873589
939 1 0.316603 0.121751 0.930763
913 1 0.49337 0.00747771 0.87778
909 1 0.371146 0.00202364 0.876048
910 1 0.425591 0.0642587 0.868861
912 1 0.37055 0.0631489 0.933092
941 1 0.370474 0.130808 0.866748
943 1 0.436461 0.12817 0.931482
916 1 0.493539 0.0632454 0.941896
945 1 0.494719 0.123936 0.879313
911 1 0.436892 0.00249549 0.937036
45 1 0.372443 0.132167 0.986635
915 1 0.555217 0.000179435 0.936282
18 1 0.566157 0.0574414 0.999473
914 1 0.554772 0.0631903 0.879396
920 1 0.622796 0.0520015 0.936866
947 1 0.557753 0.112002 0.942524
949 1 0.618704 0.121559 0.881963
53 1 0.621488 0.12342 0.999314
923 1 0.81448 0.00346725 0.943919
918 1 0.689557 0.067372 0.873443
922 1 0.806945 0.0559852 0.881171
924 1 0.750383 0.0641593 0.938431
951 1 0.684907 0.111254 0.948361
953 1 0.748408 0.120975 0.869739
955 1 0.818732 0.115409 0.941632
897 1 0.99564 0.000667212 0.874335
925 1 0.882378 0.0030247 0.877454
926 1 0.945754 0.0665551 0.876951
928 1 0.881867 0.0599224 0.938075
957 1 0.880393 0.12767 0.875955
959 1 0.934487 0.127987 0.94782
930 1 0.0691058 0.193806 0.876275
936 1 0.12738 0.193739 0.941599
962 1 0.0612751 0.309793 0.877416
963 1 0.0657896 0.251527 0.940751
965 1 0.135118 0.250128 0.872485
968 1 0.133897 0.309368 0.934351
961 1 0.000968458 0.246137 0.880344
964 1 0.0005994 0.310432 0.941728
65 1 0.00625446 0.249008 0.999912
66 1 0.060373 0.311345 0.998664
42 1 0.312157 0.187783 0.995361
934 1 0.188828 0.183102 0.869915
938 1 0.305607 0.19726 0.876845
940 1 0.250092 0.19753 0.93661
966 1 0.187973 0.311414 0.868865
967 1 0.19433 0.255331 0.933841
969 1 0.243571 0.250497 0.871267
970 1 0.307712 0.312554 0.876334
971 1 0.312475 0.25396 0.946099
972 1 0.248611 0.312668 0.938412
70 1 0.184843 0.31044 0.997095
38 1 0.19642 0.181047 0.999136
78 1 0.437051 0.309789 0.997555
973 1 0.364853 0.25026 0.871449
944 1 0.375552 0.190308 0.927104
942 1 0.434757 0.185244 0.87061
976 1 0.368445 0.313169 0.933939
975 1 0.431901 0.249903 0.930762
974 1 0.428396 0.309201 0.876241
980 1 0.496028 0.30875 0.92984
948 1 0.499708 0.182457 0.939856
977 1 0.490724 0.246779 0.871567
77 1 0.372748 0.247825 0.997365
46 1 0.437888 0.193998 0.993139
81 1 0.503062 0.251655 0.997471
50 1 0.575688 0.189328 0.995369
85 1 0.632944 0.249769 0.995987
979 1 0.55524 0.246817 0.930807
981 1 0.62727 0.245657 0.880664
978 1 0.567346 0.30684 0.867227
984 1 0.624846 0.30791 0.936352
946 1 0.556056 0.183486 0.878612
952 1 0.637268 0.185947 0.935206
82 1 0.566391 0.305975 0.995522
86 1 0.684082 0.315988 0.995014
983 1 0.690242 0.246219 0.93294
954 1 0.80881 0.179819 0.88401
982 1 0.68289 0.313519 0.881353
986 1 0.816286 0.319758 0.881813
988 1 0.748055 0.311906 0.934952
950 1 0.689995 0.183515 0.87065
985 1 0.750251 0.245582 0.877467
987 1 0.812794 0.253733 0.939021
956 1 0.741299 0.175413 0.945861
89 1 0.745369 0.256281 0.99555
90 1 0.807598 0.315304 0.996842
932 1 0.999088 0.184568 0.937798
960 1 0.875951 0.189843 0.930117
991 1 0.940705 0.254942 0.941136
958 1 0.939258 0.194667 0.877338
992 1 0.879362 0.308598 0.946532
989 1 0.874271 0.26029 0.878486
990 1 0.944903 0.319625 0.877512
1923 1 0.0629493 0.498388 0.949693
995 1 0.0643745 0.367894 0.937658
1000 1 0.123494 0.435176 0.944016
997 1 0.123709 0.37618 0.885466
994 1 0.0522784 0.437886 0.882706
98 1 0.060169 0.431699 0.999102
1931 1 0.310373 0.499541 0.942233
1002 1 0.319497 0.435279 0.880218
1003 1 0.307242 0.377449 0.942893
1001 1 0.246309 0.382931 0.876187
998 1 0.173894 0.437356 0.877726
1004 1 0.249458 0.440587 0.941329
999 1 0.191593 0.378553 0.941222
1935 1 0.44196 0.491107 0.931039
1008 1 0.376621 0.437559 0.942794
1005 1 0.376253 0.376141 0.868145
110 1 0.435379 0.439735 0.999144
1006 1 0.434894 0.43876 0.866698
1007 1 0.432921 0.376281 0.928174
109 1 0.379448 0.370589 0.993874
1013 1 0.625886 0.377723 0.875556
1009 1 0.504922 0.373442 0.876519
1012 1 0.504505 0.438053 0.929542
1010 1 0.562949 0.441458 0.872225
1011 1 0.564553 0.37875 0.940062
1016 1 0.617203 0.438811 0.931463
1045 1 0.620271 0.498017 0.992266
121 1 0.740964 0.378555 0.996591
118 1 0.676781 0.433312 0.998123
122 1 0.812088 0.440898 0.998616
1017 1 0.747302 0.377329 0.877122
1015 1 0.681871 0.380474 0.937883
1018 1 0.806437 0.437115 0.87789
1020 1 0.744073 0.444109 0.947695
1014 1 0.688398 0.450115 0.88317
1019 1 0.810499 0.379521 0.947365
1947 1 0.820099 0.499061 0.935386
1024 1 0.88645 0.444743 0.940644
1023 1 0.939616 0.379793 0.945378
1022 1 0.936705 0.440042 0.874711
1021 1 0.877186 0.376375 0.889309
1921 1 0.994599 0.498257 0.874248
996 1 0.996757 0.445371 0.942683
993 1 0.996594 0.380287 0.878188
125 1 0.872365 0.379008 0.999838
1032 1 0.123337 0.560573 0.0693952
1059 1 0.0548818 0.624202 0.0654376
1154 1 0.0585238 0.558605 0.12532
1189 1 0.119351 0.629147 0.117175
1061 1 0.123379 0.624243 0.00540681
1029 1 0.134463 0.501693 0.00449949
1026 1 0.0559648 0.565729 0.009638
1036 1 0.250205 0.563731 0.0630249
1063 1 0.185957 0.619091 0.0628658
1067 1 0.307039 0.629187 0.0689701
1158 1 0.191133 0.558362 0.123051
1162 1 0.313135 0.565336 0.122048
1193 1 0.24994 0.627089 0.123658
1161 1 0.256975 0.505698 0.129468
1065 1 0.253662 0.624555 0.00137982
1030 1 0.18577 0.567409 0.00300649
1035 1 0.317182 0.500085 0.066397
1034 1 0.31109 0.554253 0.00710569
1033 1 0.242567 0.504977 0.0047797
1040 1 0.381745 0.555144 0.0614741
1071 1 0.435204 0.624965 0.0560759
1166 1 0.437069 0.558318 0.120723
1197 1 0.379136 0.627755 0.118923
1044 1 0.497001 0.558807 0.061636
1069 1 0.368525 0.617787 0.00365692
1038 1 0.4418 0.561101 0.00529595
1201 1 0.500693 0.624859 0.123562
1048 1 0.626564 0.565937 0.0585019
1075 1 0.563995 0.627535 0.0597615
1170 1 0.558846 0.561432 0.121288
1205 1 0.622577 0.635273 0.115532
1173 1 0.620589 0.503645 0.118793
1046 1 0.691165 0.559063 0.00376413
1052 1 0.754942 0.568985 0.0640089
1079 1 0.693955 0.625543 0.0581288
1083 1 0.816361 0.628913 0.0642424
1174 1 0.682121 0.560094 0.118394
1178 1 0.816008 0.57044 0.125643
1209 1 0.755295 0.630581 0.124361
1050 1 0.822138 0.567503 0.00767455
1049 1 0.757514 0.506516 0.000234039
1051 1 0.81748 0.503434 0.0642058
1028 1 0.988746 0.565196 0.0660308
1185 1 0.99394 0.620864 0.124848
1056 1 0.877103 0.559194 0.0600475
1087 1 0.934302 0.631304 0.0579823
1182 1 0.930649 0.562194 0.125349
1213 1 0.875025 0.620688 0.114715
1085 1 0.880228 0.629998 0.00311791
1025 1 0.992823 0.503793 0.00186612
1054 1 0.937423 0.565909 0.00549838
1057 1 0.994527 0.625972 0.00277323
1090 1 0.0647806 0.817089 0.00684543
1064 1 0.125945 0.688864 0.0629464
1091 1 0.0629295 0.752662 0.0743194
1096 1 0.118986 0.816714 0.0675398
1186 1 0.0604977 0.685292 0.122167
1218 1 0.0586507 0.816509 0.1247
1221 1 0.126137 0.742291 0.124969
1217 1 0.00392153 0.755838 0.127758
1092 1 0.00128865 0.818743 0.0616656
1093 1 0.122084 0.748164 0.00498665
1089 1 0.00847242 0.75096 0.00647201
1068 1 0.250774 0.68441 0.0592011
1095 1 0.187939 0.74992 0.0618228
1099 1 0.317774 0.750041 0.0519665
1100 1 0.255952 0.807776 0.0604752
1190 1 0.190856 0.690892 0.119849
1194 1 0.314123 0.684161 0.130358
1222 1 0.188209 0.812017 0.12775
1225 1 0.249998 0.750498 0.120827
1226 1 0.321948 0.817074 0.128608
1062 1 0.184251 0.687674 0.00291969
1072 1 0.36816 0.683176 0.0606539
1103 1 0.439643 0.743757 0.0644488
1104 1 0.383221 0.810068 0.061197
1198 1 0.443413 0.682231 0.118436
1229 1 0.375144 0.749471 0.118775
1230 1 0.447237 0.809305 0.122678
1108 1 0.496283 0.808364 0.0590627
1102 1 0.436705 0.809285 0.00175194
1233 1 0.502945 0.746539 0.119987
1076 1 0.502189 0.68263 0.0557975
1080 1 0.623132 0.694751 0.060107
1107 1 0.562579 0.750535 0.0553849
1112 1 0.63305 0.810515 0.0524618
1202 1 0.565281 0.695815 0.118993
1234 1 0.560725 0.815786 0.119899
1237 1 0.625272 0.762301 0.119921
1084 1 0.755246 0.685803 0.0590482
1111 1 0.694456 0.753428 0.0593082
1115 1 0.815872 0.7506 0.066954
1116 1 0.753734 0.810691 0.0594453
1206 1 0.690584 0.685912 0.117782
1210 1 0.818246 0.689925 0.121913
1238 1 0.688487 0.814942 0.123439
1241 1 0.748962 0.75308 0.121402
1242 1 0.820529 0.819188 0.119622
1082 1 0.817592 0.690674 0.00643256
1113 1 0.760993 0.748965 0.000950661
1078 1 0.684794 0.682301 0.00119638
1086 1 0.93881 0.694332 0.00638842
1060 1 0.997063 0.690469 0.0618444
1117 1 0.878951 0.750338 0.00249407
1088 1 0.879684 0.688094 0.0655788
1119 1 0.943897 0.749302 0.0646652
1120 1 0.879587 0.814392 0.0644169
1214 1 0.936081 0.676247 0.129615
1245 1 0.882225 0.751399 0.121672
1246 1 0.947709 0.816244 0.117453
1118 1 0.940068 0.812634 0.00438242
1123 1 0.0677413 0.878388 0.0655694
1128 1 0.125563 0.955178 0.057296
1250 1 0.0679797 0.940717 0.12179
1253 1 0.127527 0.876221 0.124827
1249 1 0.00150694 0.875059 0.127521
1124 1 0.00854869 0.941952 0.0641185
1125 1 0.129905 0.875415 0.00513504
1127 1 0.18624 0.875684 0.069364
1131 1 0.309683 0.871316 0.0538618
1132 1 0.250094 0.936736 0.0573411
1254 1 0.188725 0.940444 0.117676
1257 1 0.255306 0.869266 0.128162
1258 1 0.314723 0.937474 0.118079
11 1 0.316806 0.999613 0.0581834
1126 1 0.186984 0.942793 0.008099
1130 1 0.316275 0.93743 0.000744087
1135 1 0.438513 0.881371 0.0595115
1136 1 0.376366 0.935899 0.06036
1261 1 0.381772 0.875393 0.116078
1262 1 0.438994 0.937151 0.11687
1140 1 0.49879 0.938957 0.0653509
141 1 0.377434 0.993892 0.123186
1137 1 0.496306 0.87282 0.00644141
1265 1 0.505732 0.883797 0.129153
1139 1 0.567206 0.878607 0.0609333
1144 1 0.622589 0.935601 0.0677653
1266 1 0.563319 0.946433 0.125065
1269 1 0.625858 0.870015 0.128669
21 1 0.614138 0.995617 0.00961819
1141 1 0.622847 0.877817 0.0020733
23 1 0.676844 0.998508 0.0640805
153 1 0.748202 0.9955 0.129274
1143 1 0.695188 0.875476 0.0573997
1147 1 0.817315 0.880493 0.0615676
1148 1 0.746437 0.942606 0.0583016
1270 1 0.683664 0.937222 0.129664
1273 1 0.753847 0.872635 0.118503
1274 1 0.809046 0.936992 0.125758
27 1 0.819132 0.997511 0.0685613
31 1 0.940555 0.996699 0.0665835
1151 1 0.947481 0.88064 0.0694714
1152 1 0.881046 0.933152 0.068424
1277 1 0.877047 0.880449 0.13084
1278 1 0.937095 0.942219 0.128505
157 1 0.877427 0.99894 0.126564
1283 1 0.062645 0.507792 0.312086
1281 1 0.00119332 0.500831 0.252934
1160 1 0.125633 0.571902 0.177126
1187 1 0.062069 0.620007 0.178533
1282 1 0.067425 0.564089 0.247112
1288 1 0.127493 0.56189 0.310566
1315 1 0.0628223 0.625265 0.309546
1317 1 0.121509 0.625784 0.248347
1156 1 0.00129715 0.562967 0.189192
1155 1 0.0727329 0.507684 0.189997
1291 1 0.310965 0.505053 0.314607
1159 1 0.18801 0.504046 0.190245
1164 1 0.248402 0.566876 0.191218
1191 1 0.184548 0.626177 0.176827
1195 1 0.310226 0.621098 0.188592
1286 1 0.18882 0.56143 0.247181
1290 1 0.306301 0.555207 0.248351
1292 1 0.250118 0.569428 0.315245
1319 1 0.182921 0.623097 0.31689
1321 1 0.243073 0.628021 0.245438
1323 1 0.310669 0.631173 0.312782
1293 1 0.376398 0.501478 0.250292
1168 1 0.374456 0.557056 0.183423
1199 1 0.431745 0.619663 0.180828
1294 1 0.429403 0.565352 0.246034
1296 1 0.378594 0.566707 0.311529
1325 1 0.368455 0.618721 0.246048
1327 1 0.442721 0.623158 0.315511
1300 1 0.496183 0.556978 0.308737
1329 1 0.500988 0.618486 0.249717
1172 1 0.501155 0.566548 0.182292
1176 1 0.631566 0.567055 0.17925
1203 1 0.567414 0.622867 0.183454
1298 1 0.565781 0.561374 0.243871
1304 1 0.623582 0.562956 0.313042
1331 1 0.5585 0.627329 0.304127
1333 1 0.630399 0.624742 0.244752
1297 1 0.504477 0.500712 0.243518
1299 1 0.564996 0.504687 0.312796
1301 1 0.63012 0.503788 0.249582
1180 1 0.748115 0.561939 0.174537
1207 1 0.695007 0.629948 0.180143
1211 1 0.808476 0.625184 0.188917
1302 1 0.694001 0.559756 0.24344
1306 1 0.804969 0.559161 0.24614
1308 1 0.749068 0.566021 0.305594
1335 1 0.6888 0.629895 0.300414
1337 1 0.750272 0.624721 0.243703
1339 1 0.809607 0.624482 0.310355
1309 1 0.874782 0.500318 0.247434
1284 1 0.999815 0.558539 0.316464
1313 1 0.999068 0.623475 0.253647
1184 1 0.872906 0.562584 0.188929
1215 1 0.934197 0.621704 0.191057
1310 1 0.937004 0.557836 0.246734
1312 1 0.869935 0.558739 0.306619
1341 1 0.866283 0.625832 0.248973
1343 1 0.930625 0.623874 0.303976
1192 1 0.121315 0.682144 0.182397
1219 1 0.0658642 0.74876 0.181109
1224 1 0.123441 0.806822 0.185771
1314 1 0.067567 0.680242 0.244918
1320 1 0.121387 0.684503 0.315661
1346 1 0.0657647 0.811514 0.251259
1347 1 0.0682318 0.749577 0.310185
1349 1 0.128515 0.748702 0.245367
1352 1 0.129667 0.803321 0.30593
1316 1 0.00889064 0.691355 0.309281
1348 1 0.00553849 0.805411 0.308502
1188 1 0.00555038 0.683911 0.189657
1220 1 0.00562829 0.815705 0.187104
1345 1 0.00252701 0.749641 0.251122
1196 1 0.250136 0.680218 0.18203
1223 1 0.187319 0.742312 0.186541
1227 1 0.303428 0.746647 0.181104
1228 1 0.256744 0.817229 0.194669
1318 1 0.182045 0.684368 0.246889
1322 1 0.309717 0.690109 0.247699
1324 1 0.24817 0.679983 0.311438
1350 1 0.187132 0.808167 0.242536
1351 1 0.194924 0.749163 0.308212
1353 1 0.254505 0.757832 0.252958
1354 1 0.32317 0.817407 0.248386
1355 1 0.309241 0.749002 0.320474
1356 1 0.253366 0.812762 0.31526
1200 1 0.375827 0.686961 0.188402
1231 1 0.434284 0.740784 0.178755
1232 1 0.378772 0.809647 0.18903
1326 1 0.429806 0.676622 0.246093
1328 1 0.373572 0.681097 0.306275
1357 1 0.376078 0.745004 0.247799
1358 1 0.44257 0.804676 0.235684
1359 1 0.438002 0.742709 0.305655
1360 1 0.381418 0.803988 0.316611
1364 1 0.498394 0.808406 0.309765
1332 1 0.497658 0.686486 0.301762
1204 1 0.496164 0.688917 0.188828
1361 1 0.502942 0.750365 0.247656
1236 1 0.503074 0.814209 0.181765
1208 1 0.626205 0.69269 0.17999
1235 1 0.556636 0.749701 0.186022
1330 1 0.561246 0.691831 0.245483
1336 1 0.622818 0.689496 0.311197
1363 1 0.566052 0.7559 0.305096
1365 1 0.624614 0.74734 0.247743
1368 1 0.632085 0.809917 0.308462
1240 1 0.636376 0.805363 0.195089
1362 1 0.567843 0.812755 0.249564
1212 1 0.743782 0.698521 0.184747
1239 1 0.688726 0.750976 0.186648
1243 1 0.813274 0.749533 0.181944
1244 1 0.748021 0.812946 0.181916
1334 1 0.687421 0.696684 0.25018
1338 1 0.805068 0.682694 0.250297
1340 1 0.745689 0.687716 0.30379
1366 1 0.698684 0.807087 0.252646
1367 1 0.685216 0.747834 0.311093
1369 1 0.762617 0.754917 0.250067
1370 1 0.813176 0.813915 0.254681
1371 1 0.814587 0.750045 0.318213
1372 1 0.746236 0.816894 0.313889
1216 1 0.873006 0.688916 0.185072
1247 1 0.938975 0.753552 0.182833
1248 1 0.874625 0.812647 0.184601
1342 1 0.936253 0.688833 0.242172
1344 1 0.872415 0.689927 0.295685
1373 1 0.871555 0.752639 0.251582
1374 1 0.945465 0.809778 0.243781
1375 1 0.939825 0.740297 0.30529
1376 1 0.880744 0.815444 0.316206
1251 1 0.0629316 0.883502 0.182283
1256 1 0.129947 0.943632 0.18093
1378 1 0.0680711 0.935206 0.249306
1379 1 0.0621226 0.872506 0.310709
1381 1 0.12377 0.875542 0.243287
1384 1 0.130905 0.933322 0.313781
1377 1 0.00271139 0.879014 0.245199
261 1 0.129025 0.991171 0.256219
135 1 0.192878 0.996585 0.185668
1255 1 0.190273 0.877806 0.17825
1259 1 0.318247 0.877422 0.184444
1260 1 0.264307 0.939578 0.17786
1382 1 0.188791 0.933783 0.239815
1383 1 0.190964 0.870725 0.306764
1385 1 0.251604 0.875399 0.248981
1386 1 0.310184 0.938797 0.248039
1387 1 0.313521 0.873651 0.311153
1388 1 0.252573 0.932599 0.312081
265 1 0.246237 0.986803 0.249918
263 1 0.197394 0.996624 0.309041
1263 1 0.443416 0.879912 0.182599
1264 1 0.374795 0.936869 0.181855
1389 1 0.381421 0.872497 0.249555
1390 1 0.436095 0.931075 0.245667
1391 1 0.433585 0.866596 0.314556
1392 1 0.383266 0.933989 0.306872
1268 1 0.499071 0.938318 0.183986
1393 1 0.499808 0.869319 0.251289
271 1 0.441281 0.999322 0.315895
143 1 0.443972 0.997772 0.187541
275 1 0.569112 0.998609 0.312686
277 1 0.630301 0.997724 0.255982
1396 1 0.500789 0.94428 0.304847
1267 1 0.563561 0.869175 0.192149
1272 1 0.623925 0.935229 0.185471
1394 1 0.571528 0.940555 0.248058
1395 1 0.565749 0.878778 0.308217
1397 1 0.628691 0.86774 0.247956
1400 1 0.630768 0.932615 0.316241
1271 1 0.685567 0.874085 0.188878
1275 1 0.81288 0.880242 0.190026
1276 1 0.742468 0.935408 0.190044
1398 1 0.69 0.936625 0.261557
1399 1 0.686085 0.872537 0.312539
1401 1 0.755178 0.875745 0.253877
1402 1 0.804275 0.937611 0.258994
1403 1 0.815481 0.87646 0.322073
1404 1 0.748483 0.941403 0.321674
1380 1 0.992871 0.935847 0.309239
1252 1 0.99976 0.938809 0.191236
1279 1 0.932064 0.869675 0.192263
1280 1 0.866105 0.936473 0.196878
1405 1 0.879528 0.871521 0.252609
1406 1 0.931259 0.937522 0.252014
1407 1 0.945321 0.871211 0.310519
1408 1 0.870071 0.940235 0.311838
1411 1 0.0554653 0.5025 0.443998
1413 1 0.12576 0.505361 0.374534
1410 1 0.057914 0.565158 0.384669
1416 1 0.124348 0.556287 0.440408
1443 1 0.0596079 0.630921 0.439129
1445 1 0.121513 0.624217 0.378413
1412 1 0.00306583 0.564244 0.442132
1538 1 0.0622683 0.572828 0.494632
1441 1 0.00255037 0.623518 0.370752
1573 1 0.131707 0.618626 0.496666
1417 1 0.255329 0.502375 0.371064
1449 1 0.250743 0.622499 0.373701
1451 1 0.306974 0.62328 0.441374
1420 1 0.245607 0.55697 0.433311
1447 1 0.192644 0.621207 0.437126
1414 1 0.189204 0.558252 0.373546
1418 1 0.312037 0.561892 0.378326
1415 1 0.187586 0.500534 0.438394
1546 1 0.310912 0.563565 0.497547
1423 1 0.442588 0.503032 0.441072
1421 1 0.378256 0.503607 0.369805
1581 1 0.372608 0.619 0.490808
1550 1 0.437702 0.559319 0.498891
1422 1 0.44423 0.559474 0.377783
1455 1 0.440558 0.623515 0.440004
1424 1 0.374968 0.561825 0.436399
1453 1 0.37836 0.622076 0.377875
1585 1 0.494005 0.623079 0.49968
1425 1 0.503399 0.502466 0.373554
1428 1 0.505261 0.562077 0.444596
1457 1 0.502611 0.617591 0.378046
1461 1 0.626716 0.625587 0.365975
1426 1 0.56657 0.555145 0.383209
1432 1 0.627727 0.561702 0.443909
1459 1 0.569385 0.62493 0.432676
1429 1 0.630111 0.501032 0.376533
1435 1 0.811004 0.507789 0.441036
1590 1 0.680059 0.664201 0.492166
1430 1 0.687126 0.566108 0.367509
1463 1 0.685381 0.623455 0.426036
1436 1 0.746217 0.555463 0.439463
1434 1 0.811435 0.565633 0.369177
1465 1 0.745865 0.624908 0.369557
1467 1 0.808891 0.622248 0.433172
1562 1 0.809755 0.567073 0.4958
1593 1 0.748606 0.62637 0.496593
1437 1 0.881821 0.508052 0.378138
1439 1 0.933556 0.505516 0.446432
1440 1 0.877457 0.572314 0.440259
1469 1 0.870947 0.635785 0.363719
1471 1 0.942682 0.622367 0.437492
1438 1 0.932776 0.576708 0.369848
1602 1 0.0665041 0.814699 0.497904
1480 1 0.121618 0.820358 0.438743
1477 1 0.128411 0.749664 0.374391
1442 1 0.0615653 0.68764 0.377119
1475 1 0.0642824 0.744337 0.438189
1448 1 0.128259 0.686715 0.443631
1474 1 0.0615422 0.823333 0.371228
1476 1 0.0057104 0.815158 0.442266
1473 1 0.00909688 0.751311 0.368636
1609 1 0.239927 0.748911 0.497697
1482 1 0.309355 0.807271 0.373507
1478 1 0.187462 0.819582 0.365618
1484 1 0.250812 0.817016 0.43772
1479 1 0.187238 0.752099 0.439291
1452 1 0.243464 0.688668 0.439954
1481 1 0.241248 0.75322 0.38629
1450 1 0.302461 0.691466 0.379904
1446 1 0.186081 0.683287 0.374191
1483 1 0.310059 0.748857 0.441749
1578 1 0.314095 0.681443 0.496152
1613 1 0.369102 0.744865 0.496484
1614 1 0.434518 0.813058 0.493987
1488 1 0.373143 0.812348 0.430071
1487 1 0.436061 0.737935 0.44268
1454 1 0.432242 0.684247 0.375743
1456 1 0.36648 0.682421 0.431675
1486 1 0.441088 0.808763 0.377365
1485 1 0.374263 0.74516 0.373837
1489 1 0.497436 0.745385 0.373701
1617 1 0.498596 0.748594 0.497043
1621 1 0.640303 0.746537 0.496996
1492 1 0.502427 0.807847 0.436257
1460 1 0.502814 0.677287 0.440215
1490 1 0.554542 0.810929 0.371185
1458 1 0.556962 0.68768 0.374916
1464 1 0.623086 0.689771 0.435457
1493 1 0.625082 0.750963 0.377121
1496 1 0.629568 0.809101 0.441104
1491 1 0.576186 0.748488 0.439619
1626 1 0.806915 0.819438 0.493199
1466 1 0.805691 0.681642 0.372171
1497 1 0.748231 0.747585 0.373
1498 1 0.82431 0.808521 0.377481
1495 1 0.688266 0.736482 0.434952
1499 1 0.809361 0.752132 0.439567
1462 1 0.679322 0.687345 0.368053
1468 1 0.748332 0.686705 0.436848
1500 1 0.746469 0.81479 0.431738
1494 1 0.683871 0.805598 0.381416
1622 1 0.693895 0.8108 0.498953
1444 1 0.996368 0.687442 0.438677
1629 1 0.868257 0.754915 0.496523
1501 1 0.884665 0.745234 0.370901
1504 1 0.875833 0.813402 0.440608
1502 1 0.94833 0.803593 0.373288
1470 1 0.943407 0.681572 0.370515
1472 1 0.882858 0.687956 0.441179
1503 1 0.940278 0.755686 0.438066
1637 1 0.121278 0.876871 0.49736
1634 1 0.0611565 0.939256 0.493272
385 1 0.00104268 0.9999 0.373703
389 1 0.126037 0.998054 0.371823
1506 1 0.0637327 0.942364 0.365029
1512 1 0.125257 0.941605 0.435469
1509 1 0.121765 0.876383 0.366645
1507 1 0.066598 0.884364 0.431216
1505 1 0.00115062 0.882692 0.372172
387 1 0.0689596 0.999496 0.438848
393 1 0.252246 0.999615 0.376489
391 1 0.196008 0.990443 0.43458
1516 1 0.263853 0.942546 0.436543
1511 1 0.187267 0.87515 0.428823
1513 1 0.252081 0.87568 0.372816
1515 1 0.318 0.88283 0.432063
1510 1 0.182287 0.931079 0.370886
1514 1 0.313257 0.942809 0.364878
521 1 0.26367 0.999715 0.494697
1641 1 0.254124 0.875067 0.496374
1638 1 0.185991 0.927032 0.497565
397 1 0.374869 0.998446 0.374766
1645 1 0.376261 0.873734 0.498182
1520 1 0.381843 0.934223 0.429925
1517 1 0.375263 0.867918 0.370586
1518 1 0.439026 0.937452 0.372089
1519 1 0.43683 0.872705 0.430627
399 1 0.446287 0.99637 0.437713
1646 1 0.43779 0.935337 0.494418
1649 1 0.503611 0.871541 0.49853
1523 1 0.56444 0.867779 0.435626
1525 1 0.623305 0.873466 0.376082
1528 1 0.622426 0.951184 0.439184
1650 1 0.566145 0.9279 0.497763
1521 1 0.500136 0.877009 0.36735
1522 1 0.557975 0.94399 0.378393
1524 1 0.501299 0.933146 0.439355
1653 1 0.620158 0.870916 0.495947
1654 1 0.679652 0.936545 0.49318
1529 1 0.747505 0.873558 0.374525
1532 1 0.749341 0.938255 0.436435
407 1 0.691863 0.999959 0.440626
1527 1 0.682407 0.879056 0.434397
1526 1 0.688684 0.938746 0.373565
1530 1 0.817367 0.940357 0.373027
1531 1 0.811776 0.875856 0.431446
411 1 0.80639 0.994811 0.442933
1657 1 0.750036 0.875606 0.497199
413 1 0.875476 0.998306 0.373839
1533 1 0.880145 0.87657 0.376295
1534 1 0.93864 0.942672 0.376262
1535 1 0.938028 0.867609 0.441721
1536 1 0.877031 0.928474 0.442575
1508 1 0.996913 0.939686 0.438
541 1 0.867608 0.998251 0.498125
1544 1 0.119034 0.558835 0.558959
1571 1 0.0622497 0.624514 0.555996
1666 1 0.0623058 0.561177 0.623441
1701 1 0.119241 0.626076 0.624157
1669 1 0.118194 0.50082 0.621859
1539 1 0.0564996 0.501978 0.563569
1569 1 0.00100256 0.628324 0.500892
1548 1 0.249173 0.556247 0.563001
1575 1 0.187547 0.618584 0.57098
1579 1 0.325862 0.622661 0.556394
1670 1 0.176815 0.562281 0.631317
1674 1 0.310627 0.566355 0.620664
1705 1 0.25216 0.623421 0.635799
1577 1 0.246053 0.618743 0.503303
1673 1 0.251988 0.5052 0.629215
1543 1 0.184152 0.500821 0.574027
1542 1 0.189115 0.559321 0.501209
1552 1 0.381908 0.558366 0.570188
1583 1 0.439386 0.632372 0.568168
1678 1 0.437038 0.558483 0.632116
1709 1 0.373042 0.629 0.626758
1556 1 0.496682 0.558214 0.557535
1713 1 0.498145 0.627534 0.633548
1681 1 0.499543 0.50003 0.635116
1560 1 0.614556 0.556281 0.566657
1587 1 0.559893 0.631758 0.560061
1682 1 0.555905 0.567391 0.621158
1717 1 0.62741 0.617741 0.626296
1589 1 0.617261 0.616516 0.50318
1554 1 0.562074 0.556583 0.501442
1563 1 0.811225 0.508185 0.560117
1564 1 0.747734 0.561416 0.568511
1591 1 0.678531 0.617738 0.560444
1595 1 0.810336 0.625436 0.566117
1686 1 0.688642 0.55909 0.637881
1690 1 0.813954 0.561745 0.618414
1721 1 0.747103 0.624709 0.631495
1689 1 0.755999 0.5052 0.629425
1558 1 0.689747 0.555419 0.505059
1693 1 0.872997 0.501304 0.61738
1697 1 0.992815 0.625613 0.634273
1540 1 0.997993 0.566601 0.558286
1568 1 0.87479 0.566887 0.560951
1599 1 0.93999 0.629393 0.566069
1694 1 0.932354 0.56247 0.624228
1725 1 0.88093 0.626316 0.620311
1597 1 0.875068 0.632727 0.50623
1567 1 0.930587 0.504879 0.555144
1537 1 0.996044 0.503128 0.502183
1665 1 0.995877 0.503905 0.6222
1565 1 0.872761 0.506204 0.501038
1566 1 0.93836 0.568262 0.504545
1570 1 0.0641096 0.689474 0.504569
1605 1 0.128333 0.755729 0.504609
1576 1 0.123313 0.688005 0.561624
1603 1 0.0637295 0.754081 0.565347
1608 1 0.119373 0.817291 0.560459
1698 1 0.0558616 0.687858 0.621818
1730 1 0.0653288 0.815964 0.630712
1733 1 0.123736 0.754145 0.624311
1729 1 0.00517334 0.753756 0.630001
1572 1 0.000624174 0.686709 0.563886
1601 1 0.000998443 0.743621 0.500798
1580 1 0.252475 0.679551 0.566297
1607 1 0.186525 0.753887 0.572127
1611 1 0.311721 0.744349 0.560013
1612 1 0.249538 0.811428 0.557614
1702 1 0.190241 0.684821 0.62826
1706 1 0.310456 0.680721 0.627007
1734 1 0.183218 0.820738 0.627
1737 1 0.256088 0.74138 0.624233
1738 1 0.308336 0.813387 0.621994
1574 1 0.185285 0.683019 0.503614
1606 1 0.182582 0.817984 0.500264
1610 1 0.312026 0.810989 0.501657
1584 1 0.374085 0.689632 0.558912
1615 1 0.437456 0.746392 0.564988
1616 1 0.374605 0.80291 0.556984
1710 1 0.433157 0.68523 0.630239
1741 1 0.373031 0.745513 0.626219
1742 1 0.430077 0.808817 0.626754
1620 1 0.499628 0.813417 0.56158
1745 1 0.495118 0.748997 0.623059
1582 1 0.436609 0.686037 0.505048
1586 1 0.560263 0.689836 0.500605
1588 1 0.502896 0.689405 0.560717
1592 1 0.623943 0.680723 0.556968
1619 1 0.562727 0.753175 0.56814
1624 1 0.627815 0.805504 0.561308
1714 1 0.561984 0.687005 0.617026
1746 1 0.579373 0.813032 0.629986
1749 1 0.622096 0.747815 0.624301
1618 1 0.566936 0.805463 0.5003
1596 1 0.752166 0.67996 0.562281
1623 1 0.693012 0.747008 0.57195
1627 1 0.81554 0.751219 0.562735
1628 1 0.750553 0.807019 0.559189
1718 1 0.684203 0.682665 0.624207
1722 1 0.819246 0.686438 0.621419
1750 1 0.673784 0.812397 0.625214
1753 1 0.753367 0.7511 0.626049
1754 1 0.813362 0.815413 0.627734
1594 1 0.806022 0.68406 0.500437
1625 1 0.75078 0.750653 0.506146
1604 1 0.995347 0.811066 0.569382
1600 1 0.880317 0.694439 0.562016
1631 1 0.938456 0.750143 0.563102
1632 1 0.881275 0.808861 0.564992
1726 1 0.941066 0.697449 0.62793
1757 1 0.87879 0.752309 0.634008
1758 1 0.937595 0.806078 0.628623
1630 1 0.934778 0.80325 0.502442
1598 1 0.939184 0.69032 0.50195
1635 1 0.0541817 0.870841 0.56846
1640 1 0.12635 0.93388 0.562746
1762 1 0.0681476 0.932663 0.620367
1765 1 0.122891 0.87666 0.622708
1636 1 0.00157425 0.938534 0.562022
515 1 0.0703673 0.999518 0.560968
645 1 0.131743 0.995555 0.625593
1633 1 0.00532228 0.873063 0.504375
517 1 0.130482 0.993892 0.503383
1639 1 0.188148 0.877234 0.56338
1643 1 0.31236 0.869711 0.563016
1644 1 0.252374 0.933924 0.559707
1766 1 0.18728 0.937412 0.622797
1769 1 0.25338 0.879961 0.621401
1770 1 0.313217 0.932796 0.620364
649 1 0.247137 0.992232 0.621698
523 1 0.310493 0.996307 0.565896
519 1 0.192141 0.995749 0.55773
1642 1 0.315905 0.938842 0.500896
527 1 0.442207 0.990902 0.558618
1647 1 0.435686 0.866802 0.56337
1648 1 0.376841 0.940462 0.567177
1773 1 0.374848 0.875037 0.623439
1774 1 0.449204 0.93334 0.620878
653 1 0.373752 0.995633 0.623835
657 1 0.501091 0.995658 0.625647
1652 1 0.508506 0.927614 0.557993
1777 1 0.514153 0.876984 0.622685
1651 1 0.565307 0.863759 0.563472
1656 1 0.623787 0.9373 0.557918
1778 1 0.568042 0.940677 0.633984
1781 1 0.627751 0.879667 0.623737
535 1 0.690702 0.99469 0.556191
1658 1 0.811953 0.936467 0.50156
1655 1 0.687288 0.868958 0.558591
1659 1 0.815921 0.865259 0.56601
1660 1 0.748102 0.941139 0.559396
1782 1 0.68932 0.937411 0.623969
1785 1 0.749303 0.871937 0.624402
1786 1 0.809934 0.92365 0.625135
537 1 0.749148 0.999248 0.502024
665 1 0.748864 0.997697 0.628126
1761 1 0.994217 0.883749 0.623713
1662 1 0.937157 0.937996 0.50414
1663 1 0.940811 0.87562 0.559837
1664 1 0.870262 0.930471 0.564493
1789 1 0.881728 0.876805 0.630772
1790 1 0.937116 0.94171 0.618211
543 1 0.932132 0.999901 0.557159
1661 1 0.874568 0.873138 0.507702
513 1 0.993735 0.996835 0.503275
1672 1 0.128661 0.556564 0.69278
1699 1 0.0626459 0.622614 0.684618
1794 1 0.0584561 0.565807 0.754422
1800 1 0.117255 0.560376 0.816762
1827 1 0.0581086 0.621906 0.815313
1829 1 0.122176 0.626201 0.746265
1825 1 0.000927646 0.623167 0.748832
1667 1 0.0644101 0.510196 0.691298
1676 1 0.250939 0.565924 0.693475
1703 1 0.186532 0.618494 0.688592
1707 1 0.316606 0.623236 0.693135
1798 1 0.184798 0.555719 0.753055
1802 1 0.308803 0.55985 0.753777
1804 1 0.242829 0.555955 0.812048
1831 1 0.178174 0.61642 0.808433
1833 1 0.246819 0.623384 0.757083
1835 1 0.314235 0.617847 0.808405
1799 1 0.176944 0.502566 0.818962
1680 1 0.366963 0.553971 0.691225
1711 1 0.440018 0.622556 0.686064
1806 1 0.434579 0.55732 0.74967
1808 1 0.378216 0.556348 0.807915
1837 1 0.380236 0.620338 0.748389
1839 1 0.429927 0.624142 0.810753
1841 1 0.496434 0.618979 0.738948
1812 1 0.497855 0.569108 0.812532
1811 1 0.555183 0.505299 0.81179
1684 1 0.500338 0.56212 0.687628
1688 1 0.619556 0.560397 0.689018
1715 1 0.561802 0.624637 0.684268
1810 1 0.560588 0.569353 0.754978
1816 1 0.627843 0.564667 0.812796
1843 1 0.560764 0.627658 0.815531
1845 1 0.626142 0.624854 0.75372
1691 1 0.812886 0.510429 0.68981
1819 1 0.813908 0.5072 0.807933
1687 1 0.682539 0.500192 0.694734
1692 1 0.745767 0.570186 0.687849
1719 1 0.691758 0.638048 0.691398
1723 1 0.815 0.629478 0.688214
1814 1 0.685206 0.566238 0.749605
1818 1 0.807231 0.573731 0.746857
1820 1 0.750576 0.561896 0.810396
1847 1 0.691083 0.628666 0.821795
1849 1 0.741565 0.625369 0.750561
1851 1 0.814918 0.633615 0.808603
1817 1 0.749462 0.50167 0.749182
1668 1 0.994123 0.564479 0.689767
1796 1 0.992341 0.556906 0.819162
1821 1 0.875112 0.519107 0.750626
1696 1 0.868303 0.566617 0.686306
1727 1 0.931006 0.624428 0.687636
1822 1 0.938765 0.565836 0.751024
1824 1 0.873004 0.570549 0.81468
1853 1 0.878055 0.627869 0.749892
1855 1 0.937233 0.629258 0.815238
1704 1 0.127357 0.688403 0.685922
1731 1 0.0656157 0.739667 0.689659
1736 1 0.126642 0.81402 0.68583
1826 1 0.0630989 0.681175 0.751552
1832 1 0.126434 0.683295 0.812578
1858 1 0.0685548 0.813385 0.748242
1859 1 0.0654836 0.742769 0.804829
1861 1 0.123546 0.743764 0.749121
1864 1 0.123814 0.814739 0.809601
1732 1 0.00410423 0.811858 0.686413
1857 1 0.00546487 0.75727 0.743508
1700 1 0.00337364 0.685107 0.685149
1708 1 0.246202 0.684647 0.694699
1735 1 0.18533 0.748488 0.687897
1739 1 0.314409 0.745686 0.683164
1740 1 0.248577 0.815179 0.684342
1830 1 0.186578 0.687984 0.747689
1834 1 0.308037 0.680154 0.758826
1836 1 0.244019 0.691593 0.808686
1862 1 0.185986 0.814852 0.752746
1863 1 0.189409 0.755641 0.808953
1865 1 0.25004 0.752794 0.749262
1866 1 0.314838 0.823347 0.749681
1867 1 0.31622 0.757218 0.796315
1868 1 0.255785 0.820876 0.809593
1712 1 0.373635 0.686638 0.692328
1743 1 0.441849 0.749204 0.6858
1744 1 0.375673 0.816271 0.692405
1838 1 0.439246 0.684541 0.743689
1840 1 0.373856 0.685276 0.805048
1869 1 0.379728 0.744766 0.751222
1870 1 0.44066 0.803871 0.747369
1871 1 0.442798 0.75053 0.808357
1872 1 0.376326 0.820104 0.806345
1844 1 0.495254 0.682664 0.803896
1873 1 0.501716 0.750363 0.747141
1716 1 0.504841 0.69062 0.688428
1876 1 0.501742 0.812484 0.809302
1748 1 0.500809 0.815223 0.68336
1720 1 0.616985 0.684974 0.685445
1747 1 0.566337 0.751423 0.680909
1752 1 0.627526 0.82043 0.694109
1842 1 0.555983 0.681574 0.753152
1848 1 0.623222 0.688778 0.812994
1874 1 0.564661 0.803745 0.748038
1875 1 0.56412 0.748311 0.808607
1877 1 0.623871 0.746396 0.74477
1880 1 0.621131 0.807044 0.807666
1724 1 0.752983 0.689397 0.691297
1751 1 0.692449 0.752581 0.682638
1755 1 0.810602 0.753414 0.686071
1756 1 0.751069 0.812528 0.68141
1846 1 0.692438 0.694763 0.75718
1850 1 0.812349 0.694788 0.752263
1852 1 0.751713 0.686402 0.810863
1878 1 0.68747 0.809029 0.745115
1879 1 0.690596 0.75519 0.819292
1881 1 0.746244 0.754285 0.754112
1882 1 0.809917 0.812979 0.752302
1883 1 0.812113 0.748382 0.809449
1884 1 0.749354 0.812123 0.812536
1860 1 0.995338 0.81524 0.811748
1828 1 0.998973 0.683127 0.812445
1728 1 0.872015 0.688281 0.693198
1759 1 0.937343 0.750166 0.69745
1760 1 0.874753 0.810894 0.694431
1854 1 0.939853 0.688355 0.749811
1856 1 0.880896 0.695499 0.813197
1885 1 0.874497 0.750009 0.753822
1886 1 0.93311 0.819712 0.74887
1887 1 0.941366 0.748159 0.811771
1888 1 0.868601 0.815179 0.809952
771 1 0.0652551 0.998573 0.817833
1763 1 0.0595545 0.875037 0.679436
1768 1 0.122077 0.933827 0.686508
1890 1 0.0754607 0.939243 0.757212
1891 1 0.0665838 0.879346 0.812965
1893 1 0.129874 0.87298 0.753244
1896 1 0.127682 0.943416 0.823455
1764 1 0.000737483 0.948535 0.688658
1889 1 0.00152018 0.870852 0.740848
1892 1 0.00956521 0.939134 0.816562
769 1 0.00646791 0.996925 0.751376
651 1 0.312254 0.995286 0.684397
777 1 0.248955 0.995976 0.745769
1767 1 0.186497 0.873788 0.688215
1771 1 0.315509 0.873705 0.686776
1772 1 0.246137 0.938839 0.691968
1894 1 0.178034 0.943885 0.760966
1895 1 0.188015 0.870944 0.820605
1897 1 0.249049 0.872571 0.748926
1898 1 0.314196 0.934689 0.74448
1899 1 0.312011 0.879898 0.813035
1900 1 0.244108 0.933743 0.812697
779 1 0.302316 0.996075 0.813684
647 1 0.182582 0.993628 0.693338
781 1 0.368634 0.999751 0.748338
655 1 0.433668 0.998089 0.682406
785 1 0.494596 0.993296 0.746944
1775 1 0.439646 0.868516 0.679036
1776 1 0.379029 0.934447 0.679424
1901 1 0.376592 0.87721 0.745309
1902 1 0.432611 0.942566 0.750461
1903 1 0.431779 0.87466 0.821221
1904 1 0.369699 0.944457 0.815061
1908 1 0.489023 0.937407 0.816359
1905 1 0.498196 0.875171 0.744337
1780 1 0.496238 0.936127 0.689293
659 1 0.561137 0.993709 0.695018
1779 1 0.566371 0.877202 0.689495
1784 1 0.630843 0.940965 0.685754
1906 1 0.554945 0.937221 0.750168
1907 1 0.557115 0.880611 0.813823
1909 1 0.615933 0.874299 0.749116
1912 1 0.618265 0.940706 0.813014
795 1 0.812359 0.99992 0.809992
1783 1 0.691266 0.873545 0.68353
1787 1 0.816168 0.872625 0.688535
1788 1 0.74734 0.935007 0.690563
1910 1 0.685529 0.932213 0.742827
1911 1 0.686367 0.864264 0.810475
1913 1 0.74911 0.877539 0.743126
1914 1 0.814855 0.937031 0.747482
1915 1 0.808865 0.877879 0.807432
1916 1 0.744332 0.940419 0.808937
1791 1 0.938941 0.871601 0.685237
1792 1 0.877685 0.933984 0.691598
1917 1 0.872895 0.876231 0.754815
1918 1 0.938216 0.938037 0.754049
1919 1 0.93273 0.879161 0.814623
1920 1 0.875082 0.945752 0.811891
1925 1 0.115442 0.50013 0.882325
1922 1 0.0624937 0.56336 0.881055
1928 1 0.124154 0.560157 0.945431
1955 1 0.0643197 0.624432 0.942539
1957 1 0.120807 0.619835 0.882251
1924 1 0.00691918 0.564055 0.943627
1927 1 0.189645 0.505098 0.940493
1929 1 0.249734 0.501064 0.878832
1930 1 0.309503 0.5591 0.875755
1926 1 0.184794 0.561382 0.881998
1963 1 0.310629 0.615005 0.942467
1961 1 0.248489 0.616534 0.874031
1932 1 0.248518 0.562332 0.938656
1959 1 0.187392 0.625902 0.939319
1041 1 0.497269 0.50332 0.99081
1933 1 0.381089 0.504555 0.876314
1073 1 0.498667 0.620253 0.997603
1037 1 0.382452 0.500998 0.997237
1965 1 0.370931 0.617818 0.878549
1934 1 0.438366 0.566883 0.869529
1936 1 0.370007 0.559655 0.940722
1967 1 0.429809 0.620207 0.945876
1969 1 0.496138 0.63353 0.872352
1940 1 0.495981 0.568499 0.927648
1042 1 0.553235 0.559515 0.991594
1939 1 0.558882 0.500234 0.934215
1077 1 0.622034 0.619021 0.999373
1937 1 0.501454 0.500098 0.870899
1938 1 0.559521 0.566101 0.871358
1944 1 0.62387 0.563557 0.933492
1971 1 0.558954 0.625778 0.937839
1973 1 0.628654 0.624055 0.876901
1941 1 0.61971 0.503522 0.869527
1943 1 0.687763 0.512576 0.937344
1945 1 0.751291 0.501339 0.883137
1942 1 0.697516 0.562156 0.87082
1946 1 0.811446 0.558847 0.86926
1977 1 0.768596 0.630878 0.87158
1979 1 0.817754 0.629393 0.941844
1948 1 0.750465 0.561053 0.932377
1975 1 0.69477 0.626106 0.931374
1081 1 0.754896 0.618411 0.996646
1949 1 0.875218 0.50395 0.87256
1953 1 0.992481 0.625938 0.87819
1950 1 0.939078 0.56467 0.874298
1981 1 0.873885 0.631321 0.877664
1983 1 0.937413 0.622494 0.939802
1952 1 0.872688 0.561735 0.935327
1951 1 0.93788 0.506483 0.937786
1986 1 0.0698715 0.806325 0.870003
1987 1 0.0632812 0.74858 0.938523
1954 1 0.0556766 0.683892 0.87875
1992 1 0.126331 0.813577 0.938953
1989 1 0.129027 0.743516 0.867178
1960 1 0.121693 0.693166 0.928875
1985 1 0.00475754 0.752623 0.869966
1058 1 0.0649677 0.684551 0.997604
1094 1 0.187194 0.805347 0.995163
1098 1 0.317555 0.808865 0.995948
1097 1 0.250086 0.744663 0.997966
1996 1 0.241803 0.815243 0.934471
1990 1 0.184851 0.810403 0.879641
1994 1 0.316597 0.808941 0.876513
1958 1 0.191184 0.679829 0.871486
1993 1 0.247906 0.74988 0.874902
1964 1 0.245311 0.676424 0.93429
1995 1 0.312599 0.746914 0.936445
1962 1 0.311306 0.688372 0.874594
1991 1 0.190793 0.742122 0.933117
1066 1 0.312434 0.681681 0.996786
2000 1 0.374037 0.816024 0.936316
1966 1 0.425765 0.68362 0.868846
1997 1 0.372853 0.74833 0.876708
1999 1 0.435871 0.744228 0.926123
1998 1 0.434721 0.809243 0.873897
1968 1 0.36785 0.685098 0.939323
2001 1 0.498688 0.737869 0.865196
1972 1 0.497389 0.687418 0.934671
2004 1 0.494638 0.806411 0.937733
1070 1 0.435601 0.684571 0.99748
1101 1 0.378405 0.752876 0.990688
1106 1 0.565719 0.807593 0.993464
1074 1 0.564017 0.683227 0.994829
1105 1 0.501508 0.750519 0.996474
2003 1 0.559888 0.747135 0.93119
1976 1 0.626957 0.691464 0.936545
2005 1 0.627255 0.751546 0.873488
2002 1 0.560135 0.804248 0.870269
2008 1 0.621512 0.811488 0.93675
1970 1 0.561633 0.688494 0.867445
1109 1 0.628932 0.751041 0.99706
2006 1 0.689639 0.810962 0.877862
2012 1 0.760923 0.817648 0.93498
2009 1 0.750874 0.752182 0.872264
2011 1 0.811676 0.743974 0.936279
2010 1 0.812295 0.81434 0.870188
1980 1 0.751407 0.686871 0.933274
1974 1 0.69116 0.689354 0.874778
2007 1 0.700375 0.750969 0.941062
1978 1 0.820917 0.693206 0.877639
1110 1 0.699962 0.818602 0.99564
1114 1 0.824946 0.815628 0.9991
1956 1 0.999436 0.686185 0.937307
1988 1 0.995585 0.81517 0.940553
1982 1 0.94266 0.692717 0.876111
1984 1 0.883811 0.691165 0.939776
2016 1 0.871872 0.80527 0.931034
2015 1 0.936095 0.757894 0.936308
2013 1 0.87302 0.754588 0.8684
2014 1 0.938512 0.818933 0.868913
2017 1 0.0104741 0.872219 0.875095
2018 1 0.0641637 0.940272 0.879881
2021 1 0.122058 0.875625 0.876234
2024 1 0.121978 0.943024 0.939036
2020 1 0.0130986 0.93323 0.942253
2019 1 0.0689774 0.870519 0.94127
1122 1 0.0692996 0.941594 0.993751
905 1 0.243662 0.997242 0.87654
907 1 0.312456 0.989022 0.93389
2023 1 0.181154 0.877381 0.936318
2028 1 0.240474 0.938732 0.947242
2025 1 0.250772 0.869018 0.872725
2022 1 0.184045 0.938206 0.881224
2027 1 0.305709 0.884342 0.939571
2026 1 0.298802 0.93523 0.873704
1129 1 0.243674 0.871934 0.998796
9 1 0.257176 0.998053 0.998817
17 1 0.49963 0.993594 0.996769
2036 1 0.497658 0.935594 0.932123
2031 1 0.433783 0.873561 0.937563
2032 1 0.371095 0.927622 0.93481
2030 1 0.434524 0.944204 0.880385
1133 1 0.373849 0.877495 0.99641
2033 1 0.495125 0.873016 0.877088
2029 1 0.36751 0.871011 0.877305
13 1 0.376928 0.996454 0.991166
1134 1 0.438165 0.934005 0.99125
2035 1 0.561437 0.866497 0.934565
2034 1 0.556522 0.938878 0.877989
2040 1 0.620703 0.93928 0.93445
2037 1 0.627134 0.869473 0.868351
1138 1 0.560755 0.930604 0.993915
917 1 0.629312 0.998096 0.871117
1146 1 0.812314 0.944641 0.994347
2038 1 0.681985 0.934376 0.865693
921 1 0.749994 0.996311 0.874173
2042 1 0.815743 0.936553 0.872318
2044 1 0.751111 0.936747 0.93767
2043 1 0.8185 0.876889 0.938709
1142 1 0.685408 0.940446 0.996802
2041 1 0.747362 0.878802 0.874908
2039 1 0.679463 0.878265 0.9325
919 1 0.683039 0.996018 0.933229
1145 1 0.759272 0.877068 0.998892
25 1 0.741738 0.995281 0.994121
1149 1 0.882261 0.883536 0.997016
927 1 0.942771 0.995405 0.940353
2045 1 0.875798 0.875155 0.866489
2046 1 0.940973 0.942026 0.873173
2047 1 0.934804 0.876119 0.931346
2048 1 0.879052 0.944519 0.934353
1121 1 0.992805 0.873387 0.996621
1150 1 0.943477 0.935311 0.993898
29 1 0.88387 0.98976 0.999217
| [
"[email protected]"
] | |
ff22176a2b050a193f1882462e0d36e591e42784 | cb0e7d6493b23e870aa625eb362384a10f5ee657 | /solutions/python3/0567.py | 65478b7cc2fb087117f7698fe743cdccb13f091a | [] | no_license | sweetpand/LeetCode-1 | 0acfa603af254a3350d457803449a91322f2d1a7 | 65f4ef26cb8b2db0b4bf8c42bfdc76421b479f94 | refs/heads/master | 2022-11-14T07:01:42.502172 | 2020-07-12T12:25:56 | 2020-07-12T12:25:56 | 279,088,171 | 1 | 0 | null | 2020-07-12T15:03:20 | 2020-07-12T15:03:19 | null | UTF-8 | Python | false | false | 500 | py | class Solution:
def checkInclusion(self, s1: str, s2: str) -> bool:
count1 = collections.Counter(s1)
required = len(s1)
for r, c in enumerate(s2):
count1[c] -= 1
if count1[c] >= 0:
required -= 1
if r >= len(s1):
count1[s2[r - len(s1)]] += 1
if count1[s2[r - len(s1)]] > 0:
required += 1
if required == 0:
return True
return False
| [
"[email protected]"
] | |
52564c55ce188af128e41cc3810567e62b0cb71c | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_wisecracked.py | df762aa40f8d90ebf7ab0b38869d1bab6c31eb7e | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py |
#calss header
class _WISECRACKED():
def __init__(self,):
self.name = "WISECRACKED"
self.definitions = wisecrack
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['wisecrack']
| [
"[email protected]"
] | |
c1f7f5a8fdcb8e87bf303027ecd2d3053561bdfd | abb64b652cf908aaa17257464a12395b014b6093 | /test/test_quantized_nn_mods.py | 7203fb371c6255be2b47c7441de524a677698d85 | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | beifangfazhanlu/pytorch | 8a1c5a4a11b29da26af4d3839aff0ca22e4a298a | b7d992eb46a1e085d2b8b7f0df9817bf569616d3 | refs/heads/master | 2020-07-13T15:43:26.647301 | 2019-08-29T05:18:56 | 2019-08-29T05:20:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,424 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
import torch.nn.quantized as nnq
import torch.nn.quantized.dynamic as nnqd
import torch.nn._intrinsic.quantized as nnq_fused
import torch.nn.quantized.functional as qF
from torch.nn.quantized.modules import Conv2d
from torch.nn._intrinsic.quantized import ConvReLU2d
import torch.quantization
from common_utils import run_tests, tempfile
from common_quantization import QuantizationTestCase, no_deadline, prepare_dynamic
from common_quantized import _calculate_dynamic_qparams
from hypothesis import given
from hypothesis import strategies as st
import unittest
'''
Note that tests in this file are just API test, to make sure we wrapped the
quantized operator implementations correctly in the user facing APIs, these are
not correctness test for the underlying quantized operators. For correctness
test please see `caffe2/test/test_quantized.py`.
'''
class FunctionalAPITest(QuantizationTestCase):
def test_relu_api(self):
X = torch.arange(-5, 5, dtype=torch.float)
scale = 2.0
zero_point = 1
qX = torch.quantize_linear(X, scale=scale, zero_point=zero_point, dtype=torch.quint8)
qY = torch.relu(qX)
qY_hat = qF.relu(qX)
self.assertEqual(qY, qY_hat)
@no_deadline
@unittest.skipIf(
not torch.fbgemm_is_cpu_supported(),
" Quantized operations require FBGEMM. FBGEMM is only optimized for CPUs"
" with instruction set support avx2 or newer.",
)
@given(
use_bias=st.booleans(),
)
def test_conv_api(self, use_bias):
"""Tests the correctness of the conv module.
The correctness is defined against the functional implementation.
"""
N, iC, H, W = 10, 10, 10, 3
oC, g, kH, kW = 16, 1, 3, 3
scale, zero_point = 1.0 / 255, 128
stride = (1, 1)
i_padding = (0, 0)
dilation = (1, 1)
X = torch.randn(N, iC, H, W, dtype=torch.float32)
X = X.permute([0, 2, 3, 1]).contiguous()
qX = torch.quantize_linear(X, scale=scale, zero_point=128, dtype=torch.quint8)
w = torch.randn(oC, iC // g, kH, kW, dtype=torch.float32)
qw = torch.quantize_linear(w, scale=scale, zero_point=0, dtype=torch.qint8)
b = torch.randn(oC, dtype=torch.float32) if use_bias else None
q_bias = torch.quantize_linear(b, scale=1.0 / 1024, zero_point=0, dtype=torch.qint32) if use_bias else None
q_filters_ref = torch.ops.quantized.fbgemm_conv_prepack(qw.permute([0, 2, 3, 1]),
stride,
i_padding,
dilation,
g)
requantized_bias = torch.quantize_linear(q_bias.dequantize(), scale * scale, 0 , torch.qint32) if use_bias else None
ref_result = torch.ops.quantized.fbgemm_conv2d(qX.permute([0, 2, 3, 1]), q_filters_ref,
requantized_bias, stride,
i_padding, dilation,
g, scale, zero_point).permute([0, 3, 1, 2])
q_result = torch.nn.quantized.functional.conv2d(qX,
qw,
bias=q_bias, scale=scale,
zero_point=zero_point,
stride=stride, padding=i_padding,
dilation=dilation, groups=g,
dtype=torch.quint8)
self.assertEqual(ref_result, q_result)
class DynamicModuleAPITest(QuantizationTestCase):
@no_deadline
@unittest.skipIf(
not torch.fbgemm_is_cpu_supported(),
" Quantized operations require FBGEMM. FBGEMM is only optimized for CPUs"
" with instruction set support avx2 or newer.",
)
@given(
batch_size=st.integers(1, 5),
in_features=st.integers(16, 32),
out_features=st.integers(4, 8),
use_bias=st.booleans(),
use_default_observer=st.booleans(),
)
def test_linear_api(self, batch_size, in_features, out_features, use_bias, use_default_observer):
"""test API functionality for nn.quantized.dynamic.Linear"""
W = torch.rand(out_features, in_features).float()
W_scale, W_zp = _calculate_dynamic_qparams(W, torch.qint8)
W_q = torch.quantize_linear(W, W_scale, W_zp, torch.qint8)
X = torch.rand(batch_size, in_features).float()
B = torch.rand(out_features).float() if use_bias else None
qlinear = nnqd.Linear(in_features, out_features)
# Run module with default-initialized parameters.
# This tests that the constructor is correct.
qlinear(X)
qlinear.set_weight(W_q)
# Simple round-trip test to ensure weight()/set_weight() API
self.assertEqual(qlinear.weight(), W_q)
W_pack = qlinear._packed_weight
qlinear.bias = B if use_bias else None
Z_dq = qlinear(X)
# Check if the module implementation matches calling the
# ops directly
Z_ref = torch.ops.quantized.fbgemm_linear_dynamic(X, W_pack, B)
self.assertEqual(Z_ref, Z_dq)
# Test serialization of dynamic quantized Linear Module using state_dict
model_dict = qlinear.state_dict()
self.assertEqual(model_dict['weight'], W_q)
if use_bias:
self.assertEqual(model_dict['bias'], B)
with tempfile.TemporaryFile() as f:
torch.save(model_dict, f)
f.seek(0)
loaded_dict = torch.load(f)
for key in model_dict:
self.assertEqual(model_dict[key], loaded_dict[key])
loaded_qlinear = nnqd.Linear(in_features, out_features)
loaded_qlinear.load_state_dict(loaded_dict)
linear_unpack = torch.ops.quantized.fbgemm_linear_unpack
self.assertEqual(linear_unpack(qlinear._packed_weight),
linear_unpack(loaded_qlinear._packed_weight))
if use_bias:
self.assertEqual(qlinear.bias, loaded_qlinear.bias)
self.assertTrue(dir(qlinear) == dir(loaded_qlinear))
self.assertTrue(hasattr(qlinear, '_packed_weight'))
self.assertTrue(hasattr(loaded_qlinear, '_packed_weight'))
self.assertTrue(hasattr(qlinear, 'weight'))
self.assertTrue(hasattr(loaded_qlinear, 'weight'))
self.assertEqual(qlinear.weight(), loaded_qlinear.weight())
self.assertEqual(qlinear.weight(), torch.ops.quantized.fbgemm_linear_unpack(qlinear._packed_weight))
Z_dq2 = qlinear(X)
self.assertEqual(Z_dq, Z_dq2)
# test serialization of module directly
with tempfile.TemporaryFile() as f:
torch.save(qlinear, f)
f.seek(0)
loaded = torch.load(f)
# This check is disabled pending an issue in PyTorch serialization:
# https://github.com/pytorch/pytorch/issues/24045
# self.assertEqual(qlinear.weight(), loaded.weight())
self.assertEqual(qlinear.zero_point, loaded.zero_point)
# Test JIT
self.checkScriptable(qlinear, list(zip([X], [Z_ref])), check_save_load=True)
# Test from_float
float_linear = torch.nn.Linear(in_features, out_features).float()
if use_default_observer:
float_linear.qconfig = torch.quantization.default_dynamic_qconfig
prepare_dynamic(float_linear)
float_linear(X.float())
quantized_float_linear = nnqd.Linear.from_float(float_linear)
# Smoke test to make sure the module actually runs
quantized_float_linear(X)
# Smoke test extra_repr
str(quantized_float_linear)
class ModuleAPITest(QuantizationTestCase):
def test_relu(self):
relu_module = nnq.ReLU()
relu6_module = nnq.ReLU6()
x = torch.arange(-10, 10, dtype=torch.float)
y_ref = torch.relu(x)
y6_ref = torch.nn.modules.ReLU6()(x)
qx = torch.quantize_linear(x, 1.0, 0, dtype=torch.qint32)
qy = relu_module(qx)
qy6 = relu6_module(qx)
self.assertEqual(y_ref, qy.dequantize(),
message="ReLU module API failed")
self.assertEqual(y6_ref, qy6.dequantize(),
message="ReLU6 module API failed")
@no_deadline
@unittest.skipIf(
not torch.fbgemm_is_cpu_supported(),
" Quantized operations require FBGEMM. FBGEMM is only optimized for CPUs"
" with instruction set support avx2 or newer.",
)
@given(
batch_size=st.integers(1, 5),
in_features=st.integers(16, 32),
out_features=st.integers(4, 8),
use_bias=st.booleans(),
use_fused=st.booleans(),
)
def test_linear_api(self, batch_size, in_features, out_features, use_bias, use_fused):
"""test API functionality for nn.quantized.linear and nn._intrinsic.quantized.linear_relu"""
W = torch.rand(out_features, in_features).float()
W_q = torch.quantize_linear(W, 0.1, 4, torch.qint8)
X = torch.rand(batch_size, in_features).float()
X_q = torch.quantize_linear(X, 0.2, 10, torch.quint8)
B = torch.rand(out_features).float() if use_bias else None
B_q = torch.quantize_linear(B, W_q.q_scale() * X_q.q_scale(), 0, torch.qint32) if use_bias else None
scale = 0.5
zero_point = 3
if use_fused:
qlinear = nnq_fused.LinearReLU(in_features, out_features)
else:
qlinear = nnq.Linear(in_features, out_features)
# Run module with default-initialized parameters.
# This tests that the constructor is correct.
qlinear(X_q)
qlinear.set_weight(W_q)
# Simple round-trip test to ensure weight()/set_weight() API
self.assertEqual(qlinear.weight(), W_q)
W_pack = qlinear._packed_weight
qlinear.bias = B_q if use_bias else None
qlinear.scale = float(scale)
qlinear.zero_point = int(zero_point)
Z_q = qlinear(X_q)
# Check if the module implementation matches calling the
# ops directly
if use_fused:
Z_ref = torch.ops.quantized.fbgemm_linear_relu(X_q, W_pack, B_q, scale, zero_point)
else:
Z_ref = torch.ops.quantized.fbgemm_linear(X_q, W_pack, B_q, scale, zero_point)
self.assertEqual(Z_ref, Z_q)
# Test serialization of quantized Linear Module using state_dict
model_dict = qlinear.state_dict()
self.assertEqual(model_dict['weight'], W_q)
if use_bias:
self.assertEqual(model_dict['bias'], B_q)
with tempfile.TemporaryFile() as f:
torch.save(model_dict, f)
f.seek(0)
loaded_dict = torch.load(f)
for key in model_dict:
self.assertEqual(model_dict[key], loaded_dict[key])
if use_fused:
loaded_qlinear = nnq_fused.LinearReLU(in_features, out_features)
else:
loaded_qlinear = nnq.Linear(in_features, out_features)
loaded_qlinear.load_state_dict(loaded_dict)
linear_unpack = torch.ops.quantized.fbgemm_linear_unpack
self.assertEqual(linear_unpack(qlinear._packed_weight),
linear_unpack(loaded_qlinear._packed_weight))
if use_bias:
self.assertEqual(qlinear.bias, loaded_qlinear.bias)
self.assertEqual(qlinear.scale, loaded_qlinear.scale)
self.assertEqual(qlinear.zero_point, loaded_qlinear.zero_point)
self.assertTrue(dir(qlinear) == dir(loaded_qlinear))
self.assertTrue(hasattr(qlinear, '_packed_weight'))
self.assertTrue(hasattr(loaded_qlinear, '_packed_weight'))
self.assertTrue(hasattr(qlinear, 'weight'))
self.assertTrue(hasattr(loaded_qlinear, 'weight'))
self.assertEqual(qlinear.weight(), loaded_qlinear.weight())
self.assertEqual(qlinear.weight(), torch.ops.quantized.fbgemm_linear_unpack(qlinear._packed_weight))
Z_q2 = loaded_qlinear(X_q)
self.assertEqual(Z_q, Z_q2)
# test serialization of module directly
with tempfile.TemporaryFile() as f:
torch.save(qlinear, f)
f.seek(0)
loaded = torch.load(f)
# This check is disabled pending an issue in PyTorch serialization:
# https://github.com/pytorch/pytorch/issues/24045
# self.assertEqual(qlinear.weight(), loaded.weight())
self.assertEqual(qlinear.bias, loaded.bias)
self.assertEqual(qlinear.scale, loaded.scale)
self.assertEqual(qlinear.zero_point, loaded.zero_point)
# Test JIT
self.checkScriptable(qlinear, list(zip([X_q], [Z_ref])), check_save_load=True)
# Test from_float
float_linear = torch.nn.Linear(in_features, out_features).float()
float_linear.qconfig = torch.quantization.default_qconfig
torch.quantization.prepare(float_linear)
float_linear(X.float())
quantized_float_linear = torch.quantization.convert(float_linear)
# Smoke test to make sure the module actually runs
quantized_float_linear(X_q)
# Smoke test extra_repr
str(quantized_float_linear)
def test_quant_dequant_api(self):
r = torch.tensor([[1., -1.], [1., -1.]], dtype=torch.float)
scale, zero_point, dtype = 1.0, 2, torch.qint8
# testing Quantize API
qr = torch.quantize_linear(r, scale, zero_point, dtype)
quant_m = nnq.Quantize(scale, zero_point, dtype)
qr2 = quant_m(r)
self.assertEqual(qr, qr2)
# testing Dequantize API
rqr = qr.dequantize()
dequant_m = nnq.DeQuantize()
rqr2 = dequant_m(qr2)
self.assertEqual(rqr, rqr2)
@no_deadline
@unittest.skipIf(
not torch.fbgemm_is_cpu_supported(),
" Quantized operations require FBGEMM. FBGEMM is only optimized for CPUs"
" with instruction set support avx2 or newer.",
)
@given(
use_bias=st.booleans(),
use_fused=st.booleans(),
)
def test_conv_api(self, use_bias, use_fused):
"""Tests the correctness of the conv module.
The correctness is defined against the functional implementation.
"""
N, iC, H, W = 10, 10, 10, 3
oC, g, kH, kW = 16, 1, 3, 3
scale, zero_point = 1.0 / 255, 128
X = torch.randn(N, iC, H, W, dtype=torch.float32)
X = X.permute([0, 2, 3, 1]).contiguous()
qX = torch.quantize_linear(X, scale=scale, zero_point=128, dtype=torch.quint8)
w = torch.randn(oC, iC // g, kH, kW, dtype=torch.float32)
qw = torch.quantize_linear(w, scale=scale, zero_point=0, dtype=torch.qint8)
b = torch.randn(oC, dtype=torch.float32) if use_bias else None
qb = torch.quantize_linear(b, scale=1.0 / 1024, zero_point=0, dtype=torch.qint32) if use_bias else None
if use_fused:
conv_under_test = ConvReLU2d(in_channels=iC,
out_channels=oC,
kernel_size=(kH, kW),
stride=1,
padding=0,
dilation=1,
groups=g,
bias=use_bias,
padding_mode='zeros')
else:
conv_under_test = Conv2d(in_channels=iC,
out_channels=oC,
kernel_size=(kH, kW),
stride=1,
padding=0,
dilation=1,
groups=g,
bias=use_bias,
padding_mode='zeros')
# Run module with default-initialized parameters.
# This tests that the constructor is correct.
conv_under_test(qX)
conv_under_test.set_weight(qw)
conv_under_test.bias = qb
conv_under_test.scale = scale
conv_under_test.zero_point = zero_point
# Test members
self.assertTrue(hasattr(conv_under_test, '_packed_weight'))
self.assertTrue(hasattr(conv_under_test, 'scale'))
self.assertTrue(hasattr(conv_under_test, 'zero_point'))
# Test properties
self.assertEqual(qw, conv_under_test.weight())
self.assertEqual(qb, conv_under_test.bias)
self.assertEqual(scale, conv_under_test.scale)
self.assertEqual(zero_point, conv_under_test.zero_point)
# Test forward
result_under_test = conv_under_test(qX)
result_reference = qF.conv2d(qX, qw, bias=qb,
scale=scale, zero_point=zero_point,
stride=1, padding=0,
dilation=1, groups=g, dtype=torch.quint8
)
if use_fused:
# result_reference < zero_point doesn't work for qtensor yet
# result_reference[result_reference < zero_point] = zero_point
MB, OC, OH, OW = result_reference.size()
for i in range(MB):
for j in range(OC):
for h in range(OH):
for w in range(OW):
if result_reference[i][j][h][w].int_repr() < zero_point:
# assign 0. that gets converted to zero_point
result_reference[i][j][h][w] = 0.
self.assertEqual(result_reference, result_under_test,
message="Tensors are not equal.")
# Test serialization of quantized Conv Module using state_dict
model_dict = conv_under_test.state_dict()
self.assertEqual(model_dict['weight'], qw)
if use_bias:
self.assertEqual(model_dict['bias'], qb)
with tempfile.NamedTemporaryFile() as f:
torch.save(model_dict, f)
f.seek(0)
loaded_dict = torch.load(f)
for key in model_dict:
self.assertEqual(loaded_dict[key], model_dict[key])
if use_fused:
loaded_conv_under_test = ConvReLU2d(in_channels=iC,
out_channels=oC,
kernel_size=(kH, kW),
stride=1,
padding=0,
dilation=1,
groups=g,
bias=use_bias,
padding_mode='zeros')
else:
loaded_conv_under_test = Conv2d(in_channels=iC,
out_channels=oC,
kernel_size=(kH, kW),
stride=1,
padding=0,
dilation=1,
groups=g,
bias=use_bias,
padding_mode='zeros')
loaded_conv_under_test.load_state_dict(loaded_dict)
self.assertEqual(loaded_conv_under_test.weight(), conv_under_test.weight())
if use_bias:
self.assertEqual(loaded_conv_under_test.bias, conv_under_test.bias)
self.assertEqual(loaded_conv_under_test.scale, conv_under_test.scale)
self.assertEqual(loaded_conv_under_test.zero_point, conv_under_test.zero_point)
self.assertTrue(dir(loaded_conv_under_test) == dir(conv_under_test))
self.assertTrue(hasattr(conv_under_test, '_packed_weight'))
self.assertTrue(hasattr(loaded_conv_under_test, '_packed_weight'))
self.assertTrue(hasattr(conv_under_test, 'weight'))
self.assertTrue(hasattr(loaded_conv_under_test, 'weight'))
self.assertEqual(loaded_conv_under_test.weight(), conv_under_test.weight())
self.assertEqual(loaded_conv_under_test.weight(), qw)
loaded_result = loaded_conv_under_test(qX)
self.assertEqual(loaded_result, result_reference)
with tempfile.NamedTemporaryFile() as f:
torch.save(conv_under_test, f)
f.seek(0)
loaded_conv = torch.load(f)
self.assertEqual(conv_under_test.bias, loaded_conv.bias)
self.assertEqual(conv_under_test.scale, loaded_conv.scale)
self.assertEqual(conv_under_test.zero_point, loaded_conv.zero_point)
# JIT testing
self.checkScriptable(conv_under_test, list(zip([qX], [result_reference])), check_save_load=True)
# Test from_float
float_conv = torch.nn.Conv2d(in_channels=iC,
out_channels=oC,
kernel_size=(kH, kW),
stride=1,
padding=0,
dilation=1,
groups=g,
bias=use_bias,
padding_mode='zeros').float()
float_conv.qconfig = torch.quantization.default_qconfig
torch.quantization.prepare(float_conv)
float_conv(X.float())
quantized_float_conv = torch.quantization.convert(float_conv)
# Smoke test to make sure the module actually runs
quantized_float_conv(qX)
# Check that bias is quantized based on output scale
if use_bias:
qbias = torch.quantize_linear(float_conv.bias, quantized_float_conv.scale / 2**16, 0, torch.qint32)
self.assertEqual(quantized_float_conv.bias.dequantize(), qbias.dequantize())
# Smoke test extra_repr
str(quantized_float_conv)
def test_pool_api(self):
"""Tests the correctness of the pool module.
The correctness is defined against the functional implementation.
"""
N, C, H, W = 10, 10, 10, 3
kwargs = {
'kernel_size': 2,
'stride': None,
'padding': 0,
'dilation': 1
}
scale, zero_point = 1.0 / 255, 128
X = torch.randn(N, C, H, W, dtype=torch.float32)
qX = torch.quantize_linear(X, scale=scale, zero_point=zero_point,
dtype=torch.quint8)
qX_expect = torch.nn.functional.max_pool2d(qX, **kwargs)
pool_under_test = torch.nn.quantized.MaxPool2d(**kwargs)
qX_hat = pool_under_test(qX)
self.assertEqual(qX_expect, qX_hat)
# JIT Testing
self.checkScriptable(pool_under_test, list(zip([X], [qX_expect])))
if __name__ == '__main__':
run_tests()
| [
"[email protected]"
] | |
28d7853629e519d31e6615eabe002706b6b08b38 | 4cb2bbd929ba3722d78cd6bd9feb2c5c0dd57025 | /olympic/forms.py | d27d0f59c4a933f98e12550b23203e2966edaad2 | [
"BSD-2-Clause"
] | permissive | mjtamlyn/tamlynscore | ebeebdc73feeab86995a2cb888e1bea203854553 | c6ac4e9a5e37dc3778b1f754b3143e44fa8dc0bc | refs/heads/master | 2023-08-24T11:00:16.153489 | 2023-08-08T11:30:24 | 2023-08-08T11:30:24 | 17,013,657 | 7 | 2 | BSD-3-Clause | 2023-08-05T19:52:51 | 2014-02-20T08:28:08 | Python | UTF-8 | Python | false | false | 3,102 | py | from django import forms
from .models import Result, SessionRound
class ResultForm(forms.ModelForm):
class Meta:
model = Result
exclude = ('match', 'seed')
class SetupForm(forms.Form):
SPREAD_CHOICES = (
('', 'No special options'),
('expanded', 'One target per archer'),
)
MATCH_CHOICES = (
('', 'All matches'),
('half', 'Only allocate half of the matches'),
('quarter', 'Only allocate 1/4 of the matches'),
('eighth', 'Only allocate 1/8 of the matches'),
('three-quarter', 'Only allocate 3/4 of the matches'),
('first-half', 'Only allocate first half of the matches / Final only'),
('second-half', 'Only allocate second half of the matches / Bronze only'),
)
LEVEL_CHOICES = (
(1, 'Finals'),
(2, 'Semis'),
(3, 'Quarters'),
(4, '1/8'),
(5, '1/16'),
(6, '1/32'),
(7, '1/64'),
(8, '1/128'),
)
TIMING_CHOICES = (
(1, 'Pass A'),
(2, 'Pass B'),
(3, 'Pass C'),
(4, 'Pass D'),
(5, 'Pass E'),
(6, 'Pass F'),
(7, 'Pass G'),
(8, 'Pass H'),
(9, 'Pass I'),
(10, 'Pass J'),
)
session_round = forms.ModelChoiceField(SessionRound.objects)
start = forms.IntegerField(label='Start target')
level = forms.TypedChoiceField(coerce=int, choices=LEVEL_CHOICES)
timing = forms.TypedChoiceField(label='Pass', coerce=int, choices=TIMING_CHOICES)
spread = forms.ChoiceField(label='Target spread', choices=SPREAD_CHOICES, required=False)
matches = forms.ChoiceField(label='Matches', choices=MATCH_CHOICES, required=False)
delete = forms.BooleanField(required=False)
def __init__(self, session_rounds, **kwargs):
self.session_rounds = session_rounds
super(SetupForm, self).__init__(**kwargs)
self.fields['session_round'].queryset = session_rounds
def save(self):
sr = self.cleaned_data['session_round']
kwargs = {
'level': self.cleaned_data['level'],
'start': self.cleaned_data['start'],
'timing': self.cleaned_data['timing'],
}
if sr.shot_round.team_type:
kwargs['expanded'] = True
if self.cleaned_data['spread'] == 'expanded':
kwargs['expanded'] = True
if self.cleaned_data['matches'] == 'half':
kwargs['half_only'] = True
if self.cleaned_data['matches'] == 'quarter':
kwargs['quarter_only'] = True
if self.cleaned_data['matches'] == 'eighth':
kwargs['eighth_only'] = True
if self.cleaned_data['matches'] == 'three-quarter':
kwargs['three_quarters'] = True
if self.cleaned_data['matches'] == 'first-half':
kwargs['first_half_only'] = True
if self.cleaned_data['matches'] == 'second-half':
kwargs['second_half_only'] = True
if self.cleaned_data['delete']:
sr.remove_matches(self.cleaned_data['level'])
else:
sr.make_matches(**kwargs)
| [
"[email protected]"
] | |
641aacc8b6854764e829d6932d4d0627ea980786 | 19d03d646fcee318cca8078af27636732290d77b | /parlai/utils/flake8.py | 1170b4bbb4a717b201637e00678bf96a87614026 | [
"MIT"
] | permissive | yongkyung-oh/CMU-Studio-Project | 2d6fe6ef6fa30fda1a4f2d1fc45c5b85d6143775 | 448492f342e8157df2e736aa52825b66b1d66fd7 | refs/heads/master | 2022-10-24T16:56:46.763865 | 2020-07-01T10:03:00 | 2020-07-01T10:03:00 | 252,878,283 | 2 | 5 | MIT | 2021-03-25T23:50:27 | 2020-04-04T01:02:44 | Python | UTF-8 | Python | false | false | 3,424 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Custom flake8 rules for ParlAI.
Includes:
- Checks for python3 shebang
- Check for copyright message
- Docformatter issues (TODO)
"""
import docformatter
import difflib
PYTHON_SHEBANG = '#!/usr/bin/env python3'
WHITELIST_PHRASES = ['Moscow Institute of Physics and Technology.']
WHITELIST_FNS = ["mlb_vqa"]
COPYRIGHT = [
"Copyright (c) Facebook, Inc. and its affiliates.",
"This source code is licensed under the MIT license found in the",
"LICENSE file in the root directory of this source tree.",
]
class ParlAIChecker:
"""
Custom flake8 checker for some special ParlAI requirements.
"""
name = 'flake8-parlai'
version = '0.1'
def __init__(self, tree=None, filename=None, lines=None):
self.filename = filename
self.lines = lines
def run(self):
if self.lines is None:
with open(self.filename) as f:
self.lines = f.readlines()
if self.lines and PYTHON_SHEBANG not in self.lines[0]:
yield (
1,
0,
'PAI100 Missing python3 shebang. (`#!/usr/bin/env python3`)',
'',
)
# check doc formatting
source = "".join(self.lines)
formatted_source = docformatter.format_code(
source,
pre_summary_newline=True,
description_wrap_length=88,
summary_wrap_length=88,
make_summary_multi_line=True,
force_wrap=False,
)
if source != formatted_source:
diff = difflib.unified_diff(
source.split('\n'), # have to strip newlines
formatted_source.split('\n'),
f'before/{self.filename}',
f'after/{self.filename}',
n=0,
lineterm='',
)
for line in diff:
if line.startswith('@@'):
fields = line.split()
# find out the beginning line of the docstring reformat. Example:
# --- /path/to/original timestamp
# +++ /path/to/new timestamp
# @@ -1,3 +1,9 @@
# that -1 says the first line changed, and 3 lines were removed
# with a new offset belonging at the first line, and 9
# inserted lines.
line_no, *_ = fields[1].split(',')
line_no = -int(line_no)
yield (
line_no,
1,
f'PAI101 autoformat.sh would reformat the docstring',
'',
)
# the rest is checking copyright, but there are some exceptions
# copyright must appear in the first 16 lines of the file.
source = "".join(self.lines[:16])
if any(wl in source for wl in WHITELIST_PHRASES):
return
for i, msg in enumerate(COPYRIGHT, 1):
if any(wl in self.filename for wl in WHITELIST_FNS) and i < 3:
continue
if source and msg not in source:
yield (i, 0, f'PAI20{i} Missing copyright `{msg}`', '')
| [
"[email protected]"
] | |
57e19bf0eacc2c9dc6bfd1452ebf6c427e698494 | 311ce6fbe1b264f2b656ba235371e756695dca53 | /forcing/dot_in/aestus1_A1_ae1/make_dot_in.py | 3e296f6d56c6863053a5285cd0f5d84cb28cdf8f | [
"MIT"
] | permissive | parkermac/LiveOcean | 94bc9cb9fba1bdc2e206488e0e2afadfafeabb34 | 4bd2776cf95780a7965a18addac3c5e395703ce5 | refs/heads/master | 2022-11-30T10:21:50.568014 | 2022-11-21T16:32:55 | 2022-11-21T16:32:55 | 35,834,637 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,180 | py | """
This creates and poulates directories for ROMS runs on gaggle. It is
designed to work with the "BLANK" version of the .in file,
replacing things like $whatever$ with meaningful values.
"""
import os
import sys
fpth = os.path.abspath('../../')
if fpth not in sys.path:
sys.path.append(fpth)
import forcing_functions as ffun
Ldir, Lfun = ffun.intro()
from datetime import datetime, timedelta
fdt = datetime.strptime(Ldir['date_string'], '%Y.%m.%d')
fdt_yesterday = fdt - timedelta(1)
print('- dot_in.py creating files for LiveOcean for ' + Ldir['date_string'])
#### USER DEFINED VALUES ####
gtag = Ldir['gtag']
gtagex = gtag + '_' + Ldir['ex_name']
EX_NAME = Ldir['ex_name'].upper()
multi_core = True # use more than one core
if Ldir['run_type'] == 'backfill':
days_to_run = 1.0
else:
days_to_run = 1.0
dtsec = 30 # time step in seconds INTEGER (should fit evenly into 3600 sec)
restart_nrrec = '-1' # '-1' for a non-crash restart file, otherwise '1' or '2'
his_interval = 3600 # seconds to define and write to history files
rst_interval = 1 # days between writing to the restart file (e.g. 5)
zqt_height = '2.0d0'
zw_height = '10.0d0'
#### END USER DEFINED VALUES ####
# DERIVED VALUES
if multi_core:
ntilei = '12' # number of tiles in I-direction (6)
ntilej = '6' # number of tiles in J-direction (12)
else:
ntilei = '1'
ntilej = '1'
if float(3600/dtsec) != 3600.0/dtsec:
print('** WARNING: dtsec does not fit evenly into 1 hour **')
dt = str(dtsec) + '.0d0' # a string version of dtsec, for the .in file
ninfo = int(his_interval/dtsec) # how often to write info to the log file (# of time steps)
nhis = int(his_interval/dtsec) # how often to write to the history files
ndefhis = int(nhis) # how often to create new history files
nrst = int(rst_interval*86400/dtsec)
ntimes = int(days_to_run*86400/dtsec)
# file location stuff
date_string = Ldir['date_string']
date_string_yesterday = fdt_yesterday.strftime('%Y.%m.%d')
dstart = str(int(Lfun.datetime_to_modtime(fdt) / 86400.))
f_string = 'f' + date_string
f_string_yesterday = 'f'+ date_string_yesterday
# where forcing files live (fjord, as seen from gaggle)
lo_dir = '/fjdata1/parker/LiveOcean/'
loo_dir = '/fjdata1/parker/LiveOcean_output/'
grid_dir = '/fjdata1/parker/LiveOcean_data/grids/' + Ldir['gridname'] + '/'
force_dir = loo_dir + gtag + '/' + f_string + '/'
roms_dir = '/pmr1/parker/LiveOcean_roms/'
roms_name = 'ROMS_820'
# the .in file
dot_in_name = 'liveocean.in' # name of the .in file
dot_in_dir0 = Ldir['roms'] + 'output/' + gtagex + '/'
Lfun.make_dir(dot_in_dir0) # make sure it exists
dot_in_dir = dot_in_dir0 + f_string +'/'
Lfun.make_dir(dot_in_dir, clean=True) # make sure it exists and is empty
# where to put the output files according to the .in file
out_dir0 = roms_dir + 'output/' + gtagex + '/'
out_dir = out_dir0 + f_string + '/'
atm_dir = 'atm/' # which atm forcing files to use
ocn_dir = 'ocnA/' # which ocn forcing files to use
riv_dir = 'riv1/' # which riv forcing files to use
tide_dir = 'tideA/' # which tide forcing files to use
if Ldir['start_type'] == 'continuation':
nrrec = '0' # '-1' for a hot restart
ininame = 'ocean_rst.nc' # for a hot perfect restart
#ininame = 'ocean_his_0025.nc' # for a hot restart
ini_fullname = out_dir0 + f_string_yesterday + '/' + ininame
elif Ldir['start_type'] == 'new':
nrrec = '0' # '0' for a history or ini file
ininame = 'ocean_ini.nc' # could be an ini or history file
ini_fullname = force_dir + ocn_dir + ininame
# END DERIVED VALUES
## create .in ##########################
f = open('BLANK.in','r')
f2 = open(dot_in_dir + dot_in_name,'w')
in_varlist = ['base_dir','ntilei','ntilej','ntimes','dt','nrrec','ninfo',
'nhis','dstart','ndefhis','nrst','force_dir','grid_dir','roms_dir',
'atm_dir','ocn_dir','riv_dir','tide_dir','dot_in_dir',
'zqt_height','zw_height','ini_fullname','out_dir','EX_NAME','roms_name']
for line in f:
for var in in_varlist:
if '$'+var+'$' in line:
line2 = line.replace('$'+var+'$', str(eval(var)))
line = line2
else:
line2 = line
f2.write(line2)
f.close()
f2.close()
| [
"[email protected]"
] | |
dff2b536322cbc8ac24cd00ed962fdad5d4bbba2 | 592961def9fe287a31e117649f1ac1e97b085a9b | /venv/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py | a6582e6dd5b515ec3783b7ecc3ed81adffc4f3cb | [] | no_license | Rushin95/The_Trip_Planner-Lyft_vs_Uber | 62f03a1df8c6a0268089f50f4e80ec3d9b6b9870 | 4eeea4029eb4df047471b92065455a6828232293 | refs/heads/master | 2021-01-19T11:52:47.766019 | 2018-05-03T23:59:58 | 2018-05-03T23:59:58 | 82,268,914 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | from __future__ import absolute_import, division, unicode_literals
from . import base
try:
from collections import OrderedDict
except ImportError:
# noinspection PyUnresolvedReferences
from ordereddict import OrderedDict
class Filter(base.Filter):
def __iter__(self):
for token in base.Filter.__iter__(self):
if token["type"] in ("StartTag", "EmptyTag"):
attrs = OrderedDict()
for name, value in sorted(token["data"].items(),
key=lambda x: x[0]):
attrs[name] = value
token["data"] = attrs
yield token
| [
"[email protected]"
] | |
2e0a0431b921c67132029866d0dc9a2fe708b565 | e0268b6e868fcaaf6fc9c42b720e014c3ae41a20 | /scripts/make_bu_data.py | ee30a5f8470d550046a3ed6c5170a7e7aee29344 | [
"MIT"
] | permissive | gradio-app/ImageCaptioning.pytorch | 79208726dd09e1e532863af56c7a900b576cbca2 | 436d900d01139dc402b24425c60679409e0c9051 | refs/heads/master | 2022-11-15T03:27:38.775656 | 2020-07-12T22:44:30 | 2020-07-12T22:44:30 | 279,639,722 | 1 | 1 | MIT | 2020-07-14T16:37:47 | 2020-07-14T16:37:46 | null | UTF-8 | Python | false | false | 1,889 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import base64
import numpy as np
import csv
import sys
import zlib
import time
import mmap
import argparse
parser = argparse.ArgumentParser()
# output_dir
parser.add_argument('--downloaded_feats', default='data/bu_data', help='downloaded feature directory')
parser.add_argument('--output_dir', default='data/cocobu', help='output feature files')
args = parser.parse_args()
csv.field_size_limit(sys.maxsize)
FIELDNAMES = ['image_id', 'image_w','image_h','num_boxes', 'boxes', 'features']
infiles = ['trainval/karpathy_test_resnet101_faster_rcnn_genome.tsv',
'trainval/karpathy_val_resnet101_faster_rcnn_genome.tsv',\
'trainval/karpathy_train_resnet101_faster_rcnn_genome.tsv.0', \
'trainval/karpathy_train_resnet101_faster_rcnn_genome.tsv.1']
os.makedirs(args.output_dir+'_att')
os.makedirs(args.output_dir+'_fc')
os.makedirs(args.output_dir+'_box')
for infile in infiles:
print('Reading ' + infile)
with open(os.path.join(args.downloaded_feats, infile), "r+b") as tsv_in_file:
reader = csv.DictReader(tsv_in_file, delimiter='\t', fieldnames = FIELDNAMES)
for item in reader:
item['image_id'] = int(item['image_id'])
item['num_boxes'] = int(item['num_boxes'])
for field in ['boxes', 'features']:
item[field] = np.frombuffer(base64.decodestring(item[field]),
dtype=np.float32).reshape((item['num_boxes'],-1))
np.savez_compressed(os.path.join(args.output_dir+'_att', str(item['image_id'])), feat=item['features'])
np.save(os.path.join(args.output_dir+'_fc', str(item['image_id'])), item['features'].mean(0))
np.save(os.path.join(args.output_dir+'_box', str(item['image_id'])), item['boxes'])
| [
"[email protected]"
] | |
ce037214f60bd6c8975b5e9da15eaaa6acd30d83 | 685038d4be188fa72e9dba1d2213a47ee3aa00a2 | /ECOS2021/Demands/Inputs/Surveys/A/S3/Oct_S3_A.py | f3bb6b79446fe8f081e16398f9239662c9c7acc0 | [] | no_license | CIE-UMSS/Tradeoff-between-Installed-Capacity-and-Unserved-Energy | e5599e4e4ac60b97f0c4c57c5de95e493b1b5ac4 | 459f31552e3ab57a2e52167ab82f8f48558e173c | refs/heads/master | 2023-06-01T18:09:29.839747 | 2021-06-19T15:56:26 | 2021-06-19T15:56:26 | 343,720,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,968 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 31 14:33:07 2020
@author: alejandrosoto
Script for 2 class of household in Raqaypampa.
"""
# -*- coding: utf-8 -*-
"""
@author: Alejandro Soto
"""
from core import User, np
User_list = []
#User classes definition
HI = User("high income",1)
User_list.append(HI)
LI = User("low income",0)
User_list.append(LI)
'''
Base scenario (BSA): Indoor bulb (3), outdoor bulb (1), radio (1), tv (1), phone charger (2), Water Heater (1), Mixer (1)
Base scenario (B): Indoor bulb (3), outdoor bulb (1), radio (1), tv (1), phone charger (2)
A
Scenario 1: BSA + Fridge (1) + Freezer* (1).
Scenario 2: BSA + Fridge (1).
Scenario 3: BSA + Fridge (1)*.
Scenario 4: BSA + Freezer (1).
Scenario 5: BSA + Wheler (1).
Scerario 6: BSA + Grinder (1).
Scanerio 7: Add + Dryer (1),
Scenario 9: All
B
Scenario 8: BSB + Water Heater** (1).
Scenario 10: BSA + Pump Water (1).
Scenario 11: BSA + DVD (1).
Scenario 12: BSA + Blender (1).
Scenario 13: BSA + Iron (1).
Scerario 14: BSA + Mill (1).
* With seasonal variation
** Occasional use
Cold Months: May-Aug Std Cycle 8:00-18:00 Above 10 degrees
Warm Months: Jan-Apr Std Cycle 0:00-23:59 Above 10 degrees
Hot Nonths: Sep-Dec Std Cycle 0:00-10:00; 15:01-23:59 Above 10 degrees
Int Cycle 10:01-15:00
'''
#High-Income
#indoor bulb
HI_indoor_bulb = HI.Appliance(HI,3,7,1,320,0.6,190)
HI_indoor_bulb.windows([1080,1440],[0,0])
#outdoor bulb
HI_outdoor_bulb = HI.Appliance(HI,1,13,1,340,0.1,300)
HI_outdoor_bulb.windows([1100,1440],[0,0])
HI_Radio = HI.Appliance(HI,1,7,1,280,0.3,110)
HI_Radio.windows([420,708],[0,0])
#tv
HI_TV = HI.Appliance(HI,1,60,3,300,0.38,114)
HI_TV.windows([1140,1440],[651,1139],0.35,[300,650])
#phone charger
HI_Phone_charger = HI.Appliance(HI,2,5,3,250,0.4,95)
HI_Phone_charger.windows([1190,1440],[0,420],0.35,[421,1189])
#water_heater
HI_Water_heater = HI.Appliance(HI,1,150,1,60,0.05,30)
HI_Water_heater.windows([0,1440],[0,0])
#mixer
HI_Mixer = HI.Appliance(HI,1,50,1,10,0.5,5,occasional_use = 0.3)
HI_Mixer.windows([420,560],[0,0])
#fridge
HI_Fridge = HI.Appliance(HI,1,200,1,1440,0,30,'yes',3)
HI_Fridge.windows([0,1440],[0,0])
HI_Fridge.specific_cycle_1(200,20,5,10)
HI_Fridge.specific_cycle_2(200,15,5,15)
HI_Fridge.specific_cycle_3(200,10,5,20)
HI_Fridge.cycle_behaviour([570,990],[0,0],[0,480],[1170,1440],[481,569],[991,1169])
#Lower Income
#indoor bulb
LI_indoor_bulb = LI.Appliance(LI,3,7,2,287,0.4,124)
LI_indoor_bulb.windows([1153,1440],[0,300],0.5)
#outdoor bulb
LI_outdoor_bulb = LI.Appliance(LI,1,13,1,243,0.3,71)
LI_outdoor_bulb.windows([1197,1440],[0,0])
#radio
LI_Radio = LI.Appliance(LI,1,7,2,160,0.3,49)
LI_Radio.windows([480,840],[841,1200],0.5)
#TV
LI_TV = LI.Appliance(LI,1,100,3,250,0.3,74)
LI_TV.windows([1170,1420],[551,1169],0.3,[300,550])
#phone charger
LI_Phone_charger = LI.Appliance(LI,2,5,3,200,0.4,82)
LI_Phone_charger.windows([1020,1440],[0,420],0.3,[720,1019])
| [
"[email protected]"
] | |
df585f561e1bd0f95edb526fd662fc99e5cba754 | f56fda98a93cedcec33a7d9fbb330e5cf78031e1 | /Leetcode/45. Jump Game II.py | b2d963b2956cda7d0acaeac20324868e1d0d0149 | [] | no_license | GuanzhouSong/Leetcode_Python | 7a2bac42203fb6c0b671153d9e300eb0c73d39d1 | dbb9be177c5e572eb72a79508bb6e24f357d54b3 | refs/heads/master | 2021-09-25T04:10:09.217565 | 2018-10-17T22:31:41 | 2018-10-17T22:31:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | import sys
class Solution:
def jump(self, nums):
mintimes = [0] + [sys.maxsize] * (len(nums) - 1)
for i in range(0, len(nums) - 1):
for j in range(1, min(nums[i] + 1, len(nums) - i)):
mintimes[i + j] = min(mintimes[i + j], mintimes[i] + 1)
return mintimes[-1]
s = Solution()
nums = [6, 2, 6, 1, 7, 9, 3, 5, 3, 7, 2, 8, 9, 4, 7, 7, 2, 2, 8, 4, 6, 6, 1, 3]
print(s.jump2(nums))
| [
"[email protected]"
] | |
382ab283e99868eb7c25aae590e703339aa079d0 | f4b694982027ac362de1e9d6755f2943d0355a06 | /DECSKS-12 -- debugging the recast from DECSKS-09 by comparing with v1.2/v1.2/DECSKS/lib/diagnostics.py | e4234421bf17641907309887eb6c7504590672c3 | [] | no_license | dsirajud/IPython-notebooks | 55275e44191c16f5393571522787993f931cfd98 | 6ad9d978c611558525fc9d716af101dc841a393b | refs/heads/master | 2021-01-15T15:33:57.119172 | 2016-07-13T20:08:29 | 2016-07-13T20:08:29 | 35,054,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,472 | py | import DECSKS
import numpy as np
import sys # to retrieve smallest float for lower bound tolerance
def HighPrecisionE(number):
"""Converts a number into a string object
while retaining a chosen degree of precision. This
is designed to evade the truncation that is involved
with str() so that outputs can store numbers with high
precision
inputs:
number -- (number)
outputs:
string object with chosen precision in scientific notation
"""
return "%.22e" % number
def calcs_and_writeout(sim_params,f,n,x,v):
"""orchestrates the calculation of various quantities, e.g.
Lp norms, energy, electrostatic energy, ...
inputs:
files -- (dict) contains output filenames to be written to
f -- (ndarray, ndim=3), f(t,x,v)
n -- (int) time step number, t^n
x -- (instance) space variable
v -- (instance) velocity variable
outputs:
None
"""
#I = "invariant", I1 = L1 norm invariant, etc.
if sim_params['record_outputs'] == 'yes':
I1 = L1(f,n,x,v)
I2 = L2(f,n,x,v)
# electrostatic terms
E = DECSKS.lib.fieldsolvers.Poisson(sim_params['ni'], f, x, v, n)
IW = total_energy(f,n,x,v,E)
WE = electrostatic_energy(x,E)
S = entropy(f,n,x,v)
# write to files
sim_params['outfiles']['I1'].write(HighPrecisionE(I1) + '\n')
sim_params['outfiles']['I2'].write(HighPrecisionE(I2) + '\n')
sim_params['outfiles']['IW'].write(HighPrecisionE(IW) + '\n')
sim_params['outfiles']['WE'].write(HighPrecisionE(WE) + '\n')
sim_params['outfiles']['S'].write(HighPrecisionE(S) + '\n')
if n == sim_params['Nt']:
close_all_outfiles(sim_params)
return None
def L1(f,n,x,v):
"""computes the L1 norm
inputs:
f -- (ndarray, ndim=3), f(t,x,v)
n -- (int) time step number, t^n
x -- (instance) space variable
v -- (instance) velocity variable
outputs:
I1 -- (float) L1 norm
"""
return np.sum(f[n,:,:]) * x.width * v.width
def L2(f,n,x,v):
"""computes the square of the L2 norm. Note, the intended
purpose of this computation is to compare with its deviation
from the value at time zero. To minimize compounded errors
from redundant operations, a squareroot is not taken here
and should be applied later if desired,
e.g. np.sqrt( (L2[t] - L2[0]) / L2[0])
inputs:
f -- (ndarray, ndim=3), f(t,x,v)
n -- (int) time step number, t^n
x -- (instance) space variable
v -- (instance) velocity variable
outputs:
I2 -- (float) L2 norm
"""
# compute the square of the L2 norm below to minimize
# compounded error from repeated operations like squareroot
return np.sum(f[n,:,:]**2) * x.width * v.width
def total_energy(f,n,x,v,E):
"""computes the total energy for a Vlasov-Poisson system
IW = 1/2 sum_i sum_j f[n,i,j] dx dv + 1/2 sum_i E[i] dx
inputs:
f -- (ndarray, ndim=3), f(t,x,v)
n -- (int) time step number, t^n
x -- (instance) space variable
v -- (instance) velocity variable
E -- (ndarray, ndim=1), E(x) at t^n
outputs:
IW -- (float) total energy at time t^n in system
"""
return 1/2.*np.sum(f[n,:,:] * v.cells **2) * x.width * v.width \
+ 1/2. * np.sum(E**2) * x.width
def electrostatic_energy(x,E):
"""computes the electrostic energy WE = 1/2 sum_i E[i] dx
inputs:
E -- (ndarray, ndim=1) E(x) at t^n
x -- (instance) space variable
outputs:
WE -- (float) electrostatic energy at time t^n
"""
return 1/2.* np.sum(E**2)* x.width
def entropy(f,n,x,v):
"""computes the entropy S at time t^n,
S = sum_i sum_j f_[n,i,j] * ln (f[n,i,j] + eps) dxdv
inputs:
f -- (ndarray, ndim=3), f(t,x,v)
n -- (int) time step number, t^n
x -- (instance) space variable
v -- (instance) velocity variable
outputs:
S -- (float) entropy at time t^n
"""
eps = sys.float_info.min # to evade taking np.log(0)
return np.sum(f[n,:,:] * np.log(f[n,:,:] + eps)) * x.width * v.width
def close_all_outfiles(sim_params):
"""Closes all opened output files inside dictionary
sim_params['outfiles']
inputs:
sim_params -- (dict) simulation parameters, includes dict of outfiles
outputs:
None
"""
if sim_params['outfiles'] is not None:
for outfile in sim_params['outfiles'].itervalues():
outfile.close()
return None
| [
"[email protected]"
] | |
bfcab4cecd2a7d8e3946cf55d03659e839d25b3d | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/82/usersdata/165/44623/submittedfiles/decimal2bin.py | b07134dde6b2df7bd468626e44d12cc75e301ed4 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | # -*- coding: utf-8 -*-
n=int(input('digite n:'))
i=0
soma=0
while n>0:
resto=n%10
soma=soma+resto*(2**i)
n=n//10
i=i+1
print(soma)
| [
"[email protected]"
] | |
9feacf0a85e2b4cb750a3f12f786d8971b96efc5 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/arc042/B/4081354.py | 8eea907c466a07c6b45bfcd05fcae80479294c1a | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | #!/usr/bin/env python3
p = complex(*list(map(int, input().split())))
N = int(input())
li = [complex(*list(map(int, input().split()))) for _ in range(N)]
li += [li[0]]
m = min(((p - a) / (b - a)).imag * abs(b - a) for a, b in zip(li, li[1:]))
print(m) | [
"[email protected]"
] | |
55e546f6119a07a5c3ec899c648a6d4fb3a1b7f0 | 1b29279e1517cb331657539825d0b6259582d00d | /hdrstats.py | b64985b3ecdbe30431cf81221b380822baaa8439 | [
"Unlicense"
] | permissive | OCHA-DAP/dap-scrapers | 1671a0f52ce19150dacae724394f893f87508f5e | 3beb34acfe5bf5f2fd7d2a15857264a1e65bcf08 | refs/heads/master | 2022-01-18T17:38:23.097214 | 2021-12-22T20:19:56 | 2021-12-22T20:19:56 | 13,861,733 | 4 | 2 | null | 2016-03-31T17:18:08 | 2013-10-25T13:42:20 | Python | UTF-8 | Python | false | false | 5,440 | py | import re
import lxml.html
import requests
import xypath
import StringIO
import messytables
#from hamcrest import equal_to, is_in
from orm import session, Value, DataSet, Indicator
import orm
import dateutil.parser
#import re
indicator_list = """
100106
38906
68606
89006
101406
98706
57506
38006
69706
103006
105906""".strip().split('\n')
"""Value: dsID, region, indID, period, value, source, is_number
DataSet: dsID, last_updated, last_scraped, name
Indicator: indID, name, units
"""
def disasters():
baseurl = "http://hdrstats.undp.org/en/tables/displayByRow.cfm"
data = {"selectedCountries": "3,103,203,403,503,703,803,903,1103,1203,1303,1403,1503,1603,1703,1803,1903,2003,2103,2303,2403,2503,2603,2703,2903,3003,3103,3203,3303,3403,3503,3603,3803,3903,4003,4103,4203,4303,4403,4503,4703,4803,4903,5003,5103,5203,5303,5403,5503,5603,5703,5803,5903,6003,6103,6203,6303,6603,6703,6803,7103,7203,7303,7403,7503,7703,7903,8203,8303,8403,8503,8603,8803,8903,9003,9103,9203,9303,9403,9503,9603,9803,9903,10003,10103,10203,10303,10403,10503,10603,10703,10803,10903,11003,11103,11203,11303,11403,11503,11603,11703,11803,12103,12203,12303,12403,12503,12603,12703,12903,13003,13203,13303,13403,13503,13603,13703,13903,14003,14103,14203,14303,14403,14503,14803,14903,15003,15103,15503,15603,15703,15803,15903,16003,16103,16203,16303,16403,16603,16703,16903,17103,17203,17303,17503,17603,17803,17903,18003,18103,18203,18303,18403,18603,18703,18803,18903,19003,19103,19203,19303,19403,19503,19603,19703,19903,20003,20103,20203,20403,20503,20603,12003,20703,20803,21003,21103,21203,21303,21403,21603,21703,21803,21903,22003,22103,22203,22303,22403,22503,22603,23003,23103,23203,202,2,102,2602,302,402,602,702,502,902,802,1002,1202,1102,1402,1302,1502,1602,1702,2202,1802,2002,1902,2302,2102,2402,2502,2702,3402,3302,3502,3702,3602,3802,3902,4002,4102,",
"selectedIndicators": "98606,",
"selectedYears": "1960,1970,1980,1985,1990,1995,2000,2005,2006,2007,2008,2009,2010,2011,2012,",
"language": "en",
"displayIn": "row"}
html = requests.post(baseurl, data=data).content
return html, baseurl
def getindicator(ind="100106", overridefunction=None):
if not overridefunction:
baseurl = 'http://hdrstats.undp.org/en/indicators/display_cf_xls_indicator.cfm?indicator_id=%s&lang=en' % ind
html = requests.get(baseurl).content
else:
html, baseurl = overridefunction()
value = {'dsID': 'HDRStats',
'indID': "HDR:"+ind,
'source': baseurl,
'is_number': True}
dataset = {'dsID': 'HDRStats',
'last_scraped': orm.now(),
'name': 'Human Development Indicators, UNDP'}
indicator = {'indID': "HDR:"+ind}
hdi_indicator = {'indID': 'HDR:HDI Rank',
'name': 'Human Development Index rank',
'units': ''}
Indicator(**hdi_indicator).save()
DataSet(**dataset).save()
print html
exit(3)
htmlio = StringIO.StringIO(html)
messy = messytables.html.HTMLTableSet(htmlio)
table = xypath.Table.from_messy(list(messy.tables)[0])
root = lxml.html.fromstring(html)
"get odd indicator / update time"
indicator_text = root.xpath("//h2/text()")[-1]
print indicator_text
try:
indicator_split, = re.findall("(.*)\(([^\(\)]+)\)", indicator_text)
except ValueError:
indicator_split = [indicator_text, ""]
indicator['name'], indicator['units'] = indicator_split
indicator['name'] = indicator['name'].strip()
access_text, = [x.tail.strip() for x in root.xpath("//br") if str(x.tail) != "None" and x.tail.strip()]
access_date_raw, = re.findall('Accessed:(.*)from', access_text)
dataset['last_updated'] = dateutil.parser.parse(access_date_raw).isoformat()
print dataset['last_updated'], indicator['name'], "*", indicator['units']
Indicator(**indicator).save()
country_cell = table.filter("Country").assert_one()
years = country_cell.fill(xypath.RIGHT).filter(lambda b: b.value != '')
countries = country_cell.fill(xypath.DOWN)
hdi_rank = table.filter("HDI Rank").assert_one()
max_year = max(year.value for year in years)
for i in countries.junction(hdi_rank):
newvalue = dict(value)
newvalue['indID'] = "HDR:HDI Rank"
newvalue['region'] = get_region(i[0])
newvalue['value'] = i[2].value.strip()
newvalue['period'] = 2012 # TODO Hard coded for now because year it pertains to is not clear
if newvalue['value'].strip() != '..':
Value(**newvalue).save()
for i in countries.junction(years):
newvalue = dict(value)
newvalue['region'] = get_region(i[0])
newvalue['value'] = i[2].value.strip()
newvalue['period'] =i[1].value.strip()
if newvalue['value'].strip() != '..':
Value(**newvalue).save()
print newvalue
session.commit()
def get_region(country):
region_el=lxml.html.fromstring(country.properties['html'])
try:
link, = region_el.xpath('//a/@href')
except ValueError: # non-countries don't have links.
niceregion = country.value.strip()
else:
niceregion, = re.findall("profiles/([^\.]*)\.html", link)
return niceregion
#getindicator("98606", disasters)
#exit()
for ind in indicator_list:
print ind
getindicator(ind)
| [
"[email protected]"
] | |
79f62a7ee6eb1f0d6df192c475af8fec47ca39a9 | ea5af064f6583c4dc244627f67bf51a9119347a9 | /crypto.py | 4c6a27ad97768b78070c68886cdd9f351d4f73f8 | [] | no_license | celiyan/PyPassManager | 034c10cfe594d365822dc836e0f0143e02ac25e3 | fda994b44b7a003825e16bbcaffd07cf094e04b7 | refs/heads/master | 2022-12-19T19:51:29.714559 | 2020-10-15T05:16:37 | 2020-10-15T05:16:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,530 | py | from Crypto.Cipher import AES
from os import urandom
def pad(txt):
"AES CBC requires the number of plaintext bytes to be a multiple of 16, so we pad it to the nearest multiple. Takes&Returns bytes object."
padding_length = AES.block_size - len(txt)%AES.block_size
# we pad with a character = to the padding length, to make unpadding easy
padding = chr(padding_length) * padding_length
return txt+padding.encode()
def unpad(txt):
"To get just the encrypted data back, we need to undo any meaningless padding we added to satisfy length requirements. Takes&Returns bytes object."
padding_length = txt[-1] # length is stored as the character code of the padding
return txt[:-padding_length]
def encrypt(raw, key):
"Encrypt bytes using AES CBC, and a random InitialVector that is stored at the start. Inputs two bytes objects: plaintext & key. Returns ciphertext as bytes object."
iv = urandom(AES.block_size)
key = key[:32] # key must be 32 bytes, masterpass hash is 64 bytes
cipher = AES.new(key, AES.MODE_CBC, iv)
return iv+cipher.encrypt(pad(raw)) # store iv so it can be decoded
def decrypt(data, key):
"Decrypt bytes using AES CBC, extracting the InitialVector from the start. Inputs two bytes objects: ciphertext & key. Returns plaintext as bytes object."
iv, data = data[:AES.block_size], data[AES.block_size:] # extract the iv from the start
key = key[:32] # key must be 32 bytes, masterpass hash is 64 bytes
cipher = AES.new(key, AES.MODE_CBC, iv)
return unpad(cipher.decrypt(data)) | [
"[email protected]"
] | |
8284303e2d78a6089a9fd4c7ccbb37454b2e67c4 | 503d2f8f5f5f547acb82f7299d86886691966ca5 | /atcoder/abc200_c.py | e206350c17a0371913a9b0f7696b9550c9039895 | [] | no_license | Hironobu-Kawaguchi/atcoder | 3fcb649cb920dd837a1ced6713bbb939ecc090a9 | df4b55cc7d557bf61607ffde8bda8655cf129017 | refs/heads/master | 2023-08-21T14:13:13.856604 | 2023-08-12T14:53:03 | 2023-08-12T14:53:03 | 197,216,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | # https://atcoder.jp/contests/abc200/tasks/abc200_c
from collections import Counter
n = int(input())
a = list(map(int, (input().split())))
for i in range(n):
a[i] %= 200
cnt = Counter(a)
ans = 0
for i, v in cnt.items():
if v>=2:
ans += v*(v-1) // 2
print(ans)
| [
"[email protected]"
] | |
c344c8404ac954642b6f02f8f20bca296c731bae | 5fc6b5a420b9cb2a7d5102df55b0b5248f8199e1 | /pypykatz/commons/winapi/local/function_defs/live_reader_ctypes.py | aa2bae8f03b5ebc283d8a225b8ccda4bdf88894b | [
"MIT"
] | permissive | ASkyeye/pypykatz | 8e1c598d57017fd400b9a8d830ed314be7562b96 | 8ad07f2f6f0c4904f9a77c711f693d6c794a7fb4 | refs/heads/master | 2021-07-03T13:48:34.350145 | 2020-11-14T22:50:30 | 2020-11-14T22:50:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,565 | py | import os
import sys
import ctypes
import enum
import logging
from pypykatz import logger
from .ntdll import *
from .kernel32 import *
from .psapi import *
class WindowsMinBuild(enum.Enum):
WIN_XP = 2500
WIN_2K3 = 3000
WIN_VISTA = 5000
WIN_7 = 7000
WIN_8 = 8000
WIN_BLUE = 9400
WIN_10 = 9800
#utter microsoft bullshit commencing..
def getWindowsBuild():
class OSVersionInfo(ctypes.Structure):
_fields_ = [
("dwOSVersionInfoSize" , ctypes.c_int),
("dwMajorVersion" , ctypes.c_int),
("dwMinorVersion" , ctypes.c_int),
("dwBuildNumber" , ctypes.c_int),
("dwPlatformId" , ctypes.c_int),
("szCSDVersion" , ctypes.c_char*128)];
GetVersionEx = getattr( ctypes.windll.kernel32 , "GetVersionExA")
version = OSVersionInfo()
version.dwOSVersionInfoSize = ctypes.sizeof(OSVersionInfo)
GetVersionEx( ctypes.byref(version) )
return version.dwBuildNumber
DELETE = 0x00010000
READ_CONTROL = 0x00020000
WRITE_DAC = 0x00040000
WRITE_OWNER = 0x00080000
SYNCHRONIZE = 0x00100000
STANDARD_RIGHTS_REQUIRED = DELETE | READ_CONTROL | WRITE_DAC | WRITE_OWNER
STANDARD_RIGHTS_ALL = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE
if getWindowsBuild() >= WindowsMinBuild.WIN_VISTA.value:
PROCESS_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFFF
else:
PROCESS_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFF
PROCESS_QUERY_INFORMATION = 0x0400
PROCESS_VM_READ = 0x0010
#https://msdn.microsoft.com/en-us/library/windows/desktop/ms683217(v=vs.85).aspx
def enum_process_names():
pid_to_name = {}
for pid in EnumProcesses():
if pid == 0:
continue
pid_to_name[pid] = 'Not found'
try:
process_handle = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, False, pid)
except Exception as e:
continue
pid_to_name[pid] = QueryFullProcessImageNameW(process_handle)
return pid_to_name
def get_lsass_pid():
pid_to_name = enum_process_names()
for pid in pid_to_name:
if pid_to_name[pid].lower().find('lsass.exe') != -1:
return pid
raise Exception('Failed to find lsass.exe')
def enum_lsass_handles():
#searches for open LSASS process handles in all processes
# you should be having SE_DEBUG enabled at this point
RtlAdjustPrivilege(20)
lsass_handles = []
sysinfohandles = NtQuerySystemInformation(16)
for pid in sysinfohandles:
if pid == 4:
continue
#if pid != GetCurrentProcessId():
# continue
for syshandle in sysinfohandles[pid]:
#print(pid)
try:
pHandle = OpenProcess(PROCESS_DUP_HANDLE, False, pid)
except Exception as e:
logger.debug('Error opening process %s Reason: %s' % (pid, e))
continue
try:
dupHandle = NtDuplicateObject(pHandle, syshandle.Handle, GetCurrentProcess(), PROCESS_QUERY_INFORMATION|PROCESS_VM_READ)
#print(dupHandle)
except Exception as e:
logger.debug('Failed to duplicate object! PID: %s HANDLE: %s' % (pid, hex(syshandle.Handle)))
continue
oinfo = NtQueryObject(dupHandle, ObjectTypeInformation)
if oinfo.Name.getString() == 'Process':
try:
pname = QueryFullProcessImageNameW(dupHandle)
if pname.lower().find('lsass.exe') != -1:
logger.info('Found open handle to lsass! PID: %s HANDLE: %s' % (pid, hex(syshandle.Handle)))
#print('%s : %s' % (pid, pname))
lsass_handles.append((pid, dupHandle))
except Exception as e:
logger.debug('Failed to obtain the path of the process! PID: %s' % pid)
continue
return lsass_handles
| [
"[email protected]"
] | |
84be026c4a9decd8c8cbeb0044e6269de46348c9 | c383840367c09a4aa3762d224b17b742fe53eb31 | /GANs_Advanced/DiscoGAN/train_DiscoGAN_org.py | 081a29ab949a0e8e7a706e48f2d192a1060b2e74 | [] | no_license | qzq2514/GAN | 04f3f1ff6437d6805369f28b207a8f726a112d11 | a313deb08884c2ce60d4fc3834b79a8518e38f44 | refs/heads/master | 2020-09-21T17:32:00.913453 | 2020-01-17T05:02:18 | 2020-01-17T05:02:18 | 224,866,070 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,800 | py | from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.python.framework import graph_util
import tensorflow.contrib.slim as slim
from DataLoader import Pix2Pix_loader
from net.DiscoGAN import DiscoGAN
import tensorflow as tf
import numpy as np
import scipy.misc
import os
os.environ['CUDA_VISIBLE_DEVICES']='1'
image_height = 64
image_width = 64
batch_size = 64
sample_num = 10
Train_Step = 30005
starting_rate = 0.01
change_rate = 0.5
learning_rate = 0.0002
#读取未分开的成对数据
image_dir = "/media/cgim/data/GAN/data/edges2shoes/"
model_name = "DiscoGAN_1227"
model_path="/media/cgim/dataset/models/"+model_name
pb_path=os.path.join(model_path,"pb/")
ckpt_path=os.path.join(model_path,"ckpt/")
result_dir=model_path+"/result"
if not os.path.exists(result_dir):
os.makedirs(result_dir)
if not os.path.exists(pb_path):
os.makedirs(pb_path)
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
def train():
input_A_place = tf.placeholder(tf.float32,shape=[None,image_height,image_width, 3],name="input_A")
input_B_place = tf.placeholder(tf.float32, shape=[None, image_height,image_width, 3], name="input_B")
is_training_place = tf.placeholder_with_default(False, shape=(),name="is_training")
reconst_rate_place = tf.placeholder(tf.float32, shape=(),name="reconst_rate")
discoGan = DiscoGAN(is_training_place,reconst_rate_place)
G_loss,D_loss = discoGan.build_DiscoGAN(input_A_place,input_B_place)
g_vars,d_vars = discoGan.get_vars()
global_step = tf.Variable(-1, trainable=False,name="global_step")
global_step_increase = tf.assign(global_step, tf.add(global_step, 1))
train_op_D = tf.train.AdamOptimizer(learning_rate, beta1=0.5).minimize(D_loss, var_list=d_vars)
train_op_G = tf.train.AdamOptimizer(learning_rate, beta1=0.5).minimize(G_loss, var_list=g_vars)
A2B_out,ABA_out = discoGan.sample_generate(input_A_place, "A2B")
A2B_output = tf.identity(A2B_out, name="A2B_output")
B2A_out,BAB_out = discoGan.sample_generate(input_B_place, "B2A")
B2A_output = tf.identity(B2A_out, name="B2A_output")
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(ckpt_path)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
saver.restore(sess, os.path.join(ckpt_path, ckpt_name))
_global_step = sess.run(global_step_increase)
dataLoader = Pix2Pix_loader(image_dir, image_height, image_width,batch_size=batch_size,global_step=_global_step)
while _global_step<Train_Step:
if _global_step<10000:
reconst_rate = starting_rate
else:
reconst_rate = change_rate
images_A,images_B = dataLoader.next_batch() #0~255
feed_dict = {input_A_place:images_A,input_B_place:images_B,
is_training_place:True,reconst_rate_place:reconst_rate}
if _global_step%2==0:
sess.run(train_op_D,feed_dict=feed_dict)
sess.run(train_op_G, feed_dict=feed_dict)
_global_step,_D_loss,_G_loss = sess.run([global_step,D_loss,G_loss],
feed_dict=feed_dict)
if _global_step%50==0:
print("Step:{},Reconst_rate:{},D_loss:{},G_loss:{}".format(_global_step,reconst_rate, _D_loss, _G_loss,))
if _global_step%100==0:
test_images_A, test_images_B = dataLoader.random_next_test_batch()
#save result form A to B
_A2B_output,_ABA_out = sess.run([A2B_output,ABA_out],feed_dict={input_A_place:test_images_A})
_A2B_output = (_A2B_output + 1) / 2 * 255.0
_ABA_out = (_ABA_out + 1) / 2 * 255.0
for ind,trg_image in enumerate(_A2B_output[:sample_num]):
scipy.misc.imsave(result_dir + "/{}_{}_A.jpg".format(_global_step,ind),test_images_A[ind])
scipy.misc.imsave(result_dir + "/{}_{}_A2B.jpg".format(_global_step,ind), _A2B_output[ind])
scipy.misc.imsave(result_dir + "/{}_{}_ABA.jpg".format(_global_step, ind), _ABA_out[ind])
# save result form B to A
_B2A_output,_BAB_out = sess.run([B2A_output,BAB_out], feed_dict={input_B_place: test_images_B})
_B2A_output = (_B2A_output + 1) / 2 * 255.0
_BAB_out = (_BAB_out + 1) / 2 * 255.0
for ind,trg_image in enumerate(_B2A_output[:sample_num]):
scipy.misc.imsave(result_dir + "/{}_{}_B.jpg".format(_global_step,ind),test_images_B[ind])
scipy.misc.imsave(result_dir + "/{}_{}_B2A.jpg".format(_global_step,ind), _B2A_output[ind])
scipy.misc.imsave(result_dir + "/{}_{}_BAB.jpg".format(_global_step, ind), _BAB_out[ind])
if _global_step==Train_Step-5:
# 保存PB
constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def,
["A2B_output","B2A_output"])
save_model_name = model_name + "-" + str(_global_step) + ".pb"
with tf.gfile.FastGFile(pb_path + save_model_name, mode="wb") as fw:
fw.write(constant_graph.SerializeToString())
# 保存CKPT
saver.save(sess, ckpt_path + model_name + ".ckpt", global_step=_global_step)
print("Successfully saved model {}".format(save_model_name))
return
_global_step = sess.run(global_step_increase)
if __name__ == '__main__':
train() | [
"[email protected]"
] | |
d235aec102d27ca4fae3b8e5d215f502675ae6fb | 17c366bf8aa9fed59fb3d91db06142860cb9ce38 | /nbs/examples/mnist_blocks.py | 14043be821f6c97c3bf782edb3b9b4b097f38029 | [
"Apache-2.0"
] | permissive | dienhoa/fastai | 3f4884f9fb96f9e5199e33b959478dfa0bbfa0d4 | fdce0330e05ae02db90c3456f9fc2827c3cf86a0 | refs/heads/master | 2022-04-14T06:27:52.994595 | 2022-04-13T21:24:27 | 2022-04-13T21:24:27 | 154,803,492 | 0 | 0 | Apache-2.0 | 2018-10-26T08:38:44 | 2018-10-26T08:38:43 | null | UTF-8 | Python | false | false | 422 | py | from fastai.vision.all import *
splitter = GrandparentSplitter(train_name='training', valid_name='testing')
mnist = DataBlock(blocks=(ImageBlock(PILImageBW), CategoryBlock),
get_items=get_image_files, splitter=splitter, get_y=parent_label)
if __name__ == '__main__':
data = mnist.dataloaders(untar_data(URLs.MNIST), bs=256)
learn = cnn_learner(data, resnet18)
learn.fit_one_cycle(1, 1e-2)
| [
"[email protected]"
] | |
cb4ed431777e8b10a7599b169d74a3f947751042 | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/ads/googleads/v5/googleads-py/tests/unit/gapic/googleads.v5/services/test_ad_service.py | 8c1b9a30e2d2d599c646bfa72dbe5b188716250f | [
"Apache-2.0"
] | permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,485 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from unittest import mock
import grpc
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.ads.googleads.v5.common.types import ad_asset
from google.ads.googleads.v5.common.types import ad_type_infos
from google.ads.googleads.v5.common.types import custom_parameter
from google.ads.googleads.v5.common.types import final_app_url
from google.ads.googleads.v5.common.types import url_collection
from google.ads.googleads.v5.enums.types import ad_type
from google.ads.googleads.v5.enums.types import app_url_operating_system_type
from google.ads.googleads.v5.enums.types import call_conversion_reporting_state
from google.ads.googleads.v5.enums.types import device
from google.ads.googleads.v5.enums.types import display_ad_format_setting
from google.ads.googleads.v5.enums.types import display_upload_product_type
from google.ads.googleads.v5.enums.types import legacy_app_install_ad_app_store
from google.ads.googleads.v5.enums.types import mime_type
from google.ads.googleads.v5.enums.types import response_content_type
from google.ads.googleads.v5.enums.types import served_asset_field_type
from google.ads.googleads.v5.enums.types import system_managed_entity_source
from google.ads.googleads.v5.resources.types import ad
from google.ads.googleads.v5.services.services.ad_service import AdServiceClient
from google.ads.googleads.v5.services.services.ad_service import transports
from google.ads.googleads.v5.services.types import ad_service
from google.api_core import client_options
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert AdServiceClient._get_default_mtls_endpoint(None) is None
assert AdServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert AdServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert AdServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert AdServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert AdServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
def test_ad_service_client_from_service_account_info():
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = AdServiceClient.from_service_account_info(info)
assert client.transport._credentials == creds
assert client.transport._host == 'googleads.googleapis.com:443'
def test_ad_service_client_from_service_account_file():
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = AdServiceClient.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
client = AdServiceClient.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert client.transport._host == 'googleads.googleapis.com:443'
def test_ad_service_client_get_transport_class():
transport = AdServiceClient.get_transport_class()
assert transport == transports.AdServiceGrpcTransport
transport = AdServiceClient.get_transport_class("grpc")
assert transport == transports.AdServiceGrpcTransport
@mock.patch.object(AdServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AdServiceClient))
def test_ad_service_client_client_options():
# Check that if channel is provided we won't create a new one.
with mock.patch('google.ads.googleads.v5.services.services.ad_service.AdServiceClient.get_transport_class') as gtc:
transport = transports.AdServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials()
)
client = AdServiceClient(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch('google.ads.googleads.v5.services.services.ad_service.AdServiceClient.get_transport_class') as gtc:
client = AdServiceClient(transport="grpc")
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = AdServiceClient(client_options=options)
grpc_transport.assert_called_once_with(
ssl_channel_credentials=None,
credentials=None,
host="squid.clam.whelk",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT
# is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = AdServiceClient()
grpc_transport.assert_called_once_with(
ssl_channel_credentials=None,
credentials=None,
host=client.DEFAULT_ENDPOINT,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = AdServiceClient()
grpc_transport.assert_called_once_with(
ssl_channel_credentials=None,
credentials=None,
host=client.DEFAULT_MTLS_ENDPOINT,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = AdServiceClient()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = AdServiceClient()
@mock.patch.object(AdServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AdServiceClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
@pytest.mark.parametrize("use_client_cert_env", ["true", "false"])
def test_ad_service_client_mtls_env_auto(use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
ssl_channel_creds = mock.Mock()
with mock.patch('grpc.ssl_channel_credentials', return_value=ssl_channel_creds):
grpc_transport.return_value = None
client = AdServiceClient(client_options=options)
if use_client_cert_env == "false":
expected_ssl_channel_creds = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_ssl_channel_creds = ssl_channel_creds
expected_host = client.DEFAULT_MTLS_ENDPOINT
grpc_transport.assert_called_once_with(
ssl_channel_credentials=expected_ssl_channel_creds,
credentials=None,
host=expected_host,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None):
with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock:
with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock:
if use_client_cert_env == "false":
is_mtls_mock.return_value = False
ssl_credentials_mock.return_value = None
expected_host = client.DEFAULT_ENDPOINT
expected_ssl_channel_creds = None
else:
is_mtls_mock.return_value = True
ssl_credentials_mock.return_value = mock.Mock()
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_ssl_channel_creds = ssl_credentials_mock.return_value
grpc_transport.return_value = None
client = AdServiceClient()
grpc_transport.assert_called_once_with(
ssl_channel_credentials=expected_ssl_channel_creds,
credentials=None,
host=expected_host,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None):
with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock:
is_mtls_mock.return_value = False
grpc_transport.return_value = None
client = AdServiceClient()
grpc_transport.assert_called_once_with(
ssl_channel_credentials=None,
credentials=None,
host=client.DEFAULT_ENDPOINT,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_ad_service_client_client_options_from_dict():
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = AdServiceClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
ssl_channel_credentials=None,
credentials=None,
host="squid.clam.whelk",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_get_ad(transport: str = 'grpc', request_type=ad_service.GetAdRequest):
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ad),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = ad.Ad(
resource_name='resource_name_value',
id=205,
final_urls=['final_urls_value'],
final_mobile_urls=['final_mobile_urls_value'],
tracking_url_template='tracking_url_template_value',
final_url_suffix='final_url_suffix_value',
display_url='display_url_value',
type_=ad_type.AdTypeEnum.AdType.UNKNOWN,
added_by_google_ads=True,
device_preference=device.DeviceEnum.Device.UNKNOWN,
name='name_value',
system_managed_resource_source=system_managed_entity_source.SystemManagedResourceSourceEnum.SystemManagedResourceSource.UNKNOWN,
text_ad=ad_type_infos.TextAdInfo(headline='headline_value'),
)
response = client.get_ad(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == ad_service.GetAdRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, ad.Ad)
assert response.resource_name == 'resource_name_value'
assert response.id == 205
assert response.final_urls == ['final_urls_value']
assert response.final_mobile_urls == ['final_mobile_urls_value']
assert response.tracking_url_template == 'tracking_url_template_value'
assert response.final_url_suffix == 'final_url_suffix_value'
assert response.display_url == 'display_url_value'
assert response.type_ == ad_type.AdTypeEnum.AdType.UNKNOWN
assert response.added_by_google_ads is True
assert response.device_preference == device.DeviceEnum.Device.UNKNOWN
assert response.name == 'name_value'
assert response.system_managed_resource_source == system_managed_entity_source.SystemManagedResourceSourceEnum.SystemManagedResourceSource.UNKNOWN
def test_get_ad_from_dict():
test_get_ad(request_type=dict)
def test_get_ad_field_headers():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = ad_service.GetAdRequest()
request.resource_name = 'resource_name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ad),
'__call__') as call:
call.return_value = ad.Ad()
client.get_ad(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'resource_name=resource_name/value',
) in kw['metadata']
def test_get_ad_flattened():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_ad),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = ad.Ad()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_ad(
resource_name='resource_name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].resource_name == 'resource_name_value'
def test_get_ad_flattened_error():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_ad(
ad_service.GetAdRequest(),
resource_name='resource_name_value',
)
def test_mutate_ads(transport: str = 'grpc', request_type=ad_service.MutateAdsRequest):
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.mutate_ads),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = ad_service.MutateAdsResponse(
)
response = client.mutate_ads(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == ad_service.MutateAdsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, ad_service.MutateAdsResponse)
def test_mutate_ads_from_dict():
test_mutate_ads(request_type=dict)
def test_mutate_ads_field_headers():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = ad_service.MutateAdsRequest()
request.customer_id = 'customer_id/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.mutate_ads),
'__call__') as call:
call.return_value = ad_service.MutateAdsResponse()
client.mutate_ads(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'customer_id=customer_id/value',
) in kw['metadata']
def test_mutate_ads_flattened():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.mutate_ads),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = ad_service.MutateAdsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.mutate_ads(
customer_id='customer_id_value',
operations=[ad_service.AdOperation(update_mask=field_mask_pb2.FieldMask(paths=['paths_value']))],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].customer_id == 'customer_id_value'
assert args[0].operations == [ad_service.AdOperation(update_mask=field_mask_pb2.FieldMask(paths=['paths_value']))]
def test_mutate_ads_flattened_error():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.mutate_ads(
ad_service.MutateAdsRequest(),
customer_id='customer_id_value',
operations=[ad_service.AdOperation(update_mask=field_mask_pb2.FieldMask(paths=['paths_value']))],
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.AdServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.AdServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = AdServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.AdServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.AdServiceGrpcTransport,
)
@pytest.mark.parametrize("transport_class", [
transports.AdServiceGrpcTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_ad_service_base_transport():
# Instantiate the base transport.
with mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.AdServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'get_ad',
'mutate_ads',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
def test_ad_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default') as adc, mock.patch('google.ads.googleads.v5.services.services.ad_service.transports.AdServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.AdServiceTransport()
adc.assert_called_once()
def test_ad_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
AdServiceClient()
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/adwords',
))
def test_ad_service_transport_auth_adc():
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transports.AdServiceGrpcTransport(host="squid.clam.whelk")
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/adwords',
))
def test_ad_service_host_no_port():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='googleads.googleapis.com'),
)
assert client.transport._host == 'googleads.googleapis.com:443'
def test_ad_service_host_with_port():
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='googleads.googleapis.com:8000'),
)
assert client.transport._host == 'googleads.googleapis.com:8000'
def test_ad_service_grpc_transport_channel():
channel = grpc.insecure_channel('http://localhost/')
# Check that channel is used if provided.
transport = transports.AdServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize("transport_class", [transports.AdServiceGrpcTransport])
def test_ad_service_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=(
'https://www.googleapis.com/auth/adwords',
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize("transport_class", [transports.AdServiceGrpcTransport,])
def test_ad_service_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
'https://www.googleapis.com/auth/adwords',
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_ad_path():
customer = "squid"
ad = "clam"
expected = "customers/{customer}/ads/{ad}".format(customer=customer, ad=ad, )
actual = AdServiceClient.ad_path(customer, ad)
assert expected == actual
def test_parse_ad_path():
expected = {
"customer": "whelk",
"ad": "octopus",
}
path = AdServiceClient.ad_path(**expected)
# Check that the path construction is reversible.
actual = AdServiceClient.parse_ad_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "oyster"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = AdServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nudibranch",
}
path = AdServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = AdServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "cuttlefish"
expected = "folders/{folder}".format(folder=folder, )
actual = AdServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "mussel",
}
path = AdServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = AdServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "winkle"
expected = "organizations/{organization}".format(organization=organization, )
actual = AdServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nautilus",
}
path = AdServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = AdServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "scallop"
expected = "projects/{project}".format(project=project, )
actual = AdServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "abalone",
}
path = AdServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = AdServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "squid"
location = "clam"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = AdServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "whelk",
"location": "octopus",
}
path = AdServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = AdServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.AdServiceTransport, '_prep_wrapped_messages') as prep:
client = AdServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.AdServiceTransport, '_prep_wrapped_messages') as prep:
transport_class = AdServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
f178b663d0ee93882d7f0f23f79762c86c9a62b3 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/ReverseRepo/YW_NHG_SHHG_019_GC028.py | 697b950c9b5b4c9f6d0da0feb24a47bcfb16928d | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,026 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_NHG_SHHG_019_GC028(xtp_test_case):
# YW_NHG_SHHG_019_GC028
def test_YW_NHG_SHHG_019_GC028(self):
title = '上海逆回购--数量(等于100万张)-28天'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('204028', '1', '12', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_REPO'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'price': stkparm['随机中间价'],
'quantity': 1000000,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 0
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e3890f86efe95e867f60a04ad1fb1640b5b9c625 | 6a253ee7b47c5f70c826bbc97bb8e33cd1dab3b6 | /4.Working with Dask Bags for Unstructured Data/Filtering vetoed bills.py | f6f1b993c692dc6f8cda3afb05d26a40595ed1aa | [] | no_license | Mat4wrk/Parallel-Programming-with-Dask-in-Python-Datacamp | 19a646d6d16ff46173964c25639ff923407c8f32 | 535f69b78adb50cffc7f402f81ddff19f853eea1 | refs/heads/main | 2023-03-06T19:52:39.495066 | 2021-02-13T13:27:06 | 2021-02-13T13:27:06 | 338,565,569 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | py | # Filter the bills: overridden
overridden = bills_dicts.filter(veto_override)
# Print the number of bills retained
print(overridden.count().compute())
# Get the value of the 'title' key
titles = overridden.pluck('title')
# Compute and print the titles
print(titles.compute())
| [
"[email protected]"
] | |
ccd7c753cf3f1a7e04ca7b256c5f92fffcc69c25 | 3b2e30a6f082b4b21818eae44ea2f55fc25e7aa2 | /project/cart/views.py | 51a36c3411656d1a5ebb2b1e76ab2d20290d4d53 | [] | no_license | alekseykonotop/online_store_django | d9e9941ddedd783b38b5592ab2a3af5e35f0c2ee | 183cb3680b5b8f90457ea144dafaa96c13a3433d | refs/heads/master | 2020-07-30T09:13:57.449081 | 2019-11-07T19:46:58 | 2019-11-07T19:46:58 | 210,168,644 | 0 | 0 | null | 2020-06-05T23:07:09 | 2019-09-22T15:19:34 | JavaScript | UTF-8 | Python | false | false | 942 | py | from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from store.models import Product, Category
from .cart import Cart
from .forms import CartAddProductForm
@require_POST
def cart_add(request, product_id):
cart = Cart(request)
product = get_object_or_404(Product, id=product_id)
form = CartAddProductForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
cart.add(product=product,
quantity=cd['quantity'],
update_quantity=cd['update'])
return redirect('cart:cart_detail')
def cart_remove(request, product_id):
cart = Cart(request)
product = get_object_or_404(Product, id=product_id)
cart.remove(product)
return redirect('cart:cart_detail')
def cart_detail(request):
context = {}
context['cart'] = Cart(request)
return render(request, 'cart/detail.html', context) | [
"[email protected]"
] | |
c8214a41a82f875f402de97e2db11c439208e33c | cf2ec51dfcb2d6777b5045137d2bcfe62afdec8c | /upvcarshare/core/templatetags/core_tags.py | 9e4b4acedbe5f435252e61b22be188f25d1f1041 | [] | no_license | morrme/upvcarshare | c4b8b1587370e7931d8b5d6c78b948188617795c | 189c91c608d0b61f6b68ef5c49a2546fdbbe38a2 | refs/heads/master | 2021-01-22T22:07:52.611880 | 2017-05-29T14:57:36 | 2017-05-29T14:57:36 | 88,732,669 | 0 | 0 | null | 2017-04-19T10:33:58 | 2017-04-19T10:33:58 | null | UTF-8 | Python | false | false | 1,295 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
from django import template
from django.conf import settings
from django.http import QueryDict
from journeys import DEFAULT_GOOGLE_MAPS_SRID
from journeys.helpers import make_point
register = template.Library()
@register.simple_tag
def google_static_map(point, width=600, height=300, zoom=13):
google_maps_point = make_point(point, origin_coord_srid=point.srid, destiny_coord_srid=DEFAULT_GOOGLE_MAPS_SRID)
base_uri = "https://maps.googleapis.com/maps/api/staticmap"
args = {
"maptype": "roadmap",
"zoom": zoom,
"size": "{}x{}".format(width, height),
"key": settings.GOOGLE_MAPS_API_KEY,
"center": "{},{}".format(google_maps_point.coords[1], google_maps_point.coords[0]),
"markers": "color:red|{},{}".format(google_maps_point.coords[1], google_maps_point.coords[0]),
}
query_dict = QueryDict(mutable=True)
query_dict.update(args)
return "{}?{}".format(base_uri, query_dict.urlencode())
@register.simple_tag(takes_context=True)
def add_active_class(context, names, _class="active"):
request = context["request"]
names = names.split(",")
return _class if request.resolver_match.view_name in names else ""
| [
"[email protected]"
] | |
26cb68b12f6852ef885417963ed3f227dde4232b | ad6681ec221fddc78956d45182f22bd8f1aae8e1 | /基础班/python基础班作业/zuoye5.py | d7778e78f4774e78cf7432ba9bdc60433604db33 | [] | no_license | caoxp930/MyPythonCode | cb2428fd7078100df0b118f64713b7db76fe1e23 | 6b7e17b23fbaddcc69812ba7a14a0a5ad548ad4b | refs/heads/master | 2023-03-15T01:22:17.847582 | 2021-03-02T12:37:09 | 2021-03-02T12:37:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | # -*- coding: utf-8 -*-
for i in range(1,10):
for j in range(1,i+1):
print(j,'*',i,'=',i*j,end='\t')
if i == j:
print() | [
"[email protected]"
] | |
fb4d4b99fef64675afb65af92c4e6b71f2d5ac46 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/M/markbrough/afd_1.py | 711852c3ee8cd7a769192a8717034ae07c1ec594 | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,542 | py | import scraperwiki
from lxml import html
from urllib2 import urlopen, Request, URLError
import re
import string
URL = "http://www.afd.fr/base-projets/listerProjets.action?page=%s"
def cleanURL(data):
expression=re.compile("(\S*);jsessionid=(\S*)\?(\S*)")
d = expression.match(data)
return d.group(1)+"?"+d.group(3)
def cleandata(data):
if data:
newdata = string.strip(data)
else:
newdata=''
return newdata
def cleanamount(data):
eurosign = u"\u20AC"
commas = ','
spaces = '\r\n\t\t\t\t\t'
fixed = re.sub(eurosign, '', data)
fixed = re.sub(commas, '', fixed)
fixed = re.sub(spaces, '', fixed)
return fixed
def removeImage(data):
print "Trying to remove image from", data
fixed = re.sub('<img alt="" src="img/pdf.gif">', '', data)
fixed = re.sub("\r", '', data)
fixed = re.sub("\n", '', data)
fixed = re.sub("\t", '', data)
print "Final data after removing image is", data
return fixed
# utf8 : database_field_name
translations = {
u'Libell\xe9 du projet': 'name',
u'Num\xe9ro de projet': 'id',
u'Pays de r\xe9alisation': 'country',
u'B\xe9n\xe9ficiaire': 'beneficiary',
"Secteur d'intervention": 'aim',
'Agence de gestion': 'agency',
'Classement environnemental': 'environmental_impact',
'Classement social': 'social_impact',
u"Commentaire sur l'\xe9x\xe9cution du projet": 'comment',
'Execution': 'in progress',
'Etat du projet': 'status',
'Montant global du projet': 'funding_total_euros',
"Financement de l'AFD": 'funding_from_afd_euros',
'Forme de concours': 'funding_type',
'Cofinancement': 'is_co_financed',
u"Date d'identification valid\xe9e": 'date_validated',
"Date d'octroi du financement": 'date_funded',
'Chef de projet': 'project_manager',
'Responsable agence': 'responsible_agency',
'Structure responsable': 'responsible_structure',
'non': 'no',
'oui': 'yes',
}
def translate(french_str, warn_if_no_translation=False):
if not french_str:
return ''
if french_str in translations:
return translations[french_str].decode('utf8')
else:
if warn_if_no_translation:
print 'Could not translate: %s = %r' % (french_str, french_str)
return french_str
def scrape_project_page(data, project_url):
req = Request(project_url)
data['project_details'] = project_url
doc = html.parse(urlopen(req))
for tr in doc.findall('//table//tr'):
field = []
for cell_type in ('th', 'td'):
cells = tr.findall(cell_type)
if not cells:
# ignore row <th>Commentaire...</th> with no <td>
# TODO get the pdf links at this point
continue
warn_if_no_translation = cell_type == 'th'
if cells and cells[0].get('colspan') == '2':
# ignore section titles (they span both columns)
break
cells = [translate(cleanamount(cleandata(cell.text)),
warn_if_no_translation) \
for cell in cells]
field.append(' | '.join(cells))
if len(field) == 2:
if not field[0]:
# don't save a blank key
assert not field[1], 'Throwing away data without key: %r' % field[1]
continue
data[field[0]] = field[1]
#print 'SAVE %s : %s' % tuple(field)
document_field = doc.find('//tr//td//div/a')
if document_field is not None:
data["document_url"] = cleanURL("http://www.afd.fr"+document_field.get("href"))
data["document_name"] = document_field.text_content()
print "document name is", cleandata(document_field.text_content())
print "document url is", cleanURL("http://www.afd.fr"+document_field.get("href"))
scraperwiki.sqlite.save(unique_keys=["country", "description"],
data=data)
# loop over the pages of the "liste des projets"
page_number = 0
while True:
page_number += 1
req = Request(URL % (page_number))
try:
response = urlopen(req)
except URLError, e:
# import pdb; pdb.set_trace()
if response.status == 404:
break
doc = html.parse(response)
if not(doc.findall('//tbody//tr')):
break
# loop over each project summary
for tr in doc.findall('//tbody//tr'):
cells = list(tr.findall('td'))
if not len(cells):
continue
amount = re.sub(',', '', cells[2].text)
project_url = 'http://www.afd.fr' + cells[1].find('a').get('href')
data = {
'country' : cleandata(cells[0].text),
'description' : cleandata(cells[1].find('a').text),
'project_url' : cleanURL(project_url),
'funding_total_euros' : cleanamount(cleandata(amount)),
'status' : cleandata(cells[3].text),
'date_updated' : cells[4].text
}
# drill down into the project page
try:
scrape_project_page(data, project_url)
except:
# if that fails, save what we have!
scraperwiki.sqlite.save(unique_keys=["country", "description"],
data=data)
import scraperwiki
from lxml import html
from urllib2 import urlopen, Request, URLError
import re
import string
URL = "http://www.afd.fr/base-projets/listerProjets.action?page=%s"
def cleanURL(data):
expression=re.compile("(\S*);jsessionid=(\S*)\?(\S*)")
d = expression.match(data)
return d.group(1)+"?"+d.group(3)
def cleandata(data):
if data:
newdata = string.strip(data)
else:
newdata=''
return newdata
def cleanamount(data):
eurosign = u"\u20AC"
commas = ','
spaces = '\r\n\t\t\t\t\t'
fixed = re.sub(eurosign, '', data)
fixed = re.sub(commas, '', fixed)
fixed = re.sub(spaces, '', fixed)
return fixed
def removeImage(data):
print "Trying to remove image from", data
fixed = re.sub('<img alt="" src="img/pdf.gif">', '', data)
fixed = re.sub("\r", '', data)
fixed = re.sub("\n", '', data)
fixed = re.sub("\t", '', data)
print "Final data after removing image is", data
return fixed
# utf8 : database_field_name
translations = {
u'Libell\xe9 du projet': 'name',
u'Num\xe9ro de projet': 'id',
u'Pays de r\xe9alisation': 'country',
u'B\xe9n\xe9ficiaire': 'beneficiary',
"Secteur d'intervention": 'aim',
'Agence de gestion': 'agency',
'Classement environnemental': 'environmental_impact',
'Classement social': 'social_impact',
u"Commentaire sur l'\xe9x\xe9cution du projet": 'comment',
'Execution': 'in progress',
'Etat du projet': 'status',
'Montant global du projet': 'funding_total_euros',
"Financement de l'AFD": 'funding_from_afd_euros',
'Forme de concours': 'funding_type',
'Cofinancement': 'is_co_financed',
u"Date d'identification valid\xe9e": 'date_validated',
"Date d'octroi du financement": 'date_funded',
'Chef de projet': 'project_manager',
'Responsable agence': 'responsible_agency',
'Structure responsable': 'responsible_structure',
'non': 'no',
'oui': 'yes',
}
def translate(french_str, warn_if_no_translation=False):
if not french_str:
return ''
if french_str in translations:
return translations[french_str].decode('utf8')
else:
if warn_if_no_translation:
print 'Could not translate: %s = %r' % (french_str, french_str)
return french_str
def scrape_project_page(data, project_url):
req = Request(project_url)
data['project_details'] = project_url
doc = html.parse(urlopen(req))
for tr in doc.findall('//table//tr'):
field = []
for cell_type in ('th', 'td'):
cells = tr.findall(cell_type)
if not cells:
# ignore row <th>Commentaire...</th> with no <td>
# TODO get the pdf links at this point
continue
warn_if_no_translation = cell_type == 'th'
if cells and cells[0].get('colspan') == '2':
# ignore section titles (they span both columns)
break
cells = [translate(cleanamount(cleandata(cell.text)),
warn_if_no_translation) \
for cell in cells]
field.append(' | '.join(cells))
if len(field) == 2:
if not field[0]:
# don't save a blank key
assert not field[1], 'Throwing away data without key: %r' % field[1]
continue
data[field[0]] = field[1]
#print 'SAVE %s : %s' % tuple(field)
document_field = doc.find('//tr//td//div/a')
if document_field is not None:
data["document_url"] = cleanURL("http://www.afd.fr"+document_field.get("href"))
data["document_name"] = document_field.text_content()
print "document name is", cleandata(document_field.text_content())
print "document url is", cleanURL("http://www.afd.fr"+document_field.get("href"))
scraperwiki.sqlite.save(unique_keys=["id"],
data=data)
# loop over the pages of the "liste des projets"
page_number = 0
while True:
page_number += 1
req = Request(URL % (page_number))
try:
response = urlopen(req)
except URLError, e:
# import pdb; pdb.set_trace()
if response.status == 404:
break
doc = html.parse(response)
if not(doc.findall('//tbody//tr')):
break
# loop over each project summary
for tr in doc.findall('//tbody//tr'):
cells = list(tr.findall('td'))
if not len(cells):
continue
amount = re.sub(',', '', cells[2].text)
project_url = 'http://www.afd.fr' + cells[1].find('a').get('href')
data = {
'country' : cleandata(cells[0].text),
'description' : cleandata(cells[1].find('a').text),
'project_url' : cleanURL(project_url),
'funding_total_euros' : cleanamount(cleandata(amount)),
'status' : cleandata(cells[3].text),
'date_updated' : cells[4].text
}
# drill down into the project page
try:
scrape_project_page(data, project_url)
except:
# if that fails, save what we have!
scraperwiki.sqlite.save(unique_keys=["id"],
data=data)
| [
"[email protected]"
] | |
270d5dd6dc7d1ac5dfdf0eeb82eaa30901b3cb1c | 18dba2f82e17873e5e8161e74bc714ef88b09b36 | /realestate/estatebase/migrations/0044_auto__add_localitytype__add_field_locality_locality_type.py | a5aaef7a8313514f2c8f3e1d85d08d0dbfdeaacf | [] | no_license | sanchellius/estate-agent | 8013573624b62ea3b6362fa0c22edf8371ca6966 | 53c15c2f2c970bd432ae579b5aa6f76ab2fbac49 | refs/heads/master | 2021-01-17T21:15:35.988578 | 2016-07-25T21:51:24 | 2016-07-25T21:51:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45,365 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'LocalityType'
db.create_table('estatebase_localitytype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255, db_index=True)),
('prep_name', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
))
db.send_create_signal('estatebase', ['LocalityType'])
# Adding field 'Locality.locality_type'
db.add_column('estatebase_locality', 'locality_type',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['estatebase.LocalityType'], null=True, on_delete=models.PROTECT, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'LocalityType'
db.delete_table('estatebase_localitytype')
# Deleting field 'Locality.locality_type'
db.delete_column('estatebase_locality', 'locality_type_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'estatebase.appliance': {
'Meta': {'ordering': "['name']", 'object_name': 'Appliance'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.beside': {
'Meta': {'ordering': "['name']", 'object_name': 'Beside'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.bid': {
'Meta': {'ordering': "['-history__created']", 'object_name': 'Bid'},
'agency_price_max': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'agency_price_min': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'bid_status': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.BidStatus']", 'null': 'True', 'blank': 'True'}),
'broker': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'broker_list'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['auth.User']"}),
'brokers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'cleaned_filter': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bids'", 'to': "orm['estatebase.Client']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'estate_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.EstateTypeCategory']", 'null': 'True', 'blank': 'True'}),
'estate_filter': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'estate_types': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.EstateType']", 'null': 'True', 'blank': 'True'}),
'estates': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Estate']", 'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['estatebase.HistoryMeta']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'localities': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Locality']", 'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'regions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Region']", 'null': 'True', 'blank': 'True'})
},
'estatebase.bidevent': {
'Meta': {'object_name': 'BidEvent'},
'bid': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bid_events'", 'to': "orm['estatebase.Bid']"}),
'bid_event_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.BidEventCategory']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'estates': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Estate']", 'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['estatebase.HistoryMeta']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'estatebase.bideventcategory': {
'Meta': {'ordering': "['name']", 'object_name': 'BidEventCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.bidg': {
'Meta': {'ordering': "['id']", 'object_name': 'Bidg'},
'appliances': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Appliance']", 'null': 'True', 'blank': 'True'}),
'basic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ceiling': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Ceiling']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'ceiling_height': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'documents': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Document']", 'null': 'True', 'blank': 'True'}),
'elevator': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'estate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bidgs'", 'to': "orm['estatebase.Estate']"}),
'estate_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.EstateType']", 'on_delete': 'models.PROTECT'}),
'exterior_finish': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.ExteriorFinish']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'floor': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'floor_count': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '3', 'decimal_places': '1', 'blank': 'True'}),
'flooring': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Flooring']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'heating': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Heating']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interior': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Interior']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'roof': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Roof']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'room_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'room_number': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'total_area': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'used_area': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'wall_construcion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.WallConstrucion']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'wall_finish': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.WallFinish']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'window_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.WindowType']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'year_built': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'estatebase.bidstatus': {
'Meta': {'ordering': "['name']", 'object_name': 'BidStatus'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.ceiling': {
'Meta': {'ordering': "['name']", 'object_name': 'Ceiling'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.client': {
'Meta': {'ordering': "['-id']", 'object_name': 'Client'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'client_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.ClientType']", 'on_delete': 'models.PROTECT'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'history': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['estatebase.HistoryMeta']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'origin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Origin']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'})
},
'estatebase.clienttype': {
'Meta': {'ordering': "['name']", 'object_name': 'ClientType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.comstatus': {
'Meta': {'ordering': "['name']", 'object_name': 'ComStatus'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {})
},
'estatebase.contact': {
'Meta': {'ordering': "['contact_state__id', 'contact_type__id']", 'object_name': 'Contact'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contacts'", 'to': "orm['estatebase.Client']"}),
'contact': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'contact_state': ('django.db.models.fields.related.ForeignKey', [], {'default': '5', 'to': "orm['estatebase.ContactState']", 'on_delete': 'models.PROTECT'}),
'contact_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.ContactType']", 'on_delete': 'models.PROTECT'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'estatebase.contacthistory': {
'Meta': {'object_name': 'ContactHistory'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Contact']"}),
'contact_state': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.ContactState']", 'on_delete': 'models.PROTECT'}),
'event_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 21, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'})
},
'estatebase.contactstate': {
'Meta': {'ordering': "['name']", 'object_name': 'ContactState'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.contacttype': {
'Meta': {'ordering': "['name']", 'object_name': 'ContactType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.document': {
'Meta': {'ordering': "['name']", 'object_name': 'Document'},
'estate_type_category': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['estatebase.EstateTypeCategory']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.driveway': {
'Meta': {'ordering': "['name']", 'object_name': 'Driveway'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.electricity': {
'Meta': {'ordering': "['name']", 'object_name': 'Electricity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.estate': {
'Meta': {'ordering': "['-id']", 'object_name': 'Estate'},
'agency_price': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'beside': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Beside']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'beside_distance': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'broker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'client_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'clients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'estates'", 'symmetrical': 'False', 'through': "orm['estatebase.EstateClient']", 'to': "orm['estatebase.Client']"}),
'com_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.ComStatus']", 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Contact']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'driveway': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Driveway']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'driveway_distance': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'electricity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Electricity']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'electricity_distance': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'estate_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.EstateTypeCategory']", 'on_delete': 'models.PROTECT'}),
'estate_number': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'estate_params': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.EstateParam']", 'null': 'True', 'blank': 'True'}),
'estate_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.EstateStatus']", 'on_delete': 'models.PROTECT'}),
'gassupply': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Gassupply']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'gassupply_distance': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['estatebase.HistoryMeta']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Internet']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'locality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Locality']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'microdistrict': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Microdistrict']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'origin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Origin']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Region']", 'on_delete': 'models.PROTECT'}),
'saler_price': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'sewerage': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Sewerage']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'sewerage_distance': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'street': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Street']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'telephony': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Telephony']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'validity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Validity']", 'null': 'True', 'blank': 'True'}),
'watersupply': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Watersupply']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'watersupply_distance': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'estatebase.estateclient': {
'Meta': {'unique_together': "(('client', 'estate'),)", 'object_name': 'EstateClient'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Client']"}),
'estate': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Estate']"}),
'estate_client_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.EstateClientStatus']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'estatebase.estateclientstatus': {
'Meta': {'ordering': "['name']", 'object_name': 'EstateClientStatus'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.estateparam': {
'Meta': {'ordering': "['order']", 'object_name': 'EstateParam'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True', 'blank': 'True'})
},
'estatebase.estatephoto': {
'Meta': {'ordering': "['order']", 'object_name': 'EstatePhoto'},
'estate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'images'", 'to': "orm['estatebase.Estate']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True', 'blank': 'True'})
},
'estatebase.estateregister': {
'Meta': {'ordering': "['-id']", 'object_name': 'EstateRegister'},
'bids': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'estate_registers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['estatebase.Bid']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'estates': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'estate_registers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['estatebase.Estate']"}),
'history': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['estatebase.HistoryMeta']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'register_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.RegisterCategory']", 'null': 'True', 'blank': 'True'})
},
'estatebase.estatestatus': {
'Meta': {'ordering': "['name']", 'object_name': 'EstateStatus'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.estatetype': {
'Meta': {'ordering': "['estate_type_category__order', 'name']", 'object_name': 'EstateType'},
'estate_type_category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'types'", 'on_delete': 'models.PROTECT', 'to': "orm['estatebase.EstateTypeCategory']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True', 'blank': 'True'}),
'placeable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'template': ('django.db.models.fields.IntegerField', [], {})
},
'estatebase.estatetypecategory': {
'Meta': {'ordering': "['order']", 'object_name': 'EstateTypeCategory'},
'has_bidg': ('django.db.models.fields.IntegerField', [], {}),
'has_stead': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'independent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_commerce': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True', 'blank': 'True'})
},
'estatebase.exteriorfinish': {
'Meta': {'ordering': "['name']", 'object_name': 'ExteriorFinish'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.flooring': {
'Meta': {'ordering': "['name']", 'object_name': 'Flooring'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.furniture': {
'Meta': {'ordering': "['name']", 'object_name': 'Furniture'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.gassupply': {
'Meta': {'ordering': "['name']", 'object_name': 'Gassupply'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.geogroup': {
'Meta': {'ordering': "['name']", 'object_name': 'GeoGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.heating': {
'Meta': {'ordering': "['name']", 'object_name': 'Heating'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.historymeta': {
'Meta': {'object_name': 'HistoryMeta'},
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'creators'", 'on_delete': 'models.PROTECT', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modificated': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'updators'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': "orm['auth.User']"})
},
'estatebase.interior': {
'Meta': {'ordering': "['name']", 'object_name': 'Interior'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.internet': {
'Meta': {'ordering': "['name']", 'object_name': 'Internet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.landtype': {
'Meta': {'ordering': "['name']", 'object_name': 'LandType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.layout': {
'Meta': {'ordering': "['id']", 'object_name': 'Layout'},
'area': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '7', 'decimal_places': '2', 'blank': 'True'}),
'furniture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Furniture']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interior': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Interior']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'layout_feature': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.LayoutFeature']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'layout_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.LayoutType']", 'on_delete': 'models.PROTECT'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Level']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'estatebase.layoutfeature': {
'Meta': {'ordering': "['name']", 'object_name': 'LayoutFeature'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.layouttype': {
'Meta': {'ordering': "['name']", 'object_name': 'LayoutType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.level': {
'Meta': {'ordering': "['level_name']", 'object_name': 'Level'},
'bidg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'levels'", 'to': "orm['estatebase.Bidg']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level_name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.LevelName']"})
},
'estatebase.levelname': {
'Meta': {'ordering': "['name']", 'object_name': 'LevelName'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.locality': {
'Meta': {'ordering': "['name']", 'unique_together': "(('name', 'region'),)", 'object_name': 'Locality'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locality_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.LocalityType']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Region']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'})
},
'estatebase.localitytype': {
'Meta': {'ordering': "['name']", 'object_name': 'LocalityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'prep_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'estatebase.microdistrict': {
'Meta': {'ordering': "['name']", 'unique_together': "(('name', 'locality'),)", 'object_name': 'Microdistrict'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Locality']", 'on_delete': 'models.PROTECT'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'estatebase.office': {
'Meta': {'ordering': "['name']", 'object_name': 'Office'},
'address': ('django.db.models.fields.TextField', [], {}),
'address_short': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'regions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['estatebase.Region']", 'symmetrical': 'False'})
},
'estatebase.origin': {
'Meta': {'ordering': "['name']", 'object_name': 'Origin'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.purpose': {
'Meta': {'ordering': "['name']", 'object_name': 'Purpose'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.region': {
'Meta': {'ordering': "['name']", 'object_name': 'Region'},
'geo_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.GeoGroup']", 'on_delete': 'models.PROTECT'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.registercategory': {
'Meta': {'ordering': "['name']", 'object_name': 'RegisterCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.roof': {
'Meta': {'ordering': "['name']", 'object_name': 'Roof'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.sewerage': {
'Meta': {'ordering': "['name']", 'object_name': 'Sewerage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.shape': {
'Meta': {'ordering': "['name']", 'object_name': 'Shape'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.stead': {
'Meta': {'object_name': 'Stead'},
'documents': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['estatebase.Document']", 'null': 'True', 'blank': 'True'}),
'estate': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'stead'", 'unique': 'True', 'to': "orm['estatebase.Estate']"}),
'estate_type': ('django.db.models.fields.related.ForeignKey', [], {'default': '15', 'to': "orm['estatebase.EstateType']", 'on_delete': 'models.PROTECT'}),
'face_area': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'land_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.LandType']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'purpose': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Purpose']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'shape': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Shape']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'total_area': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'})
},
'estatebase.street': {
'Meta': {'ordering': "['name']", 'unique_together': "(('name', 'locality'),)", 'object_name': 'Street'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Locality']", 'on_delete': 'models.PROTECT'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'estatebase.telephony': {
'Meta': {'ordering': "['name']", 'object_name': 'Telephony'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'geo_groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['estatebase.GeoGroup']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['estatebase.Office']", 'null': 'True', 'on_delete': 'models.PROTECT', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'estatebase.validity': {
'Meta': {'ordering': "['name']", 'object_name': 'Validity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.wallconstrucion': {
'Meta': {'ordering': "['name']", 'object_name': 'WallConstrucion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.wallfinish': {
'Meta': {'ordering': "['name']", 'object_name': 'WallFinish'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.watersupply': {
'Meta': {'ordering': "['name']", 'object_name': 'Watersupply'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'estatebase.windowtype': {
'Meta': {'ordering': "['name']", 'object_name': 'WindowType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
}
}
complete_apps = ['estatebase'] | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.