blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22c9bd34bc65eeaa2531ecd17303521ed2520d03 | abd7504f6562babf79fb4e86af7529b2cb40fb54 | /pkg/p2/algebraic/Composite.py | f7484bcffe062e091103b6467f18fc30e7da1383 | [] | no_license | aivazis/p2 | 266c1728554b3f7a89e72f09ba2d9e5ff8d4447d | fd9a82d7dafa815dd68f679eb2b4b1a6287d02ea | refs/heads/main | 2022-01-08T12:45:16.646028 | 2022-01-01T17:31:10 | 2022-01-01T17:31:10 | 225,452,981 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,458 | py | # -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <[email protected]>
# (c) 1998-2022 all rights reserved
class Composite:
"""
Mix-in class that provides an implementation of the subset of the interface of {Node} that
requires traversal of the expression graph rooted at nodes with dependencies.
This class assumes that its instances provide {operands}, a tuple of their dependencies on
other nodes
"""
# types
from .exceptions import CircularReferenceError
# interface
@property
def operands(self):
"""
A sequence of my direct dependents
"""
# the default implementation stores my operands in a private member
return self._operands
@operands.setter
def operands(self, operands):
"""
Adjust my operands
"""
# process the incoming sequence and save
self._operands = self._store(self._ingest(operands=operands))
# all done
return
@property
def span(self):
"""
Return a sequence over my entire dependency graph
"""
# i am a node in my dependency graph
yield self
# go through my operands
for operand in self.operands:
# and ask them for their span
yield from operand.span
# all done
return
# classifiers
@property
def literals(self):
"""
Return a sequence over the nodes in my dependency graph that encapsulate foreign objects
"""
# go through my operands
for operand in self.operands:
# and ask them for literals in their span
yield from operand.literals
# all done
return
@property
def operators(self):
"""
Return a sequence over the composite nodes in my dependency graph
"""
# i am one
yield self
# go through my operands
for operand in self.operands:
# and ask them for operators in their span
yield from operand.operators
# all done
return
@property
def variables(self):
"""
Return a sequence over the variables in my dependency graph
"""
# go through my operands
for operand in self.operands:
# and ask them for variables in their span
yield from operand.variables
# all done
return
# structural classifiers
@property
def leaves(self):
"""
Return a sequence over the leaves in my dependency graph
"""
# go through my operands:
for operand in self.operands:
# and ask them for leaves in their span
yield from operand.leaves
# all done
return
@property
def composites(self):
"""
Return a sequence over the composites in my dependency graph
"""
# i am one
yield self
# go through my operands:
for operand in self.operands:
# and ask them for leaves in their span
yield from operand.composites
# all done
return
# metamethods
def __init__(self, operands, **kwds):
# chain up
super().__init__(**kwds)
# save my direct dependencies
self.operands = operands
# all done
return
# implementation details
def _ingest(self, operands):
"""
Convert {operands} into nodes
"""
# go through operands
for operand in operands:
# if this is not a node instance
if not isinstance(operand, self.node):
# make it a literal
operand = self.literal(value=operand)
# hand it off
yield operand
# all done
return
def _substitute(self, current, replacement, clean):
"""
Adjust the operands by substituting {replacement} for {current} in the sequence of operands
"""
# if i'm the one being replaced
if current is self:
# just return the {replacement}
return replacement
# if i'm among the {clean} nodes
if self in clean:
# do nothing
return self
# add me to the clean pile
clean.add(self)
# otherwise, make a pile for my potentially adjusted operands
operands = []
# initially, i am not known to have replaced any of my operands
modified = False
# go through my operands
for op in self.operands:
# if this one is marked {clean}
if op in clean:
# add it to the list of operands
operands.append(op)
# and carry on
continue
# otherwise, ask it to perform the substitution
r = op._substitute(current=current, replacement=replacement, clean=clean)
# add it or its replacement to the pile
operands.append(r)
# record whether an update was performed
modified |= (r is not op)
# if any substitutions were made
if modified:
# replace my operands
self.operands = operands
# all done
return self
# the default storage mechanism for operands
_store = tuple
# storage for the operands
_operands = ()
# end of file
| [
"[email protected]"
] | |
25b84c72c3109809a8cb05a72d74a669ff8fe826 | 4059573793d0ee5b74c9dd919aa2945dad2fe426 | /Searching Algorithms/binary_search.py | 4d3d525420e9d3930077f4a8fae78ac7b8ab65cb | [] | no_license | nayanika2304/DataStructuresPractice | 04ea6d9248a63983abdd2b983632ba5907eed9d4 | f3c815ff113ce3977cc743360b77fb21c9f9b383 | refs/heads/master | 2022-12-08T05:28:22.897414 | 2020-08-29T18:17:57 | 2020-08-29T18:17:57 | 282,513,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,299 | py | '''
Compare x with the middle element.
If x matches with middle element, we return the mid index.
Else If x is greater than the mid element, then x can only lie in right half subarray after the mid element. So we recur for right half.
Else (x is smaller) recur for the left half.
'''
# Python3 Program for recursive binary search.
'''
takes the number to search, array,
l= start
r= length
'''
# Returns index of x in arr if present, else -1
def binarySearch(arr, l, r, x):
print(l,r)
# Check base case
if r >= l:
mid = l + (r - l) // 2
# If element is present at the middle itself
if arr[mid] == x:
return mid
# If element is smaller than mid, then it
# can only be present in left subarray
elif arr[mid] > x:
return binarySearch(arr, l, mid - 1, x)
# Else the element can only be present
# in right subarray
else:
return binarySearch(arr, mid + 1, r, x)
else:
# Element is not present in the array
return -1
# Driver Code
arr = [2, 3, 4, 10, 40]
x = 10
# Function call
result = binarySearch(arr, 0, len(arr) - 1, x)
if result != -1:
print("Element is present at index % d" % result)
else:
print("Element is not present in array") | [
"[email protected]"
] | |
85b3990a7902a3249be40266a3a934ab4f52f997 | 9cc6721acb439db2e7cff8eb4dbff4b6e14040d5 | /코딩테스트(책)/problems/3.dfs_bfs/6.감시피하기_x.py | 44a193abe9ce52b0c1122ad0e432887afc033494 | [] | no_license | young31/Algorithm | 35c6ec6b6d9b192f9d0e6e6f6484f33c92100232 | bfcccfa798d031a930490efa24d9b2263bd4b984 | refs/heads/master | 2021-11-04T14:01:53.827508 | 2021-10-25T06:35:24 | 2021-10-25T06:35:24 | 196,034,851 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,864 | py | # input
n1 = 5
arr1 = [
[0, 1, 0, 0, 2],
[2, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 2, 0, 0, 0],
[0, 0, 2, 0, 0]
]
n2 = 4
arr2 = [
[1, 1, 1, 2],
[0, 0, 0, 0],
[0, 0, 0, 0],
[2, 2, 2, 0]
]
# answer: yes
# algo
def search(arr, x, y, move):
n = len(arr)
res = []
is_T = False
if move == 0:
dx, dy = 1, 0
elif move == 1:
dx, dy = -1, 0
elif move == 2:
dx, dy = 0, 1
elif move == 3:
dx, dy = 0, -1
tmp = []
while 1:
nx = x+dx
ny = y+dy
if not is_feasible(n, nx, ny):
break
else:
if arr[nx][ny] == 0:
tmp.append((nx, ny))
elif arr[nx][ny] == 2:
is_T = True
break
x, y = nx, ny
if is_T:
if tmp:
res += tmp
else: # 선생이 있는데 막을 방법이 없으면 긴급상황(무조건 못 피함)
return 'FLAG'
return res
def is_feasible(n, x, y):
if 0 <= x < n and 0 <= y < n:
return True
return False
def main(n, arr):
students = []
for i in range(n):
for j in range(n):
if arr[i][j] == 1:
students.append((i, j))
# 막을 구간을 생성해서 해당 구간을 모두 방어하면 회피 성공으로 해결
to_block = []
for i, j in students:
for m in range(4):
tmp = search(arr, i, j, m)
if tmp == 'FLAG':
return 'NO'
if tmp:
to_block.append(tmp)
# 막을 곳이 남아 있는지 체크
remains = [True for _ in range(len(to_block))]
n_block = 0
for ib, block in enumerate(to_block):
if not block:
continue
hist_remove = []
# 해당 구간은 일단 막고 시작; 구간 내 몇개나 설치할 지는 아래에서 결정
if remains[ib]:
remains[ib] = False
for i, xy in enumerate(block):
remove = False
for ib_, block_ in enumerate(to_block[ib+1:], ib+1):
# 공통 좌표가 있으면 해당 부분은 무조건 막고 진행; 해결된 구간도 함께 해결한 것으로 처리
if xy in block_:
remains[ib_] = False
remove = True
block_.remove(xy)
if n_block > 3:
return 'NO'
if remove:
n_block += 1
hist_remove.append(remove)
# 공통된 부분이 없다면 해당 구간만 해결
if sum(hist_remove) == 0:
n_block += 1
if n_block <= 3 and sum(remains) == 0:
return 'YES'
else:
return 'NO'
print(main(n1, arr1))
print(main(n2, arr2)) | [
"[email protected]"
] | |
1921e361eac11d34257a95c03ac18db950c86452 | ac99fc4c74c6306cf23ebc3ddbcbd992b985387d | /tests/myapp/settings.py | 55161837d3bbd70265ce48e2ae0356a77b458656 | [
"MIT"
] | permissive | jamalex/django-nested-intervals | d7f010d13abd28efd0867fa683bfded04fb91931 | b2d80db554762e95b24c7b08217e5bcbed8f40b7 | refs/heads/master | 2020-03-28T07:31:37.486839 | 2018-09-09T21:14:06 | 2018-09-09T21:14:06 | 147,907,242 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,336 | py | from __future__ import unicode_literals
import os
import django
DIRNAME = os.path.dirname(__file__)
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'nested_intervals',
'myapp',
)
STATIC_URL = '/static/'
SECRET_KEY = 'abc123'
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ROOT_URLCONF = 'myapp.urls'
| [
"[email protected]"
] | |
5be62ca99e20643780c83602ab8fb32b229abe89 | 5f0eeef355fa84b165d4e0707e8874755cc03259 | /chp03_oscillation/NOC_3_09_wave_b/NOC_3_09_wave_b.pyde | b43cee586b23ca1e67378fd57b2a37121536b205 | [] | no_license | kidult00/NatureOfCode-Examples-Python | 5835fbed114f3991b9986852f31d29a0a46d7e53 | 42461590deebbe305d5815ff0d207ff974335ad5 | refs/heads/master | 2021-05-11T04:47:53.999705 | 2018-03-07T15:54:12 | 2018-03-07T15:54:12 | 117,946,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | pyde | # The Nature of Code - Python Version
# [kidult00](https://github.com/kidult00)
def setup():
size(250, 200)
smooth()
global startAngle, angleVel
startAngle = 0.0
angleVel = 0.2
def draw():
background(255)
stroke(0)
fill(0, 50)
strokeWeight(2)
global startAngle, angleVel
startAngle += 0.015
angle = startAngle
for x in range(0, width+1, 24):
y = map(sin(angle), -1, 1, 0, height)
ellipse(x, y, 48, 48)
angle += angleVel | [
"[email protected]"
] | |
3ec8cdf29f18ab3b98ea97b5e714e0b0770ed1e6 | 148072ce210ca4754ea4a37d83057e2cf2fdc5a1 | /src/core/w3af/w3af/plugins/attack/db/sqlmap/lib/request/direct.py | dab662acd830c943a6bb530956ff9115e04cb2cb | [] | no_license | ycc1746582381/webfuzzer | 8d42fceb55c8682d6c18416b8e7b23f5e430c45f | 0d9aa35c3218dc58f81c429cae0196e4c8b7d51b | refs/heads/master | 2021-06-14T18:46:59.470232 | 2017-03-14T08:49:27 | 2017-03-14T08:49:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,668 | py | #!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import time
from extra.safe2bin.safe2bin import safecharencode
from lib.core.agent import agent
from lib.core.common import Backend
from lib.core.common import calculateDeltaSeconds
from lib.core.common import extractExpectedValue
from lib.core.common import getCurrentThreadData
from lib.core.common import getUnicode
from lib.core.common import hashDBRetrieve
from lib.core.common import hashDBWrite
from lib.core.common import isListLike
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.dicts import SQL_STATEMENTS
from lib.core.enums import CUSTOM_LOGGING
from lib.core.enums import DBMS
from lib.core.enums import EXPECTED
from lib.core.settings import UNICODE_ENCODING
from lib.utils.timeout import timeout
def direct(query, content=True):
select = True
query = agent.payloadDirect(query)
query = agent.adjustLateValues(query)
threadData = getCurrentThreadData()
if Backend.isDbms(DBMS.ORACLE) and query.upper().startswith("SELECT ") and " FROM " not in query.upper():
query = "%s FROM DUAL" % query
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
for sqlStatement in sqlStatements:
if query.lower().startswith(sqlStatement) and sqlTitle != "SQL SELECT statement":
select = False
break
if select and not query.upper().startswith("SELECT "):
query = "SELECT %s" % query
logger.log(CUSTOM_LOGGING.PAYLOAD, query)
output = hashDBRetrieve(query, True, True)
start = time.time()
if not select and "EXEC " not in query.upper():
_ = timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None)
elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query):
output = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None)
hashDBWrite(query, output, True)
elif output:
infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20]
logger.info(infoMsg)
threadData.lastQueryDuration = calculateDeltaSeconds(start)
if not output:
return output
elif content:
if output and isListLike(output):
if len(output[0]) == 1:
output = [_[0] for _ in output]
retVal = getUnicode(output, noneToNull=True)
return safecharencode(retVal) if kb.safeCharEncode else retVal
else:
return extractExpectedValue(output, EXPECTED.BOOL)
| [
"[email protected]"
] | |
02d13497a04ea06e5cf6152aa3aec22d52d7d007 | 71764665e27f4b96bab44f38a4a591ffc2171c24 | /hhplt/productsuite/gs11/board.py | 7b57cb91d2ffc0019bfb0e4228b477749d1fc2ba | [] | no_license | kingdomjc/RSU_production_VAT | 693f8c504acc0cc88af92942734ccb85f7e7d7c0 | 9a3d6d3f5a5edfaf30afdff725661630aafe434c | refs/heads/master | 2020-07-31T05:03:46.699606 | 2019-09-24T02:09:53 | 2019-09-24T02:09:53 | 210,491,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,791 | py | #encoding:utf-8
u"单板测试,包括单板数字测试及射频测试。单板正常放置在夹具上,按下开始按钮进行测试"
suiteName = u'''单板测试'''
version = "1.0"
failWeightSum = 10 #整体不通过权值,当失败权值和超过此,判定测试不通过
from hhplt.productsuite.gs11 import board_digital,board_rf_conduct
from hhplt.testengine.testutil import multipleTest,checkBySenser
from hhplt.deviceresource import askForResource,GS11IOBoardDevice,GS11NuLink,VideoCaptureAndParser
from hhplt.testengine.server import serverParam as SP,serialCode
from hhplt.testengine.exceptions import TestItemFailException,AbortTestException
from hhplt.testengine.versionContainer import getVersionFile
import time,os
from hhplt.testengine.testcase import uiLog,superUiLog
from hhplt.parameters import SESSION,PARAM
from hhplt.testengine.manul import broadcastTestResult,closeAsynMessage
def __getIoBoard():
'''获得IO板资源'''
return askForResource("GS11IOBoardDevice", GS11IOBoardDevice.GS11IOBoardDevice,)
def __getNuLink():
'''获得ICP下载工具'''
return askForResource("GS11NuLink", GS11NuLink.GS11NuLink,)
def __getVideoCapture():
'''获得摄像头抓拍工具'''
return askForResource("VideoCaptureAndParser", VideoCaptureAndParser.VideoCaptureAndParser,)
autoTrigger = GS11IOBoardDevice.IOBoardAutoTrigger
def setup(product):
'''初始化'''
#检查夹具动作,如果夹具没合上,则合上夹具
iob = __getIoBoard()
iob.closeClap()
iob.releaseDemolishButton()
board_digital.setup(product)
SESSION["autoTrigger"].pause()
closeAsynMessage()
def finalFun(product):
'''自动弹开夹具,输出结果信号,并给OBU下电'''
try:
board_digital.finalFun(product)
iob = __getIoBoard()
iob.openClap()
broadcastTestResult(product)
finally:
SESSION["autoTrigger"].resume()
def T_01_scanBarCode_A(product):
u"扫描条码-扫描单板条码,用于后续追溯"
try:
__getVideoCapture().captureBarcodeSnapshot()
barCode = __getVideoCapture().parseBarcode()
product.setTestingSuiteBarCode(barCode)
return {u"条码扫描结果":barCode}
finally:
__getIoBoard().closeLighting()
def T_02_testVersionDownload_A(product):
u"下载测试版本-下载用于测试的OBU版本"
vf = getVersionFile(SP("gs11.vatVersion.filename","GS11-VAT-09.00.00.version",str))
uiLog(u"版本文件:"+vf)
nul = __getNuLink()
try:
uiLog(u"切换至NuLink模式")
__getIoBoard().switchToNuLink()
nul.downloadVersion(vf,verify=False)
finally:
#如果后面不跟随出场信息写入,则复位芯片并切回串口模式;否则不必退出
if "出厂信息写入" not in SESSION["seletedTestItemNameList"]:
uiLog(u"复位芯片...")
try:
nul.resetChip()
finally:
uiLog(u"切换至普通串口模式")
__getIoBoard().switchToNormalSerial()
def T_03_initFactorySetting_A(product):
u'''出厂信息写入-写入MAC地址,唤醒灵敏度参数等,通过ICP方式写入并自动判断信息一致'''
#读取旧的信息内容
magicWord = "55555555"
try:
uiLog(u"切换至NuLink模式")
__getIoBoard().switchToNuLink()
nul = __getNuLink()
infos = None
try:
infos = nul.readInfo()
obuid = infos[8:16]
superUiLog(u"单板信息区内容:"+infos)
if infos.startswith(magicWord) and obuid!="FFFFFFFF": #有魔术字,说明出场信息已经写过了
uiLog(u"出厂信息已写入,原标识:%s"%obuid)
product.setTestingProductIdCode(obuid)
uiLog(u"复位芯片...")
nul.resetChip()
return {"OBUID":obuid,"原出厂信息区":infos[:36]+","+infos[128:200]}
except TestItemFailException,e:
#如果读出失败,那么也判定为需要写
uiLog(u"区域读取失败,开始写入出厂信息")
obuid = serialCode("mac") #分配新的MAC地址(obuid)
displayDirect = SP("gs11.initParam.displayDirect","00",str) #显示方向
softwareVerionFile = SP("gs11.vatVersion.filename","GS11-VAT-09.00.00.version",str)
softwareVersion = "".join(softwareVerionFile.split("-")[2].split(".")[:3])+"00"
hardwareVersion = SP("gs11.initParam.hardwareVersion","010000",str)#硬件版本号
initWankenSensi_high_grade = SP('gs11.initWanken.high.grade',"03",str)#高灵敏度-grade
initWankenSensi_high_level = SP('gs11.initWanken.high.level',"0E",str)#高灵敏度-level
initWankenSensi_low_grade = SP('gs11.initWanken.low.grade',"03",str) #低灵敏度-grade
initWankenSensi_low_level = SP('gs11.initWanken.low.level',"0E",str)#低灵敏度-level
wakeupMode = SP("gs11.initParam.wakeupMode","04",str)#唤醒模式
amIndex = SP("gs11.initParam.amIndex","00",str)#AmIndex
transPower = SP("gs11.initParam.transPower","02",str) #发射功率
txFilter = SP("gs11.initParam.txFilter","06",str) #TxFilter
sensitivity = SP("gs11.initParam.sensitivity","00",str) #使用灵敏度
CONFIG_BUILD_INFO = "".join((magicWord,obuid,displayDirect,softwareVersion,hardwareVersion))
CONFIG_RF_PARA = "".join((magicWord,initWankenSensi_high_grade,initWankenSensi_high_level,
initWankenSensi_low_grade,initWankenSensi_low_level,
wakeupMode,amIndex,transPower,txFilter,sensitivity))
uiLog(u"初始化config区配置")
nul.initCfg()
nul.writeToInfo(CONFIG_BUILD_INFO,CONFIG_RF_PARA)
uiLog(u"分配新的OBUID:%s"%obuid)
product.setTestingProductIdCode(obuid)
uiLog(u"复位芯片...")
nul.resetChip()
return {"OBUID":obuid,u"初始信息区":CONFIG_BUILD_INFO+","+CONFIG_RF_PARA}
finally:
uiLog(u"切换至普通串口模式")
__getIoBoard().switchToNormalSerial()
def T_04_rs232Test_A(product):
u'''RS232测试-自动判断RS232应答返回是否正确'''
__getIoBoard().closeLighting()
return board_digital.T_03_rs232Test_A(product)
def T_05_esam_A(product):
u'''ESAM测试-判断地区分散码是否正确'''
return board_digital.T_08_esam_A(product)
def T_06_transmittingPower_A(product):
u'''发射功率测试-判断发射功率'''
return multipleTest(board_rf_conduct.T_04_transmittingPower_A,product,3)
def T_07_receiveSensitivity_A(product):
u'''接收灵敏度测试-判断接收灵敏度是否满足标准'''
return multipleTest(board_rf_conduct.T_03_receiveSensitivity_A,product,3)
def T_08_reset_A(product):
u'''复位测试-单板上电后返回数据,系统自动判断是否正确'''
sc = board_digital.__askForPlateDeviceCom()
iob = __getIoBoard()
sc.asynSend("TestReset")
iob.triggerResetButton()
sc.asynReceiveAndAssert("PowerOnSuccess")
# try:
# sc.bslDevice.serial.setTimeout(2)
# sc.asynReceiveAndAssert("PowerOnSuccess")
# except TestItemFailException,e:
# #第二个的PowerOnSuccess可以收不到
# pass
# finally:
# sc.bslDevice.serial.setTimeout(15)
time.sleep(0.5)
def T_09_capacityVoltage_A(product):
u'''电容电路电压测试-根据电容电路电压值判断是否满足要求'''
return board_digital.T_05_capacityVoltage_A(product)
def T_10_solarVoltage_A(product):
u'''太阳能电路电压测试-判断太阳能电路电压是否满足要求'''
return board_digital.T_06_solarVoltage_A(product)
def T_11_batteryVoltage_A(product):
u'''电池电路电压测试-判断电池电路电压是否满足要求'''
return board_digital.T_07_batteryVoltage_A(product)
def T_12_testHFChip_A(product):
u'''测试高频芯片-测试高频芯片是否正常'''
return board_digital.T_09_testHFChip_A(product)
def T_13_readRfCard_A(product):
u'''测试高频读卡-测试高频读卡是否正常'''
return board_rf_conduct.T_01_readRfCard_A(product)
def T_14_redLight_A(product):
u'''红色LED灯检测-自动判定红LED灯是否正常亮起'''
iob = __getIoBoard()
sc = board_digital.__askForPlateDeviceCom()
checkBySenser(u"红色LED灯",1,lambda:sc.asynSend("TestRedLedPara 1000"),
lambda:sc.asynReceiveAndAssert("TestRedLedParaOK"),iob.checkLedLightState)
def T_15_greenLight_A(product):
u'''绿色LED灯检测-自动判定绿LED灯是否正常亮起'''
iob = __getIoBoard()
sc = board_digital.__askForPlateDeviceCom()
checkBySenser(u"红色LED灯",1,lambda:sc.asynSend("TestGreenLedPara 1000"),
lambda:sc.asynReceiveAndAssert("TestGreenLedParaOK"),iob.checkLedLightState)
def T_16_beep_A(product):
u'''蜂鸣器检测-自动判定蜂鸣器是否响起'''
iob = __getIoBoard()
sc = board_digital.__askForPlateDeviceCom()
checkBySenser(u"蜂鸣器",2,lambda:sc.asynSend("TestBeepPara 3000"),
lambda:sc.asynReceiveAndAssert("TestBeepParaOK"),iob.checkBeepState)
def T_17_oled_A(product):
u'''OLED屏幕测试-自动判断OLED屏幕是否全白'''
pass #单板暂时不检
def T_18_wakeupSensitivity_A(product):
u'''唤醒灵敏度测试-判断高低灵敏度是否满足标准'''
# low_level_power = SP('gs11.wakeup.power.low',-33.5) #低唤醒功率
# high_level_power = SP('gs11.wakeup.power.high',-32.5) #高唤醒功率
#GS11的上述两个参数不使用服务端获得,而从本地配置文件中搞
low_level_power = PARAM['gs11.wakeup.power.low'] #低唤醒功率
high_level_power = PARAM['gs11.wakeup.power.high'] #高唤醒功率
uiLog(u"唤醒功率范围:%.2f-%.2f"%(low_level_power,high_level_power))
sc = board_rf_conduct.__askForPlateDeviceCom()
try:
lowWakenSensiResult = sc.adjustWakenSensi(low_level_power)
except TestItemFailException,e:
e.message = u"低灵敏度测试失败"
raise e
try:
highWakenSensiResult = sc.adjustWakenSensi(high_level_power)
except TestItemFailException,e:
e.message = u"高灵敏度测试失败"
raise e
#写入唤醒灵敏度的值
uiLog(u"开始写入灵敏度值")
# writeSensiCmd = "D-WriteWakeSensiPara 4 0x%.2x 0x%.2x 0x%.2x 0x%.2x"%(highWakenSensiResult[0],highWakenSensiResult[1],
# lowWakenSensiResult[0],lowWakenSensiResult[1])
# sc.assertSynComm(request =writeSensiCmd,response = 'D-WriteWakeSensiParaOK')
iob = __getIoBoard()
nul = __getNuLink()
try:
iob.switchToNuLink()
oriData = nul.readInfo()
CONFIG_BUILD_INFO = oriData[:32]
CONFIG_RF_PARA = oriData[128:154]
CONFIG_RF_PARA = CONFIG_RF_PARA[:8]+"%.2X%.2X%.2X%.2X"%(highWakenSensiResult[0],highWakenSensiResult[1],
lowWakenSensiResult[0],lowWakenSensiResult[1]) + \
CONFIG_RF_PARA[16:]
nul.writeToInfo(CONFIG_BUILD_INFO,CONFIG_RF_PARA)
nul.resetChip()
finally:
uiLog(u"切换至普通串口模式")
__getIoBoard().switchToNormalSerial()
# iob.pressDemolishButton()
# time.sleep(1)
# iob.releaseDemolishButton()
time.sleep(0.5)
sc.asynSend("ResetObu")
sc.asynReceiveAndAssert("PowerOnSuccess")
time.sleep(1)
return {"低唤醒灵敏度粗调":lowWakenSensiResult[0],"低唤醒灵敏度细调":lowWakenSensiResult[1],
"高唤醒灵敏度粗调":highWakenSensiResult[0],"高唤醒灵敏度细调":highWakenSensiResult[1]}
def T_19_staticCurrent_A(product):
u'''静态电流测试-判断静态电流值是否在正常范围内'''
try:
sc = board_digital.__askForPlateDeviceCom()
iob = __getIoBoard()
# v = sc.testStaticCurrent()
try:
device = sc.bslDevice
device.make_obu_enter_sleep()
iob.output(GS11IOBoardDevice.UART_TX_OUTPUT,True) #切换OBU的UART串口
iob.output(GS11IOBoardDevice.UART_RX_OUTPUT,True)
time.sleep(0.5)
device.set_small_current_switch(0)
current_val = device.read_adc_current()
if current_val > 10:
print "current_val=",current_val
superUiLog("small_current_switch = 0,current_val="+str(current_val))
raise
device.set_small_current_switch(1)
current_val = device.read_adc_current()
v = sc.convertAdcToCurrent(current_val)
finally:
iob.output(GS11IOBoardDevice.UART_TX_OUTPUT,False)
iob.output(GS11IOBoardDevice.UART_RX_OUTPUT,False)
device.set_small_current_switch(0)
resultMap = {u"静态电流":v}
if v < SP('gs11.staticCurrent.low',2) or v > SP('gs11.staticCurrent.high',18):
raise TestItemFailException(failWeight = 10,message = u'静态电流测试不通过,正常阈值4-18',output=resultMap)
return resultMap
except TestItemFailException,e:
raise e
except:
raise TestItemFailException(failWeight = 10,message = u'静态电流测试失败')
def T_20_deepStaticCurrent_A(product):
u'''深度静态电流测试-判断深度静态电流值是否在正常范围内'''
iob = __getIoBoard()
sc = board_digital.__askForPlateDeviceCom()
iob.pressDemolishButton()
# sc.assertSynComm(request ='CloseObuUart',response = 'CloseObuUartOK')
iob.output(GS11IOBoardDevice.UART_TX_OUTPUT,True)
iob.output(GS11IOBoardDevice.UART_RX_OUTPUT,True)
time.sleep(2)
try:
v = sc.testDeepStaticCurrent()
resultMap = {u"深度静态电流":v}
if v < SP('gs10.deepStaticCurrent.low',0) or v > SP('gs10.deepStaticCurrent.high',3):
raise TestItemFailException(failWeight = 10,message = u'深度静态电流测试不通过',output=resultMap)
return resultMap
except TestItemFailException,e:
raise e
except Exception,e:
print e
raise TestItemFailException(failWeight = 10,message = u'深度静态电流测试失败')
finally:
sc.assertSynComm(request ='OpenObuUart',response = 'OpenObuUartOK')
iob.releaseDemolishButton()
iob.output(GS11IOBoardDevice.UART_TX_OUTPUT,False)
iob.output(GS11IOBoardDevice.UART_RX_OUTPUT,False)
| [
"[email protected]"
] | |
82942fcabae1643320272fa31c19c206e0b8e146 | f6db8d85a3b41eed543959314d65927353a8229c | /.history/W5/mapsafood/settings_20201202154801.py | dd4788e75db51881d2911e9cc646c3ed5af86df3 | [] | no_license | NFEL/DjangoPaeez99 | d573cc8e36500f08bc104d76f7a2628062d86c2f | 621636bfb47d71f2a4f45037b7264dd5ebc7cdd7 | refs/heads/main | 2023-01-27T22:05:57.788049 | 2020-12-08T10:08:28 | 2020-12-08T10:08:28 | 304,553,353 | 1 | 2 | null | 2020-10-16T07:33:04 | 2020-10-16T07:33:03 | null | UTF-8 | Python | false | false | 3,954 | py | """
Django settings for mapsafood project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'wopis*_*o3rtlblge=mm)pb)*ynu66zc+wqt&bs8l2*v=z1g%$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
LOGGING = {
'version': 1,
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': '/tmp/django.log',
}
},
'loggers': {
'django.db.backends': {
'level': 'DEBUG',
'handlers': ['file'],
}
}
}
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
'django.contrib.gis',
'user_profile',
'geolocation',
'restaurant',
'crispy_form'
]
# DJANGO_ALLOW_ASYNC_UNSAFE = False
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INTERNAL_IPS = [
'127.0.0.1',
]
ROOT_URLCONF = 'mapsafood.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mapsafood.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'mapsafood',
'USER': 'nfel',
'PASSWORD': '1',
'HOST': 'localhost',
'PORT': '5432'
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR / "static",
]
MEDIA_ROOT = BASE_DIR / 'media/'
MEDIA_URL = '/media/'
| [
"[email protected]"
] | |
c8059431cc04d65e1bcc582461b779c051f7c2d4 | 5d2bc0efb0e457cfd55a90d9754d5ced9c009cae | /venv/lib/python2.7/site-packages/ibm_db_dbi.py | c83e13b87927c0a7203b0952875b5b9c305de8c9 | [] | no_license | michaelp1212/paxton | dafe08eca55557d036189d5242e47e89ec15bf2d | 0bd1da471c3a594c0765a4bc5cd1288404791caf | refs/heads/master | 2021-03-25T07:17:06.523340 | 2020-03-19T01:38:24 | 2020-03-19T01:38:24 | 247,598,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63,583 | py | # +--------------------------------------------------------------------------+
# | Licensed Materials - Property of IBM |
# | |
# | (C) Copyright IBM Corporation 2007-2015 |
# +--------------------------------------------------------------------------+
# | This module complies with SQLAlchemy and is |
# | Licensed under the Apache License, Version 2.0 (the "License"); |
# | you may not use this file except in compliance with the License. |
# | You may obtain a copy of the License at |
# | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable |
# | law or agreed to in writing, software distributed under the License is |
# | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
# | KIND, either express or implied. See the License for the specific |
# | language governing permissions and limitations under the License. |
# +--------------------------------------------------------------------------+
# | Authors: Swetha Patel, Abhigyan Agrawal, Tarun Pasrija, Rahul Priyadarshi,
# | Akshay Anand, Saba Kauser
# +--------------------------------------------------------------------------+
"""
This module implements the Python DB API Specification v2.0 for DB2 database.
"""
import types, string, time, datetime, decimal, sys
import weakref
if sys.version_info >= (3, ):
buffer = memoryview
if sys.version_info < (3, ):
import exceptions
exception = exceptions.StandardError
else:
exception = Exception
import ibm_db
__version__ = ibm_db.__version__
# Constants for specifying database connection options.
SQL_ATTR_AUTOCOMMIT = ibm_db.SQL_ATTR_AUTOCOMMIT
SQL_ATTR_CURRENT_SCHEMA = ibm_db.SQL_ATTR_CURRENT_SCHEMA
SQL_AUTOCOMMIT_OFF = ibm_db.SQL_AUTOCOMMIT_OFF
SQL_AUTOCOMMIT_ON = ibm_db.SQL_AUTOCOMMIT_ON
ATTR_CASE = ibm_db.ATTR_CASE
CASE_NATURAL = ibm_db.CASE_NATURAL
CASE_LOWER = ibm_db.CASE_LOWER
CASE_UPPER = ibm_db.CASE_UPPER
SQL_FALSE = ibm_db.SQL_FALSE
SQL_TRUE = ibm_db.SQL_TRUE
SQL_TABLE_STAT = ibm_db.SQL_TABLE_STAT
SQL_INDEX_CLUSTERED = ibm_db.SQL_INDEX_CLUSTERED
SQL_INDEX_OTHER = ibm_db.SQL_INDEX_OTHER
SQL_DBMS_VER = ibm_db.SQL_DBMS_VER
SQL_DBMS_NAME = ibm_db.SQL_DBMS_NAME
USE_WCHAR = ibm_db.USE_WCHAR
WCHAR_YES = ibm_db.WCHAR_YES
WCHAR_NO = ibm_db.WCHAR_NO
FIX_RETURN_TYPE = 1
# Module globals
apilevel = '2.0'
threadsafety = 0
paramstyle = 'qmark'
class Error(exception):
"""This is the base class of all other exception thrown by this
module. It can be use to catch all exceptions with a single except
statement.
"""
def __init__(self, message):
"""This is the constructor which take one string argument."""
self._message = message
super(Error, self).__init__(message)
def __str__(self):
"""Converts the message to a string."""
return 'ibm_db_dbi::'+str(self.__class__.__name__)+': '+str(self._message)
class Warning(exception):
"""This exception is used to inform the user about important
warnings such as data truncations.
"""
def __init__(self, message):
"""This is the constructor which take one string argument."""
self._message = message
super(Warning,self).__init__(message)
def __str__(self):
"""Converts the message to a string."""
return 'ibm_db_dbi::'+str(self.__class__.__name__)+': '+str(self._message)
class InterfaceError(Error):
"""This exception is raised when the module interface is being
used incorrectly.
"""
pass
class DatabaseError(Error):
"""This exception is raised for errors related to database."""
pass
class InternalError(DatabaseError):
"""This exception is raised when internal database error occurs,
such as cursor is not valid anymore.
"""
pass
class OperationalError(DatabaseError):
"""This exception is raised when database operation errors that are
not under the programmer control occur, such as unexpected
disconnect.
"""
pass
class ProgrammingError(DatabaseError):
"""This exception is raised for programming errors, such as table
not found.
"""
pass
class IntegrityError(DatabaseError):
"""This exception is thrown when errors occur when the relational
integrity of database fails, such as foreign key check fails.
"""
pass
class DataError(DatabaseError):
"""This exception is raised when errors due to data processing,
occur, such as divide by zero.
"""
pass
class NotSupportedError(DatabaseError):
"""This exception is thrown when a method in this module or an
database API is not supported.
"""
pass
def Date(year, month, day):
"""This method can be used to get date object from integers, for
inserting it into a DATE column in the database.
"""
return datetime.date(year, month, day)
def Time(hour, minute, second):
"""This method can be used to get time object from integers, for
inserting it into a TIME column in the database.
"""
return datetime.time(hour, minute, second)
def Timestamp(year, month, day, hour, minute, second):
"""This method can be used to get timestamp object from integers,
for inserting it into a TIMESTAMP column in the database.
"""
return datetime.datetime(year, month, day, hour, minute, second)
def DateFromTicks(ticks):
"""This method can be used to get date object from ticks seconds,
for inserting it into a DATE column in the database.
"""
time_tuple = time.localtime(ticks)
return datetime.date(time_tuple[0], time_tuple[1], time_tuple[2])
def TimeFromTicks(ticks):
"""This method can be used to get time object from ticks seconds,
for inserting it into a TIME column in the database.
"""
time_tuple = time.localtime(ticks)
return datetime.time(time_tuple[3], time_tuple[4], time_tuple[5])
def TimestampFromTicks(ticks):
"""This method can be used to get timestamp object from ticks
seconds, for inserting it into a TIMESTAMP column in the database.
"""
time_tuple = time.localtime(ticks)
return datetime.datetime(time_tuple[0], time_tuple[1], time_tuple[2],
time_tuple[3], time_tuple[4], time_tuple[5])
def Binary(string):
"""This method can be used to store binary information, for
inserting it into a binary type column in the database.
"""
if not isinstance( string, (types.StringType, types.BufferType) ):
raise InterfaceError("Binary function expects type string argument.")
return buffer(string)
class DBAPITypeObject(frozenset):
"""Class used for creating objects that can be used to compare
in order to determine the python type to provide in parameter
sequence argument of the execute method.
"""
def __new__(cls, col_types):
return frozenset.__new__(cls, col_types)
def __init__(self, col_types):
"""Constructor for DBAPITypeObject. It takes a tuple of
database column type as an argument.
"""
self.col_types = col_types
def __cmp__(self, cmp):
"""This method checks if the string compared with is in the
tuple provided to the constructor of this object. It takes
string as an argument.
"""
if cmp in self.col_types:
return 0
if sys.version_info < (3, ):
if cmp < self.col_types:
return 1
else:
return -1
else:
return 1
def __eq__(self, cmp):
"""This method checks if the string compared with is in the
tuple provided to the constructor of this object. It takes
string as an argument.
"""
return cmp in self.col_types
def __ne__(self, cmp):
"""This method checks if the string compared with is not in the
tuple provided to the constructor of this object. It takes
string as an argument.
"""
return cmp not in self.col_types
def __hash__(self):
return id(self)
# The user can use these objects to compare the database column types
# with in order to determine the python type to provide in the
# parameter sequence argument of the execute method.
STRING = DBAPITypeObject(("CHARACTER", "CHAR", "VARCHAR",
"CHARACTER VARYING", "CHAR VARYING", "STRING",))
TEXT = DBAPITypeObject(("CLOB", "CHARACTER LARGE OBJECT", "CHAR LARGE OBJECT","DBCLOB"))
XML = DBAPITypeObject(("XML",))
BINARY = DBAPITypeObject(("BLOB", "BINARY LARGE OBJECT",))
NUMBER = DBAPITypeObject(("INTEGER", "INT", "SMALLINT",))
BIGINT = DBAPITypeObject(("BIGINT",))
FLOAT = DBAPITypeObject(("FLOAT", "REAL", "DOUBLE", "DECFLOAT"))
DECIMAL = DBAPITypeObject(("DECIMAL", "DEC", "NUMERIC", "NUM",))
DATE = DBAPITypeObject(("DATE",))
TIME = DBAPITypeObject(("TIME",))
DATETIME = DBAPITypeObject(("TIMESTAMP",))
ROWID = DBAPITypeObject(())
# This method is used to determine the type of error that was
# generated. It takes an exception instance as an argument, and
# returns exception object of the appropriate type.
def _get_exception(inst):
# These tuple are used to determine the type of exceptions that are
# thrown by the database. They store the SQLSTATE code and the
# SQLSTATE class code(the 2 digit prefix of the SQLSTATE code)
warning_error_tuple = ('01', )
data_error_tuple = ('02', '22', '10601', '10603', '10605', '10901', '10902',
'38552', '54')
operational_error_tuple = ( '08', '09', '10502', '10000', '10611', '38501',
'38503', '38553', '38H01', '38H02', '38H03', '38H04',
'38H05', '38H06', '38H07', '38H09', '38H0A')
integrity_error_tuple = ('23', )
internal_error_tuple = ('24', '25', '26', '2D', '51', '57')
programming_error_tuple = ('08002', '07', 'OD', 'OF','OK','ON','10', '27',
'28', '2E', '34', '36', '38', '39', '56', '42',
'3B', '40', '44', '53', '55', '58', '5U', '21')
not_supported_error_tuple = ('0A', '10509')
# These tuple are used to determine the type of exceptions that are
# thrown from the driver module.
interface_exceptions = ( "Supplied parameter is invalid",
"ATTR_CASE attribute must be one of "
"CASE_LOWER, CASE_UPPER, or CASE_NATURAL",
"Connection or statement handle must be passed in.",
"Param is not a tuple")
programming_exceptions = ( "Connection is not active",
"qualifier must be a string",
"unique must be a boolean",
"Parameters not bound",
"owner must be a string",
"table_name must be a string",
"table type must be a string",
"column_name must be a string",
"Column ordinal out of range",
"procedure name must be a string",
"Requested row number must be a positive value",
"Options Array must have string indexes")
database_exceptions = ( "Binding Error",
"Column information cannot be retrieved: ",
"Column binding cannot be done: ",
"Failed to Determine XML Size: ")
statement_exceptions = ( "Statement Execute Failed: ",
"Describe Param Failed: ",
"Sending data failed: ",
"Fetch Failure: ",
"SQLNumResultCols failed: ",
"SQLRowCount failed: ",
"SQLGetDiagField failed: ",
"Statement prepare Failed: ")
operational_exceptions = ( "Connection Resource cannot be found",
"Failed to Allocate Memory",
"Describe Param Failed: ",
"Statement Execute Failed: ",
"Sending data failed: ",
"Failed to Allocate Memory for XML Data",
"Failed to Allocate Memory for LOB Data")
# First check if the exception is from the database. If it is
# determine the SQLSTATE code which is used further to determine
# the exception type. If not check if the exception is thrown by
# by the driver and return the appropriate exception type. If it
# is not possible to determine the type of exception generated
# return the generic Error exception.
if inst is not None:
message = repr(inst)
if message.startswith("Exception('") and message.endswith("',)"):
message = message[11:]
message = message[:len(message)-3]
index = message.find('SQLSTATE=')
if( message != '') & (index != -1):
error_code = message[(index+9):(index+14)]
prefix_code = error_code[:2]
else:
for key in interface_exceptions:
if message.find(key) != -1:
return InterfaceError(message)
for key in programming_exceptions:
if message.find(key) != -1:
return ProgrammingError(message)
for key in operational_exceptions:
if message.find(key) != -1:
return OperationalError(message)
for key in database_exceptions:
if message.find(key) != -1:
return DatabaseError(message)
for key in statement_exceptions:
if message.find(key) != -1:
return DatabaseError(message)
return Error(message)
else:
return Error('An error has occured')
# First check if the SQLSTATE is in the tuples, if not check
# if the SQLSTATE class code is in the tuples to determine the
# exception type.
if ( error_code in warning_error_tuple or
prefix_code in warning_error_tuple ):
return Warning(message)
if ( error_code in data_error_tuple or
prefix_code in data_error_tuple ):
return DataError(message)
if ( error_code in operational_error_tuple or
prefix_code in operational_error_tuple ):
return OperationalError(message)
if ( error_code in integrity_error_tuple or
prefix_code in integrity_error_tuple ):
return IntegrityError(message)
if ( error_code in internal_error_tuple or
prefix_code in internal_error_tuple ):
return InternalError(message)
if ( error_code in programming_error_tuple or
prefix_code in programming_error_tuple ):
return ProgrammingError(message)
if ( error_code in not_supported_error_tuple or
prefix_code in not_supported_error_tuple ):
return NotSupportedError(message)
return DatabaseError(message)
def _server_connect(dsn, user='', password='', host=''):
"""This method create connection with server
"""
if dsn is None:
raise InterfaceError("dsn value should not be None")
if (not isinstance(dsn, basestring)) | \
(not isinstance(user, basestring)) | \
(not isinstance(password, basestring)) | \
(not isinstance(host, basestring)):
raise InterfaceError("Arguments should be of type string or unicode")
# If the dsn does not contain port and protocal adding database
# and hostname is no good. Add these when required, that is,
# if there is a '=' in the dsn. Else the dsn string is taken to be
# a DSN entry.
if dsn.find('=') != -1:
if dsn[len(dsn) - 1] != ';':
dsn = dsn + ";"
if host != '' and dsn.find('HOSTNAME=') == -1:
dsn = dsn + "HOSTNAME=" + host + ";"
else:
dsn = "DSN=" + dsn + ";"
# attach = true is not valid against IDS. And attach is not needed for connect currently.
#if dsn.find('attach=') == -1:
#dsn = dsn + "attach=true;"
if user != '' and dsn.find('UID=') == -1:
dsn = dsn + "UID=" + user + ";"
if password != '' and dsn.find('PWD=') == -1:
dsn = dsn + "PWD=" + password + ";"
try:
conn = ibm_db.connect(dsn, '', '')
except Exception, inst:
raise _get_exception(inst)
return conn
def createdb(database, dsn, user='', password='', host='', codeset='', mode=''):
"""This method creates a database by using the specified database name, code set, and mode
"""
if database is None:
raise InterfaceError("createdb expects a not None database name value")
if (not isinstance(database, basestring)) | \
(not isinstance(codeset, basestring)) | \
(not isinstance(mode, basestring)):
raise InterfaceError("Arguments sould be string or unicode")
conn = _server_connect(dsn, user=user, password=password, host=host)
try:
return_value = ibm_db.createdb(conn, database, codeset, mode)
except Exception, inst:
raise _get_exception(inst)
finally:
try:
ibm_db.close(conn)
except Exception, inst:
raise _get_exception(inst)
return return_value
def dropdb(database, dsn, user='', password='', host=''):
"""This method drops the specified database
"""
if database is None:
raise InterfaceError("dropdb expects a not None database name value")
if (not isinstance(database, basestring)):
raise InterfaceError("Arguments sould be string or unicode")
conn = _server_connect(dsn, user=user, password=password, host=host)
try:
return_value = ibm_db.dropdb(conn, database)
except Exception, inst:
raise _get_exception(inst)
finally:
try:
ibm_db.close(conn)
except Exception, inst:
raise _get_exception(inst)
return return_value
def recreatedb(database, dsn, user='', password='', host='', codeset='', mode=''):
"""This method drops and then recreate the database by using the specified database name, code set, and mode
"""
if database is None:
raise InterfaceError("recreatedb expects a not None database name value")
if (not isinstance(database, basestring)) | \
(not isinstance(codeset, basestring)) | \
(not isinstance(mode, basestring)):
raise InterfaceError("Arguments sould be string or unicode")
conn = _server_connect(dsn, user=user, password=password, host=host)
try:
return_value = ibm_db.recreatedb(conn, database, codeset, mode)
except Exception, inst:
raise _get_exception(inst)
finally:
try:
ibm_db.close(conn)
except Exception, inst:
raise _get_exception(inst)
return return_value
def createdbNX(database, dsn, user='', password='', host='', codeset='', mode=''):
"""This method creates a database if it not exist by using the specified database name, code set, and mode
"""
if database is None:
raise InterfaceError("createdbNX expects a not None database name value")
if (not isinstance(database, basestring)) | \
(not isinstance(codeset, basestring)) | \
(not isinstance(mode, basestring)):
raise InterfaceError("Arguments sould be string or unicode")
conn = _server_connect(dsn, user=user, password=password, host=host)
try:
return_value = ibm_db.createdbNX(conn, database, codeset, mode)
except Exception, inst:
raise _get_exception(inst)
finally:
try:
ibm_db.close(conn)
except Exception, inst:
raise _get_exception(inst)
return return_value
def connect(dsn, user='', password='', host='', database='', conn_options=None):
"""This method creates a non persistent connection to the database. It returns
a ibm_db_dbi.Connection object.
"""
if dsn is None:
raise InterfaceError("connect expects a not None dsn value")
if (not isinstance(dsn, basestring)) | \
(not isinstance(user, basestring)) | \
(not isinstance(password, basestring)) | \
(not isinstance(host, basestring)) | \
(not isinstance(database, basestring)):
raise InterfaceError("connect expects the first five arguments to"
" be of type string or unicode")
if conn_options is not None:
if not isinstance(conn_options, dict):
raise InterfaceError("connect expects the sixth argument"
" (conn_options) to be of type dict")
if not SQL_ATTR_AUTOCOMMIT in conn_options:
conn_options[SQL_ATTR_AUTOCOMMIT] = SQL_AUTOCOMMIT_OFF
else:
conn_options = {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_OFF}
# If the dsn does not contain port and protocal adding database
# and hostname is no good. Add these when required, that is,
# if there is a '=' in the dsn. Else the dsn string is taken to be
# a DSN entry.
if dsn.find('=') != -1:
if dsn[len(dsn) - 1] != ';':
dsn = dsn + ";"
if database != '' and dsn.find('DATABASE=') == -1:
dsn = dsn + "DATABASE=" + database + ";"
if host != '' and dsn.find('HOSTNAME=') == -1:
dsn = dsn + "HOSTNAME=" + host + ";"
else:
dsn = "DSN=" + dsn + ";"
if user != '' and dsn.find('UID=') == -1:
dsn = dsn + "UID=" + user + ";"
if password != '' and dsn.find('PWD=') == -1:
dsn = dsn + "PWD=" + password + ";"
try:
conn = ibm_db.connect(dsn, '', '', conn_options)
ibm_db.set_option(conn, {SQL_ATTR_CURRENT_SCHEMA : user}, 1)
except Exception, inst:
raise _get_exception(inst)
return Connection(conn)
def pconnect(dsn, user='', password='', host='', database='', conn_options=None):
"""This method creates persistent connection to the database. It returns
a ibm_db_dbi.Connection object.
"""
if dsn is None:
raise InterfaceError("connect expects a not None dsn value")
if (not isinstance(dsn, basestring)) | \
(not isinstance(user, basestring)) | \
(not isinstance(password, basestring)) | \
(not isinstance(host, basestring)) | \
(not isinstance(database, basestring)):
raise InterfaceError("connect expects the first five arguments to"
" be of type string or unicode")
if conn_options is not None:
if not isinstance(conn_options, dict):
raise InterfaceError("connect expects the sixth argument"
" (conn_options) to be of type dict")
if not SQL_ATTR_AUTOCOMMIT in conn_options:
conn_options[SQL_ATTR_AUTOCOMMIT] = SQL_AUTOCOMMIT_OFF
else:
conn_options = {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_OFF}
# If the dsn does not contain port and protocal adding database
# and hostname is no good. Add these when required, that is,
# if there is a '=' in the dsn. Else the dsn string is taken to be
# a DSN entry.
if dsn.find('=') != -1:
if dsn[len(dsn) - 1] != ';':
dsn = dsn + ";"
if database != '' and dsn.find('DATABASE=') == -1:
dsn = dsn + "DATABASE=" + database + ";"
if host != '' and dsn.find('HOSTNAME=') == -1:
dsn = dsn + "HOSTNAME=" + host + ";"
else:
dsn = "DSN=" + dsn + ";"
if user != '' and dsn.find('UID=') == -1:
dsn = dsn + "UID=" + user + ";"
if password != '' and dsn.find('PWD=') == -1:
dsn = dsn + "PWD=" + password + ";"
try:
conn = ibm_db.pconnect(dsn, '', '', conn_options)
ibm_db.set_option(conn, {SQL_ATTR_CURRENT_SCHEMA : user}, 1)
except Exception, inst:
raise _get_exception(inst)
return Connection(conn)
class Connection(object):
"""This class object represents a connection between the database
and the application.
"""
def __init__(self, conn_handler):
"""Constructor for Connection object. It takes ibm_db
connection handler as an argument.
"""
self.conn_handler = conn_handler
# Used to identify close cursors for generating exceptions
# after the connection is closed.
self._cursor_list = []
self.__dbms_name = ibm_db.get_db_info(conn_handler, SQL_DBMS_NAME)
self.__dbms_ver = ibm_db.get_db_info(conn_handler, SQL_DBMS_VER)
self.FIX_RETURN_TYPE = 1
# This method is used to get the DBMS_NAME
def __get_dbms_name( self ):
return self.__dbms_name
# This attribute specifies the DBMS_NAME
# It is a read only attribute.
dbms_name = property(__get_dbms_name, None, None, "")
# This method is used to get the DBMS_ver
def __get_dbms_ver( self ):
return self.__dbms_ver
# This attribute specifies the DBMS_ver
# It is a read only attribute.
dbms_ver = property(__get_dbms_ver, None, None, "")
def close(self):
"""This method closes the Database connection associated with
the Connection object. It takes no arguments.
"""
self.rollback()
try:
if self.conn_handler is None:
raise ProgrammingError("Connection cannot be closed; "
"connection is no longer active.")
else:
return_value = ibm_db.close(self.conn_handler)
except Exception, inst:
raise _get_exception(inst)
self.conn_handler = None
for index in range(len(self._cursor_list)):
if (self._cursor_list[index]() != None):
tmp_cursor = self._cursor_list[index]()
tmp_cursor.conn_handler = None
tmp_cursor.stmt_handler = None
tmp_cursor._all_stmt_handlers = None
self._cursor_list = []
return return_value
def commit(self):
"""This method commits the transaction associated with the
Connection object. It takes no arguments.
"""
try:
return_value = ibm_db.commit(self.conn_handler)
except Exception, inst:
raise _get_exception(inst)
return return_value
def rollback(self):
"""This method rollbacks the transaction associated with the
Connection object. It takes no arguments.
"""
try:
return_value = ibm_db.rollback(self.conn_handler)
except Exception, inst:
raise _get_exception(inst)
return return_value
def cursor(self):
"""This method returns a Cursor object associated with the
Connection. It takes no arguments.
"""
if self.conn_handler is None:
raise ProgrammingError("Cursor cannot be returned; "
"connection is no longer active.")
cursor = Cursor(self.conn_handler, self)
self._cursor_list.append(weakref.ref(cursor))
return cursor
# Sets connection attribute values
def set_option(self, attr_dict):
"""Input: connection attribute dictionary
Return: True on success or False on failure
"""
return ibm_db.set_option(self.conn_handler, attr_dict, 1)
# Retrieves connection attributes values
def get_option(self, attr_key):
"""Input: connection attribute key
Return: current setting of the resource attribute requested
"""
return ibm_db.get_option(self.conn_handler, attr_key, 1)
# Sets FIX_RETURN_TYPE. Added for performance improvement
def set_fix_return_type(self, is_on):
try:
if is_on:
self.FIX_RETURN_TYPE = 1
else:
self.FIX_RETURN_TYPE = 0
except Exception, inst:
raise _get_exception(inst)
return self.FIX_RETURN_TYPE
# Sets connection AUTOCOMMIT attribute
def set_autocommit(self, is_on):
"""Input: connection attribute: true if AUTOCOMMIT ON, false otherwise (i.e. OFF)
Return: True on success or False on failure
"""
try:
if is_on:
is_set = ibm_db.set_option(self.conn_handler, {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_ON}, 1)
else:
is_set = ibm_db.set_option(self.conn_handler, {SQL_ATTR_AUTOCOMMIT : SQL_AUTOCOMMIT_OFF}, 1)
except Exception, inst:
raise _get_exception(inst)
return is_set
# Sets connection attribute values
def set_current_schema(self, schema_name):
"""Input: connection attribute dictionary
Return: True on success or False on failure
"""
self.current_schema = schema_name
try:
is_set = ibm_db.set_option(self.conn_handler, {SQL_ATTR_CURRENT_SCHEMA : schema_name}, 1)
except Exception, inst:
raise _get_exception(inst)
return is_set
# Retrieves connection attributes values
def get_current_schema(self):
"""Return: current setting of the schema attribute
"""
try:
conn_schema = ibm_db.get_option(self.conn_handler, SQL_ATTR_CURRENT_SCHEMA, 1)
if conn_schema is not None and conn_schema != '':
self.current_schema = conn_schema
except Exception, inst:
raise _get_exception(inst)
return self.current_schema
# Retrieves the IBM Data Server version for a given Connection object
def server_info(self):
"""Return: tuple (DBMS_NAME, DBMS_VER)
"""
try:
server_info = []
server_info.append(self.dbms_name)
server_info.append(self.dbms_ver)
except Exception, inst:
raise _get_exception(inst)
return tuple(server_info)
def set_case(self, server_type, str_value):
return str_value.upper()
# Retrieves the tables for a specified schema (and/or given table name)
def tables(self, schema_name=None, table_name=None):
"""Input: connection - ibm_db.IBM_DBConnection object
Return: sequence of table metadata dicts for the specified schema
"""
result = []
if schema_name is not None:
schema_name = self.set_case("DB2_LUW", schema_name)
if table_name is not None:
table_name = self.set_case("DB2_LUW", table_name)
try:
stmt = ibm_db.tables(self.conn_handler, None, schema_name, table_name)
row = ibm_db.fetch_assoc(stmt)
i = 0
while (row):
result.append( row )
i += 1
row = ibm_db.fetch_assoc(stmt)
ibm_db.free_result(stmt)
except Exception, inst:
raise _get_exception(inst)
return result
# Retrieves metadata pertaining to index for specified schema (and/or table name)
def indexes(self, unique=True, schema_name=None, table_name=None):
"""Input: connection - ibm_db.IBM_DBConnection object
Return: sequence of index metadata dicts for the specified table
Example:
Index metadata retrieved from schema 'PYTHONIC.TEST_TABLE' table
{
'TABLE_SCHEM': 'PYTHONIC', 'TABLE_CAT': None,
'TABLE_NAME': 'ENGINE_USERS', 'PAGES': None,
'COLUMN_NAME': 'USER_ID' 'FILTER_CONDITION': None,
'INDEX_NAME': 'SQL071201150750170', 'CARDINALITY': None,
'ORDINAL_POSITION': 1, 'INDEX_QUALIFIER': 'SYSIBM',
'TYPE': 3,
'NON_UNIQUE': 0,
'ASC_OR_DESC': 'A'
}
"""
result = []
if schema_name is not None:
schema_name = self.set_case("DB2_LUW", schema_name)
if table_name is not None:
table_name = self.set_case("DB2_LUW", table_name)
try:
stmt = ibm_db.statistics(self.conn_handler, None, schema_name, table_name, unique)
row = ibm_db.fetch_assoc(stmt)
i = 0
while (row):
if row['TYPE'] == SQL_INDEX_OTHER:
result.append( row )
i += 1
row = ibm_db.fetch_assoc(stmt)
ibm_db.free_result(stmt)
except Exception, inst:
raise _get_exception(inst)
return result
# Retrieves metadata pertaining to primary keys for specified schema (and/or table name)
def primary_keys(self, unique=True, schema_name=None, table_name=None):
"""Input: connection - ibm_db.IBM_DBConnection object
Return: sequence of PK metadata dicts for the specified table
Example:
PK metadata retrieved from 'PYTHONIC.ORDERS' table
{
'TABLE_SCHEM': 'PYTHONIC', 'TABLE_CAT': None,
'TABLE_NAME': 'ORDERS',
'COLUMN_NAME': 'ORDER_ID'
'PK_NAME': 'SQL071128122038680',
'KEY_SEQ': 1
}
"""
result = []
if schema_name is not None:
schema_name = self.set_case("DB2_LUW", schema_name)
if table_name is not None:
table_name = self.set_case("DB2_LUW", table_name)
try:
stmt = ibm_db.primary_keys(self.conn_handler, None, schema_name, table_name)
row = ibm_db.fetch_assoc(stmt)
i = 0
while (row):
result.append( row )
i += 1
row = ibm_db.fetch_assoc(stmt)
ibm_db.free_result(stmt)
except Exception, inst:
raise _get_exception(inst)
return result
# Retrieves metadata pertaining to foreign keys for specified schema (and/or table name)
def foreign_keys(self, unique=True, schema_name=None, table_name=None):
"""Input: connection - ibm_db.IBM_DBConnection object
Return: sequence of FK metadata dicts for the specified table
Example:
FK metadata retrieved from 'PYTHONIC.ENGINE_EMAIL_ADDRESSES' table
{
'PKTABLE_SCHEM': 'PYTHONIC', 'PKTABLE_CAT': None,
'PKTABLE_NAME': 'ENGINE_USERS', 'FKTABLE_CAT': None,
'PKCOLUMN_NAME': 'USER_ID', 'UPDATE_RULE': 3,
'PK_NAME': 'SQL071205090958680', 'DELETE_RULE': 3
'KEY_SEQ': 1, 'DEFERRABILITY': 7,
'FK_NAME': 'SQL071205091000160',
'FKCOLUMN_NAME': 'REMOTE_USER_ID',
'FKTABLE_NAME': 'ENGINE_EMAIL_ADDRESSES',
'FKTABLE_SCHEM': 'PYTHONIC'
}
"""
result = []
if schema_name is not None:
schema_name = self.set_case("DB2_LUW", schema_name)
if table_name is not None:
table_name = self.set_case("DB2_LUW", table_name)
try:
stmt = ibm_db.foreign_keys(self.conn_handler, None, None, None, None, schema_name, table_name)
row = ibm_db.fetch_assoc(stmt)
i = 0
while (row):
result.append( row )
i += 1
row = ibm_db.fetch_assoc(stmt)
ibm_db.free_result(stmt)
except Exception, inst:
raise _get_exception(inst)
return result
# Retrieves the columns for a specified schema (and/or table name and column name)
def columns(self, schema_name=None, table_name=None, column_names=None):
"""Input: connection - ibm_db.IBM_DBConnection object
Return: sequence of column metadata dicts for the specified schema
Example:
Column metadata retrieved from schema 'PYTHONIC.FOO' table, column 'A'
{
'TABLE_NAME': 'FOO', 'NULLABLE': 1,
'ORDINAL_POSITION': 2L, 'REMARKS': None,
'COLUMN_NAME': 'A', 'BUFFER_LENGTH': 30L,
'TYPE_NAME': 'VARCHAR', 'SQL_DATETIME_SUB': None,
'COLUMN_DEF': None, 'DATA_TYPE': 12,
'IS_NULLABLE': 'YES', 'SQL_DATA_TYPE': 12,
'COLUMN_SIZE': 30L, 'TABLE_CAT': None,
'CHAR_OCTET_LENGTH': 30L, 'TABLE_SCHEM': 'PYTHONIC',
'NUM_PREC_RADIX': None,
'DECIMAL_DIGITS': None
}
"""
result = []
if schema_name is not None:
schema_name = self.set_case("DB2_LUW", schema_name)
if table_name is not None:
table_name = self.set_case("DB2_LUW", table_name)
try:
stmt = ibm_db.columns(self.conn_handler, None, schema_name, table_name)
row = ibm_db.fetch_assoc(stmt)
i = 0
while (row):
result.append( row )
i += 1
row = ibm_db.fetch_assoc(stmt)
ibm_db.free_result(stmt)
col_names_lower = []
if column_names is not None:
for name in column_names:
col_names_lower.append(name.lower())
include_columns = []
if column_names and column_names != '':
for column in result:
if column['COLUMN_NAME'].lower() in col_names_lower:
column['COLUMN_NAME'] = column['COLUMN_NAME'].lower()
include_columns.append(column)
result = include_columns
except Exception, inst:
raise _get_exception(inst)
return result
# Defines a cursor for the driver connection
class Cursor(object):
"""This class represents a cursor of the connection. It can be
used to process an SQL statement.
"""
# This method is used to get the description attribute.
def __get_description(self):
""" If this method has already been called, after executing a select statement,
return the stored information in the self.__description.
"""
if self.__description is not None:
return self.__description
if self.stmt_handler is None:
return None
self.__description = []
try:
num_columns = ibm_db.num_fields(self.stmt_handler)
""" If the execute statement did not produce a result set return None.
"""
if num_columns == False:
self.__description = None
return None
for column_index in range(num_columns):
column_desc = []
column_desc.append(ibm_db.field_name(self.stmt_handler,
column_index))
type = ibm_db.field_type(self.stmt_handler, column_index)
type = type.upper()
if STRING == type:
column_desc.append(STRING)
elif TEXT == type:
column_desc.append(TEXT)
elif XML == type:
column_desc.append(XML)
elif BINARY == type:
column_desc.append(BINARY)
elif NUMBER == type:
column_desc.append(NUMBER)
elif BIGINT == type:
column_desc.append(BIGINT)
elif FLOAT == type:
column_desc.append(FLOAT)
elif DECIMAL == type:
column_desc.append(DECIMAL)
elif DATE == type:
column_desc.append(DATE)
elif TIME == type:
column_desc.append(TIME)
elif DATETIME == type:
column_desc.append(DATETIME)
elif ROWID == type:
column_desc.append(ROWID)
column_desc.append(ibm_db.field_display_size(
self.stmt_handler, column_index))
column_desc.append(ibm_db.field_display_size(
self.stmt_handler, column_index))
column_desc.append(ibm_db.field_precision(
self.stmt_handler, column_index))
column_desc.append(ibm_db.field_scale(self.stmt_handler,
column_index))
column_desc.append(ibm_db.field_nullable(
self.stmt_handler, column_index))
self.__description.append(column_desc)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
return self.__description
# This attribute provides the metadata information of the columns
# in the result set produced by the last execute function. It is
# a read only attribute.
description = property(fget = __get_description)
# This method is used to get the rowcount attribute.
def __get_rowcount( self ):
return self.__rowcount
def __iter__( self ):
return self
def next( self ):
row = self.fetchone()
if row == None:
raise StopIteration
return row
# This attribute specifies the number of rows the last executeXXX()
# produced or affected. It is a read only attribute.
rowcount = property(__get_rowcount, None, None, "")
# This method is used to get the Connection object
def __get_connection( self ):
return self.__connection
# This attribute specifies the connection object.
# It is a read only attribute.
connection = property(__get_connection, None, None, "")
def __init__(self, conn_handler, conn_object=None):
"""Constructor for Cursor object. It takes ibm_db connection
handler as an argument.
"""
# This attribute is used to determine the fetch size for fetchmany
# operation. It is a read/write attribute
self.arraysize = 1
self.__rowcount = -1
self._result_set_produced = False
self.__description = None
self.conn_handler = conn_handler
self.stmt_handler = None
self._is_scrollable_cursor = False
self.__connection = conn_object
self.messages = []
self.FIX_RETURN_TYPE = conn_object.FIX_RETURN_TYPE
# This method closes the statemente associated with the cursor object.
# It takes no argument.
def close(self):
"""This method closes the cursor object. After this method is
called the cursor object is no longer usable. It takes no
arguments.
"""
messages = []
if self.conn_handler is None:
self.messages.append(ProgrammingError("Cursor cannot be closed; connection is no longer active."))
raise self.messages[len(self.messages) - 1]
try:
return_value = ibm_db.free_stmt(self.stmt_handler)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
self.stmt_handler = None
self.conn_handler = None
self._all_stmt_handlers = None
if self.__connection is not None:
try:
self.__connection._cursor_list.remove(weakref.ref(self))
except:
pass
return return_value
# helper for calling procedure
def _callproc_helper(self, procname, parameters=None):
if parameters is not None:
buff = []
CONVERT_STR = (buffer)
# Convert date/time and binary objects to string for
# inserting into the database.
for param in parameters:
if isinstance(param, CONVERT_STR):
param = str(param)
buff.append(param)
parameters = tuple(buff)
try:
result = ibm_db.callproc(self.conn_handler, procname,parameters)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
else:
try:
result = ibm_db.callproc(self.conn_handler, procname)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
return result
def callproc(self, procname, parameters=None):
"""This method can be used to execute a stored procedure.
It takes the name of the stored procedure and the parameters to
the stored procedure as arguments.
"""
self.messages = []
if not isinstance(procname, basestring):
self.messages.append(InterfaceError("callproc expects the first argument to be of type String or Unicode."))
raise self.messages[len(self.messages) - 1]
if parameters is not None:
if not isinstance(parameters, (types.ListType, types.TupleType)):
self.messages.append(InterfaceError("callproc expects the second argument to be of type list or tuple."))
raise self.messages[len(self.messages) - 1]
result = self._callproc_helper(procname, parameters)
return_value = None
self.__description = None
self._all_stmt_handlers = []
if isinstance(result, types.TupleType):
self.stmt_handler = result[0]
return_value = result[1:]
else:
self.stmt_handler = result
self._result_set_produced = True
return return_value
# Helper for preparing an SQL statement.
def _prepare_helper(self, operation, parameters=None):
try:
ibm_db.free_stmt(self.stmt_handler)
except:
pass
try:
self.stmt_handler = ibm_db.prepare(self.conn_handler, operation)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
# Helper for preparing an SQL statement.
def _set_cursor_helper(self):
if (ibm_db.get_option(self.stmt_handler, ibm_db.SQL_ATTR_CURSOR_TYPE, 0) != ibm_db.SQL_CURSOR_FORWARD_ONLY):
self._is_scrollable_cursor = True
else:
self._is_scrollable_cursor = False
self._result_set_produced = False
try:
num_columns = ibm_db.num_fields(self.stmt_handler)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
if not num_columns:
return True
self._result_set_produced = True
return True
# Helper for executing an SQL statement.
def _execute_helper(self, parameters=None):
if parameters is not None:
buff = []
CONVERT_STR = (buffer)
# Convert date/time and binary objects to string for
# inserting into the database.
for param in parameters:
if isinstance(param, CONVERT_STR):
param = str(param)
buff.append(param)
parameters = tuple(buff)
try:
return_value = ibm_db.execute(self.stmt_handler, parameters)
if not return_value:
if ibm_db.conn_errormsg() is not None:
self.messages.append(Error(str(ibm_db.conn_errormsg())))
raise self.messages[len(self.messages) - 1]
if ibm_db.stmt_errormsg() is not None:
self.messages.append(Error(str(ibm_db.stmt_errormsg())))
raise self.messages[len(self.messages) - 1]
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
else:
try:
return_value = ibm_db.execute(self.stmt_handler)
if not return_value:
if ibm_db.conn_errormsg() is not None:
self.messages.append(Error(str(ibm_db.conn_errormsg())))
raise self.messages[len(self.messages) - 1]
if ibm_db.stmt_errormsg() is not None:
self.messages.append(Error(str(ibm_db.stmt_errormsg())))
raise self.messages[len(self.messages) - 1]
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
return return_value
# This method is used to set the rowcount after executing an SQL
# statement.
def _set_rowcount(self):
self.__rowcount = -1
if not self._result_set_produced:
try:
counter = ibm_db.num_rows(self.stmt_handler)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
self.__rowcount = counter
elif self._is_scrollable_cursor:
try:
counter = ibm_db.get_num_result(self.stmt_handler)
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
if counter >= 0:
self.__rowcount = counter
return True
# Retrieves the last generated identity value from the DB2 catalog
def _get_last_identity_val(self):
"""
The result of the IDENTITY_VAL_LOCAL function is not affected by the following:
- A single row INSERT statement with a VALUES clause for a table without an
identity column
- A multiple row INSERT statement with a VALUES clause
- An INSERT statement with a fullselect
"""
operation = 'SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1'
try:
stmt_handler = ibm_db.prepare(self.conn_handler, operation)
if ibm_db.execute(stmt_handler):
row = ibm_db.fetch_tuple(stmt_handler)
if row[0] is not None:
identity_val = int(row[0])
else:
identity_val = None
else:
if ibm_db.conn_errormsg() is not None:
self.messages.append(Error(str(ibm_db.conn_errormsg())))
raise self.messages[len(self.messages) - 1]
if ibm_db.stmt_errormsg() is not None:
self.messages.append(Error(str(ibm_db.stmt_errormsg())))
raise self.messages[len(self.messages) - 1]
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
return identity_val
last_identity_val = property(_get_last_identity_val, None, None, "")
def execute(self, operation, parameters=None):
"""
This method can be used to prepare and execute an SQL
statement. It takes the SQL statement(operation) and a
sequence of values to substitute for the parameter markers in
the SQL statement as arguments.
"""
self.messages = []
if not isinstance(operation, basestring):
self.messages.append(InterfaceError("execute expects the first argument [%s] to be of type String or Unicode." % operation ))
raise self.messages[len(self.messages) - 1]
if parameters is not None:
if not isinstance(parameters, (types.ListType, types.TupleType, types.DictType)):
self.messages.append(InterfaceError("execute parameters argument should be sequence."))
raise self.messages[len(self.messages) - 1]
self.__description = None
self._all_stmt_handlers = []
self._prepare_helper(operation)
self._set_cursor_helper()
self._execute_helper(parameters)
return self._set_rowcount()
def executemany(self, operation, seq_parameters):
"""
This method can be used to prepare, and then execute an SQL
statement many times. It takes the SQL statement(operation)
and sequence of sequence of values to substitute for the
parameter markers in the SQL statement as its argument.
"""
self.messages = []
if not isinstance(operation, basestring):
self.messages.append(InterfaceError("executemany expects the first argument to be of type String or Unicode."))
raise self.messages[len(self.messages) - 1]
if seq_parameters is None:
self.messages.append(InterfaceError("executemany expects a not None seq_parameters value"))
raise self.messages[len(self.messages) - 1]
if not isinstance(seq_parameters, (types.ListType, types.TupleType)):
self.messages.append(InterfaceError("executemany expects the second argument to be of type list or tuple of sequence."))
raise self.messages[len(self.messages) - 1]
CONVERT_STR = (buffer)
# Convert date/time and binary objects to string for
# inserting into the database.
buff = []
seq_buff = []
for index in range(len(seq_parameters)):
buff = []
for param in seq_parameters[index]:
if isinstance(param, CONVERT_STR):
param = str(param)
buff.append(param)
seq_buff.append(tuple(buff))
seq_parameters = tuple(seq_buff)
self.__description = None
self._all_stmt_handlers = []
self.__rowcount = -1
self._prepare_helper(operation)
try:
autocommit = ibm_db.autocommit(self.conn_handler)
if autocommit != 0:
ibm_db.autocommit(self.conn_handler, 0)
self.__rowcount = ibm_db.execute_many(self.stmt_handler, seq_parameters)
if autocommit != 0:
ibm_db.commit(self.conn_handler)
ibm_db.autocommit(self.conn_handler, autocommit)
if self.__rowcount == -1:
if ibm_db.conn_errormsg() is not None:
self.messages.append(Error(str(ibm_db.conn_errormsg())))
raise self.messages[len(self.messages) - 1]
if ibm_db.stmt_errormsg() is not None:
self.messages.append(Error(str(ibm_db.stmt_errormsg())))
raise self.messages[len(self.messages) - 1]
except Exception, inst:
self._set_rowcount()
self.messages.append(Error(inst))
raise self.messages[len(self.messages) - 1]
return True
def _fetch_helper(self, fetch_size=-1):
"""
This method is a helper function for fetching fetch_size number of
rows, after executing an SQL statement which produces a result set.
It takes the number of rows to fetch as an argument.
If this is not provided it fetches all the remaining rows.
"""
if self.stmt_handler is None:
self.messages.append(ProgrammingError("Please execute an SQL statement in order to get a row from result set."))
raise self.messages[len(self.messages) - 1]
if self._result_set_produced == False:
self.messages.append(ProgrammingError("The last call to execute did not produce any result set."))
raise self.messages[len(self.messages) - 1]
row_list = []
rows_fetched = 0
while (fetch_size == -1) or \
(fetch_size != -1 and rows_fetched < fetch_size):
try:
row = ibm_db.fetch_tuple(self.stmt_handler)
except Exception, inst:
if ibm_db.stmt_errormsg() is not None:
self.messages.append(Error(str(ibm_db.stmt_errormsg())))
else:
self.messages.append(_get_exception(inst))
if len(row_list) == 0:
raise self.messages[len(self.messages) - 1]
else:
return row_list
if row != False:
if self.FIX_RETURN_TYPE == 1:
row_list.append(self._fix_return_data_type(row))
else:
row_list.append(row)
else:
return row_list
rows_fetched = rows_fetched + 1
return row_list
def fetchone(self):
"""This method fetches one row from the database, after
executing an SQL statement which produces a result set.
"""
row_list = self._fetch_helper(1)
if len(row_list) == 0:
return None
else:
return row_list[0]
def fetchmany(self, size=0):
"""This method fetches size number of rows from the database,
after executing an SQL statement which produces a result set.
It takes the number of rows to fetch as an argument. If this
is not provided it fetches self.arraysize number of rows.
"""
if not isinstance(size, (int, long)):
self.messages.append(InterfaceError( "fetchmany expects argument type int or long."))
raise self.messages[len(self.messages) - 1]
if size == 0:
size = self.arraysize
if size < -1:
self.messages.append(ProgrammingError("fetchmany argument size expected to be positive."))
raise self.messages[len(self.messages) - 1]
return self._fetch_helper(size)
def fetchall(self):
"""This method fetches all remaining rows from the database,
after executing an SQL statement which produces a result set.
"""
return self._fetch_helper()
def nextset(self):
"""This method can be used to get the next result set after
executing a stored procedure, which produces multiple result sets.
"""
self.messages = []
if self.stmt_handler is None:
self.messages.append(ProgrammingError("Please execute an SQL statement in order to get result sets."))
raise self.messages[len(self.messages) - 1]
if self._result_set_produced == False:
self.messages.append(ProgrammingError("The last call to execute did not produce any result set."))
raise self.messages[len(self.messages) - 1]
try:
# Store all the stmt handler that were created. The
# handler was the one created by the execute method. It
# should be used to get next result set.
self.__description = None
self._all_stmt_handlers.append(self.stmt_handler)
self.stmt_handler = ibm_db.next_result(self._all_stmt_handlers[0])
except Exception, inst:
self.messages.append(_get_exception(inst))
raise self.messages[len(self.messages) - 1]
if self.stmt_handler == False:
self.stmt_handler = None
if self.stmt_handler == None:
return None
return True
def setinputsizes(self, sizes):
"""This method currently does nothing."""
pass
def setoutputsize(self, size, column=-1):
"""This method currently does nothing."""
pass
# This method is used to convert a string representing decimal
# and binary data in a row tuple fetched from the database
# to decimal and binary objects, for returning it to the user.
def _fix_return_data_type(self, row):
row_list = None
for index in range(len(row)):
if row[index] is not None:
type = ibm_db.field_type(self.stmt_handler, index)
type = type.upper()
try:
if type == 'BLOB':
if row_list is None:
row_list = list(row)
row_list[index] = buffer(row[index])
elif type == 'DECIMAL':
if row_list is None:
row_list = list(row)
row_list[index] = decimal.Decimal(str(row[index]).replace(",", "."))
except Exception, inst:
self.messages.append(DataError("Data type format error: "+ str(inst)))
raise self.messages[len(self.messages) - 1]
if row_list is None:
return row
else:
return tuple(row_list)
| [
"[email protected]"
] | |
2f8e37dec004ca49835f8b8bc04602ffbeae85b3 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/green.py | 3d1d36e0a8a5ded3f2b6dab641fd0c4ffb2aeb76 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 2,588 | py | ii = [('BentJDO2.py', 1), ('EmerRN.py', 3), ('CookGHP3.py', 4), ('LyelCPG2.py', 3), ('MarrFDI.py', 4), ('RogePAV2.py', 17), ('CoolWHM2.py', 11), ('KembFFF.py', 1), ('GodwWSL2.py', 2), ('RogePAV.py', 2), ('SadlMLP.py', 1), ('FerrSDO3.py', 12), ('WilbRLW.py', 11), ('WilbRLW4.py', 1), ('RennJIT.py', 43), ('ProuWCM.py', 2), ('AubePRP2.py', 2), ('MartHSI2.py', 17), ('LeakWTI2.py', 10), ('KembFJ1.py', 23), ('WilkJMC3.py', 25), ('WilbRLW5.py', 5), ('LeakWTI3.py', 5), ('PettTHE.py', 29), ('MarrFDI3.py', 4), ('TennAP.py', 26), ('PeckJNG.py', 35), ('KnowJMM.py', 1), ('BailJD2.py', 6), ('ChalTPW2.py', 1), ('GellWPT.py', 10), ('AdamWEP.py', 22), ('FitzRNS3.py', 9), ('WilbRLW2.py', 7), ('ClarGE2.py', 12), ('GellWPT2.py', 7), ('WilkJMC2.py', 4), ('CarlTFR.py', 43), ('SeniNSP.py', 2), ('LyttELD.py', 8), ('CoopJBT2.py', 1), ('TalfTAC.py', 1), ('RoscTTI3.py', 10), ('AinsWRR3.py', 16), ('CookGHP2.py', 1), ('KiddJAE.py', 3), ('BailJD1.py', 3), ('RoscTTI2.py', 8), ('CoolWHM.py', 13), ('MarrFDI2.py', 1), ('CrokTPS.py', 51), ('ClarGE.py', 9), ('LandWPA.py', 6), ('BuckWGM.py', 6), ('IrviWVD.py', 7), ('LyelCPG.py', 8), ('GilmCRS.py', 22), ('DaltJMA.py', 6), ('WestJIT2.py', 26), ('DibdTRL2.py', 7), ('AinsWRR.py', 6), ('CrocDNL.py', 4), ('MedwTAI.py', 16), ('LandWPA2.py', 2), ('WadeJEB.py', 8), ('FerrSDO2.py', 2), ('GodwWLN.py', 2), ('CoopJBT.py', 1), ('KirbWPW2.py', 6), ('SoutRD2.py', 9), ('BackGNE.py', 25), ('LeakWTI4.py', 9), ('LeakWTI.py', 28), ('MedwTAI2.py', 14), ('BachARE.py', 15), ('SoutRD.py', 20), ('DickCSG.py', 2), ('BuckWGM2.py', 8), ('WheeJPT.py', 29), ('MereHHB3.py', 1), ('HowiWRL2.py', 72), ('BailJD3.py', 3), ('MereHHB.py', 2), ('WilkJMC.py', 3), ('HogaGMM.py', 4), ('MartHRW.py', 35), ('MackCNH.py', 2), ('WestJIT.py', 34), ('BabbCEM.py', 1), ('FitzRNS4.py', 97), ('CoolWHM3.py', 10), ('FitzRNS.py', 20), ('EdgeMHT.py', 2), ('BowrJMM.py', 5), ('LyttELD3.py', 6), ('HallFAC.py', 20), ('FerrSDO.py', 6), ('RoscTTI.py', 7), ('ThomGLG.py', 2), ('KembFJ2.py', 23), ('LewiMJW.py', 18), ('BellCHM.py', 8), ('JacoWHI2.py', 1), ('SomeMMH.py', 4), ('AinsWRR2.py', 14), ('MereHHB2.py', 4), ('BrewDTO.py', 186), ('JacoWHI.py', 1), ('ClarGE3.py', 7), ('RogeSIP.py', 21), ('MartHRW2.py', 34), ('DibdTRL.py', 24), ('FitzRNS2.py', 30), ('HogaGMM2.py', 5), ('MartHSI.py', 50), ('EvarJSP.py', 2), ('NortSTC.py', 38), ('BowrJMM2.py', 4), ('LyelCPG3.py', 56), ('BowrJMM3.py', 7), ('BeckWRE.py', 8), ('TaylIF.py', 1), ('WordWYR.py', 23), ('DibdTBR.py', 2), ('ThomWEC.py', 5), ('KeigTSS.py', 7), ('KirbWPW.py', 9), ('WaylFEP.py', 2), ('BentJDO.py', 2), ('ClarGE4.py', 57), ('HowiWRL.py', 52)] | [
"[email protected]"
] | |
973af6afd92942add0a57a3941085fd6e2d66174 | 5e84763c16bd6e6ef06cf7a129bb4bd29dd61ec5 | /blimgui/dist/pyglet/font/__init__.py | 18313153ed4a927df01f0629f5dc04a01d122ae6 | [
"MIT"
] | permissive | juso40/bl2sdk_Mods | 8422a37ca9c2c2bbf231a2399cbcb84379b7e848 | 29f79c41cfb49ea5b1dd1bec559795727e868558 | refs/heads/master | 2023-08-15T02:28:38.142874 | 2023-07-22T21:48:01 | 2023-07-22T21:48:01 | 188,486,371 | 42 | 110 | MIT | 2022-11-20T09:47:56 | 2019-05-24T20:55:10 | Python | UTF-8 | Python | false | false | 7,514 | py | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2022 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Load fonts.
pyglet will automatically load any system-installed fonts. You can add additional fonts
(for example, from your program resources) using :meth:`add_file` or
:meth:`add_directory`. These fonts are then available in the same way as system-installed fonts::
from pyglet import font
font.add_file('action_man.ttf')
action_man = font.load('Action Man', 16)
# or
from pyglet import resource
resource.add_font('action_man.ttf')
action_man = font.load('Action Man')
See the :mod:`pyglet.font.base` module for documentation on the base classes used
by this package.
"""
import os
import sys
import weakref
import pyglet
from pyglet import gl
if not getattr(sys, 'is_pyglet_doc_run', False):
if pyglet.compat_platform == 'darwin':
from pyglet.font.quartz import QuartzFont
_font_class = QuartzFont
elif pyglet.compat_platform in ('win32', 'cygwin'):
from pyglet.libs.win32.constants import WINDOWS_7_OR_GREATER
if WINDOWS_7_OR_GREATER:
if pyglet.options["advanced_font_features"] is True:
from pyglet.font.directwrite import Win32DirectWriteFont
_font_class = Win32DirectWriteFont
else:
from pyglet.font.win32 import GDIPlusFont
_font_class = GDIPlusFont
else:
from pyglet.font.win32 import GDIPlusFont
_font_class = GDIPlusFont
else:
from pyglet.font.freetype import FreeTypeFont
_font_class = FreeTypeFont
def have_font(name):
"""Check if specified system font name is available."""
return _font_class.have_font(name)
def load(name=None, size=None, bold=False, italic=False, stretch=False, dpi=None):
"""Load a font for rendering.
:Parameters:
`name` : str, or list of str
Font family, for example, "Times New Roman". If a list of names
is provided, the first one matching a known font is used. If no
font can be matched to the name(s), a default font is used. In
pyglet 1.1, the name may be omitted.
`size` : float
Size of the font, in points. The returned font may be an exact
match or the closest available. In pyglet 1.1, the size may be
omitted, and defaults to 12pt.
`bold` : bool
If True, a bold variant is returned, if one exists for the given
family and size.
`italic` : bool
If True, an italic variant is returned, if one exists for the given
family and size.
`dpi` : float
The assumed resolution of the display device, for the purposes of
determining the pixel size of the font. Defaults to 96.
:rtype: `Font`
"""
# Arbitrary default size
if size is None:
size = 12
if dpi is None:
dpi = 96
# Find first matching name
if type(name) in (tuple, list):
for n in name:
if _font_class.have_font(n):
name = n
break
else:
name = None
# Locate or create font cache
shared_object_space = gl.current_context.object_space
if not hasattr(shared_object_space, 'pyglet_font_font_cache'):
shared_object_space.pyglet_font_font_cache = weakref.WeakValueDictionary()
shared_object_space.pyglet_font_font_hold = []
font_cache = shared_object_space.pyglet_font_font_cache
font_hold = shared_object_space.pyglet_font_font_hold
# Look for font name in font cache
descriptor = (name, size, bold, italic, stretch, dpi)
if descriptor in font_cache:
return font_cache[descriptor]
# Not in cache, create from scratch
font = _font_class(name, size, bold=bold, italic=italic, stretch=stretch, dpi=dpi)
# Save parameters for new-style layout classes to recover
# TODO: add properties to the Font classes, so these can be queried:
font.size = size
font.bold = bold
font.italic = italic
font.stretch = stretch
font.dpi = dpi
# Cache font in weak-ref dictionary to avoid reloading while still in use
font_cache[descriptor] = font
# Hold onto refs of last three loaded fonts to prevent them being
# collected if momentarily dropped.
del font_hold[3:]
font_hold.insert(0, font)
return font
def add_file(font):
"""Add a font to pyglet's search path.
In order to load a font that is not installed on the system, you must
call this method to tell pyglet that it exists. You can supply
either a filename or any file-like object.
The font format is platform-dependent, but is typically a TrueType font
file containing a single font face. Note that to use a font added with this method,
you should pass the face name (not the file name) to :meth::py:func:`pyglet.font.load` or any
other place where you normally specify a font.
:Parameters:
`font` : str or file-like object
Filename or file-like object to load fonts from.
"""
if isinstance(font, str):
font = open(font, 'rb')
if hasattr(font, 'read'):
font = font.read()
_font_class.add_font_data(font)
def add_directory(directory):
"""Add a directory of fonts to pyglet's search path.
This function simply calls :meth:`pyglet.font.add_file` for each file with a ``.ttf``
extension in the given directory. Subdirectories are not searched.
:Parameters:
`dir` : str
Directory that contains font files.
"""
for file in os.listdir(directory):
if file[-4:].lower() == '.ttf':
add_file(os.path.join(directory, file))
__all__ = ('add_file', 'add_directory', 'load', 'have_font')
| [
"[email protected]"
] | |
144d632a68c20edf5f33c6a21eba24ba413b5060 | 61eae81a1780141ba1323adb93d3d41c4ad0cc3e | /src/model_bank/dataset_2018_7_13_lcp_recognition_model.py | 66b9a61b10443213dac0a710aa181f548137a8c5 | [] | no_license | lsy125/AE-signal-model | 0783cda7eee972cce3ab1abf682047baf3d739a1 | da11717ffc8bf74dbb2d6f818e73f0b839f0ab5a | refs/heads/master | 2020-04-02T08:46:05.140848 | 2018-10-22T02:01:48 | 2018-10-22T02:01:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,219 | py | from keras.layers import *
from keras.models import Sequential, Model
from keras.utils import plot_model
from keras import regularizers
# self lib
from src.utils.helpers import direct_to_dir
def lcp_recognition_binary_model():
visible_in = Input(shape=(6000, 1))
conv_1 = Conv1D(5, kernel_size=5, activation='relu')(visible_in)
maxpool_1 = MaxPooling1D(pool_size=3, strides=2)(conv_1)
dropout_1 = Dropout(0.4)(maxpool_1)
conv_2 = Conv1D(20, kernel_size=5, activation='relu')(dropout_1)
maxpool_2 = MaxPooling1D(pool_size=3, strides=2)(conv_2)
conv_3 = Conv1D(32, kernel_size=5, activation='relu')(maxpool_2)
maxpool_3 = MaxPooling1D(pool_size=3, strides=2)(conv_3)
flatten = Flatten()(maxpool_3)
dropout_2 = Dropout(0.5)(flatten)
dense_1 = Dense(10, activation='relu')(dropout_2)
# dense_2 = Dense(20, activation='relu')(dense_1)
# dense_3 = Dense(80, activation='relu')(dense_2)
visible_out = Dense(1, activation='sigmoid')(dense_1)
model = Model(inputs=visible_in, outputs=visible_out)
print(model.summary())
return model
def lcp_recognition_binary_model_2():
'''
refer Online, VGG concept
model = Sequential()
model.add(Conv1D(64, 3, activation='relu', input_shape=(6000, 1)))
model.add(Conv1D(64, 3, activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(128, 3, activation='relu'))
model.add(Conv1D(128, 3, activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(256, 3, activation='relu'))
model.add(Conv1D(256, 3, activation='relu'))
model.add(GlobalAveragePooling1D())
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
:return:
'''
model = Sequential()
model.add(Conv1D(64, 3, activation='relu', input_shape=(6000, 1)))
# model.add(Conv1D(16, 3, activation='relu'))
model.add(MaxPooling1D(3, strides=2))
model.add(Dropout(0.3))
# model.add(Conv1D(32, 3, activation='relu'))
# model.add(Conv1D(32, 3, activation='relu'))
# model.add(MaxPooling1D(3, strides=2))
# model.add(Dropout(0.3))
# model.add(Conv1D(64, 3, activation='relu'))
# model.add(Conv1D(64, 3, activation='relu'))
# model.add(MaxPooling1D(3, strides=2))
# model.add(Dropout(0.3))
#
# model.add(Conv1D(128, 3, activation='relu'))
# model.add(Conv1D(128, 3, activation='relu'))
# model.add(MaxPooling1D(3, strides=2))
# model.add(Dropout(0.3))
# model.add(Conv1D(256, 3, activation='relu'))
# model.add(Conv1D(256, 3, activation='relu'))
model.add(GlobalAveragePooling1D())
# model.add(Dropout(0.5))
model.add(Dense(50, activation='relu'))
model.add(Dense(10, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
print(model.summary())
return model
def lcp_recognition_binary_model_3():
'''
Dual layer
'''
visible_in = Input(shape=(6000, 1))
# Part a
conv_a_1 = Conv1D(32, kernel_size=5, activation='relu', name='conv_a_1')(visible_in)
conv_a_2 = Conv1D(32, kernel_size=5, activation='relu', name='conv_a_2')(conv_a_1)
maxpool_a_1 = MaxPooling1D(pool_size=3, strides=2, name='maxp_a_1')(conv_a_2)
drop_a_1 = Dropout(0.3, name='drop_a_1')(maxpool_a_1)
conv_a_3 = Conv1D(64, kernel_size=5, activation='relu', name='conv_a_3')(drop_a_1)
conv_a_4 = Conv1D(128, kernel_size=5, activation='relu', name='conv_a_4', use_bias=False)(conv_a_3)
maxpool_a_2 = MaxPooling1D(pool_size=3, strides=2, name='maxp_a_2')(conv_a_4)
gap_a_1 = GlobalAveragePooling1D(name='gap_a_1')(maxpool_a_2)
# Part b
conv_b_1 = Conv1D(32, kernel_size=5, activation='relu', name='conv_b_1')(visible_in)
conv_b_2 = Conv1D(32, kernel_size=5, activation='relu', name='conv_b_2')(conv_b_1)
maxpool_b_1 = MaxPooling1D(pool_size=3, strides=2, name='maxp_b_1')(conv_b_2)
drop_b_1 = Dropout(0.3, name='drop_b_1')(maxpool_b_1)
conv_b_3 = Conv1D(128, kernel_size=5, activation='relu', name='conv_b_3')(drop_b_1)
# drop_b_2 = Dropout(0.3, name='drop_b_2')(conv_b_3)
# conv_b_4 = Conv1D(128, kernel_size=5, activation='relu', name='conv_b_4')(drop_b_2)
# maxpool_b_2 = MaxPooling1D(pool_size=3, strides=2, name='maxp_b_2')(conv_b_4)
gap_b_1 = GlobalAveragePooling1D(name='gap_b_1')(conv_b_3)
# Layer 2
merge_1 = concatenate([gap_a_1, gap_b_1])
dense_1 = Dense(50, activation='relu', name='dense_1')(merge_1)
drop_1 = Dropout(0.2, name='drop_1')(dense_1)
visible_out = Dense(1, activation='sigmoid', name='dense_2')(drop_1)
model = Model(inputs=visible_in, outputs=visible_out)
print(model.summary())
save_model_plot = direct_to_dir(where='result') + 'lcp_recognition_binary_model_3.png'
plot_model(model, to_file=save_model_plot)
return model
# TESTING AT LAPTOP
def model_1():
visible_in = Input(shape=(6000, 1))
conv_1 = Conv1D(filters=32, kernel_size=5, strides=1, activation='relu', name='Conv_a_1')(visible_in)
gap = GlobalAveragePooling1D()(conv_1)
visible_out = Dense(1, activation='sigmoid')(gap)
model = Model(inputs=visible_in, outputs=visible_out)
print(model.summary())
# lcp_recognition_binary_model_2()
| [
"[email protected]"
] | |
8afd85abc6361f6690923f4902b20607b778ad70 | 87b7d7948aa51fdb4a27540240579788896369ea | /code/runs_sacred/model_data_random/_sources/data_set_file_8dbb1c73ef6f7d6c76679b005f0b994b.py | 492729d1daa74ff2dbc6c7004f3cccb8805d5d13 | [] | no_license | Samuel-Levesque/Projet_GLO7030 | 6f13accd63b52107ec3e3a0b9b5f52edccda7c8d | 557bce3235f09723900f65c6e3b44a0ed9d2b519 | refs/heads/master | 2022-01-16T12:49:22.884798 | 2019-05-05T18:38:35 | 2019-05-05T18:38:35 | 177,038,991 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,055 | py | import random
import warnings
warnings.filterwarnings('ignore') # to suppress some matplotlib deprecation warnings
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import ast
import cv2
import matplotlib.pyplot as plt
import os
from torch.utils.data import Dataset, DataLoader, ConcatDataset
from utility import save_object
def create_encoding_deconding_dict(path_data):
'''
Crée un dictionnaire d'encoding des labels et un dictionnaire de decoding des labels
:param path_data:
:return:
'''
filenames = os.listdir(path_data)
filenames=sorted(filenames)
en_dict = {}
counter = 0
for fn in filenames:
en_dict[fn[:-4].split('/')[-1].replace(' ', '_')] = counter
counter += 1
dec_dict = {v: k for k, v in en_dict.items()}
save_object(en_dict,"saves_obj/en_dict.pk")
save_object(dec_dict, "saves_obj/dec_dict.pk")
return en_dict,dec_dict
pass
#Pour une classe
class DoodlesDataset(Dataset):
"""Doodles csv dataset.
adapté de https://www.kaggle.com/leighplt/pytorch-starter-kit/notebook
Dataset Pytorch pour une seul catégorie. Pour faire un dataset complet on concatène plusieurs de ces dataset
"""
def __init__(self, csv_file, root_dir,nrows,encoding_dict=None, mode='train', skiprows=None, size=224, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations. ex :airplane.csv
root_dir (string): Directory with all the csv.
mode (string): Train or test mode.
nrows (int): Number of rows of file to read. Useful for reading pieces of large files.
skiprows (list-like or integer or callable):
Line numbers to skip (0-indexed) or number of lines to skip (int) at the start of the file.
size (int): Size of output image.
transform (callable, optional): Optional transform to be applied (pas utile pour l'instant)
on a sample.
"""
self.root_dir = root_dir
file = os.path.join(self.root_dir, csv_file)
self.size = size
self.mode = mode
self.doodle = pd.read_csv(file, usecols=['drawing'], nrows=nrows, skiprows=skiprows) #Data set pandas
# self.transform = transform
if self.mode == 'train':
self.txt_label= csv_file.replace(' ', '_')[:-4]
self.label = encoding_dict[self.txt_label]
@staticmethod
def _draw(raw_strokes, size=256, largeur_trait=6):
BASE_SIZE = 256
img = np.full((BASE_SIZE, BASE_SIZE), 255,dtype=np.uint8)
for t, stroke in enumerate(raw_strokes):
for i in range(len(stroke[0]) - 1):
color = 0
_ = cv2.line(img, (stroke[0][i], stroke[1][i]),
(stroke[0][i + 1], stroke[1][i + 1]), color, largeur_trait)
if size != BASE_SIZE:
return cv2.resize(img, (size, size))
else:
return img
def __len__(self):
return len(self.doodle)
def __getitem__(self, idx):
raw_strokes = ast.literal_eval(self.doodle.drawing[idx])
sample = self._draw(raw_strokes, size=self.size, largeur_trait=6)
# if self.transform:
# sample = self.transform(sample)
if self.mode == 'train':
return (sample[None] / 255).astype('float32'), self.label
else:
return (sample[None] / 255).astype('float32')
#Pour toutes les classe, nb_row par classe
def create_huge_data_set(path,nb_rows=1000,size_image=224,encoding_dict=None,skip_rows=None,filenames=None,mode="train"):
'''
Concatène les dataset de plusieurs classes
:param path: path où se trouve le dossier avec les csv
:param nb_rows: Nombre de rows par classes
:param size_image:
:param filenames: si on veut des classe particulières ex : [airplane.csv, angel.csv]
:return:
'''
if filenames==None:
filenames = os.listdir(path)
doodles = ConcatDataset([DoodlesDataset(fn,path,nrows=nb_rows, size=size_image,
skiprows=skip_rows,encoding_dict=encoding_dict,mode=mode)
for fn in filenames])
return doodles
def generate_random_dataset( path, nb_row_valid,nb_rows_test,nb_rows,dict_nb_lignes, size_image=224, encoding_dict=None,filenames=None):
'''
Pour chaque classe dans filenames, on prend nb_rows données aléatoire dans le fichier
:param path:
:param nb_row_valid:
:param nb_rows_test:
:param nb_rows:
:param size_image:
:param encoding_dict:
:param filenames:
:return:
'''
if filenames==None:
filenames = os.listdir(path)
nb_lignes_skip = nb_row_valid + nb_rows_test
list_dataset=[]
for fn in filenames:
n = dict_nb_lignes[fn]
skip =list(range(1,nb_lignes_skip)) +sorted(random.sample(range(nb_lignes_skip,n), n - nb_rows-nb_lignes_skip))
data_set=DoodlesDataset(fn, path, nrows=nb_rows, size=size_image,
skiprows=skip, encoding_dict=encoding_dict, mode="train")
list_dataset.append(data_set)
doodles = ConcatDataset(list_dataset)
return doodles
def create_dict_nb_ligne(path,filenames=None):
'''
dictionnaire du nombre de ligne dans les fichiers csv
:param path:
:return:
'''
if filenames==None:
filenames = os.listdir(path)
dict_nb_ligne={}
for fn in filenames:
n = sum(1 for line in open(path + fn)) - 1
dict_nb_ligne[fn]=n
save_object(dict_nb_ligne,"saves_obj/dict_nb_ligne.pk")
return dict_nb_ligne
def imshow(img_tensor):
npimg = img_tensor.numpy()
# print(npimg)
plt.imshow(npimg,cmap="gray")
plt.show()
if __name__ == "__main__":
path = 'D:/User/William/Documents/Devoir/Projet Deep/data/mini_train/'
# path = 'D:/User/William/Documents/Devoir/Projet Deep/data/train_simplified/'
filenames = os.listdir(path)
filenames = [path + x for x in filenames]
size_image = 224
select_nrows = 1000
csv_file=filenames[0].split('/')[-1]
#Créer data set pour un csv file en particulier
# essai=DoodlesDataset(csv_file, path,nrows=select_nrows, size=size_image,skiprows=range(1,10))
# loader=DataLoader(essai,batch_size=10)
# for image, label in loader:
# print(image)
# t1=image[0,0,:,:]
# #imshow(t1)
# print(label)
doodles = ConcatDataset([DoodlesDataset(fn.split('/')[-1], path,
nrows=select_nrows, size=size_image) for fn in filenames])
loader = DataLoader(doodles, batch_size=2,shuffle=True)
i=0
for image, label in loader:
# print(image)
t1 = image[0, 0, :, :]
t2=image[1,0,:,:]
# imshow(t1)
# imshow(t2)
i+=2
print(i)
print(label)
print("end") | [
"[email protected]"
] | |
8a307d78726f1ec71e12b3c0c22f5ac21db4bcf9 | dd38578f9622b1ea54838340711a96d57fcfbbc6 | /kbm/migrations/0004_auto_20200629_0017.py | 29157ee0c71fb1bf849f5a93c6227cfca901170a | [] | no_license | nabaman/sistem-informasi-akademik | 544d3563c922f105d310bb6377d236e6022fcb2c | 1ffb46e86ab76c6c4b98a10862acf01a5676d574 | refs/heads/master | 2022-11-17T05:54:07.808086 | 2020-07-01T06:02:28 | 2020-07-01T06:02:28 | 257,820,789 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | # Generated by Django 3.0.5 on 2020-06-28 17:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kbm', '0003_auto_20200609_2156'),
]
operations = [
migrations.AlterField(
model_name='data_jurusan',
name='jurusan',
field=models.CharField(max_length=30, null=True),
),
]
| [
"[email protected]"
] | |
373405fdc1325b6237b78fad3cdd074864c92bc5 | 45f6c7f6e7160c5535d74582c6ba165ee21ab56e | /test/test_van/test_saved_lists.py | 740bb5997167057c1ca394ba2c8000ab159b836b | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | move-coop/parsons | 1b0bcfcbb8c0e1edb26d06664c6ecb7370eae864 | 553b0ede3e41cc9811c48d768d05953f8cf75312 | refs/heads/main | 2023-08-21T12:34:34.004784 | 2023-08-17T17:46:09 | 2023-08-17T17:46:09 | 204,343,221 | 240 | 122 | NOASSERTION | 2023-09-14T20:55:15 | 2019-08-25T19:56:21 | Python | UTF-8 | Python | false | false | 7,500 | py | import unittest
import os
import requests_mock
import unittest.mock as mock
from parsons import VAN, Table
from test.utils import validate_list
from parsons.utilities import cloud_storage
class TestSavedLists(unittest.TestCase):
def setUp(self):
self.van = VAN(os.environ["VAN_API_KEY"], db="MyVoters", raise_for_status=False)
def tearDown(self):
pass
@requests_mock.Mocker()
def test_get_saved_lists(self, m):
json = {
"count": 1,
"items": [
{
"savedListId": 517612,
"listCount": 974656,
"name": "LikelyParents(16andunder)_DWID_S... - MN",
"doorCount": 520709,
"description": "null",
}
],
"nextPageLink": None,
}
m.get(self.van.connection.uri + "savedLists", json=json)
expected = ["savedListId", "listCount", "name", "doorCount", "description"]
self.assertTrue(validate_list(expected, self.van.get_saved_lists()))
@requests_mock.Mocker()
def test_get_saved_list(self, m):
saved_list_id = 517612
json = {
"savedListId": 517612,
"listCount": 974656,
"name": "LikelyParents(16andunder)_DWID_S... - MN",
"doorCount": 520709,
"description": "null",
}
m.get(self.van.connection.uri + f"savedLists/{saved_list_id}", json=json)
# expected = ['savedListId', 'listCount', 'name', 'doorCount', 'description']
self.assertEqual(self.van.get_saved_list(saved_list_id), json)
def test_upload_saved_list(self):
cloud_storage.post_file = mock.MagicMock()
cloud_storage.post_file.return_value = "https://box.com/my_file.zip"
self.van.connection._soap_client = mock.MagicMock()
self.van.get_folders = mock.MagicMock()
self.van.get_folders.return_value = [{"folderId": 1}]
tbl = Table([["VANID"], ["1"], ["2"], ["3"]])
self.van.upload_saved_list(
tbl, "GOTV List", 1, replace=True, url_type="S3", bucket="tmc-scratch"
)
assert self.van.connection._soap_client.service.CreateAndStoreSavedList.called
@requests_mock.Mocker()
def test_upload_saved_list_rest(self):
cloud_storage.post_file = mock.MagicMock()
cloud_storage.post_file.return_value = "https://box.com/my_file.zip"
self.van.get_folders = mock.MagicMock()
self.van.get_folders.return_value = [{"folderId": 1}]
tbl = Table([["VANID"], ["1"], ["2"], ["3"]])
response = self.van.upload_saved_list_rest(
tbl=tbl,
url_type="S3",
folder_id=1,
list_name="GOTV List",
description="parsons test list",
callback_url="https://webhook.site/69ab58c3-a3a7-4ed8-828c-1ea850cb4160",
columns=["VANID"],
id_column="VANID",
bucket="tmc-scratch",
overwrite=517612,
)
self.assertIn("jobId", response)
@requests_mock.Mocker()
def test_get_folders(self, m):
json = {
"count": 2,
"items": [
{"folderId": 5046, "name": "#2018_MN_active_universe"},
{"folderId": 2168, "name": "API Generated Lists"},
],
"nextPageLink": None,
}
m.get(self.van.connection.uri + "folders", json=json)
expected = ["folderId", "name"]
self.assertTrue(validate_list(expected, self.van.get_folders()))
@requests_mock.Mocker()
def test_get_folder(self, m):
folder_id = 5046
json = {"folderId": 5046, "name": "#2018_MN_active_universe"}
m.get(self.van.connection.uri + f"folders/{folder_id}", json=json)
self.assertEqual(json, self.van.get_folder(folder_id))
@requests_mock.Mocker()
def test_export_job_types(self, m):
json = {
"count": 1,
"items": [{"exportJobTypeId": 4, "name": "SavedListExport"}],
"nextPageLink": None,
}
m.get(self.van.connection.uri + "exportJobTypes", json=json)
expected = ["exportJobTypeId", "name"]
self.assertTrue(validate_list(expected, self.van.get_export_job_types()))
@requests_mock.Mocker()
def test_export_job_create(self, m):
saved_list_id = 517612
json = {
"status": "Completed",
"errorCode": "null",
"exportJobGuid": "bf4d1297-1c77-3fb2-03bd-f0acda122d37",
"activistCodes": "null",
"canvassFileRequestId": 448,
"dateExpired": "2018-09-08T16:04:00Z",
"surveyQuestions": "null",
"webhookUrl": "https://www.nothing.com/",
"downloadUrl": "https://ngpvan.blob.core.windows.net/canvass-files-savedlistexport/bf4d1297-1c77-3fb2-03bd-f0acda122d37_2018-09-08T13:03:27.7191831-04:00.csv", # noqa: E501
"savedListId": 517612,
"districtFields": "null",
"canvassFileRequestGuid": "bf4d1297-1c77-3fb2-03bd-f0acda122d37",
"customFields": "null",
"type": 4,
"exportJobId": 448,
}
m.post(self.van.connection.uri + "exportJobs", json=json, status_code=201)
# expected = [
# 'status',
# 'errorCode',
# 'exportJobGuid',
# 'activistCodes',
# 'canvassFileRequestId',
# 'dateExpired',
# 'surveyQuestions',
# 'webhookUrl',
# 'downloadUrl',
# 'savedListId',
# 'districtFields',
# 'canvassFileRequestGuid',
# 'customFields',
# 'type',
# 'exportJobId']
self.assertEqual(json, self.van.export_job_create(saved_list_id))
@requests_mock.Mocker()
def test_get_export_job(self, m):
export_job_id = 448
json = {
"status": "Completed",
"errorCode": "null",
"exportJobGuid": "bf4d1297-1c77-3fb2-03bd-f0acda122d37",
"activistCodes": "null",
"canvassFileRequestId": 448,
"dateExpired": "2018-09-08T16:04:00Z",
"surveyQuestions": "null",
"webhookUrl": "https://www.nothing.com/",
"downloadUrl": "https://ngpvan.blob.core.windows.net/canvass-files-savedlistexport/bf4d1297-1c77-3fb2-03bd-f0acda122d37_2018-09-08T13:03:27.7191831-04:00.csv", # noqa: E501
"savedListId": 517612,
"districtFields": "null",
"canvassFileRequestGuid": "bf4d1297-1c77-3fb2-03bd-f0acda122d37",
"customFields": "null",
"type": 4,
"exportJobId": 448,
}
# expected = [
# 'status',
# 'errorCode',
# 'exportJobGuid',
# 'activistCodes',
# 'canvassFileRequestId',
# 'dateExpired',
# 'surveyQuestions',
# 'webhookUrl',
# 'downloadUrl',
# 'savedListId',
# 'districtFields',
# 'canvassFileRequestGuid',
# 'customFields',
# 'type',
# 'exportJobId']
m.get(self.van.connection.uri + f"exportJobs/{export_job_id}", json=json)
self.assertEqual(json, self.van.get_export_job(export_job_id))
| [
"[email protected]"
] | |
24ea8739a59fecf67cf4b899f035e7f803deef1c | 9b420b88924f8b5120f76a319504a59edf4c7810 | /fluent_pages/tests/urldispatcher.py | 63bd6de5c48706b2232d40a699daba12adb96999 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | techdragon/django-fluent-pages | 3384a10b231e10f7d290e70cf18d6dcbddcea76a | 1e38bb2fe1db0b376098ba35df7a7286ad3e2794 | refs/heads/master | 2021-01-18T10:56:12.763397 | 2013-08-03T08:32:55 | 2013-08-03T08:32:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,456 | py | from fluent_pages.models import Page
from fluent_pages.tests.utils import AppTestCase, script_name, override_settings
from fluent_pages.tests.testapp.models import SimpleTextPage, PlainTextFile, WebShopPage
class UrlDispatcherTests(AppTestCase):
"""
Tests for URL resolving.
"""
@classmethod
def setUpTree(cls):
SimpleTextPage.objects.create(title="Home", slug="home", status=SimpleTextPage.PUBLISHED, author=cls.user, override_url='/')
SimpleTextPage.objects.create(title="Text1", slug="sibling1", status=SimpleTextPage.PUBLISHED, author=cls.user, contents="TEST_CONTENTS")
SimpleTextPage.objects.create(title="Text1", slug="unpublished", status=SimpleTextPage.DRAFT, author=cls.user)
WebShopPage.objects.create(title="Shop1", slug="shop", status=SimpleTextPage.PUBLISHED, author=cls.user)
PlainTextFile.objects.create(slug='README', status=PlainTextFile.PUBLISHED, author=cls.user, content="This is the README")
def test_get_for_path(self):
"""
The testdata should be found under the expected URLs.
"""
# Test basic state
sibling1 = Page.objects.get_for_path('/sibling1/')
self.assertEquals(sibling1.get_absolute_url(), '/sibling1/', "Page at {0} has invalid absolute URL".format('/sibling1/'))
self.assert200('/')
self.assert200('/sibling1/')
# Test exceptions
self.assertRaises(SimpleTextPage.DoesNotExist, lambda: SimpleTextPage.objects.get_for_path('/not-found/'))
def test_get_append_slash_redirect(self):
"""
The dispatcher should implement APPEND_SLASH handling,
because ``fluent_pages.urls`` is a catch-all for ALL url's, including those without a slash.
"""
with override_settings(APPEND_SLASH=True):
self.assertRedirects(self.client.get('/sibling1'), '/sibling1/', status_code=302)
with override_settings(APPEND_SLASH=False):
self.assert404('/sibling1', 'APPEND_SLASH=False: ')
# However, non existing pages should not get an APPEND_SLASH redirect
self.assert404('/not-found')
def test_hide_unpublished(self):
"""
Unpublished pages should not appear
"""
self.assertTrue(SimpleTextPage.objects.filter(slug='unpublished').exists(), "page /unpublished/ should exist in the database.")
self.assert404('/unpublished/')
self.assert404('/unpublished') # With default APPEND_SLASH=True
def test_get_for_path_script_name(self):
"""
The dispatcher should support a different WSGIScriptAlias prefix.
"""
with script_name('/_test_subdir_/'):
sibling1 = Page.objects.get_for_path('/sibling1/')
self.assertEquals(sibling1.get_absolute_url(), '/_test_subdir_/sibling1/', "UrlNode.get_absolute_url() should take changes to SCRIPT_NAME into account (got: {0}).".format(sibling1.get_absolute_url()))
# Note the test client always operates relative to the Django script_name root.
self.assert200('/')
self.assert200('/sibling1/')
def test_page_output(self):
"""
Pages should render output via the ``render_template``.
"""
# Test initial state
from fluent_pages.tests.testapp.page_type_plugins import SimpleTextPagePlugin # Import here as it needs an existing DB
self.assertEquals(SimpleTextPagePlugin.render_template, 'testapp/simpletextpage.html')
# Test how a normal page is rendered
response = self.client.get('/sibling1/')
self.assertTemplateUsed(response, 'testapp/simpletextpage.html')
self.assertContains(response, '<div id="test_contents">TEST_CONTENTS</div>')
def test_app_page_output(self):
"""
The resolver should detect that the plugin has an URLconf that overlays the CMS page index url.
"""
# Test initial state
from fluent_pages.tests.testapp.page_type_plugins import WebShopPagePlugin
self.assertEquals(WebShopPagePlugin.urls, 'fluent_pages.tests.testapp.urls_webshop')
response = self.client.get('/shop/')
self.assertContains(response, 'test_webshop: index_page') # The URLconf is an overlay over the standard get_response()
def test_app_page_url(self):
"""
The URL that is a mix of DB page + URLconf should match and return.
"""
response = self.client.get('/shop/foobar/')
self.assertContains(response, 'test_webshop: article: foobar')
def test_app_page_append_slash(self):
"""
The APPEND_SLASH setting should also work for app page URLs
"""
with override_settings(APPEND_SLASH=True):
self.assertRedirects(self.client.get('/shop'), '/shop/', status_code=302)
self.assertRedirects(self.client.get('/shop/article1'), '/shop/article1/', status_code=302)
with override_settings(APPEND_SLASH=False):
self.assert404('/shop', 'APPEND_SLASH=False')
self.assert404('/shop/article1', 'APPEND_SLASH=False')
# However, non resolvable app pages should not get an APPEND_SLASH redirect
self.assert404('/shop/article1/foo')
def test_plain_text_file(self):
"""
URLs that point to files should return properly.
"""
response = self.client.get('/README')
self.assertEqual(response.content, 'This is the README')
self.assertEqual(response['Content-Type'], 'text/plain')
def test_unicode_404(self):
"""
Urls with unicode characters should return proper 404 pages, not crash on it.
"""
url = u'/foo/\xe9\u20ac\xdf\xed\xe0\xf8\xeb\xee\xf1\xfc/'
self.assert404(url)
def test_admin_redirect(self):
"""
Urls can end with @admin to be redirected to the admin.
"""
self.assertRedirects(self.client.get('/@admin'), 'http://testserver/admin/fluent_pages/page/1/', status_code=302)
self.assertRedirects(self.client.get('/sibling1/@admin'), 'http://testserver/admin/fluent_pages/page/2/', status_code=302)
self.assertRedirects(self.client.get('/shop/@admin'), 'http://testserver/admin/fluent_pages/page/4/', status_code=302)
# Anything that doesn't match, is redirected to the URL without @admin suffix
self.assertRedirects(self.client.get('/unpublished/@admin'), 'http://testserver/unpublished/', status_code=302, target_status_code=404)
self.assertRedirects(self.client.get('/non-existent/@admin'), 'http://testserver/non-existent/', status_code=302, target_status_code=404)
# Same also applies to application URLs. Can be extended in the future to resolve to the
# app page, or the actual object. Currently this is not supported.
self.assertRedirects(self.client.get('/shop/foobar/@admin'), 'http://testserver/shop/foobar/', status_code=302)
class UrlDispatcherNonRootTests(AppTestCase):
"""
Tests for URL resolving with a non-root URL include.
"""
urls = 'fluent_pages.tests.testapp.urls_nonroot'
@classmethod
def setUpTree(cls):
SimpleTextPage.objects.create(title="Text1", slug="sibling1", status=SimpleTextPage.PUBLISHED, author=cls.user, contents="TEST_CONTENTS")
def test_urlconf_root(self):
"""
The dispatcher should support an URLConf where fluent_pages.url is not at the root.
"""
sibling1 = Page.objects.get_for_path('/sibling1/') # Stored path is always relative to ROOT
self.assert200('/pages/sibling1/')
self.assert404('/sibling1/')
self.assertEquals(sibling1.get_absolute_url(), '/pages/sibling1/', "UrlNode.get_absolute_url() should other URLConf root into account (got: {0}).".format(sibling1.get_absolute_url()))
sibling1.save()
self.assertEquals(sibling1._cached_url, '/sibling1/', "UrlNode keeps paths relative to the include()")
# NOTE: admin needs to be tested elsewhere for this too.
def test_admin_redirect(self):
"""
Urls can end with @admin to be redirected to the admin.
"""
self.assertRedirects(self.client.get('/pages/sibling1/@admin'), 'http://testserver/admin/fluent_pages/page/1/', status_code=302)
self.assertRedirects(self.client.get('/pages/non-existent/@admin'), 'http://testserver/pages/non-existent/', status_code=302, target_status_code=404)
| [
"[email protected]"
] | |
73cc2b03cd8658a38a334360c792ef745c6ead8b | b94c4c88aa3661696a8f8a6677c845a882d4091e | /src/main.py | b0ed8bfe850e44f49de124164adce4397b6cf652 | [
"MIT"
] | permissive | pestefo/Coon | b4f1e62b97a330844140f00057a5bda184cf774e | 8caa8b683b54b0c0047cbe1095ccf5576352c6ac | refs/heads/master | 2021-01-02T21:45:19.393150 | 2020-02-11T16:49:36 | 2020-02-11T16:49:36 | 239,814,056 | 0 | 0 | MIT | 2020-02-11T16:52:15 | 2020-02-11T16:47:54 | Python | UTF-8 | Python | false | false | 654 | py | """Script for Default keyword"""
import sys
import currency
from currency.workflow import Workflow3
def main(workflow):
"""The main workflow entry function"""
method = str(workflow.args.pop(0))
if method in currency.__all__:
workflow.run(getattr(currency, method))
else:
workflow.run(currency.help_me)
if __name__ == "__main__":
WF = Workflow3(
default_settings={
"favorites": ["EUR", "CNY", "JPY", "GBP"]
},
update_settings={
"github_slug": "tomy0000000/coon",
"frequency": 7
},
help_url="https://git.io/fjD6M")
sys.exit(WF.run(main))
| [
"[email protected]"
] | |
7f0a093f4f97b501f6e1600dd3dd4537b88a9ef0 | 375e5bca82843647941068bd7634cf7adf2015ca | /tests/test_transform_affine3.py | d2f6651ed72b7052691ec6a9f54e6165a33fa6de | [
"MIT"
] | permissive | civodlu/trw | cd57e7bded7fdb0a9d623ed9cd50645fab96583b | 11c59dea0072d940b036166be22b392bb9e3b066 | refs/heads/master | 2023-02-08T09:56:39.203340 | 2023-02-07T14:22:16 | 2023-02-07T14:22:16 | 195,147,670 | 12 | 2 | MIT | 2020-10-19T15:24:11 | 2019-07-04T01:19:31 | Python | UTF-8 | Python | false | false | 4,555 | py | import os
import math
import unittest
import trw
import torch
import numpy as np
class TestTransformsAffine(unittest.TestCase):
def test_2d_identity_nn(self):
matrix2 = [
[1, 0, 0],
[0, 1, 0],
]
matrix2 = torch.FloatTensor(matrix2)
images = torch.arange(2 * 5 * 10, dtype=torch.float32).view((2, 1, 5, 10))
images_tfm2 = trw.transforms.affine_transform(images, matrix2, interpolation='nearest')
assert int((images == images_tfm2).all()) == 1
def test_3d_identity_nn(self):
matrix = [
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
]
matrix = torch.FloatTensor(matrix)
images = torch.arange(2 * 5 * 10 * 3, dtype=torch.float32).view((2, 1, 5, 10, 3))
images_tfm = trw.transforms.affine_transform(images, matrix)
assert torch.max((images - images_tfm).abs()) < 1e-4
def test_2d_translation_nn(self):
images = torch.arange(2 * 5 * 10, dtype=torch.float).view((2, 1, 5, 10))
m = [
[1, 0, -1],
[0, 1, -2],
[0, 0, 1]
],
m = torch.FloatTensor(m)[0]
m = trw.transforms.to_voxel_space_transform(m, images[0].shape)
images_tfm = trw.transforms.affine_transform(images, torch.cat((m.unsqueeze(0), m.unsqueeze(0))), interpolation='nearest')
assert torch.max(torch.abs(images[:, :, 2:, 1:] - images_tfm[:, :, :-2, :-1])) < 1e-4
def test_2d_image(self):
matrix = trw.transforms.affine_transformation_translation([80, 0])
matrix = torch.mm(matrix, trw.transforms.affine_transformation_rotation2d(1 * math.pi / 4))
from PIL import Image
image_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'tutorials', 'input_images', '2007_008764.jpg')
images = Image.open(image_path)
images = np.asarray(images).transpose((2, 0, 1))
images = images.reshape([1] + list(images.shape))
images = torch.from_numpy(images).float()
images_tfm = trw.transforms.affine_transform(
images,
trw.transforms.to_voxel_space_transform(matrix, images[0].shape),
interpolation='nearest')
i = np.uint8(images_tfm.numpy())[0, 0]
options = trw.train.Options()
root = options.workflow_options.logging_directory
Image.fromarray(np.stack((i, i, i), axis=2)).save(os.path.join(root, 'transformed.png'))
def test_affine_2d_joint(self):
options = trw.train.Options()
root = options.workflow_options.logging_directory
from PIL import Image
image_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'tutorials', 'input_images', '2007_008764.jpg')
images = Image.open(image_path)
images.save(os.path.join(root, 'affine_original.png'))
images = np.asarray(images).transpose((2, 0, 1))
images = images.reshape([1] + list(images.shape))
images = torch.from_numpy(images).float()
batch = {
'images': images,
'images_joint': images
}
tfm = trw.transforms.TransformAffine([45, 50], [0.7, 1.3], .5, padding_mode='reflection')
for n in range(10):
transformed_batch = tfm(batch)
i = np.uint8(transformed_batch['images'].numpy())[0, 0]
Image.fromarray(np.stack((i, i, i), axis=2)).save(os.path.join(root, f'affine_transformed_{n}.png'))
assert (transformed_batch['images'] == transformed_batch['images_joint']).all()
def test_affine_3d_joint(self):
options = trw.train.Options()
root = options.workflow_options.logging_directory
shape = [32, 64, 96]
shape2 = [32 // 2, 64 // 2, 96 // 2]
images = torch.ones(shape, dtype=torch.float32)
images[shape2[0]-5:shape2[0]+5, shape2[1]-10:shape2[1]+10, shape2[2]-15:shape2[2]+15] = 3.0
images = images.unsqueeze(0).unsqueeze(0) # add N, C components
batch = {
'images': images,
'images_joint': images
}
i = images.numpy()[0, 0]
np.save(os.path.join(root, f'affine_transformed_3d_original.png'), i)
tfm = trw.transforms.TransformAffine(0, 1, 0.9)
for n in range(10):
transformed_batch = tfm(batch)
i = transformed_batch['images'].numpy()[0, 0]
np.save(os.path.join(root, f'affine_transformed_3d_{n}.png'), i)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a80d0ebfec737cf90fc2e079f2ebe80f10496421 | 562cc46d23d69f399a5f807d11ac567d8f30b567 | /env/bin/symilar | b0ebfe4bd208743b9437624b9eda91935d85e19b | [] | no_license | JahanzebNawaz/DJANGO-CRUD | ab77a31c84134f10aee43b4fdc1900b4223b36ba | b3e848f8752c3755bcd26aeefe59eaedbcc06e8d | refs/heads/master | 2022-01-26T13:29:30.218067 | 2020-05-03T11:43:58 | 2020-05-03T11:43:58 | 220,016,612 | 0 | 0 | null | 2020-05-03T11:43:59 | 2019-11-06T14:32:25 | Python | UTF-8 | Python | false | false | 258 | #!/home/jk/JKROOT/GITHUB_REPO/DJANGO-CRUD/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_symilar
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run_symilar())
| [
"[email protected]"
] | ||
f6e12a2fecf32423abd6f204856a0263e6998195 | f0526130407daf1484ba0a228fb16683aa0fa509 | /muchopper/common/queries.py | c45db0eec3bcf31cd446399bd333b29bb42491fa | [
"Apache-2.0"
] | permissive | ericschdt/muchopper | f8538f9e58b3512a3341731bc983fa2fbe5a749f | d621b8853360cfdc9f745aefbb9f9ec5d02d1a06 | refs/heads/master | 2020-08-06T21:10:18.316372 | 2019-10-06T11:30:17 | 2019-10-06T11:30:17 | 213,155,935 | 0 | 0 | NOASSERTION | 2019-10-06T11:30:18 | 2019-10-06T11:27:11 | null | UTF-8 | Python | false | false | 2,611 | py | import shlex
import sqlalchemy
from . import model
def base_filter(q, include_closed=False):
if not include_closed:
q = q.filter(
model.MUC.is_open == True # NOQA
)
return q.filter(
model.MUC.is_hidden == False # NOQA
)
def base_query(session, *,
include_closed=False,
with_avatar_flag=False):
if with_avatar_flag:
q = session.query(
model.MUC,
model.PubliclyListedMUC,
model.Avatar.address != None, # NOQA
).join(
model.PubliclyListedMUC,
).outerjoin(
model.Avatar,
)
else:
q = session.query(
model.MUC,
model.PubliclyListedMUC
).join(
model.PubliclyListedMUC
)
return base_filter(q, include_closed=include_closed)
def common_query(session, *,
min_users=1,
**kwargs):
q = base_query(session, **kwargs)
if min_users > 0:
q = q.filter(
model.MUC.nusers_moving_average > min_users
)
return q.order_by(
model.MUC.nusers_moving_average.desc(),
model.MUC.address.asc(),
)
def chain_condition(conditional, new):
if conditional is None:
return new
return sqlalchemy.or_(conditional, new)
def filter_keywords(keywords, min_length):
keywords = set(
keyword
for keyword in (
keyword.strip()
for keyword in keywords
)
if len(keyword) >= min_length
)
return keywords
def prepare_keywords(query_string, min_length=3):
keywords = shlex.split(query_string)
return filter_keywords(keywords, min_length)
def apply_search_conditions(q,
keywords,
search_address,
search_description,
search_name):
for keyword in keywords:
conditional = None
if search_address:
conditional = chain_condition(
conditional,
model.PubliclyListedMUC.address.ilike("%" + keyword + "%")
)
if search_description:
conditional = chain_condition(
conditional,
model.PubliclyListedMUC.description.ilike("%" + keyword + "%")
)
if search_name:
conditional = chain_condition(
conditional,
model.PubliclyListedMUC.name.ilike("%" + keyword + "%")
)
q = q.filter(conditional)
return q
| [
"[email protected]"
] | |
ef7700a3b968cac227b909d38a28fa784053110b | 36e4a3581877736a501a74bfdfc10bbbd4386b8a | /tests/conftest.py | 23314e52187708a98fe75147a8829b79b72f0019 | [
"Apache-2.0"
] | permissive | valmac/lean-cli | 1620300e7bf9428df269bae26b47a4615525144a | 88a191afadf7bfe766665fa67c552390cb2e3951 | refs/heads/main | 2023-03-08T10:40:15.541980 | 2021-02-11T00:52:47 | 2021-02-11T00:52:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,500 | py | import os
from pathlib import Path
import pytest
from pyfakefs.fake_filesystem import FakeFilesystem
from responses import RequestsMock
from lean.container import container
# conftest.py is ran by pytest before loading each testing module
# Fixtures defined in here are therefore available in all testing modules
@pytest.fixture(autouse=True)
def mock_filesystem(fs: FakeFilesystem) -> FakeFilesystem:
"""A pytest fixture which mocks the filesystem before each test."""
# The "fs" argument triggers pyfakefs' own pytest fixture to register
# After pyfakefs has started all filesystem actions will happen on a fake in-memory filesystem
# Create a fake home directory and set the cwd to an empty directory
fs.create_dir(Path.home() / "testing")
os.chdir(Path.home() / "testing")
# Reset singletons so that fresh Path instances get created
container.reset_singletons()
return fs
@pytest.fixture(autouse=True)
def requests_mock() -> RequestsMock:
"""A pytest fixture which mocks the requests library before each test.
If a test makes an HTTP request which hasn't been mocked, the request will fail.
"""
with RequestsMock() as mock:
yield mock
@pytest.fixture(autouse=True)
def reset_container_overrides() -> None:
"""A pytest fixture which makes sure all container and provider overrides are reset before each test."""
for provider in container.traverse():
provider.reset_override()
container.reset_override()
| [
"[email protected]"
] | |
ef4de32906a268554e3e0baa7f814fe784103ba9 | cb5f0731e84797ceaa295b840f24b7aa094b66ea | /lib/hypercorn/trio/h2.py | 60f43a10c2aabdae56a4047548e23d9b542b4a7e | [] | no_license | guruprasaad123/onet | da9de96675a0e38e09d972dcdf17c54a40acfeec | c5c580e4080160d2e3336663b2393b9b1c4245ba | refs/heads/master | 2020-05-19T05:59:39.744509 | 2019-05-06T15:10:32 | 2019-05-06T15:10:32 | 184,861,014 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,893 | py | from functools import partial
from itertools import chain
from typing import Callable, Dict, Iterable, Optional, Tuple, Type
import h2.config
import h2.connection
import h2.events
import h2.exceptions
import h11
import trio
import wsproto.connection
import wsproto.events
from .base import HTTPServer
from ..asgi.h2 import (
Data,
EndStream,
H2Event,
H2HTTPStreamMixin,
H2WebsocketStreamMixin,
Response,
ServerPush,
)
from ..asgi.utils import ASGIHTTPState, ASGIWebsocketState
from ..config import Config
from ..typing import ASGIFramework, H2AsyncStream
MAX_RECV = 2 ** 16
class MustCloseError(Exception):
pass
class H2HTTPStream(H2HTTPStreamMixin):
"""A HTTP Stream."""
def __init__(self, app: Type[ASGIFramework], config: Config, asend: Callable) -> None:
self.app = app
self.config = config
self.response: Optional[dict] = None
self.scope: Optional[dict] = None
self.state = ASGIHTTPState.REQUEST
self.asend = asend # type: ignore
self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10)
async def data_received(self, data: bytes) -> None:
await self.app_send_channel.send({"type": "http.request", "body": data, "more_body": True})
async def ended(self) -> None:
await self.app_send_channel.send({"type": "http.request", "body": b"", "more_body": False})
async def reset(self) -> None:
await self.app_send_channel.send({"type": "http.disconnect"})
async def close(self) -> None:
await self.app_send_channel.send({"type": "http.disconnect"})
async def asgi_receive(self) -> dict:
return await self.app_receive_channel.receive()
class H2WebsocketStream(H2WebsocketStreamMixin):
"""A Websocket Stream."""
def __init__(self, app: Type[ASGIFramework], config: Config, asend: Callable) -> None:
self.app = app
self.config = config
self.response: Optional[dict] = None
self.scope: Optional[dict] = None
self.state = ASGIWebsocketState.CONNECTED
self.connection: Optional[wsproto.connection.Connection] = None
self.asend = asend # type: ignore
self.app_send_channel, self.app_receive_channel = trio.open_memory_channel(10)
async def data_received(self, data: bytes) -> None:
self.connection.receive_data(data)
for event in self.connection.events():
if isinstance(event, wsproto.events.TextMessage):
await self.app_send_channel.send({"type": "websocket.receive", "text": event.data})
elif isinstance(event, wsproto.events.BytesMessage):
await self.app_send_channel.send({"type": "websocket.receive", "bytes": event.data})
elif isinstance(event, wsproto.events.Ping):
await self.asend(Data(self.connection.send(event.response())))
elif isinstance(event, wsproto.events.CloseConnection):
if self.connection.state == wsproto.connection.ConnectionState.REMOTE_CLOSING:
await self.asend(Data(self.connection.send(event.response())))
await self.app_send_channel.send({"type": "websocket.disconnect"})
break
async def ended(self) -> None:
await self.app_send_channel.send({"type": "websocket.disconnect"})
async def reset(self) -> None:
await self.app_send_channel.send({"type": "websocket.disconnect"})
async def close(self) -> None:
await self.app_send_channel.send({"type": "websocket.disconnect"})
async def asgi_put(self, message: dict) -> None:
await self.app_send_channel.send(message)
async def asgi_receive(self) -> dict:
return await self.app_receive_channel.receive()
class H2Server(HTTPServer):
def __init__(
self,
app: Type[ASGIFramework],
config: Config,
stream: trio.abc.Stream,
*,
upgrade_request: Optional[h11.Request] = None,
received_data: Optional[bytes] = None,
) -> None:
super().__init__(stream, "h2")
self.app = app
self.config = config
self.streams: Dict[int, H2AsyncStream] = {} # type: ignore
self.flow_control: Dict[int, trio.Event] = {}
self.send_lock = trio.Lock()
self.upgrade_request = upgrade_request
self.received_data = received_data
self.connection = h2.connection.H2Connection(
config=h2.config.H2Configuration(client_side=False, header_encoding=None)
)
self.connection.DEFAULT_MAX_INBOUND_FRAME_SIZE = config.h2_max_inbound_frame_size
self.connection.local_settings = h2.settings.Settings(
client=False,
initial_values={
h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: config.h2_max_concurrent_streams,
h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: config.h2_max_header_list_size,
h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1,
},
)
async def initiate(self) -> None:
if self.upgrade_request is None:
self.connection.initiate_connection()
if self.received_data:
await self.process_data(self.received_data)
else:
settings = ""
headers = []
for name, value in self.upgrade_request.headers:
if name.lower() == b"http2-settings":
settings = value.decode()
elif name.lower() == b"host":
headers.append((b":authority", value))
headers.append((name, value))
headers.append((b":method", self.upgrade_request.method))
headers.append((b":path", self.upgrade_request.target))
self.connection.initiate_upgrade_connection(settings)
event = h2.events.RequestReceived()
event.stream_id = 1
event.headers = headers
await self.create_stream(event, complete=True)
await self.send()
async def create_stream(
self, event: h2.events.RequestReceived, *, complete: bool = False
) -> None:
method: str
for name, value in event.headers:
if name == b":method":
method = value.decode("ascii").upper()
if method == "CONNECT":
self.streams[event.stream_id] = H2WebsocketStream(
self.app, self.config, partial(self.asend, event.stream_id)
)
else:
self.streams[event.stream_id] = H2HTTPStream(
self.app, self.config, partial(self.asend, event.stream_id)
)
if complete:
await self.streams[event.stream_id].ended()
self.nursery.start_soon(self.handle_request, event)
async def handle_request(self, event: h2.events.RequestReceived) -> None:
await self.streams[event.stream_id].handle_request(
event, self.scheme, self.client, self.server
)
if (
self.connection.state_machine.state is not h2.connection.ConnectionState.CLOSED
and event.stream_id in self.connection.streams
and not self.connection.streams[event.stream_id].closed
):
# The connection is not closed and there has been an error
# preventing the stream from closing correctly.
self.connection.reset_stream(event.stream_id)
await self.streams[event.stream_id].close()
del self.streams[event.stream_id]
async def handle_connection(self) -> None:
try:
async with trio.open_nursery() as nursery:
self.nursery = nursery
await self.initiate()
await self.read_data()
except trio.TooSlowError:
self.connection.close_connection()
await self.send()
except MustCloseError:
await self.send()
except (trio.BrokenResourceError, trio.ClosedResourceError):
pass
finally:
for stream in self.streams.values():
await stream.close()
await self.aclose()
async def read_data(self) -> None:
while True:
try:
with trio.fail_after(self.config.keep_alive_timeout):
data = await self.stream.receive_some(MAX_RECV)
except trio.TooSlowError:
if len(self.streams) == 0:
raise
else:
continue # Keep waiting
if data == b"":
return
await self.process_data(data)
async def process_data(self, data: bytes) -> None:
try:
events = self.connection.receive_data(data)
except h2.exceptions.ProtocolError:
raise MustCloseError()
else:
for event in events:
if isinstance(event, h2.events.RequestReceived):
await self.create_stream(event)
elif isinstance(event, h2.events.DataReceived):
await self.streams[event.stream_id].data_received(event.data)
self.connection.acknowledge_received_data(
event.flow_controlled_length, event.stream_id
)
elif isinstance(event, h2.events.StreamReset):
await self.streams[event.stream_id].reset()
elif isinstance(event, h2.events.StreamEnded):
await self.streams[event.stream_id].ended()
elif isinstance(event, h2.events.WindowUpdated):
self.window_updated(event.stream_id)
elif isinstance(event, h2.events.ConnectionTerminated):
raise MustCloseError()
await self.send()
async def asend(self, stream_id: int, event: H2Event) -> None:
connection_state = self.connection.state_machine.state
stream_state = self.connection.streams[stream_id].state_machine.state
if (
connection_state == h2.connection.ConnectionState.CLOSED
or stream_state == h2.stream.StreamState.CLOSED
):
return
if isinstance(event, Response):
self.connection.send_headers(
stream_id, event.headers + self.response_headers() # type: ignore
)
await self.send()
elif isinstance(event, EndStream):
self.connection.end_stream(stream_id)
await self.send()
elif isinstance(event, Data):
await self.send_data(stream_id, event.data)
elif isinstance(event, ServerPush):
await self.server_push(stream_id, event.path, event.headers)
async def send_data(self, stream_id: int, data: bytes) -> None:
while True:
while not self.connection.local_flow_control_window(stream_id):
await self.wait_for_flow_control(stream_id)
chunk_size = min(len(data), self.connection.local_flow_control_window(stream_id))
chunk_size = min(chunk_size, self.connection.max_outbound_frame_size)
if chunk_size < 1: # Pathological client sending negative window sizes
continue
self.connection.send_data(stream_id, data[:chunk_size])
await self.send()
data = data[chunk_size:]
if not data:
break
async def send(self) -> None:
data = self.connection.data_to_send()
if data == b"":
return
async with self.send_lock:
try:
await self.stream.send_all(data)
except trio.BrokenResourceError:
pass
async def wait_for_flow_control(self, stream_id: int) -> None:
event = trio.Event()
self.flow_control[stream_id] = event
await event.wait()
def window_updated(self, stream_id: Optional[int]) -> None:
if stream_id is None or stream_id == 0:
# Unblock all streams
stream_ids = list(self.flow_control.keys())
for stream_id in stream_ids:
event = self.flow_control.pop(stream_id)
event.set()
elif stream_id is not None:
if stream_id in self.flow_control:
event = self.flow_control.pop(stream_id)
event.set()
async def server_push(
self, stream_id: int, path: str, headers: Iterable[Tuple[bytes, bytes]]
) -> None:
push_stream_id = self.connection.get_next_available_stream_id()
stream = self.streams[stream_id]
for name, value in stream.scope["headers"]:
if name == b":authority":
authority = value
request_headers = [
(name, value)
for name, value in chain(
[
(b":method", b"GET"),
(b":path", path.encode()),
(b":scheme", stream.scope["scheme"].encode()),
(b":authority", authority),
],
headers,
self.response_headers(),
)
]
try:
self.connection.push_stream(
stream_id=stream_id,
promised_stream_id=push_stream_id,
request_headers=request_headers,
)
except h2.exceptions.ProtocolError:
# Client does not accept push promises or we are trying to
# push on a push promises request.
pass
else:
event = h2.events.RequestReceived()
event.stream_id = push_stream_id
event.headers = request_headers
await self.create_stream(event, complete=True)
@property
def scheme(self) -> str:
return "https" if self._is_ssl else "http"
| [
"[email protected]"
] | |
d92546f21afa2d9f3d90a90399e84e3bb189d0eb | 1966d4ee937abc2febb80af14ea37b3316428ee9 | /HackerRank/By Challenge/Charging the Batteries/python/solution.py | 5159ded20b702117fd27463677e5700549ec4b56 | [] | no_license | spanktastic2120/fun | 8083ea33c014062ef791192f1d25d2c3bc45c7fa | de23f404a0505576e00730d06b32aac4ae1e7e75 | refs/heads/master | 2021-06-03T23:39:31.305198 | 2019-08-05T07:55:13 | 2019-08-05T07:55:13 | 18,019,488 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,027 | py | #!/bin/python
import sys
if __name__ == "__main__":
n, m, k = map(int, raw_input().strip().split(' '))
sockets = []
for _ in xrange(m):
x, y = map(int, raw_input().strip().split(' '))
# left
if not x:
sockets.append(y)
# bottom
elif not y:
sockets.append((4*n) - x)
# right
elif x == n:
sockets.append((3*n) - y)
# top
else:
sockets.append(n + x)
#print sockets[-1]
sockets = sorted(sockets)
best = n*4
# check contiguous sockets
for i in xrange(m-k):
if sockets[i+k-1] - sockets[i] < best:
best = sockets[i+k-1] - sockets[i]
if not best:
break
# check wrapped sockets
if best:
for i in xrange(k-1):
if ((sockets[i] + (4 * n)) - sockets[i + 1 - k]) < best:
best = (sockets[i] + (4 * n)) - sockets[i + 1 - k]
if not best:
break
print best | [
"[email protected]"
] | |
f1ecf9ada5f4de3b1dc3427fc4476c252663ee76 | fab14fae2b494068aa793901d76464afb965df7e | /benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/3-sender_receiver_19.py | 10d2eb6c1a1a4703d74ec66ea4081c7a6e92abee | [
"MIT"
] | permissive | teodorov/F3 | 673f6f9ccc25acdfdecbfc180f439253474ba250 | c863215c318d7d5f258eb9be38c6962cf6863b52 | refs/heads/master | 2023-08-04T17:37:38.771863 | 2021-09-16T07:38:28 | 2021-09-16T07:38:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,986 | py | from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, s_evt)
loc0.set_progress(0, x_s_evt)
hint = Hint("h_s_evt0", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_msg_id, i0))
loc0.set_progress(0, mgr.Equals(x_s_msg_id, i0))
hint = Hint("h_s_msg_id0", env, frozenset([s_msg_id]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(0, x_r_l)
hint = Hint("h_r_l0", env, frozenset([r_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
return frozenset(res)
| [
"[email protected]"
] | |
22137631ea36c7cc351b63715fcd7365718497c2 | 1729f98dcd81506ef5e5e6ded8b539894a647680 | /tensorflow/python/layers/convolutional.py | fbb13bb72c435ad3675a8f3f31c568952c043743 | [
"Apache-2.0"
] | permissive | PipelineAI/tensorflow | f539227fd5d3f304b4f246877e35303dbd388a0c | a0b68c666b8a06d237cc6776183ab8cd31055fcb | refs/heads/r1.5 | 2021-05-05T21:54:02.830548 | 2018-01-21T08:48:53 | 2018-01-21T08:48:53 | 115,791,564 | 0 | 1 | Apache-2.0 | 2018-01-15T05:38:46 | 2017-12-30T11:08:37 | C++ | UTF-8 | Python | false | false | 77,620 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Contains the convolutional layer classes and their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
class _Conv(base.Layer):
"""Abstract nD convolution layer (private, used as implementation base).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of n integers, specifying the
length of the convolution window.
strides: An integer or tuple/list of n integers,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, ..., channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, ...)`.
dilation_rate: An integer or tuple/list of n integers, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, rank,
filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(_Conv, self).__init__(trainable=trainable, name=name,
activity_regularizer=activity_regularizer,
**kwargs)
self.rank = rank
self.filters = filters
self.kernel_size = utils.normalize_tuple(kernel_size, rank, 'kernel_size')
self.strides = utils.normalize_tuple(strides, rank, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
self.dilation_rate = utils.normalize_tuple(
dilation_rate, rank, 'dilation_rate')
self.activation = activation
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.kernel_constraint = kernel_constraint
self.bias_constraint = bias_constraint
self.input_spec = base.InputSpec(ndim=self.rank + 2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis].value is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis].value
kernel_shape = self.kernel_size + (input_dim, self.filters)
self.kernel = self.add_variable(name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.input_spec = base.InputSpec(ndim=self.rank + 2,
axes={channel_axis: input_dim})
self._convolution_op = nn_ops.Convolution(
input_shape,
filter_shape=self.kernel.get_shape(),
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format,
self.rank + 2))
self.built = True
def call(self, inputs):
outputs = self._convolution_op(inputs, self.kernel)
if self.use_bias:
if self.data_format == 'channels_first':
if self.rank == 1:
# nn.bias_add does not accept a 1D input tensor.
bias = array_ops.reshape(self.bias, (1, self.filters, 1))
outputs += bias
if self.rank == 2:
outputs = nn.bias_add(outputs, self.bias, data_format='NCHW')
if self.rank == 3:
# As of Mar 2017, direct addition is significantly slower than
# bias_add when computing gradients. To use bias_add, we collapse Z
# and Y into a single dimension to obtain a 4D input tensor.
outputs_shape = outputs.shape.as_list()
outputs_4d = array_ops.reshape(outputs,
[outputs_shape[0], outputs_shape[1],
outputs_shape[2] * outputs_shape[3],
outputs_shape[4]])
outputs_4d = nn.bias_add(outputs_4d, self.bias, data_format='NCHW')
outputs = array_ops.reshape(outputs_4d, outputs_shape)
else:
outputs = nn.bias_add(outputs, self.bias, data_format='NHWC')
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_last':
space = input_shape[1:-1]
new_space = []
for i in range(len(space)):
new_dim = utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0]] + new_space +
[self.filters])
else:
space = input_shape[2:]
new_space = []
for i in range(len(space)):
new_dim = utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0], self.filters] +
new_space)
class Conv1D(_Conv):
"""1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of a single integer, specifying the
length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
dilation_rate: An integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Convolution1D, self).__init__(
rank=1,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv1d(inputs,
filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for 1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of a single integer, specifying the
length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
dilation_rate: An integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = Conv1D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv2D(_Conv):
"""2D convolution layer (e.g. spatial convolution over images).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv2D, self).__init__(
rank=2,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 2D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = Conv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv3D(_Conv):
"""3D convolution layer (e.g. spatial convolution over volumes).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth,
height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
dilation_rate: An integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv3D, self).__init__(
rank=3,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv3d(inputs,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 3D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth,
height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
dilation_rate: An integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = Conv3D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class SeparableConv2D(Conv2D):
"""Depthwise separable 2D convolution.
This layer performs a depthwise convolution that acts separately on
channels, followed by a pointwise convolution that mixes channels.
If `use_bias` is True and a bias initializer is provided,
it adds a bias vector to the output.
It then optionally applies an activation function to produce the final output.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any `stride` value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
depthwise_initializer: An initializer for the depthwise convolution kernel.
pointwise_initializer: An initializer for the pointwise convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
depthwise_regularizer: Optional regularizer for the depthwise
convolution kernel.
pointwise_regularizer: Optional regularizer for the pointwise
convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
depthwise_constraint: Optional projection function to be applied to the
depthwise kernel after being updated by an `Optimizer` (e.g. used for
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
pointwise_constraint: Optional projection function to be applied to the
pointwise kernel after being updated by an `Optimizer`.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
depth_multiplier=1,
activation=None,
use_bias=True,
depthwise_initializer=None,
pointwise_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
depthwise_regularizer=None,
pointwise_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
depthwise_constraint=None,
pointwise_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(SeparableConv2D, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
**kwargs)
self.data_format = data_format
self.depth_multiplier = depth_multiplier
self.depthwise_initializer = depthwise_initializer
self.pointwise_initializer = pointwise_initializer
self.depthwise_regularizer = depthwise_regularizer
self.pointwise_regularizer = pointwise_regularizer
self.depthwise_constraint = depthwise_constraint
self.pointwise_constraint = pointwise_constraint
def build(self, input_shape):
if len(input_shape) < 4:
raise ValueError('Inputs to `SeparableConv2D` should have rank 4. '
'Received input shape:', str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = 3
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs to '
'`SeparableConv2D` '
'should be defined. Found `None`.')
input_dim = int(input_shape[channel_axis])
self.input_spec = base.InputSpec(ndim=4, axes={channel_axis: input_dim})
depthwise_kernel_shape = (self.kernel_size[0],
self.kernel_size[1],
input_dim,
self.depth_multiplier)
pointwise_kernel_shape = (1, 1,
self.depth_multiplier * input_dim,
self.filters)
self.depthwise_kernel = self.add_variable(
name='depthwise_kernel',
shape=depthwise_kernel_shape,
initializer=self.depthwise_initializer,
regularizer=self.depthwise_regularizer,
constraint=self.depthwise_constraint,
trainable=True,
dtype=self.dtype)
self.pointwise_kernel = self.add_variable(
name='pointwise_kernel',
shape=pointwise_kernel_shape,
initializer=self.pointwise_initializer,
regularizer=self.pointwise_regularizer,
constraint=self.pointwise_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
# Apply the actual ops.
if self.data_format == 'channels_last':
strides = (1,) + self.strides + (1,)
else:
strides = (1, 1) + self.strides
outputs = nn.separable_conv2d(
inputs,
self.depthwise_kernel,
self.pointwise_kernel,
strides=strides,
padding=self.padding.upper(),
rate=self.dilation_rate,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.use_bias:
outputs = nn.bias_add(
outputs,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
else:
rows = input_shape[1]
cols = input_shape[2]
rows = utils.conv_output_length(rows, self.kernel_size[0],
self.padding, self.strides[0])
cols = utils.conv_output_length(cols, self.kernel_size[1],
self.padding, self.strides[1])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], self.filters, rows, cols])
else:
return tensor_shape.TensorShape(
[input_shape[0], rows, cols, self.filters])
def separable_conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
depth_multiplier=1,
activation=None,
use_bias=True,
depthwise_initializer=None,
pointwise_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
depthwise_regularizer=None,
pointwise_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
depthwise_constraint=None,
pointwise_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the depthwise separable 2D convolution layer.
This layer performs a depthwise convolution that acts separately on
channels, followed by a pointwise convolution that mixes channels.
If `use_bias` is True and a bias initializer is provided,
it adds a bias vector to the output.
It then optionally applies an activation function to produce the final output.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any `stride` value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
depthwise_initializer: An initializer for the depthwise convolution kernel.
pointwise_initializer: An initializer for the pointwise convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
depthwise_regularizer: Optional regularizer for the depthwise
convolution kernel.
pointwise_regularizer: Optional regularizer for the pointwise
convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
depthwise_constraint: Optional projection function to be applied to the
depthwise kernel after being updated by an `Optimizer` (e.g. used for
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
pointwise_constraint: Optional projection function to be applied to the
pointwise kernel after being updated by an `Optimizer`.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = SeparableConv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
depth_multiplier=depth_multiplier,
activation=activation,
use_bias=use_bias,
depthwise_initializer=depthwise_initializer,
pointwise_initializer=pointwise_initializer,
bias_initializer=bias_initializer,
depthwise_regularizer=depthwise_regularizer,
pointwise_regularizer=pointwise_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
depthwise_constraint=depthwise_constraint,
pointwise_constraint=pointwise_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv2DTranspose(Conv2D):
"""Transposed 2D convolution layer (sometimes called 2D Deconvolution).
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv2DTranspose, self).__init__(
filters,
kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
**kwargs)
self.input_spec = base.InputSpec(ndim=4)
def build(self, input_shape):
if len(input_shape) != 4:
raise ValueError('Inputs should have rank 4. Received input shape: ' +
str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis]
self.input_spec = base.InputSpec(ndim=4, axes={channel_axis: input_dim})
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_variable(name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
inputs_shape = array_ops.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
c_axis, h_axis, w_axis = 1, 2, 3
else:
c_axis, h_axis, w_axis = 3, 1, 2
height, width = inputs_shape[h_axis], inputs_shape[w_axis]
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
# Infer the dynamic output shape:
out_height = utils.deconv_output_length(height,
kernel_h,
self.padding,
stride_h)
out_width = utils.deconv_output_length(width,
kernel_w,
self.padding,
stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_height, out_width)
strides = (1, 1, stride_h, stride_w)
else:
output_shape = (batch_size, out_height, out_width, self.filters)
strides = (1, stride_h, stride_w, 1)
output_shape_tensor = array_ops.stack(output_shape)
outputs = nn.conv2d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format, ndim=4))
if context.in_graph_mode():
# Infer the static output shape:
out_shape = inputs.get_shape().as_list()
out_shape[c_axis] = self.filters
out_shape[h_axis] = utils.deconv_output_length(out_shape[h_axis],
kernel_h,
self.padding,
stride_h)
out_shape[w_axis] = utils.deconv_output_length(out_shape[w_axis],
kernel_w,
self.padding,
stride_w)
outputs.set_shape(out_shape)
if self.use_bias:
outputs = nn.bias_add(
outputs,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = list(input_shape)
if self.data_format == 'channels_first':
c_axis, h_axis, w_axis = 1, 2, 3
else:
c_axis, h_axis, w_axis = 3, 1, 2
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
output_shape[c_axis] = self.filters
output_shape[h_axis] = utils.deconv_output_length(
output_shape[h_axis], kernel_h, self.padding, stride_h)
output_shape[w_axis] = utils.deconv_output_length(
output_shape[w_axis], kernel_w, self.padding, stride_w)
return tensor_shape.TensorShape(output_shape)
def conv2d_transpose(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for transposed 2D convolution layer.
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to `None` to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If `None`, then no
bias will be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = Conv2DTranspose(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv3DTranspose(Conv3D):
"""Transposed 3D convolution layer (sometimes called 3D Deconvolution).
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for all spatial
dimensions.
strides: An integer or tuple/list of 3 integers, specifying the strides
of the convolution along the depth, height and width.
Can be a single integer to specify the same value for all spatial
dimensions.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
activation: Activation function. Set it to `None` to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If `None`, then no
bias will be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv3DTranspose, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
**kwargs)
self.input_spec = base.InputSpec(ndim=5)
def build(self, input_shape):
if len(input_shape) != 5:
raise ValueError('Inputs should have rank 5, received input shape:',
str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined, found None: ' + str(input_shape))
input_dim = input_shape[channel_axis]
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_variable(
'kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(
'bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
def call(self, inputs):
inputs_shape = array_ops.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4
else:
c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3
self.input_spec = base.InputSpec(ndim=5,
axes={c_axis: inputs_shape[c_axis]})
depth = inputs_shape[d_axis]
height = inputs_shape[h_axis]
width = inputs_shape[w_axis]
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
# Infer the dynamic output shape:
out_depth = utils.deconv_output_length(depth,
kernel_d,
self.padding,
stride_d)
out_height = utils.deconv_output_length(height,
kernel_h,
self.padding,
stride_h)
out_width = utils.deconv_output_length(width,
kernel_w,
self.padding,
stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_depth, out_height,
out_width)
strides = (1, 1, stride_d, stride_h, stride_w)
else:
output_shape = (batch_size, out_depth, out_height, out_width,
self.filters)
strides = (1, stride_d, stride_h, stride_w, 1)
output_shape_tensor = array_ops.stack(output_shape)
outputs = nn.conv3d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
data_format=utils.convert_data_format(self.data_format, ndim=5),
padding=self.padding.upper())
if context.in_graph_mode():
# Infer the static output shape:
out_shape = inputs.get_shape().as_list()
out_shape[c_axis] = self.filters
out_shape[d_axis] = utils.deconv_output_length(out_shape[d_axis],
kernel_d,
self.padding,
stride_d)
out_shape[h_axis] = utils.deconv_output_length(out_shape[h_axis],
kernel_h,
self.padding,
stride_h)
out_shape[w_axis] = utils.deconv_output_length(out_shape[w_axis],
kernel_w,
self.padding,
stride_w)
outputs.set_shape(out_shape)
if self.use_bias:
outputs_shape = outputs.shape.as_list()
if self.data_format == 'channels_first':
outputs_4d = array_ops.reshape(outputs, [
outputs_shape[0], outputs_shape[1],
outputs_shape[2] * outputs_shape[3], outputs_shape[4]
])
else:
outputs_4d = array_ops.reshape(outputs, [
outputs_shape[0], outputs_shape[1] * outputs_shape[2],
outputs_shape[3], outputs_shape[4]
])
outputs_4d = nn.bias_add(
outputs_4d,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
outputs = array_ops.reshape(outputs_4d, outputs_shape)
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = list(input_shape)
if self.data_format == 'channels_first':
c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4
else:
c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
output_shape[c_axis] = self.filters
output_shape[d_axis] = utils.deconv_output_length(
output_shape[d_axis], stride_d, kernel_d, self.padding)
output_shape[h_axis] = utils.deconv_output_length(
output_shape[h_axis], stride_h, kernel_h, self.padding)
output_shape[w_axis] = utils.deconv_output_length(
output_shape[w_axis], stride_w, kernel_w, self.padding)
return tensor_shape.TensorShape(output_shape)
def conv3d_transpose(inputs,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for transposed 3D convolution layer.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 3 positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 3 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = Conv3DTranspose(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
# Aliases
Convolution1D = Conv1D
Convolution2D = Conv2D
Convolution3D = Conv3D
SeparableConvolution2D = SeparableConv2D
Convolution2DTranspose = Deconvolution2D = Deconv2D = Conv2DTranspose
Convolution3DTranspose = Deconvolution3D = Deconv3D = Conv3DTranspose
convolution1d = conv1d
convolution2d = conv2d
convolution3d = conv3d
separable_convolution2d = separable_conv2d
convolution2d_transpose = deconvolution2d = deconv2d = conv2d_transpose
convolution3d_transpose = deconvolution3d = deconv3d = conv3d_transpose
| [
"[email protected]"
] | |
5ea2d8e12d59b152aba52323d7c529c2a61cbda6 | c6fbf6df12cb7de82b3060a7bc12fee9b46e5990 | /Chapter10/ch10-9.ML_ann.py | 6c23988b2637bec8bdfe79e24d198ebb675f9bea | [
"MIT"
] | permissive | tschoi6712/pythonDataAnalysis2nd | 5f0d07493bb835c76aa9fbe7100834e5a8341912 | 63e366d4dee52f7e4df6cf4d988a85d6de5b00e4 | refs/heads/master | 2020-08-09T14:42:55.831842 | 2019-10-10T06:53:24 | 2019-10-10T06:53:24 | 214,108,757 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | """
예측분석과 머신러닝 - (9) Neural Networks: pip install theanets nose_parameterized
conda install m2w64-toolchain
"""
import numpy as np
import theanets
import multiprocessing
from sklearn import datasets
from sklearn.metrics import accuracy_score
rain = .1 * np.load('ch10.rain.npy')
rain[rain < 0] = .05/2
dates = np.load('ch10.doy.npy')
x = np.vstack((dates[:-1], np.sign(rain[:-1])))
x = x.T
y = np.vstack(np.sign(rain[1:]),)
N = int(.9 * len(x))
train = [x[:N], y[:N]]
valid = [x[N:], y[N:]]
net = theanets.Regressor(layers=[2, 3, 1])
net.train(train, valid, learning_rate=0.1, momentum=0.5)
pred = net.predict(x[N:]).ravel()
print("Pred Min", pred.min(), "Max", pred.max())
print("Y Min", y.min(), "Max", y.max())
print("Accuracy", accuracy_score(y[N:], pred >= .5))
| [
"[email protected]"
] | |
77c20440aaf6a3fb84a2337d4e42929b8d240a79 | 6d1bf00636259c1a65842a8dd49ea2037218cc8d | /Admin_console/Summary_Report.py | 017d7ef82d692280cd82d33e66e200171626829a | [] | no_license | chetandg123/Release_1.2 | efb4b0844b57638d23ac09783e0cd751893058ad | f9ff8add5930c7779ab1954c779f8f0e8cd1e908 | refs/heads/master | 2022-12-05T21:12:14.671813 | 2020-08-20T21:33:50 | 2020-08-20T21:33:50 | 288,701,130 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,632 | py | import time
import unittest
from selenium.webdriver.support.select import Select
from Data.parameters import Data
from get_dir import pwd
from reuse_func import GetData
class Test_summaryreport(unittest.TestCase):
@classmethod
def setUpClass(self):
self.data = GetData()
self.p = pwd()
self.driver = self.data.get_driver()
self.data.open_cqube_appln(self.driver)
self.data.page_loading(self.driver)
self.data.login_to_adminconsole(self.driver)
def test_summary_icon(self):
count = 0
self.data.page_loading(self.driver)
self.driver.find_element_by_xpath("//*[@id='summary']/img").click()
self.data.page_loading(self.driver)
if 'summary-statistics' in self.driver.current_url:
print("Summmary statistics report page is present ")
else:
print('Summary report page is not displayed')
count = count + 1
self.assertEqual(0,count,msg='Summary report page is not working')
self.driver.find_element_by_id('homeBtn').click()
self.data.page_loading(self.driver)
def test_dashboard_summary(self):
count = 0
self.driver.find_element_by_id(Data.Dashboard).click()
time.sleep(2)
self.driver.find_element_by_xpath("//*[@id='summary']/div/td[2]").click()
self.data.page_loading(self.driver)
if 'summary-statistics' in self.driver.current_url:
print("Summmary statistics report page is present ")
else:
print('Summary report page is not displayed')
count = count + 1
self.assertEqual(0,count,msg='Summary report page is not working')
self.driver.find_element_by_id('homeBtn').click()
self.data.page_loading(self.driver)
def test_check_summary(self):
self.data.page_loading(self.driver)
self.driver.find_element_by_id(Data.Dashboard).click()
time.sleep(2)
self.driver.find_element_by_xpath("//*[@id='summary']/div/td[2]").click()
self.data.page_loading(self.driver)
reports =self.driver.find_elements_by_tag_name('h2')
count = len(reports)
for i in range(len(reports)):
print(reports[i].text)
self.assertNotEqual(0,count,msg='All summary reports are not present')
if count > 6:
print("summary report of all files to be updated")
self.data.page_loading(self.driver)
self.driver.find_element_by_id('homeBtn').click()
self.data.page_loading(self.driver)
@classmethod
def tearDownClass(cls):
cls.driver.close() | [
"[email protected]"
] | |
fbd63bd85e48a99cbd17501c5e7a58f779e502b2 | a55756b40e0d14b6a4c366b55be65966e1934ec2 | /Recursion ADD(1).py | 8362065f9df3c9311c4efe9b885631a0f0d57111 | [] | no_license | rajeevj0909/FunFiles | 97323135bb5120d2d3a2458847a73f91dfa944bf | 85f4a430f0e71d1fee9c753bceb566ec319e5da0 | refs/heads/master | 2022-07-02T15:43:11.495874 | 2020-05-16T23:29:02 | 2020-05-16T23:29:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | def add(list1,counter,total):
n=len(list1)
if n==counter:
print(total)
else:
total=add(list1[counter],counter,total)+total
return(total)
return counter
counter = counter + 1
list1=[1,2,3,4]
counter=0
total=0
add(list1,counter,total)
| [
"[email protected]"
] | |
a0b302a8a168a912fbece6444aa9e7f4710361c3 | c15a28ae62eb94dbf3ed13e2065195e572a9988e | /Cook book/src/11/passing_a_socket_file_descriptor_between_processes/server.py | 05d6b07b5f08832aaaca3ea0ad4f14ffc9c19a1c | [] | no_license | xuyuchends1/python | 10798c92840a1a59d50f5dc5738b2881e65f7865 | 545d950a3d2fee799902658e8133e3692939496b | refs/heads/master | 2021-01-25T07:07:04.812140 | 2020-02-28T09:25:15 | 2020-02-28T09:25:15 | 93,647,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,098 | py | # server.py
import socket
import struct
def send_fd(sock, fd):
'''
Send a single file descriptor.
'''
sock.sendmsg([b'x'],
[(socket.SOL_SOCKET, socket.SCM_RIGHTS, struct.pack('i', fd))])
ack = sock.recv(2)
assert ack == b'OK'
def server(work_address, port):
# Wait for the worker to connect
work_serv = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
work_serv.bind(work_address)
work_serv.listen(1)
worker, addr = work_serv.accept()
# Now run a TCP/IP server and send clients to worker
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
s.bind(('', port))
s.listen(1)
while True:
client, addr = s.accept()
print('SERVER: Got connection from', addr)
send_fd(worker, client.fileno())
client.close()
if __name__ == '__main__':
import sys
if len(sys.argv) != 3:
print('Usage: server.py server_address port', file=sys.stderr)
raise SystemExit(1)
server(sys.argv[1], int(sys.argv[2]))
| [
"[email protected]"
] | |
069de552aee7711007a0052316643c96cc5c88fd | 5facb7b4baccff021b357ad67c966e2d92d665d1 | /stemming.py | 3a6a5d2215913259aacdefe3190af4f7568fe7da | [] | no_license | sadirahman/NLP | 73eeb1e32e7027c35b79ba3f6953b9ec51098cb7 | 844419ffd7bbb41217ab273753249757a4433865 | refs/heads/master | 2020-04-09T21:58:21.130129 | 2018-12-06T13:57:18 | 2018-12-06T13:57:18 | 160,617,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | from nltk.stem import PorterStemmer
from nltk.tokenize import word_tokenize
ps = PorterStemmer()
example_words =["python","pythoner","pythoning","pythoned","pythonly"]
for w in example_words:
print(ps.stem(w))
new_text = "It is very important to be pythonly while you are pythoning with python ."
word =word_tokenize(new_text)
for i in word:
print(ps.stem(i)) | [
"[email protected]"
] | |
18c9a3ba254ee73daa90283860550d2af14d7777 | 242da8865e037f9fffb76269c3acddb73ce9fa14 | /packages/pyright-internal/src/tests/samples/typeGuard1.py | ebeda0031c0154c1009d096010bb082e40aa8e7f | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | khyveasna11111908/pyright | f42eceae044f6fbc27552c1765b03ebd345a451c | 493d47807b96137995e4bb6ca341930e4de911f9 | refs/heads/main | 2023-08-30T00:08:36.191799 | 2021-09-25T19:17:13 | 2021-09-25T19:17:13 | 410,361,483 | 1 | 1 | NOASSERTION | 2021-09-25T19:15:23 | 2021-09-25T19:15:22 | null | UTF-8 | Python | false | false | 1,635 | py | # This sample tests the TypeGuard functionality
# that allows user-defined functions to perform
# conditional type narrowing.
# pyright: reportMissingModuleSource=false
import os
from typing import Any, List, Literal, Tuple, TypeVar, Union
from typing_extensions import TypeGuard
_T = TypeVar("_T")
def is_two_element_tuple(a: Tuple[_T, ...]) -> TypeGuard[Tuple[_T, _T]]:
return True
def func1(a: Tuple[int, ...]):
if is_two_element_tuple(a):
t1: Literal["Tuple[int, int]"] = reveal_type(a)
else:
t2: Literal["Tuple[int, ...]"] = reveal_type(a)
def is_string_list(val: List[Any], allow_zero_entries: bool) -> TypeGuard[List[str]]:
if allow_zero_entries and len(val) == 0:
return True
return all(isinstance(x, str) for x in val)
def func2(a: List[Union[str, int]]):
if is_string_list(a, True):
t1: Literal["List[str]"] = reveal_type(a)
else:
t2: Literal["List[str | int]"] = reveal_type(a)
# This should generate an error because TypeGuard
# has no type argument.
def bad1(a: int) -> TypeGuard:
return True
# This should generate an error because TypeGuard
# has too many type arguments.
def bad2(a: int) -> TypeGuard[str, int]:
return True
# This should generate an error because TypeGuard
# does not accept an elipsis.
def bad3(a: int) -> TypeGuard[...]:
return True
# This should generate an error because TypeGuard
# has does not accept a module.
def bad4(a: int) -> TypeGuard[os]:
return True
def bad5(a: int) -> TypeGuard[int]:
# This should generate an error because only
# bool values can be returned.
return 3
| [
"[email protected]"
] | |
72655be7c8a19587230e487b7743481687a52f88 | fe1e601b67a30e9215fe858e16b9c2965f8a3576 | /bloomFilter.py | 02f8a1f3bd5e7973d83bc77df4793f64c425fd90 | [] | no_license | z-o-e/algoFun | 8a53b0804d9ddb019133981ebd677b768d25cee6 | 539f89bece5791a1aadfafbddc3ce7c48fbf12ac | refs/heads/master | 2020-04-19T00:49:53.957430 | 2014-10-22T20:32:17 | 2014-10-22T20:32:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,571 | py | # bloom filter is a space-efficient probabilistic data structure to check whether an element is a member of a very large set.
# False positive are possible, false negative are impossible (100% recall)
# probability of a false positive depends on the density of 1's in the array and the number of hash functions
# (the number of 1's is approximately the number of elements inserted times the number of hash functions)
# suppose bitArraySize: b, hashFunctionSize: h, streaElementSize: n
# throwing d darts at t targets at random: (1-1/t)^d = (1-1/t)^(t*d/t) ~= e^(-d/t)
# for example: suppose b=1b, h=5, n=100m, that is t = 10^9, d = 5*10^8
# the fraction of 0's e^(-1/2), fraction of 1's: 1-e^(-1/2), fraction of false positive (1-e^(-1/2))^5
import math
# @param funcs: a list of hash functions
# @param filterSize: an int representing the size of bloom filter -- a bit vector
class bloomFilter:
def __init__(self, funcs, filterSize):
self.funcs = funcs
self.bitArray = [0]*filterSize
def _dec2Binary(self, dec):
if dec==0:
return [0]
res = []
while dec:
res = [dec%2] + res
dec //= 2
return res
def set(self, streamElem):
elem = self._dec2Binary(streamElem)
for func in self.funcs:
idx = func(elem, len(self.bitArray))
self.bitArray[idx] |= 1
def lookup(self, newElem):
elem = self._dec2Binary(newElem)
for func in self.funcs:
idx = func(elem, len(self.bitArray))
if self.bitArray[idx]==0:
return False
return True
def estimateFP(self,streamSize):
zeros = math.exp(-len(self.funcs)*streamSize/len(self.bitArray))
fp = (1-zeros)**(len(self.funcs))
return fp
# h1, h2 take odd-numbered, even-numbered bits startinf from the right of binary representation of x
def h1(x,modula):
odds = []
for i in range(len(x)-1,-1,-2):
odds = [x[i]]+odds
res = 0
i = 0
while odds:
t = odds.pop()
if t==1:
res += 2**i
i+=1
return res%modula
def h2(x,modula):
evens = []
for i in range(len(x)-2,-1,-2):
evens = [x[i]]+evens
res = 0
i = 0
while evens:
t = evens.pop()
if t==1:
res += 2**i
i+=1
return res%modula
funcs = [h1, h2]
filterSize = 11
test = bloomFilter(funcs, filterSize)
test.set(25)
test.set(159)
test.lookup(25)
test.lookup(159)
test.lookup(30)
test.bitArray
| [
"[email protected]"
] | |
a4781f355e0c7465556c9f5f554524e357c8f228 | 0e722188b0a0c2302fab5b2917e9c39394664db3 | /gshs_auth/decorators.py | 768fdc4efcb9a5bed86fed80905ca350361d8498 | [] | no_license | Qwaz/GSHSmeal | 6df1d773a883b729c4d87c6f6b1c3990ed0e4d3c | 6f2af5f538a1b9b9db8f791041ff65a5c97b96d8 | refs/heads/master | 2020-04-17T09:12:47.694626 | 2015-03-04T10:27:56 | 2015-03-04T10:27:56 | 20,253,318 | 0 | 1 | null | 2014-08-20T01:56:00 | 2014-05-28T10:14:37 | JavaScript | UTF-8 | Python | false | false | 305 | py | from functools import wraps
from django.shortcuts import redirect
def admin_login(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
if request.user.is_staff:
return func(request, *args, **kwargs)
else:
return redirect('home')
return wrapper
| [
"[email protected]"
] | |
59576815e0896221abafb383026b4ff38ea0df21 | f4ec787e34b74823875a8074d2f7c10d6207f206 | /tests/util/alert_server.py | 2e17d1448e570ccf024752181c78bbba66ce8f5b | [
"Apache-2.0"
] | permissive | Sweety-Network/sweety-blockchain-test | d0fc0fbd27e5b6970597824de43db3fb9e3bceed | 44373ff1b6a7a2a1dcb39d3d6fa01cd15e3e5fb6 | refs/heads/master | 2023-09-04T15:57:19.853659 | 2021-10-08T04:00:10 | 2021-10-08T04:00:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,143 | py | import argparse
import asyncio
import logging
from pathlib import Path
from typing import Any
from aiohttp import web
log = logging.getLogger(__name__)
class AlertServer:
shut_down: bool
shut_down_event: asyncio.Event
log: Any
app: Any
alert_file_path: Path
port: int
@staticmethod
async def create_alert_server(alert_file_path: Path, port):
self = AlertServer()
self.log = log
self.shut_down = False
self.app = web.Application()
self.shut_down_event = asyncio.Event()
self.port = port
routes = [
web.get("/status", self.status),
]
self.alert_file_path = alert_file_path
self.app.add_routes(routes)
return self
async def status(self, request):
file_text = self.alert_file_path.read_text()
return web.Response(body=file_text, content_type="text/plain")
async def stop(self):
self.shut_down_event.set()
async def run(self):
runner = web.AppRunner(self.app, access_log=None)
await runner.setup()
site = web.TCPSite(runner, None, self.port)
await site.start()
async def run_and_wait(file_path, port):
server = await AlertServer.create_alert_server(Path(file_path), port)
await server.run()
await server.shut_down_event.wait()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-file_path", type=str, dest="file_path")
parser.add_argument("-port", type=str, dest="port")
port = None
file_path = None
for key, value in vars(parser.parse_args()).items():
if key == "port":
port = value
elif key == "file_path":
file_path = value
else:
print(f"Invalid argument {key}")
if port is None or file_path is None:
print(
"Missing arguments, example usage:\n\n"
"python sweety/util/alert_server.py -p 4000 -file_path /home/user/alert.txt\n"
)
quit()
return asyncio.get_event_loop().run_until_complete(run_and_wait(file_path, port))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
62dcbc0c418cf6583d090a0a4451f0b217365320 | 4b44a299bafbd4ca408ce1c89c9fe4a449632783 | /python3/06_Collections/02_Tuples/01_tuples.py | 490cc61a2d1d027663715e7fff988af2cc75274f | [] | no_license | umunusb1/PythonMaterial | ecd33d32b2de664eaaae5192be7c3f6d6bef1d67 | 1e0785c55ccb8f5b9df1978e1773365a29479ce0 | refs/heads/master | 2023-01-23T23:39:35.797800 | 2020-12-02T19:29:00 | 2020-12-02T19:29:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Purpose: Working with tuples
"""
mylist = [12, 34.8, 500000, [6, 8], (5,)]
print('type(mylist)', type(mylist))
print('len(mylist)', len(mylist))
mytuple = (12, 34.8, 500000, [6, 8], (5,))
print('\ntype(mytuple)', type(mytuple))
print('len(mytuple)', len(mytuple))
print(mytuple)
print()
another_tuple = (99.9,)
print('type(another_tuple)', type(another_tuple))
print('len(another_tuple) ', len(another_tuple))
# NOTE: For tuple with single element, place comma at the end
# to recognize it as tuple; else recognizes as individual element
print()
empty_tuple = tuple() # ()
print('type(empty_tuple):', type(empty_tuple))
print('len(empty_tuple) :', len(empty_tuple))
print()
mytuple = 1, 2, 3
print('type(mytuple) :', type(mytuple))
print('len(mytuple) :', len(mytuple))
print(mytuple)
| [
"[email protected]"
] | |
d50ee8a96e31ec7c4404e0ea515495d6127da418 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AlipayUserAuthZhimaperInnerApplyResponse.py | aa55cb52b9d5249d124fbd30a25e134c9e2a5866 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,403 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayUserAuthZhimaperInnerApplyResponse(AlipayResponse):
def __init__(self):
super(AlipayUserAuthZhimaperInnerApplyResponse, self).__init__()
self._access_token = None
self._auth_token_type = None
self._refresh_token = None
@property
def access_token(self):
return self._access_token
@access_token.setter
def access_token(self, value):
self._access_token = value
@property
def auth_token_type(self):
return self._auth_token_type
@auth_token_type.setter
def auth_token_type(self, value):
self._auth_token_type = value
@property
def refresh_token(self):
return self._refresh_token
@refresh_token.setter
def refresh_token(self, value):
self._refresh_token = value
def parse_response_content(self, response_content):
response = super(AlipayUserAuthZhimaperInnerApplyResponse, self).parse_response_content(response_content)
if 'access_token' in response:
self.access_token = response['access_token']
if 'auth_token_type' in response:
self.auth_token_type = response['auth_token_type']
if 'refresh_token' in response:
self.refresh_token = response['refresh_token']
| [
"[email protected]"
] | |
52acd0e9645a8b753223b9adf1ec37f385a0421b | 51090d3c72894b8db606319bb71772672bb179d5 | /feature/getfeatures_ch.py | dca8a16f62118b5c7d206d1962b60d70ee044848 | [] | no_license | Miyayx/XLore_Feature | c00143b448a9367cf02435c520ddf73921a0ffd8 | 696e5399e9b0cb54f257815cdd4eb2ef986be702 | refs/heads/master | 2020-06-05T03:02:06.842902 | 2014-09-20T15:49:34 | 2014-09-20T15:49:34 | 21,727,105 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,418 | py | #!/usr/bin/env python2.7
#encoding=utf-8
import codecs
from CHSingleSent import *
import FileIO
import threading
DATA_PATH = "/home/lmy/data/origin/"
PARSER_PATH = "/home/lmy/data/parser/"
FEATURE_PATH = "/home/lmy/data/feature/"
HEADWORD_PATH = "./headword/"
#FILE_NAME = "zhwiki-instance-concept-1v1.dat"
FILE_NAME = "zhwiki-concept-sub-all-1v1.dat"
DATAFILE= DATA_PATH + FILE_NAME
SUPER_FILE = PARSER_PATH+FILE_NAME.split(".")[0]+"-0column.dat"
SUB_FILE = PARSER_PATH+FILE_NAME.split(".")[0]+"-1column.dat"
FEATURE13=FEATURE_PATH+FILE_NAME.split(".")[0]+"-feature13.dat"
SUPER_HEADWORD_FILE = HEADWORD_PATH+FILE_NAME.split(".")[0]+"-superheadword.dat"
SUB_HEADWORD_FILE = HEADWORD_PATH+FILE_NAME.split(".")[0]+"-subheadword.dat"
DATA_DELIMITER = '\t\t'
FEATURE_ITEM_DELIMITER = '\t'
relations = ['equal','contain','contained','no relation','other']
def getStr2ObjectDict(filename, items):
"""
str(filename) -> list of CHSingleSent
"""
l = FileIO.readDataFromFile(filename)
i2css = {}
for item in l:
if not item[0] in items:
continue
try:
i2css[item[0]] = CHSingleSent(item)
except:
print "String Error:",item[0]
return i2css
def getRelationshipOfTwoSets(l1,l2):
"""
get the relationship of two classes' headword collection
two list of headwords -> int(relationshipID)
relations = ['equal','contain','contained','no relation','other']
"""
oldLen1 = len(set(l1))
oldLen2 = len(set(l2))
mergeList = l1
mergeList.extend(l2)
mergeLen = len(set(mergeList))
if oldLen1 == mergeLen and oldLen2 == mergeLen:
#return relations[0]
return 0
elif oldLen1 == mergeLen:
#return relations[1]
return 1
elif oldLen2 == mergeLen:
#return relations[2]
return 2
elif oldLen1+oldLen2 == mergeLen:
#return relations[3]
return 3
else:
return 4
#return relations[4]
def getRalationshipOfTwoSets2(l1,l2):
"""
get the relationship of two classes' headword and unheadword collection
two list of headwords -> int(relationshipID)
relations just include 'no relation' and 'other'
"""
if len(l1) == 0 or len(l2) == 0:
#return relations[3]
return 0
oldLen1 = len(set(l1))
oldLen2 = len(set(l2))
mergeList = l1
mergeList.extend(l2)
mergeLen = len(set(mergeList))
if oldLen1+oldLen2 == mergeLen:
#return relations[3]
return 0
else:
#return relations[4]
return 1
def recordInputtedFile(newfile):
inputted_lines = []
try:
for line in codecs.open(newfile,'r','utf-8'):
inputted_lines.append(line.strip('\n').split(DATA_DELIMITER)[0])
except:
pass
return inputted_lines
def writeFeatureToFile(superD,subD,super_sub_freD,sub_super_freD,featurefile,items):
"""
(str,str) -> NoneType
str is file name,first is feature-record file, second is class-subclass file
Order of features:
1. Relationship of super headword and subclass headword (look at the relation list )
2. If sub(the whole string) starts with super(the whole string)
3. If sub(the whole string) ends with super(the whole string)
4. If super(the whole string) starts with sub(the whole string)
5. If super(the whole string) ends with sub(the whole string)
6. Length of superclass headword set
7. Length of subclass headword set
8. Length of superclass string
9. Length of subclass string
10.Relationship of superclass headwords set and subclass unheadword set
11.Relationship of subclass headwords set and superclass unheadword set
12.Frequency ratio a sub string's words appear to in the same super
13.Frequency ratio a super string's words appear to in the same sub
Output format:
superclass\tsubclass\t\tf1,f2,f3....\n
"""
fwrite = codecs.open(featurefile,'a','utf-8')
for line in items:
item = line.strip('\n').split(FEATURE_ITEM_DELIMITER)
superStr = item[0]
subStr = item[1]
if len(subStr) == 0:
print "subStr",subStr
print "no sub"
continue
try:
superS = superD[superStr]
subS = subD[subStr]
except:
print "Super:",superStr
print "Sub:",subStr
continue
relation = getRelationshipOfTwoSets(superS.headword,subS.headword)
relation8 = getRalationshipOfTwoSets2(superS.headword,subS.unHeadword)
relation9 = getRalationshipOfTwoSets2(subS.headword,superS.unHeadword)
fwrite.write('%s\t%s\t\t%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%.3f,%.3f\n'%(
superStr,subStr,relation,
1 if subStr.startswith(superStr) else 0,
1 if subStr.endswith(superStr) else 0,
1 if superStr.startswith(subStr) else 0,
1 if superStr.endswith(subStr) else 0,
superS.hwLen,subS.hwLen,superS.wordLen,subS.wordLen,
relation8,relation9,
super_sub_freD[superStr][subStr],
sub_super_freD[subStr][superStr]))
fwrite.flush()
fwrite.close()
def calculateWordFrequency(d):
"""
dict(k:str v:list of related string) -> dict(k:str v:dict of [k:str v:ratio])
example:
d['aaa'] = ['dwd','fgse','sdf']
for key == 'aaa' in d:
totalWord = 3+4+3 = 10
freD = {'d':3,'w':1,'f':2,'g':1,'s':2,'e':1}
when key=='aaa', i =='dwd'
fre = 3+1+3 = 7
allFreD['aaa']['dwd']=7/10*3
"""
#for k,v in d.items():
# print k,v
allFreD = {}
for key in d:
freD = {}
totalWord = 0
allFreD[key] = {}
for i in d[key]:
ss = list(i)
for s in ss:
totalWord = totalWord+1
if freD.has_key(s):
freD[s] = freD[s]+1
else: freD[s] = 1
for i in d[key]:
ss = list(i)
fre = 0
for s in ss:
fre = fre+freD[s]
try:
allFreD[key][i] =float(fre)/totalWord*len(d[unicode(key)])
except:
continue
return allFreD
if __name__ == '__main__':
finished = []
import os
if os.path.isfile(FEATURE13):
for line in codecs.open(FEATURE13,'r','utf-8'):
finished.append(line.split(DATA_DELIMITER)[0])
all_ = []
for line in codecs.open(DATAFILE,'r','utf-8'):
all_.append(line.strip("\n").replace(DATA_DELIMITER,FEATURE_ITEM_DELIMITER))
unfinished = list(set(all_) - set(finished))
print "ALL:",len(all_)
print "Finished",len(finished)
print "Unfinished",len(unfinished)
del finished
del all_
super_items = set([i.split(FEATURE_ITEM_DELIMITER)[0] for i in unfinished])
sub_items = set([i.split(FEATURE_ITEM_DELIMITER)[1] for i in unfinished])
superD = {}
#def fun_a():
print "Calculating Object of super..."
#global superD
superD = getStr2ObjectDict(SUPER_FILE, super_items)
print "Len of superD",len(superD)
#superHeadword = dict((k,v.headword) for k,v in superD.iteritems())
#FileIO.recordHeadword(SUPER_HEADWORD_FILE,superHeadword)
#del superHeadword
subD = {}
#def fun_b():
print "Calculating Object of sub..."
#global subD
subD = getStr2ObjectDict(SUB_FILE, sub_items)
print "Len of subD",len(subD)
#subHeadword = dict((k,v.headword) for k,v in subD.iteritems())
#FileIO.recordHeadword(SUB_HEADWORD_FILE,subHeadword)
#del subHeadword
super_sub_freD = {}
sub_super_freD = {}
#def fun_c():
print "Calculating frequency..."
#global super_sub_freD,sub_super_freD
ddict = FileIO.readTwoColumnsToDict(DATAFILE,delimiter=DATA_DELIMITER)
super_sub_freD = calculateWordFrequency(ddict)
dddict = FileIO.readTwoColumnsToDict(DATAFILE,True,delimiter=DATA_DELIMITER)
sub_super_freD = calculateWordFrequency(dddict)
del ddict
del dddict
#threads = []
#threads.append(threading.Thread(target=fun_a))
#threads.append(threading.Thread(target=fun_b))
#threads.append(threading.Thread(target=fun_c))
#for t in threads:
# t.start()
#for t in threads:
# t.join()
print "Writing to file..."
writeFeatureToFile(superD,subD,super_sub_freD,sub_super_freD,FEATURE13,unfinished)
| [
"[email protected]"
] | |
bb28f5fb6011fac3c82ea587eebe8da09c4224ef | d2c4151eff768af64946ababc2e41c13d8973cd3 | /ARC104/b.py | 626f3f54124699e2c5c7d4b325852a552aa235d2 | [] | no_license | Intel-out-side/AtCoder | 2de19b71981247135432aed2d6d9c2a16c3ab7f0 | 0c419d2df15fff02032432cb1b1323612484e16e | refs/heads/master | 2022-06-23T04:21:12.886072 | 2022-06-13T14:39:07 | 2022-06-13T14:39:07 | 235,240,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | I = input().split()
N = int(I[0])
S = I[1]
class AccumulatedSum:
"""
累積和を勝手に計算します。
0-indexedなので注意
"""
def __init__(self, A:list):
self.N = len(A)
self.accumulated_sum = A.copy()
for i in range(1, self.N):
self.accumulated_sum[i] += self.accumulated_sum[i-1]
def getSumIn(self, left:int, right:int) -> int:
"""
[l, r]区間の和を計算します。左右端も含みます。
"""
if left == 0:
return self.accumulated_sum[right]
return self.accumulated_sum[right] - self.accumulated_sum[left-1]
A = [0] * N
T = [0] * N
C = [0] * N
G = [0] * N
for i in range(N):
if S[i] == "A":
A[i] = 1
if S[i] == "T":
T[i] = 1
if S[i] == "C":
C[i] = 1
if S[i] == "G":
G[i] = 1
Aacc = AccumulatedSum(A)
Tacc = AccumulatedSum(T)
Cacc = AccumulatedSum(C)
Gacc = AccumulatedSum(G)
ans = 0
for i in range(N):
for j in range(i, N):
if Aacc.getSumIn(i, j) == Tacc.getSumIn(i, j) and Cacc.getSumIn(i, j) == Gacc.getSumIn(i, j):
ans += 1
print(ans)
| [
"[email protected]"
] | |
294ca33e70eb3070b97b28d0251b424150e1b996 | d7e41697c8d82f5ef3f9555af7d3d204803f0a99 | /test/mitmproxy/utils/test_strutils.py | 4759f1db47ceb9e0a34ee9a9ca78e176b3d0be98 | [
"MIT"
] | permissive | shitongzhu/mitmproxy | aba9681765b71c385b2b20fda303c71090a04376 | 08f35286d3af9ad39046b0b3663913701086be9a | refs/heads/master | 2023-08-19T07:03:52.226411 | 2021-09-04T10:56:33 | 2021-09-04T10:56:33 | 316,844,544 | 0 | 0 | MIT | 2020-11-29T00:02:33 | 2020-11-29T00:02:33 | null | UTF-8 | Python | false | false | 4,828 | py | import pytest
from mitmproxy.utils import strutils
def test_always_bytes():
assert strutils.always_bytes(bytes(range(256))) == bytes(range(256))
assert strutils.always_bytes("foo") == b"foo"
with pytest.raises(ValueError):
strutils.always_bytes("\u2605", "ascii")
with pytest.raises(TypeError):
strutils.always_bytes(42, "ascii")
def test_always_str():
with pytest.raises(TypeError):
strutils.always_str(42)
assert strutils.always_str("foo") == "foo"
assert strutils.always_str(b"foo") == "foo"
assert strutils.always_str(None) is None
def test_escape_control_characters():
assert strutils.escape_control_characters("one") == "one"
assert strutils.escape_control_characters("\00ne") == ".ne"
assert strutils.escape_control_characters("\nne") == "\nne"
assert strutils.escape_control_characters("\nne", False) == ".ne"
assert strutils.escape_control_characters("\u2605") == "\u2605"
assert (
strutils.escape_control_characters(bytes(bytearray(range(128))).decode()) ==
'.........\t\n..\r.................. !"#$%&\'()*+,-./0123456789:;<'
'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.'
)
assert (
strutils.escape_control_characters(bytes(bytearray(range(128))).decode(), False) ==
'................................ !"#$%&\'()*+,-./0123456789:;<'
'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.'
)
with pytest.raises(ValueError):
strutils.escape_control_characters(b"foo")
def test_bytes_to_escaped_str():
assert strutils.bytes_to_escaped_str(b"foo") == "foo"
assert strutils.bytes_to_escaped_str(b"\b") == r"\x08"
assert strutils.bytes_to_escaped_str(br"&!?=\)") == r"&!?=\\)"
assert strutils.bytes_to_escaped_str(b'\xc3\xbc') == r"\xc3\xbc"
assert strutils.bytes_to_escaped_str(b"'") == r"'"
assert strutils.bytes_to_escaped_str(b'"') == r'"'
assert strutils.bytes_to_escaped_str(b"'", escape_single_quotes=True) == r"\'"
assert strutils.bytes_to_escaped_str(b'"', escape_single_quotes=True) == r'"'
assert strutils.bytes_to_escaped_str(b"\r\n\t") == "\\r\\n\\t"
assert strutils.bytes_to_escaped_str(b"\r\n\t", True) == "\r\n\t"
assert strutils.bytes_to_escaped_str(b"\n", True) == "\n"
assert strutils.bytes_to_escaped_str(b"\\n", True) == "\\ \\ n".replace(" ", "")
assert strutils.bytes_to_escaped_str(b"\\\n", True) == "\\ \\ \n".replace(" ", "")
assert strutils.bytes_to_escaped_str(b"\\\\n", True) == "\\ \\ \\ \\ n".replace(" ", "")
with pytest.raises(ValueError):
strutils.bytes_to_escaped_str("such unicode")
def test_escaped_str_to_bytes():
assert strutils.escaped_str_to_bytes("foo") == b"foo"
assert strutils.escaped_str_to_bytes("\x08") == b"\b"
assert strutils.escaped_str_to_bytes("&!?=\\\\)") == br"&!?=\)"
assert strutils.escaped_str_to_bytes("\\x08") == b"\b"
assert strutils.escaped_str_to_bytes("&!?=\\\\)") == br"&!?=\)"
assert strutils.escaped_str_to_bytes("\u00fc") == b'\xc3\xbc'
with pytest.raises(ValueError):
strutils.escaped_str_to_bytes(b"very byte")
def test_is_mostly_bin():
assert not strutils.is_mostly_bin(b"foo\xFF")
assert strutils.is_mostly_bin(b"foo" + b"\xFF" * 10)
assert not strutils.is_mostly_bin("")
def test_is_xml():
assert not strutils.is_xml(b"foo")
assert strutils.is_xml(b"<foo")
assert strutils.is_xml(b" \n<foo")
def test_clean_hanging_newline():
s = "foo\n"
assert strutils.clean_hanging_newline(s) == "foo"
assert strutils.clean_hanging_newline("foo") == "foo"
def test_hexdump():
assert list(strutils.hexdump(b"one\0" * 10))
ESCAPE_QUOTES = [
"'" + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + "'",
'"' + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + '"'
]
def test_split_special_areas():
assert strutils.split_special_areas("foo", ESCAPE_QUOTES) == ["foo"]
assert strutils.split_special_areas("foo 'bar' baz", ESCAPE_QUOTES) == ["foo ", "'bar'", " baz"]
assert strutils.split_special_areas(
"""foo 'b\\'a"r' baz""",
ESCAPE_QUOTES
) == ["foo ", "'b\\'a\"r'", " baz"]
assert strutils.split_special_areas(
"foo\n/*bar\nbaz*/\nqux",
[r'/\*[\s\S]+?\*/']
) == ["foo\n", "/*bar\nbaz*/", "\nqux"]
assert strutils.split_special_areas(
"foo\n//bar\nbaz",
[r'//.+$']
) == ["foo\n", "//bar", "\nbaz"]
def test_escape_special_areas():
assert strutils.escape_special_areas('foo "bar" baz', ESCAPE_QUOTES, "*") == 'foo "bar" baz'
esc = strutils.escape_special_areas('foo "b*r" b*z', ESCAPE_QUOTES, "*")
assert esc == 'foo "b\ue02ar" b*z'
assert strutils.unescape_special_areas(esc) == 'foo "b*r" b*z'
| [
"[email protected]"
] | |
b81de21a3b245370ce540d63715dc874f1e5a1f5 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/timber/testcase/firstcases/testcase1_023.py | bd7a990dcc4d20bd7e9c133c55b4eed72482d212 | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,842 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'com.naman14.timber',
'appActivity' : 'com.naman14.timber.activities.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'com.naman14.timber/com.naman14.timber.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase023
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
driver.press_keycode(82)
driver.press_keycode(82)
driver.press_keycode(82)
element = getElememtBack(driver, "new UiSelector().text(\"Sort by\")", "new UiSelector().className(\"android.widget.TextView\").instance(1)")
TouchAction(driver).tap(element).perform()
driver.press_keycode(82)
element = getElememt(driver, "new UiSelector().resourceId(\"com.naman14.timber:id/albumArt\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"She\")", "new UiSelector().className(\"android.widget.TextView\").instance(10)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"com.naman14.timber:id/albumArt\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageView\").description(\"More options\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageView\").description(\"More options\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageButton\").description(\"Navigate up\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Library\")", "new UiSelector().className(\"android.widget.CheckedTextView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageButton\").description(\"Navigate up\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Heartbeat\")", "new UiSelector().className(\"android.widget.TextView\").instance(17)")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Report any bugs here\")", "new UiSelector().className(\"android.widget.TextView\").instance(5)")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"1_023\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'com.naman14.timber'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
235f9be2e5351b3f4d2ed4d1505881be688d90e7 | 76d7e8f11f0cf1c0e8e0f90ac603eb297e063c1a | /private/templates/DRRPP/controllers.py | 0ba33102ef7e030aae20e3ee442c9f8b84172849 | [
"MIT"
] | permissive | carlisip/eden | afc8778071c854e95ddbe1655a6176a8c9eea12c | cf9c25afd48d63953976e5103e82a858f5027b5f | refs/heads/master | 2021-01-23T21:19:54.463890 | 2012-07-16T08:04:34 | 2012-07-16T08:04:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,743 | py | # -*- coding: utf-8 -*-
from os import path
from gluon import current
from gluon.html import *
# =============================================================================
def INPUT_BTN(**attributes):
"""
Utility function to create a styled button
"""
return SPAN(INPUT(_class = "button-right",
**attributes),
_class = "button-left")
# =============================================================================
class index():
""" Custom Home Page """
def __call__(self):
T = current.T
request = current.request
appname = request.application
response = current.response
response.title = current.deployment_settings.get_system_name()
view = path.join(request.folder, "private", "templates",
"DRRPP", "views", "index.html")
try:
# Pass view as file not str to work in compiled mode
response.view = open(view, "rb")
except IOError:
from gluon.http import HTTP
raise HTTP("404", "Unable to open Custom View: %s" % view)
home_img = IMG(_src="/%s/static/themes/DRRPP/img/home_img.jpg" % appname,
_id="home_img")
home_page_img = IMG(_src="/%s/static/themes/DRRPP/img/home_page_img.png" % appname,
_id="home_page_img")
home_map_img = IMG(_src="/%s/static/themes/DRRPP/img/home_map_img.png" % appname,
_id="home_map_img")
list_img = A(IMG(_src="/%s/static/themes/DRRPP/img/list_img.png" % appname,
_id="list_img"),
_href=URL(c="project", f="project", args=["list"]),
_title="Project List")
matrix_img = A(IMG(_src="/%s/static/themes/DRRPP/img/matrix_img.png" % appname,
_id="matrix_img"),
_href=URL(c="project", f="project", args=["matrix"]),
_title="Project Matrix Report")
map_img = A(IMG(_src="/%s/static/themes/DRRPP/img/map_img.png" % appname,
_id="map_img"),
_href=URL( f="project", args=["map"]),
_title="Project Map")
graph_img = A(IMG(_src="/%s/static/themes/DRRPP/img/graph_img.png" % appname,
_id="graph_img"),
_href=URL(c="project", f="project", args=["graphs"]),
_title="Project Graph")
add_pipeline_project_link = URL(c="project",
f="project",
args=["create"],
vars=dict(set_status_id = "1"))
add_current_project_link = URL(c="project",
f="project",
args=["create"],
vars=dict(set_status_id = "2"))
add_completed_project_link = URL(c="project",
f="project",
args=["create"],
vars=dict(set_status_id = "3"))
add_offline_project_link = URL(c="static",
f="DRR_Project_Portal_New_Project_Form.doc")
add_framework_link = URL(c="project",
f="framework",
args=["create"])
project_captions = {
1:"DRR projects which will be being implemented in the future, and for which funding has been secured in the Asia and Pacific region.",
2:"DRR projects which are currently being implemented in one or more country in the Asia and Pacific region.",
3:"DRR projects which have been completed and are no longer being implemented in the Asia and Pacific region."
}
framework_caption = "Frameworks, action plans, road maps, strategies, declarations, statements and action agendas on DRR or DRR related themes, which are documents or instruments for guiding stakeholders on DRR planning, programming and implementation."
add_div = DIV(A(DIV("ADD ", SPAN("CURRENT", _class="white_text"), " PROJECT"),
_href=add_current_project_link,
_title=project_captions[2]),
A(DIV("ADD ", SPAN("PROPOSED", _class="white_text"), " PROJECT" ),
_href=add_pipeline_project_link,
_title=project_captions[1]),
A(DIV("ADD ", SPAN("COMPLETED", _class="white_text"), " PROJECT" ),
_href=add_completed_project_link,
_title=project_captions[3]),
A(DIV("ADD PROJECT OFFLINE" ),
_href=add_offline_project_link,
_title="Download a form to enter a DRR projects off-line and submit by Email"),
A(DIV("ADD ", SPAN("DRR FRAMEWORK", _class="white_text")),
_href=add_framework_link,
_title=framework_caption),
_id="add_div"
)
why_box = DIV(H1("WHY THIS PORTAL?"),
UL("Share information on implementation of DRR: Who? What? Where?",
"Collectively identify gaps, improve planning and programming on DRR",
"Identify areas of cooperation on implementation of DRR"
),
_id="why_box")
what_box = DIV(H1("WHAT CAN WE GET FROM THIS PORTAL?"),
UL("List of completed and ongoing DRR projects - by country, hazards, themes, partners and donors.",
"List of planned/proposed projects - better planning of future projects.",
"Quick analysis - on number and types of completed and ongoing DRR projects",
"Generate customised graphs and maps.",
"Know more on the DRR frameworks/action plans guiding the region - identify priority areas for providing support and implementation.",
"List of organisations implementing DRR projects at regional level.",
"Archive of periodic meetings of regional DRR mechanisms."
),
_id="what_box")
how_help_box = DIV(H1("HOW WOULD THIS INFORMATION HELP?"),
H2("National Government"),
UL("Gain clarity on types of support that may be accessed from regional level and thus receive coherent regional assistance"),
H2("Organisation Implementing DRR Projects"),
UL("Plan better-knowing who does what, and where; Find partners and scale up implementation; and Learn from past and ongoing work of partners"),
H2("Donor Agencies"),
UL("Identify priorities to match your policy and programmatic imperatives; and minimise overlap; maximise resources"),
_id="how_help_box")
how_start_box = DIV(H1("HOW DO WE GET STARTED?"),
UL("Add information on current / proposed / completed DRR projects",
"Search for information - project list, project analysis, DRR frameworks",
"Log in to add and edit your data",
"Link to this portal from your organisation website"
),
_id="how_start_box")
help = A(DIV("USER MANUAL",
_id="help_div"),
_href=URL(c="static", f="DRR_Portal_User_Manual.pdf"),
_target="_blank"
)
tour = A(DIV("VIDEO TOUR",
_id="tour_div"),
_href=URL(c="default", f="index", args="video"),
_target="_blank"
)
db = current.db
s3db = current.s3db
table = s3db.project_project
query = (table.deleted == False)
#approved = & (table.approved == True)
#current = & (table.status_id == 2)
#proposed = & (table.status_id == 1)
#completed = & (table.status_id == 1)
projects = db(query).count()
ftable = s3db.project_framework
query = (ftable.deleted == False)
#approved = & (table.approved == True)
frameworks = db(query).count()
stats = DIV(DIV("Currently the DRR Projects Portal has information on:"),
TABLE(TR(projects,
A("Projects",
_href=URL(c="project", f="project",
args=["list"]))
),
TR(TD(),
TABLE(TR(projects,
A("Current Projects",
_href=URL(c="project", f="project",
args=["list"],
vars={"status_id":2}))
)
)
),
TR(TD(),
TABLE(TR(projects,
A("Proposed Projects",
_href=URL(c="project", f="project",
args=["list"],
vars={"status_id":1}))
)
)
),
TR(TD(),
TABLE(TR(projects,
A("Completed Projects",
_href=URL(c="project", f="project",
args=["list"],
vars={"status_id":3}))
)
)
),
TR(frameworks,
A("Frameworks",
_href=URL(c="project", f="framework"))
),
),
_id="stats_div")
market = DIV(DIV(I("Under Development...")),
H2("DRR Project Marketplace"),
DIV("A platform to coordinate and collaborate on future DRR Projects."),
_id = "market_div")
auth = current.auth
_table_user = auth.settings.table_user
_table_user.language.label = T("Language")
_table_user.language.default = "en"
_table_user.language.comment = DIV(_class="tooltip",
_title=T("Language|The language to use for notifications."))
#_table_user.language.requires = IS_IN_SET(s3_languages)
languages = current.deployment_settings.get_L10n_languages()
_table_user.language.represent = lambda opt: \
languages.get(opt, current.messages.UNKNOWN_OPT)
request.args = ["login"]
login = auth()
login[0][-1][1][0] = INPUT_BTN(_type = "submit",
_value = T("Login"))
return dict(title = T("Home"),
home_img = home_img,
add_div = add_div,
login = login,
why_box = why_box,
home_page_img = home_page_img,
what_box = what_box,
how_help_box = how_help_box,
home_map_img = home_map_img,
how_start_box = how_start_box,
tour = tour,
help = help,
stats = stats,
market = market,
list_img = list_img,
matrix_img = matrix_img,
map_img = map_img,
graph_img = graph_img,
)
# END =========================================================================
| [
"[email protected]"
] | |
23b386e881e81a9c5f3259e0c00bdcf53cd206af | ed0f9eb0c1cb4858d91ef7e2d435db307f23a5a5 | /dist/manage/django/conf/__init__.py | 819f76dac03455205168724f054bb56819d714e2 | [] | no_license | hjlhehehe123/ATC_Data | 81b4622e7279aa9cc2013db8cc5a71d33561e768 | ad35e61afb8e87d8bab2d2b3aeea08e9409d56c0 | refs/heads/master | 2023-07-13T16:23:45.951584 | 2021-08-20T12:37:34 | 2021-08-20T12:37:34 | 256,994,694 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,444 | py | """
Settings and configuration for Django.
Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global_settings.py
for a list of all possible variables.
"""
import importlib
import os
import traceback
import warnings
from pathlib import Path
import django
import time
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.functional import LazyObject, empty
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
PASSWORD_RESET_TIMEOUT_DAYS_DEPRECATED_MSG = (
'The PASSWORD_RESET_TIMEOUT_DAYS setting is deprecated. Use '
'PASSWORD_RESET_TIMEOUT instead.'
)
DEFAULT_HASHING_ALGORITHM_DEPRECATED_MSG = (
'The DEFAULT_HASHING_ALGORITHM transitional setting is deprecated. '
'Support for it and tokens, cookies, sessions, and signatures that use '
'SHA-1 hashing algorithm will be removed in Django 4.0.'
)
class SettingsReference(str):
"""
String subclass which references a current settings value. It's treated as
the value in memory but serializes to a settings.NAME attribute reference.
"""
def __new__(self, value, setting_name):
return str.__new__(self, value)
def __init__(self, value, setting_name):
self.setting_name = setting_name
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
is used the first time settings are needed, if the user hasn't
configured settings manually.
"""
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
if not settings_module:
desc = ("setting %s" % name) if name else "settings"
raise ImproperlyConfigured(
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
% (desc, ENVIRONMENT_VARIABLE))
self._wrapped = Settings(settings_module)
def __repr__(self):
# Hardcode the class name as otherwise it yields 'Settings'.
if self._wrapped is empty:
return '<LazySettings [Unevaluated]>'
return '<LazySettings "%(settings_module)s">' % {
'settings_module': self._wrapped.SETTINGS_MODULE,
}
def __getattr__(self, name):
"""Return the value of a setting and cache it in self.__dict__."""
if self._wrapped is empty:
self._setup(name)
val = getattr(self._wrapped, name)
self.__dict__[name] = val
return val
def __setattr__(self, name, value):
"""
Set the value of setting. Clear all cached values if _wrapped changes
(@override_settings does this) or clear single values when set.
"""
if name == '_wrapped':
self.__dict__.clear()
else:
self.__dict__.pop(name, None)
super().__setattr__(name, value)
def __delattr__(self, name):
"""Delete a setting and clear it from cache if needed."""
super().__delattr__(name)
self.__dict__.pop(name, None)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError('Settings already configured.')
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
if not name.isupper():
raise TypeError('Setting %r must be uppercase.' % name)
setattr(holder, name, value)
self._wrapped = holder
@staticmethod
def _add_script_prefix(value):
"""
Add SCRIPT_NAME prefix to relative paths.
Useful when the app is being served at a subpath and manually prefixing
subpath to STATIC_URL and MEDIA_URL in settings is inconvenient.
"""
# Don't apply prefix to absolute paths and URLs.
if value.startswith(('http://', 'https://', '/')):
return value
from django.urls import get_script_prefix
return '%s%s' % (get_script_prefix(), value)
@property
def configured(self):
"""Return True if the settings have already been configured."""
return self._wrapped is not empty
@property
def PASSWORD_RESET_TIMEOUT_DAYS(self):
stack = traceback.extract_stack()
# Show a warning if the setting is used outside of Django.
# Stack index: -1 this line, -2 the caller.
filename, _, _, _ = stack[-2]
if not filename.startswith(os.path.dirname(django.__file__)):
warnings.warn(
PASSWORD_RESET_TIMEOUT_DAYS_DEPRECATED_MSG,
RemovedInDjango40Warning,
stacklevel=2,
)
return self.__getattr__('PASSWORD_RESET_TIMEOUT_DAYS')
@property
def STATIC_URL(self):
return self._add_script_prefix(self.__getattr__('STATIC_URL'))
@property
def MEDIA_URL(self):
return self._add_script_prefix(self.__getattr__('MEDIA_URL'))
class Settings:
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting.isupper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
mod = importlib.import_module(self.SETTINGS_MODULE)
tuple_settings = (
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
)
self._explicit_settings = set()
for setting in dir(mod):
if setting.isupper():
setting_value = getattr(mod, setting)
if (setting in tuple_settings and
not isinstance(setting_value, (list, tuple))):
raise ImproperlyConfigured("The %s setting must be a list or a tuple. " % setting)
setattr(self, setting, setting_value)
self._explicit_settings.add(setting)
if not self.SECRET_KEY:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
if self.is_overridden('PASSWORD_RESET_TIMEOUT_DAYS'):
if self.is_overridden('PASSWORD_RESET_TIMEOUT'):
raise ImproperlyConfigured(
'PASSWORD_RESET_TIMEOUT_DAYS/PASSWORD_RESET_TIMEOUT are '
'mutually exclusive.'
)
setattr(self, 'PASSWORD_RESET_TIMEOUT', self.PASSWORD_RESET_TIMEOUT_DAYS * 60 * 60 * 24)
warnings.warn(PASSWORD_RESET_TIMEOUT_DAYS_DEPRECATED_MSG, RemovedInDjango40Warning)
if self.is_overridden('DEFAULT_HASHING_ALGORITHM'):
warnings.warn(DEFAULT_HASHING_ALGORITHM_DEPRECATED_MSG, RemovedInDjango40Warning)
if hasattr(time, 'tzset') and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = Path('/usr/share/zoneinfo')
zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split('/'))
if zoneinfo_root.exists() and not zone_info_file.exists():
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = self.TIME_ZONE
time.tzset()
def is_overridden(self, setting):
return setting in self._explicit_settings
def __repr__(self):
return '<%(cls)s "%(settings_module)s">' % {
'cls': self.__class__.__name__,
'settings_module': self.SETTINGS_MODULE,
}
class UserSettingsHolder:
"""Holder for user configured settings."""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__['_deleted'] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if not name.isupper() or name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
if name == 'PASSWORD_RESET_TIMEOUT_DAYS':
setattr(self, 'PASSWORD_RESET_TIMEOUT', value * 60 * 60 * 24)
warnings.warn(PASSWORD_RESET_TIMEOUT_DAYS_DEPRECATED_MSG, RemovedInDjango40Warning)
if name == 'DEFAULT_HASHING_ALGORITHM':
warnings.warn(DEFAULT_HASHING_ALGORITHM_DEPRECATED_MSG, RemovedInDjango40Warning)
super().__setattr__(name, value)
def __delattr__(self, name):
self._deleted.add(name)
if hasattr(self, name):
super().__delattr__(name)
def __dir__(self):
return sorted(
s for s in [*self.__dict__, *dir(self.default_settings)]
if s not in self._deleted
)
def is_overridden(self, setting):
deleted = (setting in self._deleted)
set_locally = (setting in self.__dict__)
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
return deleted or set_locally or set_on_default
def __repr__(self):
return '<%(cls)s>' % {
'cls': self.__class__.__name__,
}
settings = LazySettings()
| [
"[email protected]"
] | |
abf4e6394243a39be16d129035ad1e8ef4a0593f | 65f863c73f6a3cd1cfdc70c94ab972a6ddac863f | /tests/benchmarks.py | c9f2c93bb951251d7afeb8fe8c816562d2b3d156 | [
"MIT"
] | permissive | marksagal/enaml-native | a266a631fd78ba9654412ae811bdf758566a4375 | 1606f895b0718e223cb4296fe3ead8df8018cad9 | refs/heads/master | 2021-09-01T04:04:59.020078 | 2017-12-12T20:43:13 | 2017-12-12T20:43:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,329 | py | '''
Copyright (c) 2017, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on Oct 4, 2017
@author: jrm
'''
import re
import os
import sh
import time
import pytest
import json
import requests
from os.path import exists, join
from utils import cd, source_activated
#: Stats
config = {
'app_built':False,
'stats': {},
}
def prepare_new_app(config):
""" Init a new app, build it, and launch it on a connected device.
:param config:
:return:
"""
app_dir = 'tmp/test_benchmarks/'
config['app_dir'] = app_dir
#: Create an app to to test
if exists(app_dir):
#: If using an emulator enable forwarding
if "emulator-" in sh.adb('devices'):
sh.adb("forward", "tcp:8888", "tcp:8888")
return # App already made
#if config['app_built']:
# return # App already made
#else:
# #: Cleanup the old app
# cleanup_app(config)
enamlnative = sh.Command('./enaml-native')
print(enamlnative('init', 'Benchmarks', 'com.codelv.enamlnative.benchmarks',
'tmp/test_benchmarks/'))
config['app_built'] = True
with cd(join(app_dir,'Benchmarks')):
with source_activated('venv', 'enaml-native') as enamlnative:
#: Now build python
print(enamlnative('build-python'))
#: Build and do a gradle sync, this will NOT include jni and native libs!
print(enamlnative('build-android'))
#: Now build python (again) to put them in the correct spot
print(enamlnative('build-python'))
#: Now try to run it and see if it crashes
#: Requires emulator or device
assert len(sh.adb('devices').strip().split("\n")) > 0, "No device is connected, " \
"can't test the build!"
#: Flush logcat
sh.adb('logcat', '--clear')
#: Do a build and run
print(enamlnative('run-android'))
#: Wait a few seconds
#: If using an emulator enable forwarding
if "emulator-" in sh.adb('devices'):
sh.adb("forward", "tcp:8888", "tcp:8888")
def cleanup_app(config):
if os.path.exists(config['app_dir']):
sh.rm('-R', config['app_dir'])
@pytest.mark.parametrize("platforms, path", [
(["android"], 'activity_indicator.enaml'),
(["android"], 'auto_complete_text_view.enaml'),
(["android"], 'block.enaml'),
(["android"], 'button.enaml'),
(["android"], 'calendar_view.enaml'),
(["android"], 'card_view.enaml'),
(["android"], 'clocks.enaml'),
(["android"], 'checkbox.enaml'),
(["android"], 'chronometer.enaml'),
(["android"], 'date_picker.enaml'),
(["android"], 'dialog.enaml'),
(["android"], 'drawer_layout.enaml'),
(["android"], 'edit_text.enaml'),
(["android"], 'flexbox.enaml'),
(["android"], 'icon.enaml'),
(["android"], 'mapview.enaml'),
(["android"], 'pager_tab_strip.enaml'),
(["android"], 'picker.enaml'),
(["android"], 'progress_bar.enaml'),
(["android"], 'radio_buttons.enaml'),
(["android"], 'rating_bar.enaml'),
(["android"], 'seekbar.enaml'),
(["android"], 'snackbar.enaml'),
(["android"], 'spacer.enaml'),
(["android"], 'spinner.enaml'),
(["android"], 'switch.enaml'),
(["android"], 'swipe_refresh.enaml'),
(["android"], 'tabs.enaml'),
(["android"], 'toast.enaml'),
(["android"], 'view_pager.enaml'),
(["android"], 'webview.enaml'),
])
def test_examples_for_real(platforms, path):
""" This builds an actuall app and does full system benchmarks on loading app examples
"""
if 'TRAVIS' in os.environ:
return #: Doesn't work on travis
#: Pretty hackish but whatever
prepare_new_app(config)
#: Load the code
dir_path = os.path.abspath(os.path.split(os.path.dirname(__file__))[0])
enaml_file = os.path.join(dir_path, 'examples', os.path.normpath(path))
with open(enaml_file, 'rb') as f:
source = f.read()
#: Trigger a reload
r = requests.post("http://localhost:8888/", json={
"type": "reload",
"files": {'view.enaml': source},
}).json()
assert r['ok'], "Failed to reload {}!".format(enaml_file)
#: TODO need a way to know when everything is done...
#: should read the log unil it stops
time.sleep(5)
#: Flush logcat
#: Save it
stats = parse_stats(sh.adb('logcat', '-d'))
config['stats'][enaml_file] = stats
#: Save it
data = json.dumps(config,indent=2)
with open('tmp/stats.json', 'w') as f:
f.write(data)
#: TODO: Now compare it to the baseline
def parse_stats(output):
""" Parses logcat output and returns the stats """
lines = [line for line in output if "[Stats]" in line]
stats = {
'totals': {'time': 0, 'tasks': 0, 'avg': 0}
}
for line in lines:
m = re.search(r'\((\d+) ms\).+\((\d+)\).+\((\d+) us.+\)', line)
if not m:
continue
dt, tasks, avg = map(int, m.groups())
if 'totals' in line:
stats['totals'] = {'time': dt, 'tasks': tasks, 'avg': avg}
return stats
| [
"[email protected]"
] | |
0c17e9bf063a7efc0d9893b30f9499564e3969f5 | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/gui/impl/gen/view_models/views/lobby/marathon/__init__.py | 71f0b031ebeac46c2d890ceb67264bafff348eac | [] | no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 151 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/impl/gen/view_models/views/lobby/marathon/__init__.py
pass
| [
"[email protected]"
] | |
ceb44fc8f169f591ebbdf0b4d2e0a1e4b500d9fc | 43ff15a7989576712d0e51f0ed32e3a4510273c0 | /chtscan/migrations/0006_auto_20160411_0800.py | f59e435e4885549c1b59a6379cce6ccd44b7e5cb | [] | no_license | v1cker/kekescan | f2b51d91a9d6496e2cdc767eb6a600171f513449 | 3daa1775648439ba9e0003a376f90b601820290e | refs/heads/master | 2020-09-19T16:26:56.522453 | 2017-06-15T02:55:24 | 2017-06-15T02:55:24 | 94,495,007 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 783 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-04-11 08:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chtscan', '0005_auto_20160408_0723'),
]
operations = [
migrations.RemoveField(
model_name='vulnerability',
name='requestid',
),
migrations.AddField(
model_name='assessment',
name='requestid',
field=models.IntegerField(null=True, verbose_name='REQUESTID'),
),
migrations.AddField(
model_name='assessment',
name='taskid',
field=models.CharField(default='', max_length=50, verbose_name='TASKID'),
),
]
| [
"[email protected]"
] | |
1745f883f6d00b6642ba7d9496082c2fc1e47628 | a54c6117cf2bb8b33f7a1e1ce92dffa1ffa4fe94 | /demos/mismip/plot-result.py | 36106cbdafa2092b5925b31aca55d5b70fbaa846 | [] | no_license | barionleg/icepack-paper | f74d5ea9722f2bc1d51ddbb959297353bf32ecb9 | 3554c618468320c06c25bca46fd2f97c5d1e860c | refs/heads/master | 2023-06-08T01:05:13.074850 | 2021-01-12T00:04:18 | 2021-01-12T00:04:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,496 | py | import os
import argparse
import firedrake
import icepack.plot
parser = argparse.ArgumentParser()
parser.add_argument('--input')
parser.add_argument('--level', type=int)
parser.add_argument('--output')
args = parser.parse_args()
Lx, Ly = 640e3, 80e3
ny = 20
nx = int(Lx/Ly) * ny
coarse_mesh = firedrake.RectangleMesh(nx, ny, Lx, Ly)
mesh_hierarchy = firedrake.MeshHierarchy(coarse_mesh, args.level)
mesh = mesh_hierarchy[args.level]
Q = firedrake.FunctionSpace(mesh, family='CG', degree=1)
V = firedrake.VectorFunctionSpace(mesh, family='CG', degree=1)
h = firedrake.Function(Q)
u = firedrake.Function(V)
input_name = os.path.splitext(args.input)[0]
with firedrake.DumbCheckpoint(input_name, mode=firedrake.FILE_READ) as chk:
timesteps, indices = chk.get_timesteps()
chk.set_timestep(timesteps[-1], idx=indices[-1])
chk.load(h, name='h')
chk.load(u, name='u')
fig, axes = icepack.plot.subplots(
nrows=2, sharex=True, sharey=True, figsize=(6.4, 2.8)
)
axes[0].get_xaxis().set_visible(False)
for ax in axes:
ax.set_xlim(0, 640e3)
ax.set_ylim(0, 80e3)
ax.get_yaxis().set_visible(False)
colors_h = icepack.plot.tripcolor(h, axes=axes[0])
fig.colorbar(colors_h, ax=axes[0], fraction=0.0075, pad=0.04, label='m')
axes[0].set_title('Thickness')
colors_u = icepack.plot.tripcolor(u, axes=axes[1])
fig.colorbar(colors_u, ax=axes[1], fraction=0.0075, pad=0.04, label='m/year')
axes[1].set_title('Velocity')
fig.savefig(args.output, dpi=300, bbox_inches='tight')
| [
"[email protected]"
] | |
8954446c00e5428d2dacb756f5a86bb94bf80784 | 73832d62f4e982517f36265617c17bea3b715719 | /06cem/Translation_Methods/lab02/semantic.py | 66e0fa0e4feaccf2895327713a2780d2abd8acd3 | [] | no_license | keipa/bsuir-labs | fa9018a121ec1b040bae27326607cd82ac7652a4 | a7a3eca34d095568c7f8cbbe976cc91e515dc756 | refs/heads/master | 2021-06-15T23:37:27.249024 | 2020-06-09T09:33:32 | 2020-06-09T09:33:32 | 34,018,391 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,792 | py | from mylexer import data, lexer
from token_rules import tokens
import ply.yacc as yacc
from operator import add, sub, mul, truediv, eq, ne, le, ge, gt, lt
from ply.yacc import YaccSymbol
from checker import check
consts = dict()
identifiers = dict()
semantic_errors = []
interpret_index = 0
separators = [";", "do", "then", "end"]
interpret_mas = [[]]
interpret_dict = {"string": "= str()",
"integer": "= int()",
"writeln": "print",
"readln": "= input()",
":=": "=",
"length": "len",
"inc": "+= 1"
}
def tree_print(l, i):
for item in l:
if type(item) is list:
tree_print(item, i)
else:
i += 1
if item:
print('\t' * i + str(item))
def interpret_tree(l, i):
global interpret_index
for item in l:
if type(item) is list:
interpret_tree(item, i)
else:
i += 1
if item and item in separators:
interpret_index += 1
interpret_mas.append([])
if item is "end":
interpret_index += 1
interpret_mas.append([])
elif item:
if item in interpret_dict.keys():
item = interpret_dict[item]
interpret_mas[interpret_index].append(item)
def interpret_pascal_mas_to_python(mas):
interpret_code = []
for m in mas:
line = str()
for item in m:
line += " {0} ".format(item)
interpret_code.append(line[1:-1])
return check_conditions(interpret_code)
def check_conditions(mas):
for line in mas:
if line.startswith("for"):
index = mas.index(line)
new_line = line.replace("for", "")
if new_line.count(".0") > 0:
new_line = new_line.replace(".0", "")
split_line = new_line.split("=")
identifier_name, condition = split_line[0], split_line[1]
down_to_way = new_line.index("downto") > 0
if down_to_way:
result_line = "for {0} in range({1}, {2}, -1)"\
.format(identifier_name, condition.split("downto")[0], condition.split("downto")[1])
else:
result_line = "for {0} in range({1}, {2})" \
.format(identifier_name, new_line.split("to")[0], new_line.split("to")[1])
mas[index] = result_line + ":"
elif line.startswith("while") or line.startswith("if") or line.startswith("else"):
index = mas.index(line)
mas[index] += ":"
elif line.count(".0") > 0:
index = mas.index(line)
mas[index] = mas[index].replace(".0", "")
return mas
def get_full_python_code(mas):
tab_count = 0
python_code = ""
for line in mas:
python_code += ("\t" * tab_count) + line + "\n"
if line.startswith("for") or line.startswith("while") or line.startswith("if") or line.startswith("else"):
tab_count += 1
if line is "":
tab_count -= 1
return python_code
def get_lines_python_code(mas):
tab_count = 0
python_code = []
for line in mas:
python_code.append(("\t" * tab_count) + line)
if line.startswith("for") or line.startswith("while") or line.startswith("if") or line.startswith("else"):
tab_count += 1
if line is "":
tab_count -= 1
return python_code
def start_pascal(text):
exec(text)
def start_pascal_lines(lines):
for line in lines:
exec(line)
class Node:
def parts_str(self):
st = []
for part in self.parts:
st.append(part.__str__())
return "\n".join(st)
def __str__(self):
return self.type + ":\n\t" + self.parts_str().replace("\n", "\n\t")
def add_child(self, parts):
if isinstance(parts, Node):
if parts.parts:
self.parts.extend(parts.parts)
return self
def __init__(self, type, parts):
self.type = type
self.parts = parts
def p_consts(p):
'''
consts : CONST IDENTIFIER EQUALITY NUMBER SEMICOLON begin_program
| CONST IDENTIFIER EQUALITY STRING SEMICOLON begin_program
| CONST IDENTIFIER EQUALITY matrix SEMICOLON begin_program
| begin_program
'''
if len(p) == 2:
p[0] = p[1]
else:
p[0] = [[p[2], p[3], p[4]], p[5], p[6]]
def p_matrix(p):
'''
matrix : OPEN_SQUARE_BRACKET identifiers CLOSE_SQUARE_BRACKET
'''
p[0] = [p[1], p[2], p[3]]
def p_identifiers(p):
'''
identifiers : IDENTIFIER ZAPYATAYA identifiers
| NUMBER ZAPYATAYA identifiers
| STRING ZAPYATAYA identifiers
| IDENTIFIER
| NUMBER
| STRING
'''
if len(p) == 2:
p[0] = p[1]
else:
p[0] = [[p[1]], p[2], p[3]]
def p_begin_program(p):
'''
begin_program : VAR declarations BEGIN body END POINT
'''
p[0] = [p[2], [p[4]]]
def p_block(p):
'''
block : BEGIN body END SEMICOLON
'''
p[0] = [p[2], p[3]]
def p_body(p):
'''
body : expression
'''
p[0] = [p[1]]
def p_identifier(p):
'''
identifier : IDENTIFIER
| IDENTIFIER SEMICOLON
'''
if list(identifiers.keys()).count(p[1]) == 0:
semantic_errors.append('{0}) There is no {1} variable!'.format(p.lexer.lineno, p[1]))
if len(p) > 2:
p[0] = [p[1], p[2]]
else:
p[0] = p[1]
def p_expression(p):
'''
expression : assignment expression
| if expression
| function expression
| empty
| while expression
| for expression
| break
'''
# empty
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = [p[1], p[2]]
def p_break(p):
'''
break : BREAK SEMICOLON
'''
p[0] = [p[1], p[2]]
def p_declarations(p):
'''
declarations : declaration declarations
| empty
'''
if len(p) == 2:
p[0] = p[0]
else:
p[0] = [p[1], p[2]]
def p_declaration(p):
'''
declaration : IDENTIFIER another_identifiers COLON type SEMICOLON
'''
if type(p[2]) is list:
p[0] = [[p[1], p[4], p[5]]] + [[item, p[4], p[5]] for item in p[2]]
elif p[2] is None:
p[0] = [p[1], p[4], p[5]]
else:
p[0] = [[p[1], p[4], p[5]], [p[2], p[4], p[5]]]
def p_another_identifiers(p):
'''
another_identifiers : ZAPYATAYA IDENTIFIER another_identifiers
| empty
'''
if len(p) == 2:
p[0] = p[0]
else:
if p[3] is None:
p[0] = p[2]
else:
p[0] = [p[2], p[3]]
def p_type(p):
'''
type : TYPE_STRING
| TYPE_INTEGER
| TYPE_REAL
'''
p[0] = [p[1]]
def p_empty(p):
'''
empty :
'''
p[0] = p[0]
def p_assignment(p):
'''
assignment : identifier ASSIGNMENT arithmetic_expression SEMICOLON
| identifier ASSIGNMENT function SEMICOLON
| identifier ASSIGNMENT function
| identifier ASSIGNMENT arithmetic_expression
'''
if len(p) > 4:
p[0] = [p[1], p[2], p[3], p[4]]
else:
p[0] = [p[1], p[2], p[3]]
def p_arithmetic_expression(p):
'''
arithmetic_expression : NUMBER
| identifier
| STRING
| function
| identifier PLUS arithmetic_expression
| identifier MINUS arithmetic_expression
| identifier MUL arithmetic_expression
| identifier DIV arithmetic_expression
| NUMBER PLUS arithmetic_expression
| NUMBER MINUS arithmetic_expression
| NUMBER MUL arithmetic_expression
| NUMBER DIV arithmetic_expression
| function PLUS arithmetic_expression
| function MINUS arithmetic_expression
| function MUL arithmetic_expression
| function DIV arithmetic_expression
| identifier OPEN_SQUARE_BRACKET arithmetic_expression CLOSE_SQUARE_BRACKET
| arithmetic_expression PLUS arithmetic_expression
| arithmetic_expression MINUS arithmetic_expression
| arithmetic_expression MUL arithmetic_expression
| arithmetic_expression DIV arithmetic_expression
| arithmetic_expression SEMICOLON
'''
if len(p) == 3:
p[0] = [p[1], p[2]]
elif len(p) == 5:
p[0] = [p[1], p[2], p[3], p[4]]
elif len(p) == 2:
p[0] = p[1]
else:
if p[2] in ("+", "-", "/", "*"):
if type(p[3]) is list and type(p[1]) is list:
if list(identifiers.keys()).count(p[1][0]) > 0 and list(identifiers.keys()).count(p[3][0]) > 0:
if identifiers[p[3][0]][1] in (list, str) and identifiers[p[1][0]][1] in (list, str) and\
identifiers[p[3][0]][0] != identifiers[p[1][0]][0]:
semantic_errors.append("{3}) Operations with different types! {0} {1} {2}"
.format(p[1], p[2], p[3], p.lexer.lineno))
elif list(identifiers.keys()).count(p[1]) > 0 and list(identifiers.keys()).count(p[3]) > 0:
if identifiers[p[3]][1] != identifiers[p[1]][1]:
semantic_errors.append("{3}) Operations with different types! {0} {1} {2}"
.format(p[1], p[2], p[3], p.lexer.lineno))
elif list(identifiers.keys()).count(p[1]) > 0:
if ((type(p[3]) is list and p[3][1] == "[]" and identifiers[p[3][0]][1] != identifiers[p[1]][1]) or\
(type(p[3]) is not list and (not (type(p[3]) in (int, float) and identifiers[p[1]][1]
in (int, float)) and not (type(p[3]) is str and identifiers[p[1]][1] is str)))) and p[3] != "0":
semantic_errors.append("{3}) Operations with different types! {0} {1} {2}"
.format(p[1], p[2], p[3], p.lexer.lineno))
elif list(identifiers.keys()).count(p[3]) > 0:
if (type(p[1]) is list and p[1][1] == "[]" and identifiers[p[1][0]][1] != identifiers[p[3]][1]) or \
(type(p[1]) is not list and (not (type(p[1]) in (int, float) and identifiers[p[3]][1]
in (int, float)) and not (type(p[1]) is str and identifiers[p[3]][1] is str))):
semantic_errors.append("{0}) Operations with different types! {1} {2} {3}"
.format(p.lexer.lineno, p[1], p[2], p[3]))
if p[2] == "/" and ((type(p[3]) in (int, float) and p[3] == 0) or p[3] == "0"):
semantic_errors.append("{3}) Division by 0 exception. {0} {1} {2}".format(p[1], p[2], p[3], p.lexer.lineno))
p[0] = [p[1], p[2], p[3]]
def perform_operation(operation, v1, v2):
return operation(v1, v2)
def p_function(p):
'''
function : WRITE OPEN_BRACKET arithmetic_expression CLOSE_BRACKET SEMICOLON
| WRITELN OPEN_BRACKET arithmetic_expression CLOSE_BRACKET SEMICOLON
| READ OPEN_BRACKET identifier CLOSE_BRACKET SEMICOLON
| READLN OPEN_BRACKET identifier CLOSE_BRACKET SEMICOLON
| LENGTH OPEN_BRACKET identifier CLOSE_BRACKET
| INC OPEN_BRACKET identifier CLOSE_BRACKET SEMICOLON
| LENGTH OPEN_BRACKET identifier CLOSE_BRACKET SEMICOLON
'''
if len(p) > 5:
if p[1] in ("readln", "inc", "read"):
p[0] = [p[3], p[1], p[5]]
else:
p[0] = [p[1], p[2], p[3], p[4], p[5]]
else:
if p[1] in ("readln", "inc", "read"):
p[0] = [p[3], p[1]]
else:
p[0] = [p[1], p[2], p[3], p[4]]
def invert_operation(operation):
invertor = {'>': '<', '<': '>', '<=': '>=',
'>=': '<=', '=': '<>', '<>': '=', 'in': 'in'}
return invertor[operation]
def p_predicate(p):
'''
predicate : arithmetic_expression COMPARISON arithmetic_expression
| arithmetic_expression EQUALITY arithmetic_expression
| arithmetic_expression NON_EQUALITY arithmetic_expression
| arithmetic_expression NON_STRICT_COMPARISON arithmetic_expression
| arithmetic_expression IN identifier
| arithmetic_expression IN matrix
'''
if p[3] in identifiers.keys() and p[2] == "in" and identifiers[p[3]][1] is not list:
semantic_errors.append("{0}) You can pass operator \"in\" only for matrix.".format(p.lexer.lineno))
old_len = len(semantic_errors)
check_comparation(p[1], p[2], p[3], p.lexer.lineno)
if old_len == len(semantic_errors):
check_comparation(p[3], p[2], p[1], p.lexer.lineno)
if type(p[1]) is list and p[1][1] == '[]':
p[0] = p[1] + [p[2], p[3]]
else:
p[0] = [p[1], p[2], p[3]]
def check_comparation(p1, p2, p3, line):
if type(p1) is list and p2 != "in":
tp = None
for _ in p1:
if list(identifiers.keys()).count(_) > 0:
tp = identifiers[_][1]
break
tp = tp if tp is not None else type(p1[0])
if type(p3) is not list:
if (type(p3) is not tp) or (tp is str and type(p3) in (int, float)):
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
elif list(identifiers.keys()).count(p3) > 0 and identifiers[p3][1] != tp:
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
else:
tp1 = None
for _ in p3:
if list(identifiers.keys()).count(_) > 0:
tp1 = identifiers[_][1]
break
tp1 = tp1 if tp1 is not None else type(p3[0])
if tp != tp1:
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
else:
if list(identifiers.keys()).count(p1) > 0:
if list(identifiers.keys()).count(p3) > 0:
if identifiers[p3][1] != identifiers[p1][1]:
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
else:
if identifiers[p1][1] != type(p3):
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
else:
if list(identifiers.keys()).count(p3) > 0:
if type(p1) != identifiers[p3][1]:
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
else:
if type(p1) != type(p3):
semantic_errors.append("{0}) You can't pass different types to {1} operation.".format(line, p2))
def p_some_predicates(p):
'''
some_predicates : OPEN_BRACKET predicate CLOSE_BRACKET AND some_predicates
| OPEN_BRACKET predicate CLOSE_BRACKET OR some_predicates
| OPEN_BRACKET predicate CLOSE_BRACKET
| OPEN_BRACKET NOT OPEN_BRACKET predicate CLOSE_BRACKET CLOSE_BRACKET AND some_predicates
| OPEN_BRACKET NOT OPEN_BRACKET predicate CLOSE_BRACKET CLOSE_BRACKET OR some_predicates
| OPEN_BRACKET NOT OPEN_BRACKET predicate CLOSE_BRACKET CLOSE_BRACKET
'''
if len(p) == 9:
p[0] = [[p[2], p[3], p[4], p[5], p[7]], p[8]]
elif len(p) == 7:
p[0] = [p[2], p[4]]
elif len(p) == 4:
p[0] = p[2]
else:
p[0] = [[p[2], p[4]], p[5]]
def p_while(p):
'''
while : WHILE OPEN_BRACKET predicate CLOSE_BRACKET DO block
| WHILE some_predicates DO block
'''
if len(p) == 5:
p[0] = [['while', p[2]], ['do', p[4]]]
else:
p[0] = [['while', p[3]], ['do', p[6]]]
def p_for(p):
'''
for : FOR assignment TO arithmetic_expression DO block
| FOR assignment DOWNTO arithmetic_expression DO block
'''
p[0] = [['for', [p[2], [p[3], p[4]]]], p[5], p[6]]
def p_if(p):
'''
if : IF OPEN_BRACKET predicate CLOSE_BRACKET THEN block
| IF OPEN_BRACKET predicate CLOSE_BRACKET THEN block else
'''
if len(p) == 8:
p[0] = [['if', p[3]], ['then', p[6]], ['else', p[7]]]
else:
p[0] = [['if', p[3]], ['then', p[6]]]
def p_else(p):
'''
else : ELSE block
'''
p[0] = [p[1], p[2]]
def p_error(p):
print('Unexpected token {0}'.format(p))
if __name__ == '__main__':
with open('input.txt', 'r') as fin:
data = ''.join(fin.readlines())
identifiers = check(data)[0]
parser = yacc.yacc(method='LALR')
result = parser.parse(data, lexer=lexer, debug=False, tracking=True)
if result and len(semantic_errors) == 0:
interpret_tree(result, 0)
start_pascal(get_full_python_code(interpret_pascal_mas_to_python(interpret_mas)))
if len(semantic_errors) > 0:
for _ in semantic_errors:
print(_)
| [
"[email protected]"
] | |
9c315f3e10b630f08fda544fd7dabd316ebaed05 | 1f98ccf9ef52d3adab704676480c85fe22c9542d | /simpledb/test/TestBlk.py | 0ebcd6e00f84ea455edce878943380b79c66d26e | [] | no_license | 61515/simpleDB_Python | 234c671cbbf57f3e8fc5489ec4c292365085b7a8 | b6846da4a78369838f5b3c7a704de704e18f7be7 | refs/heads/master | 2023-02-22T14:07:52.660633 | 2021-01-24T02:25:40 | 2021-01-24T02:25:40 | 332,343,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | from simpledb.file.BlockId import BlockId
if __name__ == '__main__':
blk = BlockId("file", 1)
buffers = {blk: 1}
pins = [blk]
# pins.remove(blk)
if blk in pins:
print(1)
print(pins.count(blk))
# print(pins)
# print(buffers.get(blk))
| [
"[email protected]"
] | |
9b43fa6a64fe1365c949bf00ef1b7be04e6c5852 | 1cfcfa686489885843b9a142c8ba980ebd5d5ffd | /tests/optim/test_weight_average.py | a421cf0328fcef62c31051e3eef5aa29b3acfded | [
"MIT"
] | permissive | qyz-thu/gnn_vae | 9d2d8e984a96d0f22f74362889fdd1c0613df46d | 278aeb7038216812a94c7f7acd2ca425696f986b | refs/heads/master | 2023-02-05T20:07:24.097968 | 2020-12-18T06:34:20 | 2020-12-18T06:34:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,265 | py | import copy
import numpy as np
import pytest
import tensorkit as tk
from tensorkit import tensor as T
from tests.helper import *
def stepwise_average_check(ctx, factory, update_fn, get_fn):
def clone_state(val):
if isinstance(val, dict):
return {k: clone_state(v) for k, v in val.items()}
elif isinstance(val, list):
return [clone_state(v) for v in val]
elif isinstance(val, (T.Tensor, T.Variable)):
return T.copy(val)
elif isinstance(val, np.ndarray):
return np.copy(val)
else:
return copy.copy(val)
T.random.seed(1234)
weights = [
T.variable(shape=[4], initializer=tk.init.zeros, requires_grad=False),
T.variable(shape=[3], initializer=tk.init.zeros, requires_grad=False),
]
answers = [clone_state(w) for w in weights]
inputs_1 = T.random.randn([7, 4])
inputs_2 = T.random.randn([7, 3])
# do a scan
avg = factory(weights)
the_states = []
the_outputs = []
num_updates = 0
for batch_vals in zip(inputs_1, inputs_2):
for weight, val in zip(weights, batch_vals):
T.assign(weight, val)
the_states.append(clone_state(avg.get_state_dict()))
avg.update()
with avg.temporarily_commit():
the_outputs.extend(clone_state(w) for w in weights)
for i, val in enumerate(batch_vals):
answers[i] = update_fn(answers[i], val, num_updates)
num_updates += 1
for weight, ans in zip(weights, answers):
assert_allclose(weight, get_fn(ans, num_updates), rtol=1e-4, atol=1e-6)
for weight, val in zip(weights, batch_vals):
assert_allclose(weight, val, rtol=1e-4, atol=1e-6)
# test enabled = False
avg = factory(weights, enabled=False)
for x1, x2, state, output in zip(inputs_1, inputs_2, the_states, the_outputs):
batch_vals = [x1, x2]
for weight, val in zip(weights, batch_vals):
T.assign(weight, val)
avg.update()
avg.commit() # should still affect weights even if enabled is False
for avg_val in avg.get_state_dict()['averages']:
assert_allclose(avg_val, T.zeros_like(avg_val), rtol=1e-4, atol=1e-6)
for weight in weights:
assert_allclose(weight, T.zeros_like(weight), rtol=1e-4, atol=1e-6)
# do another scan using backup states
avg = factory(weights, enabled=False)
avg.set_enabled(True)
for x1, x2, state, output in zip(inputs_1, inputs_2, the_states, the_outputs):
batch_vals = [x1, x2]
for weight, val in zip(weights, batch_vals):
T.assign(weight, val)
avg.set_state_dict(state)
avg.update()
with avg.temporarily_commit():
the_outputs.extend(clone_state(w) for w in weights)
for weight, val in zip(weights, batch_vals):
assert_allclose(weight, val, rtol=1e-4, atol=1e-6)
# try set bad state
avg = factory(weights)
state = dict(avg.get_state_dict())
state['averages'] = []
with pytest.raises(ValueError, match='Bad state'):
avg.set_state_dict(state)
def full_scan_average_check(ctx, factory, input_x, expected):
weight = T.variable(T.shape(input_x)[1:], initializer=tk.init.zeros,
requires_grad=False)
avg = factory([weight])
for x in input_x:
T.assign(weight, x)
avg.update()
avg.commit()
assert_allclose(weight, expected, atol=1e-4, rtol=1e-6)
class WeightAveragingTestCase(TestCase):
def test_MeanAveraging(self):
# step-wise check
factory = tk.optim.WeightMeanAveraging
def update_fn(old_val, new_val, num_updates):
return (old_val * num_updates + new_val) / (num_updates + 1.)
def get_fn(val, num_updates):
return val
stepwise_average_check(self, factory, update_fn, get_fn)
# overall check
input_x = T.random.randn([7, 4])
full_scan_average_check(
self, factory, input_x, T.reduce_mean(input_x, axis=[0]))
def test_MovingAveraging(self):
# step-wise check
for decay in (0.9, 0.99):
for zero_debias in (True, False):
factory = lambda weights, **kwargs: tk.optim.WeightMovingAveraging(
weights, decay=decay, zero_debias=zero_debias, **kwargs)
def update_fn(old_val, new_val, num_updates):
return decay * old_val + (1. - decay) * new_val
if zero_debias:
def get_fn(val, num_updates):
if num_updates > 0:
return val / (1. - decay ** num_updates)
else:
return val
else:
def get_fn(val, num_updates):
return val
stepwise_average_check(self, factory, update_fn, get_fn)
# overall check
input_x = T.expand(T.random.randn([4]), [7, 4])
factory = lambda weights, **kwargs: tk.optim.WeightMovingAveraging(
weights, decay=0.9, zero_debias=True, **kwargs)
full_scan_average_check(self, factory, input_x, input_x[0])
| [
"[email protected]"
] | |
908ab077984dc561d27e833fbb86e6d280225929 | c34308d9e283d3689baeade246b69dad13eea0c1 | /homework/week5/study201736.py | 015a5575dfb0de380ed5acf9ed7422c0ab64d7a2 | [] | no_license | michaelChen07/studyPython | d19fe5762cfbccdff17248d7d5574939296d3954 | 11a2d9dd0b730cad464393deaf733b4a0903401f | refs/heads/master | 2021-01-19T00:20:27.347088 | 2017-05-13T08:43:44 | 2017-05-13T08:43:44 | 73,004,133 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 354 | py | #coding:utf-8
def findMaxNum(x,y,z):
if x >=y and x >= z:
return x
elif y > z:
return y
else:
return z
if __name__=="__main__":
numList = raw_input(u"请输入3个数字,以逗号分隔:").split(",")
print numList
maxNum = findMaxNum(int(numList[0]),int(numList[1]),int(numList[2]))
print maxNum
| [
"[email protected]"
] | |
0b66b2d868d6ec5a557304ec44f1c3585252aa9c | 9ea18a9a52f6fe9077a6073dac72d19e21b2b5d6 | /setup.py | d2deac46ae69778452128ea1d6305c8bf86fe655 | [] | no_license | barneygale/twisted-enttec | 2e826e09159b82d3bc1fc281f8973b43341746cb | dec065475ab08ad4cc7a1fef301c904f2c033953 | refs/heads/master | 2020-04-04T14:44:32.010652 | 2018-11-03T18:24:59 | 2018-11-03T18:24:59 | 156,010,781 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | from setuptools import setup
setup(
name='twisted-enttec',
version='0.1',
author='Barney Gale',
author_email='[email protected]',
url='https://github.com/barneygale/twisted-enttec',
license='MIT',
description='Python/Twisted support for the Enttec DMX USB Pro',
long_description=open('README.rst').read(),
py_modules=['twisted_enttec'],
install_requires=[
'twisted',
'pyserial'
],
) | [
"[email protected]"
] | |
5fb9500044bc1c606e8a9670801610d19b190611 | c56807b801c887b8707611100efa3a0d7befea50 | /DeepHumanPrediction/Code/DeepHumanPrediction/Motion_Prediction_encoding_decoding/bvh_reader.py | 03027b871993c575cf4406b54021653e8f9eaf5e | [] | no_license | wy-luke/DeepHumanPrediction | 9e3777f6d66d4ab34bf16a935ded19422385f724 | 2e131b4b365e6f565c7ed8075a61f2b3c7b2d53a | refs/heads/master | 2021-08-30T19:26:57.537964 | 2017-12-19T05:47:36 | 2017-12-19T05:47:36 | 266,292,568 | 1 | 0 | null | 2020-05-23T08:13:36 | 2020-05-23T08:13:36 | null | UTF-8 | Python | false | false | 3,861 | py | # -*-coding: utf-8-*-
import numpy as np
import glob
from tqdm import *
import os
import time
def Motion_Data_Preprocessing(time_step = 100 , seed_timestep=20 , batch_Frame=5):
np.set_printoptions(threshold=1000000)
files = glob.glob("Data/ACCAD/Transform_Male1_bvh/Short_data/*.bvh")
time_step = time_step
seed_timestep = seed_timestep
batch_Frame = batch_Frame
xyz_position=3
complexity = False
Data = []
train_label_motion=[]
file_directory=[]
'''data normalization'''
Normalization_factor=1
dtype="int"
#Extract only file names, not path names.
for i in range(len(files)):
file_directory.append(os.path.basename(files[i]))
for file_name, i in tqdm(zip(files, range(len(files)))):
# time.sleep(0.01)
Raw = []
Mdata = []
MOTION = False
'''1.basic - Motion data preprocessing'''
print('Processed Data : {}'.format(i + 1))
try:
with open(file_name, 'r') as f:
while True:
line = f.readline()
if line == 'MOTION' + "\n" or MOTION:
MOTION = True
Raw.append(line)
if not line:
break
for raw in Raw[3:]:
#Xposition Yposition Zposition 는 제외
if dtype=="int":
temp=raw.split()[xyz_position:]
if complexity :
temp = [np.float32(i) * Normalization_factor for i in temp]
else : # complexity = False
temp=[np.floor(np.float32(i))*Normalization_factor for i in temp]
else:# dtype="str"
temp=raw.split()[xyz_position:]
Mdata.append(temp)
#Remove the blank line..
Mdata.pop()
'''2. Motion data preprocessing - easy for deeplearning'''
#data padding
if len(Mdata) < time_step:
frame = np.zeros(shape=(time_step - len(Mdata), len(Mdata[0])))
for i in range(time_step - len(Mdata)):
frame[i] = Mdata[-1]
Mdata = np.concatenate((Mdata, frame), axis=0)
else:
Mdata = Mdata[:time_step]
Data.append(Mdata)
except Exception as e:
raise e
'''3.final - Motion data preprocessing'''
for i in range(len(files)):
train_label_motion.append(Data[i][seed_timestep:])
print("train_motion shape = {}".format(np.shape(Data)))
print("train_label_motion shape = {}".format(np.shape(train_label_motion)))
train_motion = np.reshape(Data,(len(files),int(time_step/batch_Frame),len(Data[0][0])*batch_Frame))
train_label_motion = np.reshape(train_label_motion,(len(files),int(time_step-seed_timestep)*len(Data[0][0])))
print("-------------------Transform data shape--------------------")
print("transform_motion shape = {}".format(np.shape(train_motion)))
print("transform_label_motion shape = {}".format(np.shape(train_label_motion)))
return Normalization_factor , train_motion , train_label_motion , int(seed_timestep/batch_Frame) , int((time_step-seed_timestep)/batch_Frame) , len(train_motion[0][0]) , file_directory
if __name__ == "__main__":
print('Motion_Data_Preprocessing_Starting In Main')
Normalization_factor, train_motion, train_label_motion ,seed_timestep , pre_timestep , column , file_directory = Motion_Data_Preprocessing(time_step = 140, seed_timestep=10 , batch_Frame=5)
print("new seed_timestep : {}".format(seed_timestep))
print("new prediction_timestep : {}".format(pre_timestep))
print("new Motion_rotation_data : {}".format(column))
else:
print("Motion_Data_Preprocessing_Imported")
| [
"[email protected]"
] | |
6ed8248672240fa151a633c9e5ec55c98f340d87 | 5d20c3d81baef9cdb56f4bcff63d3f9248e2ac77 | /dist/Cython/Cython/Compiler/Symtab.py | 0a6e429364a46203fe604e6d0fd357c4e571f098 | [
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] | permissive | VuisterLab/cing | 225b76476d3b60367a4205d9c4fce7c539299701 | 718178f5c9ea2a3c70fc753fe73fb17cc9acac6c | refs/heads/master | 2022-11-14T01:37:31.020772 | 2016-06-21T14:00:33 | 2016-06-21T14:00:33 | 34,669,624 | 2 | 4 | null | 2022-10-27T19:24:20 | 2015-04-27T13:53:47 | Pascal | UTF-8 | Python | false | false | 58,945 | py | #
# Pyrex - Symbol Table
#
import re
import bisect
from Errors import warning, error, InternalError
import Options
import Naming
import PyrexTypes
from PyrexTypes import *
import TypeSlots
from TypeSlots import \
pyfunction_signature, pymethod_signature, \
get_special_method_signature, get_property_accessor_signature
import ControlFlow
import __builtin__
identifier_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*$")
class Entry:
# A symbol table entry in a Scope or ModuleNamespace.
#
# name string Python name of entity
# cname string C name of entity
# type PyrexType Type of entity
# doc string Doc string
# init string Initial value
# visibility 'private' or 'public' or 'extern'
# is_builtin boolean Is an entry in the Python builtins dict
# is_cglobal boolean Is a C global variable
# is_pyglobal boolean Is a Python module-level variable
# or class attribute during
# class construction
# is_member boolean Is an assigned class member
# is_variable boolean Is a variable
# is_cfunction boolean Is a C function
# is_cmethod boolean Is a C method of an extension type
# is_unbound_cmethod boolean Is an unbound C method of an extension type
# is_type boolean Is a type definition
# is_const boolean Is a constant
# is_property boolean Is a property of an extension type:
# doc_cname string or None C const holding the docstring
# getter_cname string C func for getting property
# setter_cname string C func for setting or deleting property
# is_self_arg boolean Is the "self" arg of an exttype method
# is_arg boolean Is the arg of a method
# is_local boolean Is a local variable
# is_readonly boolean Can't be assigned to
# func_cname string C func implementing Python func
# pos position Source position where declared
# namespace_cname string If is_pyglobal, the C variable
# holding its home namespace
# pymethdef_cname string PyMethodDef structure
# signature Signature Arg & return types for Python func
# init_to_none boolean True if initial value should be None
# as_variable Entry Alternative interpretation of extension
# type name or builtin C function as a variable
# xdecref_cleanup boolean Use Py_XDECREF for error cleanup
# in_cinclude boolean Suppress C declaration code
# enum_values [Entry] For enum types, list of values
# qualified_name string "modname.funcname" or "modname.classname"
# or "modname.classname.funcname"
# is_declared_generic boolean Is declared as PyObject * even though its
# type is an extension type
# as_module None Module scope, if a cimported module
# is_inherited boolean Is an inherited attribute of an extension type
# #interned_cname string C name of interned name string
# pystring_cname string C name of Python version of string literal
# is_interned boolean For string const entries, value is interned
# used boolean
# is_special boolean Is a special method or property accessor
# of an extension type
# defined_in_pxd boolean Is defined in a .pxd file (not just declared)
# api boolean Generate C API for C class or function
# utility_code string Utility code needed when this entry is used
borrowed = 0
init = ""
visibility = 'private'
is_builtin = 0
is_cglobal = 0
is_pyglobal = 0
is_member = 0
is_variable = 0
is_cfunction = 0
is_cmethod = 0
is_unbound_cmethod = 0
is_type = 0
is_const = 0
is_property = 0
doc_cname = None
getter_cname = None
setter_cname = None
is_self_arg = 0
is_arg = 0
is_local = 0
is_declared_generic = 0
is_readonly = 0
func_cname = None
doc = None
init_to_none = 0
as_variable = None
xdecref_cleanup = 0
in_cinclude = 0
as_module = None
is_inherited = 0
#interned_cname = None
pystring_cname = None
is_interned = 0
used = 0
is_special = 0
defined_in_pxd = 0
api = 0
utility_code = None
is_overridable = 0
def __init__(self, name, cname, type, pos = None, init = None):
self.name = name
self.cname = cname
self.type = type
self.pos = pos
self.init = init
class Scope:
# name string Unqualified name
# outer_scope Scope or None Enclosing scope
# entries {string : Entry} Python name to entry, non-types
# const_entries [Entry] Constant entries
# type_entries [Entry] Struct/union/enum/typedef/exttype entries
# sue_entries [Entry] Struct/union/enum entries
# arg_entries [Entry] Function argument entries
# var_entries [Entry] User-defined variable entries
# pyfunc_entries [Entry] Python function entries
# cfunc_entries [Entry] C function entries
# c_class_entries [Entry] All extension type entries
# temp_entries [Entry] Temporary variable entries
# free_temp_entries [Entry] Temp variables currently unused
# temp_counter integer Counter for naming temp vars
# cname_to_entry {string : Entry} Temp cname to entry mapping
# int_to_entry {int : Entry} Temp cname to entry mapping
# pow_function_used boolean The C pow() function is used
# return_type PyrexType or None Return type of function owning scope
# is_py_class_scope boolean Is a Python class scope
# is_c_class_scope boolean Is an extension type scope
# scope_prefix string Disambiguator for C names
# in_cinclude boolean Suppress C declaration code
# qualified_name string "modname" or "modname.classname"
# pystring_entries [Entry] String const entries newly used as
# Python strings in this scope
# control_flow ControlFlow Used for keeping track of environment state
is_py_class_scope = 0
is_c_class_scope = 0
is_module_scope = 0
scope_prefix = ""
in_cinclude = 0
def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain.
# The parent_scope is used to derive the qualified name of this scope.
self.name = name
self.outer_scope = outer_scope
self.parent_scope = parent_scope
mangled_name = "%d%s_" % (len(name), name)
qual_scope = self.qualifying_scope()
if qual_scope:
self.qualified_name = qual_scope.qualify_name(name)
self.scope_prefix = qual_scope.scope_prefix + mangled_name
else:
self.qualified_name = name
self.scope_prefix = mangled_name
self.entries = {}
self.const_entries = []
self.type_entries = []
self.sue_entries = []
self.arg_entries = []
self.var_entries = []
self.pyfunc_entries = []
self.cfunc_entries = []
self.c_class_entries = []
self.defined_c_classes = []
self.imported_c_classes = {}
self.temp_entries = []
self.free_temp_entries = []
#self.pending_temp_entries = [] # TEMPORARY
self.temp_counter = 1
self.cname_to_entry = {}
self.pow_function_used = 0
self.string_to_entry = {}
self.num_to_entry = {}
self.obj_to_entry = {}
self.pystring_entries = []
self.control_flow = ControlFlow.LinearControlFlow()
def start_branching(self, pos):
self.control_flow = self.control_flow.start_branch(pos)
def next_branch(self, pos):
self.control_flow = self.control_flow.next_branch(pos)
def finish_branching(self, pos):
self.control_flow = self.control_flow.finish_branch(pos)
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
def intern(self, name):
return self.global_scope().intern(name)
def qualifying_scope(self):
return self.parent_scope
def mangle(self, prefix, name = None):
if name:
return "%s%s%s" % (prefix, self.scope_prefix, name)
else:
return self.parent_scope.mangle(prefix, self.name)
def mangle_internal(self, name):
# Mangle an internal name so as not to clash with any
# user-defined name in this scope.
prefix = "%s%s_" % (Naming.pyrex_prefix, name)
return self.mangle(prefix)
#return self.parent_scope.mangle(prefix, self.name)
def global_scope(self):
# Return the module-level scope containing this scope.
return self.outer_scope.global_scope()
def builtin_scope(self):
# Return the module-level scope containing this scope.
return self.outer_scope.builtin_scope()
def declare(self, name, cname, type, pos):
# Create new entry, and add to dictionary if
# name is not None. Reports a warning if already
# declared.
if not self.in_cinclude and cname and re.match("^_[_A-Z]+$", cname):
# See http://www.gnu.org/software/libc/manual/html_node/Reserved-Names.html#Reserved-Names
warning(pos, "'%s' is a reserved name in C." % cname, -1)
dict = self.entries
if name and dict.has_key(name):
warning(pos, "'%s' redeclared " % name, 0)
entry = Entry(name, cname, type, pos = pos)
entry.in_cinclude = self.in_cinclude
if name:
entry.qualified_name = self.qualify_name(name)
dict[name] = entry
entry.scope = self
return entry
def qualify_name(self, name):
return "%s.%s" % (self.qualified_name, name)
def declare_const(self, name, type, value, pos, cname = None):
# Add an entry for a named constant.
if not cname:
if self.in_cinclude:
cname = name
else:
cname = self.mangle(Naming.enum_prefix, name)
entry = self.declare(name, cname, type, pos)
entry.is_const = 1
entry.value = value
return entry
def declare_type(self, name, type, pos,
cname = None, visibility = 'private', defining = 1):
# Add an entry for a type definition.
if not cname:
cname = name
entry = self.declare(name, cname, type, pos)
entry.visibility = visibility
entry.is_type = 1
if defining:
self.type_entries.append(entry)
return entry
def declare_typedef(self, name, base_type, pos, cname = None,
visibility = 'private'):
if not cname:
if self.in_cinclude or visibility == 'public':
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
type = PyrexTypes.CTypedefType(cname, base_type)
entry = self.declare_type(name, type, pos, cname, visibility)
type.qualified_name = entry.qualified_name
return entry
def declare_struct_or_union(self, name, kind, scope,
typedef_flag, pos, cname = None, visibility = 'private'):
# Add an entry for a struct or union definition.
if not cname:
if self.in_cinclude or visibility == 'public':
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
entry = self.lookup_here(name)
if not entry:
type = CStructOrUnionType(name, kind, scope, typedef_flag, cname)
entry = self.declare_type(name, type, pos, cname,
visibility = visibility, defining = scope is not None)
self.sue_entries.append(entry)
else:
if not (entry.is_type and entry.type.is_struct_or_union):
warning(pos, "'%s' redeclared " % name, 0)
elif scope and entry.type.scope:
warning(pos, "'%s' already defined (ignoring second definition)" % name, 0)
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
self.check_previous_visibility(entry, visibility, pos)
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if not scope and not entry.type.scope:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
return entry
def check_previous_typedef_flag(self, entry, typedef_flag, pos):
if typedef_flag != entry.type.typedef_flag:
error(pos, "'%s' previously declared using '%s'" % (
entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
def check_previous_visibility(self, entry, visibility, pos):
if entry.visibility != visibility:
error(pos, "'%s' previously declared as '%s'" % (
entry.name, entry.visibility))
def declare_enum(self, name, pos, cname, typedef_flag,
visibility = 'private'):
if name:
if not cname:
if self.in_cinclude or visibility == 'public':
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
type = CEnumType(name, cname, typedef_flag)
else:
type = PyrexTypes.c_anon_enum_type
entry = self.declare_type(name, type, pos, cname = cname,
visibility = visibility)
entry.enum_values = []
self.sue_entries.append(entry)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a variable.
if not cname:
if visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.var_prefix, name)
entry = self.declare(name, cname, type, pos)
entry.is_variable = 1
entry.visibility = visibility
self.control_flow.set_state((), (name, 'initalized'), False)
return entry
def declare_builtin(self, name, pos):
return self.outer_scope.declare_builtin(name, pos)
def declare_pyfunction(self, name, pos):
# Add an entry for a Python function.
entry = self.declare_var(name, py_object_type, pos)
entry.signature = pyfunction_signature
self.pyfunc_entries.append(entry)
return entry
def register_pyfunction(self, entry):
self.pyfunc_entries.append(entry)
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
# Add an entry for a C function.
entry = self.lookup_here(name)
if entry:
if visibility != 'private' and visibility != entry.visibility:
warning(pos, "Function '%s' previously declared as '%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
warning(pos, "Function signature does not match previous declaration", 1)
entry.type = type
else:
if not cname:
if api or visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.func_prefix, name)
entry = self.add_cfunction(name, type, pos, cname, visibility)
entry.func_cname = cname
if in_pxd and visibility != 'extern':
entry.defined_in_pxd = 1
if api:
entry.api = 1
if not defining and not in_pxd and visibility != 'extern':
error(pos, "Non-extern C function declared but not defined")
return entry
def add_cfunction(self, name, type, pos, cname, visibility):
# Add a C function entry without giving it a func_cname.
entry = self.declare(name, cname, type, pos)
entry.is_cfunction = 1
entry.visibility = visibility
self.cfunc_entries.append(entry)
return entry
def find(self, name, pos):
# Look up name, report error if not found.
entry = self.lookup(name)
if entry:
return entry
else:
error(pos, "'%s' is not declared" % name)
def lookup(self, name):
# Look up name in this scope or an enclosing one.
# Return None if not found.
return (self.lookup_here(name)
or (self.outer_scope and self.outer_scope.lookup(name))
or None)
def lookup_here(self, name):
# Look up in this scope only, return None if not found.
return self.entries.get(name, None)
def lookup_target(self, name):
# Look up name in this scope only. Declare as Python
# variable if not found.
entry = self.lookup_here(name)
if not entry:
entry = self.declare_var(name, py_object_type, None)
return entry
def add_string_const(self, value):
# Add an entry for a string constant.
cname = self.new_const_cname()
entry = Entry("", cname, c_char_array_type, init = value)
entry.used = 1
self.const_entries.append(entry)
return entry
def get_string_const(self, value):
# Get entry for string constant. Returns an existing
# one if possible, otherwise creates a new one.
genv = self.global_scope()
entry = genv.string_to_entry.get(value)
if not entry:
entry = self.add_string_const(value)
genv.string_to_entry[value] = entry
return entry
def add_py_string(self, entry):
# If not already done, allocate a C name for a Python version of
# a string literal, and add it to the list of Python strings to
# be created at module init time. If the string resembles a
# Python identifier, it will be interned.
if not entry.pystring_cname:
value = entry.init
if identifier_pattern.match(value) and isinstance(value, str):
entry.pystring_cname = self.intern(value)
entry.is_interned = 1
else:
entry.pystring_cname = entry.cname + "p"
self.pystring_entries.append(entry)
self.global_scope().all_pystring_entries.append(entry)
def add_py_num(self, value):
# Add an entry for an int constant.
cname = "%s%s" % (Naming.interned_num_prefix, value)
cname = cname.replace('-', 'neg_').replace('.','_')
entry = Entry("", cname, py_object_type, init = value)
entry.used = 1
entry.is_interned = 1
self.const_entries.append(entry)
self.interned_nums.append(entry)
return entry
def get_py_num(self, value):
# Get entry for int constant. Returns an existing
# one if possible, otherwise creates a new one.
genv = self.global_scope()
entry = genv.num_to_entry.get(value)
if not entry:
entry = genv.add_py_num(value)
genv.num_to_entry[value] = entry
genv.pynum_entries.append(entry)
return entry
def add_py_obj(self, obj, c_prefix=''):
obj.check_const()
cname = self.new_const_cname(c_prefix)
entry = Entry("", cname, py_object_type, init = value)
entry.used = 1
entry.is_interned = 1
self.const_entries.append(entry)
self.interned_objs.append(entry)
return entry
def get_py_obj(self, obj, c_prefix=''):
# Get entry for a generic constant. Returns an existing
# one if possible, otherwise creates a new one.
genv = self.global_scope()
entry = genv.obj_to_entry.get(obj)
if not entry:
entry = genv.add_py_num(obj, c_prefix)
genv.obj_to_entry[obj] = entry
return entry
def new_const_cname(self):
# Create a new globally-unique name for a constant.
return self.global_scope().new_const_cname()
def allocate_temp(self, type):
# Allocate a temporary variable of the given type from the
# free list if available, otherwise create a new one.
# Returns the cname of the variable.
for entry in self.free_temp_entries:
if entry.type == type:
self.free_temp_entries.remove(entry)
return entry.cname
n = self.temp_counter
self.temp_counter = n + 1
cname = "%s%d" % (Naming.pyrex_prefix, n)
entry = Entry("", cname, type)
entry.used = 1
if type.is_pyobject or type == c_py_ssize_t_type:
entry.init = "0"
self.cname_to_entry[entry.cname] = entry
self.temp_entries.append(entry)
return entry.cname
def allocate_temp_pyobject(self):
# Allocate a temporary PyObject variable.
return self.allocate_temp(py_object_type)
def release_temp(self, cname):
# Release a temporary variable for re-use.
if not cname: # can happen when type of an expr is void
return
entry = self.cname_to_entry[cname]
if entry in self.free_temp_entries:
raise InternalError("Temporary variable %s released more than once"
% cname)
self.free_temp_entries.append(entry)
def temps_in_use(self):
# Return a new list of temp entries currently in use.
return [entry for entry in self.temp_entries
if entry not in self.free_temp_entries]
#def recycle_pending_temps(self):
# # Obsolete
# pass
def use_utility_code(self, new_code):
self.global_scope().use_utility_code(new_code)
def generate_library_function_declarations(self, code):
# Generate extern decls for C library funcs used.
#if self.pow_function_used:
# code.putln("%s double pow(double, double);" % Naming.extern_c_macro)
pass
def defines_any(self, names):
# Test whether any of the given names are
# defined in this scope.
for name in names:
if name in self.entries:
return 1
return 0
class PreImportScope(Scope):
def __init__(self):
Scope.__init__(self, Options.pre_import, None, None)
def declare_builtin(self, name, pos):
entry = self.declare(name, name, py_object_type, pos)
entry.is_variable = True
entry.is_pyglobal = True
entry.namespace_cname = Naming.preimport_cname
return entry
class BuiltinScope(Scope):
# The builtin namespace.
def __init__(self):
if Options.pre_import is None:
Scope.__init__(self, "__builtin__", None, None)
else:
Scope.__init__(self, "__builtin__", PreImportScope(), None)
for name, definition in self.builtin_entries.iteritems():
cname, type = definition
self.declare_var(name, type, None, cname)
def declare_builtin(self, name, pos):
if not hasattr(__builtin__, name):
if self.outer_scope is not None:
return self.outer_scope.declare_builtin(name, pos)
else:
error(pos, "undeclared name not builtin: %s"%name)
def declare_builtin_cfunction(self, name, type, cname, python_equiv = None,
utility_code = None):
# If python_equiv == "*", the Python equivalent has the same name
# as the entry, otherwise it has the name specified by python_equiv.
entry = self.declare_cfunction(name, type, None, cname)
entry.utility_code = utility_code
if python_equiv:
if python_equiv == "*":
python_equiv = name
var_entry = Entry(python_equiv, python_equiv, py_object_type)
var_entry.is_variable = 1
var_entry.is_builtin = 1
entry.as_variable = var_entry
return entry
def builtin_scope(self):
return self
builtin_entries = {
"int": ["((PyObject*)&PyInt_Type)", py_object_type],
"long": ["((PyObject*)&PyLong_Type)", py_object_type],
"float": ["((PyObject*)&PyFloat_Type)", py_object_type],
"str": ["((PyObject*)&PyString_Type)", py_object_type],
"unicode":["((PyObject*)&PyUnicode_Type)", py_object_type],
"tuple": ["((PyObject*)&PyTuple_Type)", py_object_type],
"list": ["((PyObject*)&PyList_Type)", py_object_type],
"dict": ["((PyObject*)&PyDict_Type)", py_object_type],
"set": ["((PyObject*)&PySet_Type)", py_object_type],
"frozenset": ["((PyObject*)&PyFrozenSet_Type)", py_object_type],
"type": ["((PyObject*)&PyType_Type)", py_object_type],
"slice": ["((PyObject*)&PySlice_Type)", py_object_type],
"file": ["((PyObject*)&PyFile_Type)", py_object_type],
"None": ["Py_None", py_object_type],
"False": ["Py_False", py_object_type],
"True": ["Py_True", py_object_type],
}
class ModuleScope(Scope):
# module_name string Python name of the module
# module_cname string C name of Python module object
# #module_dict_cname string C name of module dict object
# method_table_cname string C name of method table
# doc string Module doc string
# doc_cname string C name of module doc string
# const_counter integer Counter for naming constants
# utility_code_used [string] Utility code to be included
# default_entries [Entry] Function argument default entries
# python_include_files [string] Standard Python headers to be included
# include_files [string] Other C headers to be included
# string_to_entry {string : Entry} Map string const to entry
# context Context
# parent_module Scope Parent in the import namespace
# module_entries {string : Entry} For cimport statements
# type_names {string : 1} Set of type names (used during parsing)
# pxd_file_loaded boolean Corresponding .pxd file has been processed
# cimported_modules [ModuleScope] Modules imported with cimport
# intern_map {string : string} Mapping from Python names to interned strs
# interned_names [string] Interned names pending generation of declarations
# interned_nums [int/long] Interned numeric constants
# all_pystring_entries [Entry] Python string consts from all scopes
# types_imported {PyrexType : 1} Set of types for which import code generated
is_module_scope = 1
def __init__(self, name, parent_module, context):
self.parent_module = parent_module
outer_scope = context.find_submodule("__builtin__")
Scope.__init__(self, name, outer_scope, parent_module)
self.module_name = name
self.context = context
self.module_cname = Naming.module_cname
self.module_dict_cname = Naming.moddict_cname
self.method_table_cname = Naming.methtable_cname
self.doc = ""
self.doc_cname = Naming.moddoc_cname
self.const_counter = 1
self.utility_code_used = []
self.default_entries = []
self.module_entries = {}
self.python_include_files = ["Python.h", "structmember.h"]
self.include_files = []
self.type_names = {}
self.pxd_file_loaded = 0
self.cimported_modules = []
self.intern_map = {}
self.interned_names = []
self.interned_nums = []
self.interned_objs = []
self.all_pystring_entries = []
self.types_imported = {}
self.pynum_entries = []
self.has_extern_class = 0
self.cached_builtins = []
self.undeclared_cached_builtins = []
def qualifying_scope(self):
return self.parent_module
def global_scope(self):
return self
def declare_builtin(self, name, pos):
if not hasattr(__builtin__, name):
if self.outer_scope is not None:
return self.outer_scope.declare_builtin(name, pos)
else:
error(pos, "undeclared name not builtin: %s"%name)
if Options.cache_builtins:
for entry in self.cached_builtins:
if entry.name == name:
return entry
entry = self.declare(None, None, py_object_type, pos)
if Options.cache_builtins:
entry.is_builtin = 1
entry.is_const = 1
entry.name = name
entry.cname = Naming.builtin_prefix + name
self.cached_builtins.append(entry)
self.undeclared_cached_builtins.append(entry)
else:
entry.is_builtin = 1
return entry
def intern(self, name):
intern_map = self.intern_map
cname = intern_map.get(name)
if not cname:
cname = Naming.interned_prefix + name
intern_map[name] = cname
self.interned_names.append(name)
return cname
def find_module(self, module_name, pos):
# Find a module in the import namespace, interpreting
# relative imports relative to this module's parent.
# Finds and parses the module's .pxd file if the module
# has not been referenced before.
return self.global_scope().context.find_module(
module_name, relative_to = self.parent_module, pos = pos)
def find_submodule(self, name):
# Find and return scope for a submodule of this module,
# creating a new empty one if necessary. Doesn't parse .pxd.
scope = self.lookup_submodule(name)
if not scope:
scope = ModuleScope(name,
parent_module = self, context = self.context)
self.module_entries[name] = scope
return scope
def lookup_submodule(self, name):
# Return scope for submodule of this module, or None.
return self.module_entries.get(name, None)
def add_include_file(self, filename):
if filename not in self.python_include_files \
and filename not in self.include_files:
self.include_files.append(filename)
def add_imported_module(self, scope):
if scope not in self.cimported_modules:
self.cimported_modules.append(scope)
def add_imported_entry(self, name, entry, pos):
if entry not in self.entries:
self.entries[name] = entry
else:
warning(pos, "'%s' redeclared " % name, 0)
def declare_module(self, name, scope, pos):
# Declare a cimported module. This is represented as a
# Python module-level variable entry with a module
# scope attached to it. Reports an error and returns
# None if previously declared as something else.
entry = self.lookup_here(name)
if entry:
if entry.is_pyglobal and entry.as_module is scope:
return entry # Already declared as the same module
if not (entry.is_pyglobal and not entry.as_module):
# SAGE -- I put this here so Pyrex
# cimport's work across directories.
# Currently it tries to multiply define
# every module appearing in an import list.
# It shouldn't be an error for a module
# name to appear again, and indeed the generated
# code compiles fine.
return entry
warning(pos, "'%s' redeclared " % name, 0)
return None
else:
entry = self.declare_var(name, py_object_type, pos)
entry.as_module = scope
self.cimported_modules.append(scope)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a global variable. If it is a Python
# object type, and not declared with cdef, it will live
# in the module dictionary, otherwise it will be a C
# global variable.
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
if not visibility in ('private', 'public', 'extern'):
error(pos, "Module-level variable cannot be declared %s" % visibility)
if not is_cdef:
if not (type.is_pyobject and not type.is_extension_type):
raise InternalError(
"Non-cdef global variable is not a generic Python object")
entry.is_pyglobal = 1
entry.namespace_cname = self.module_cname
#if Options.intern_names:
# entry.interned_cname = self.intern(name)
else:
entry.is_cglobal = 1
self.var_entries.append(entry)
return entry
def declare_global(self, name, pos):
entry = self.lookup_here(name)
if not entry:
self.declare_var(name, py_object_type, pos)
def add_default_value(self, type):
# Add an entry for holding a function argument
# default value.
cname = self.new_const_cname()
entry = Entry("", cname, type)
self.default_entries.append(entry)
return entry
def new_const_cname(self, prefix=''):
# Create a new globally-unique name for a constant.
n = self.const_counter
self.const_counter = n + 1
return "%s%s_%d" % (Naming.const_prefix, prefix, n)
def use_utility_code(self, new_code):
# Add string to list of utility code to be included,
# if not already there (tested using 'is').
for old_code in self.utility_code_used:
if old_code is new_code:
return
self.utility_code_used.append(new_code)
def declare_c_class(self, name, pos, defining, implementing,
module_name, base_type, objstruct_cname, typeobj_cname,
visibility, typedef_flag, api):
#
# Look for previous declaration as a type
#
entry = self.lookup_here(name)
if entry:
type = entry.type
if not (entry.is_type and type.is_extension_type):
entry = None # Will cause an error when we redeclare it
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
if base_type != type.base_type:
error(pos, "Base type does not match previous declaration")
#
# Make a new entry if needed
#
if not entry:
type = PyExtensionType(name, typedef_flag, base_type)
type.pos = pos
if visibility == 'extern':
type.module_name = module_name
else:
type.module_name = self.qualified_name
type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
entry = self.declare_type(name, type, pos, visibility = visibility,
defining = 0)
if objstruct_cname:
type.objstruct_cname = objstruct_cname
elif not entry.in_cinclude:
type.objstruct_cname = self.mangle(Naming.objstruct_prefix, name)
else:
error(entry.pos,
"Object name required for 'public' or 'extern' C class")
self.attach_var_entry_to_c_class(entry)
self.c_class_entries.append(entry)
#
# Check for re-definition and create scope if needed
#
if not type.scope:
if defining or implementing:
scope = CClassScope(name = name, outer_scope = self,
visibility = visibility)
if base_type:
scope.declare_inherited_c_attributes(base_type.scope)
type.set_scope(scope)
self.type_entries.append(entry)
else:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
else:
if defining and type.scope.defined:
error(pos, "C class '%s' already defined" % name)
elif implementing and type.scope.implemented:
error(pos, "C class '%s' already implemented" % name)
#
# Fill in options, checking for compatibility with any previous declaration
#
if defining:
entry.defined_in_pxd = 1
if implementing: # So that filenames in runtime exceptions refer to
entry.pos = pos # the .pyx file and not the .pxd file
if visibility != 'private' and entry.visibility != visibility:
error(pos, "Class '%s' previously declared as '%s'"
% (name, entry.visibility))
if api:
entry.api = 1
if objstruct_cname:
if type.objstruct_cname and type.objstruct_cname != objstruct_cname:
error(pos, "Object struct name differs from previous declaration")
type.objstruct_cname = objstruct_cname
if typeobj_cname:
if type.typeobj_cname and type.typeobj_cname != typeobj_cname:
error(pos, "Type object name differs from previous declaration")
type.typeobj_cname = typeobj_cname
#
# Return new or existing entry
#
return entry
def check_for_illegal_incomplete_ctypedef(self, typedef_flag, pos):
if typedef_flag and not self.in_cinclude:
error(pos, "Forward-referenced type must use 'cdef', not 'ctypedef'")
def allocate_vtable_names(self, entry):
# If extension type has a vtable, allocate vtable struct and
# slot names for it.
type = entry.type
if type.base_type and type.base_type.vtabslot_cname:
#print "...allocating vtabslot_cname because base type has one" ###
type.vtabslot_cname = "%s.%s" % (
Naming.obj_base_cname, type.base_type.vtabslot_cname)
elif type.scope and type.scope.cfunc_entries:
#print "...allocating vtabslot_cname because there are C methods" ###
type.vtabslot_cname = Naming.vtabslot_cname
if type.vtabslot_cname:
#print "...allocating other vtable related cnames" ###
type.vtabstruct_cname = self.mangle(Naming.vtabstruct_prefix, entry.name)
type.vtabptr_cname = self.mangle(Naming.vtabptr_prefix, entry.name)
def check_c_classes(self):
# Performs post-analysis checking and finishing up of extension types
# being implemented in this module. This is called only for the main
# .pyx file scope, not for cimported .pxd scopes.
#
# Checks all extension types declared in this scope to
# make sure that:
#
# * The extension type is implemented
# * All required object and type names have been specified or generated
# * All non-inherited C methods are implemented
#
# Also allocates a name for the vtable if needed.
#
debug_check_c_classes = 0
if debug_check_c_classes:
print("Scope.check_c_classes: checking scope " + self.qualified_name)
for entry in self.c_class_entries:
if debug_check_c_classes:
print("...entry %s %s" % (entry.name, entry))
print("......type = " + entry.type)
print("......visibility = " + entry.visibility)
type = entry.type
name = entry.name
visibility = entry.visibility
# Check defined
if not type.scope:
error(entry.pos, "C class '%s' is declared but not defined" % name)
# Generate typeobj_cname
if visibility != 'extern' and not type.typeobj_cname:
type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
## Generate typeptr_cname
#type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
# Check C methods defined
if type.scope:
for method_entry in type.scope.cfunc_entries:
if not method_entry.is_inherited and not method_entry.func_cname:
error(method_entry.pos, "C method '%s' is declared but not defined" %
method_entry.name)
# Allocate vtable name if necessary
if type.vtabslot_cname:
#print "ModuleScope.check_c_classes: allocating vtable cname for", self ###
type.vtable_cname = self.mangle(Naming.vtable_prefix, entry.name)
def attach_var_entry_to_c_class(self, entry):
# The name of an extension class has to serve as both a type
# name and a variable name holding the type object. It is
# represented in the symbol table by a type entry with a
# variable entry attached to it. For the variable entry,
# we use a read-only C global variable whose name is an
# expression that refers to the type object.
var_entry = Entry(name = entry.name,
type = py_object_type,
pos = entry.pos,
cname = "((PyObject*)%s)" % entry.type.typeptr_cname)
var_entry.is_variable = 1
var_entry.is_cglobal = 1
var_entry.is_readonly = 1
entry.as_variable = var_entry
class LocalScope(Scope):
def __init__(self, name, outer_scope):
Scope.__init__(self, name, outer_scope, outer_scope)
def mangle(self, prefix, name):
return prefix + name
def declare_arg(self, name, type, pos):
# Add an entry for an argument of a function.
cname = self.mangle(Naming.var_prefix, name)
entry = self.declare(name, cname, type, pos)
entry.is_variable = 1
if type.is_pyobject:
entry.init = "0"
entry.is_arg = 1
#entry.borrowed = 1 # Not using borrowed arg refs for now
self.arg_entries.append(entry)
self.control_flow.set_state((), (name, 'source'), 'arg')
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a local variable.
if visibility in ('public', 'readonly'):
error(pos, "Local variable cannot be declared %s" % visibility)
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
if type.is_pyobject and not Options.init_local_none:
entry.init = "0"
entry.init_to_none = type.is_pyobject and Options.init_local_none
entry.is_local = 1
self.var_entries.append(entry)
return entry
def declare_global(self, name, pos):
# Pull entry from global scope into local scope.
if self.lookup_here(name):
warning(pos, "'%s' redeclared ", 0)
else:
entry = self.global_scope().lookup_target(name)
self.entries[name] = entry
class StructOrUnionScope(Scope):
# Namespace of a C struct or union.
def __init__(self):
Scope.__init__(self, "?", None, None)
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0, allow_pyobject = 0):
# Add an entry for an attribute.
if not cname:
cname = name
if type.is_cfunction:
type = CPtrType(type)
entry = self.declare(name, cname, type, pos)
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_pyobject and not allow_pyobject:
error(pos,
"C struct/union member cannot be a Python object")
if visibility != 'private':
error(pos,
"C struct/union member cannot be declared %s" % visibility)
return entry
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
self.declare_var(name, type, pos, cname, visibility)
class ClassScope(Scope):
# Abstract base class for namespace of
# Python class or extension type.
#
# class_name string Pyrex name of the class
# scope_prefix string Additional prefix for names
# declared in the class
# doc string or None Doc string
def __init__(self, name, outer_scope):
Scope.__init__(self, name, outer_scope, outer_scope)
self.class_name = name
self.doc = None
def add_string_const(self, value):
return self.outer_scope.add_string_const(value)
def lookup(self, name):
if name == "classmethod":
# We don't want to use the builtin classmethod here 'cause it won't do the
# right thing in this scope (as the class memebers aren't still functions).
# Don't want to add a cfunction to this scope 'cause that would mess with
# the type definition, so we just return the right entry.
self.use_utility_code(classmethod_utility_code)
entry = Entry("classmethod",
"__Pyx_Method_ClassMethod",
CFuncType(py_object_type, [CFuncTypeArg("", py_object_type, None)], 0, 0))
entry.is_cfunction = 1
return entry
else:
return Scope.lookup(self, name)
class PyClassScope(ClassScope):
# Namespace of a Python class.
#
# class_dict_cname string C variable holding class dict
# class_obj_cname string C variable holding class object
is_py_class_scope = 1
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
entry.is_pyglobal = 1
entry.namespace_cname = self.class_obj_cname
#if Options.intern_names:
# entry.interned_cname = self.intern(name)
return entry
def allocate_temp(self, type):
return self.outer_scope.allocate_temp(type)
def release_temp(self, cname):
self.outer_scope.release_temp(cname)
#def recycle_pending_temps(self):
# self.outer_scope.recycle_pending_temps()
def add_default_value(self, type):
return self.outer_scope.add_default_value(type)
class CClassScope(ClassScope):
# Namespace of an extension type.
#
# parent_type CClassType
# #typeobj_cname string or None
# #objstruct_cname string
# method_table_cname string
# member_table_cname string
# getset_table_cname string
# has_pyobject_attrs boolean Any PyObject attributes?
# public_attr_entries boolean public/readonly attrs
# property_entries [Entry]
# defined boolean Defined in .pxd file
# implemented boolean Defined in .pyx file
# inherited_var_entries [Entry] Adapted var entries from base class
is_c_class_scope = 1
def __init__(self, name, outer_scope, visibility):
ClassScope.__init__(self, name, outer_scope)
if visibility != 'extern':
self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
self.member_table_cname = outer_scope.mangle(Naming.memtab_prefix, name)
self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
self.has_pyobject_attrs = 0
self.public_attr_entries = []
self.property_entries = []
self.inherited_var_entries = []
self.defined = 0
self.implemented = 0
def needs_gc(self):
# If the type or any of its base types have Python-valued
# C attributes, then it needs to participate in GC.
return self.has_pyobject_attrs or \
(self.parent_type.base_type and \
self.parent_type.base_type.scope.needs_gc())
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
if is_cdef:
# Add an entry for an attribute.
if self.defined:
error(pos,
"C attributes cannot be added in implementation part of"
" extension type")
if get_special_method_signature(name):
error(pos,
"The name '%s' is reserved for a special method."
% name)
if not cname:
cname = name
entry = self.declare(name, cname, type, pos)
entry.visibility = visibility
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_pyobject:
self.has_pyobject_attrs = 1
if visibility not in ('private', 'public', 'readonly'):
error(pos,
"Attribute of extension type cannot be declared %s" % visibility)
if visibility in ('public', 'readonly'):
if type.pymemberdef_typecode:
self.public_attr_entries.append(entry)
if name == "__weakref__":
error(pos, "Special attribute __weakref__ cannot be exposed to Python")
else:
error(pos,
"C attribute of type '%s' cannot be accessed from Python" % type)
if visibility == 'public' and type.is_extension_type:
error(pos,
"Non-generic Python attribute cannot be exposed for writing from Python")
return entry
else:
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
entry.is_member = 1
entry.is_pyglobal = 1 # xxx: is_pyglobal changes behaviour in so many places that
# I keep it in for now. is_member should be enough
# later on
entry.namespace_cname = "(PyObject *)%s" % self.parent_type.typeptr_cname
if Options.intern_names:
entry.interned_cname = self.intern(name)
return entry
def declare_pyfunction(self, name, pos):
# Add an entry for a method.
if name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'):
error(pos, "Special method %s must be implemented via __richcmp__" % name)
if name == "__new__":
warning(pos, "__new__ method of extension type will change semantics "
"in a future version of Pyrex and Cython. Use __cinit__ instead.")
name = "__cinit__"
entry = self.declare_var(name, py_object_type, pos)
special_sig = get_special_method_signature(name)
if special_sig:
# Special methods get put in the method table with a particular
# signature declared in advance.
entry.signature = special_sig
entry.is_special = 1
else:
entry.signature = pymethod_signature
entry.is_special = 0
self.pyfunc_entries.append(entry)
return entry
def lookup_here(self, name):
if name == "__new__":
name = "__cinit__"
return ClassScope.lookup_here(self, name)
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
if get_special_method_signature(name):
error(pos, "Special methods must be declared with 'def', not 'cdef'")
args = type.args
if not args:
error(pos, "C method has no self argument")
elif not args[0].type.same_as(self.parent_type):
error(pos, "Self argument of C method does not match parent type")
entry = self.lookup_here(name)
if entry:
if not entry.is_cfunction:
warning(pos, "'%s' redeclared " % name, 0)
else:
if defining and entry.func_cname:
error(pos, "'%s' already defined" % name)
#print "CClassScope.declare_cfunction: checking signature" ###
if type.same_c_signature_as(entry.type, as_cmethod = 1):
pass
elif type.compatible_signature_with(entry.type, as_cmethod = 1):
if type.optional_arg_count and not type.original_sig.optional_arg_count:
# Need to put a wrapper taking no optional arguments
# into the method table.
wrapper_func_cname = self.mangle(Naming.func_prefix, name) + Naming.no_opt_args
wrapper_func_name = name + Naming.no_opt_args
if entry.type.optional_arg_count:
old_entry = self.lookup_here(wrapper_func_name)
old_entry.func_cname = wrapper_func_cname
else:
entry.func_cname = wrapper_func_cname
entry.name = wrapper_func_name
entry = self.add_cfunction(name, type, pos, cname or name, visibility)
defining = 1
entry.type = type
# if type.narrower_c_signature_than(entry.type, as_cmethod = 1):
# entry.type = type
else:
error(pos, "Signature not compatible with previous declaration")
else:
if self.defined:
error(pos,
"C method '%s' not previously declared in definition part of"
" extension type" % name)
entry = self.add_cfunction(name, type, pos, cname or name, visibility)
if defining:
entry.func_cname = self.mangle(Naming.func_prefix, name)
return entry
def add_cfunction(self, name, type, pos, cname, visibility):
# Add a cfunction entry without giving it a func_cname.
entry = ClassScope.add_cfunction(self, name, type, pos, cname, visibility)
entry.is_cmethod = 1
return entry
def declare_property(self, name, doc, pos):
entry = self.declare(name, name, py_object_type, pos)
entry.is_property = 1
entry.doc = doc
entry.scope = PropertyScope(name,
outer_scope = self.global_scope(), parent_scope = self)
entry.scope.parent_type = self.parent_type
self.property_entries.append(entry)
return entry
def declare_inherited_c_attributes(self, base_scope):
# Declare entries for all the C attributes of an
# inherited type, with cnames modified appropriately
# to work with this type.
def adapt(cname):
return "%s.%s" % (Naming.obj_base_cname, base_entry.cname)
for base_entry in \
base_scope.inherited_var_entries + base_scope.var_entries:
entry = self.declare(base_entry.name, adapt(base_entry.cname),
base_entry.type, None)
entry.is_variable = 1
self.inherited_var_entries.append(entry)
for base_entry in base_scope.cfunc_entries:
entry = self.add_cfunction(base_entry.name, base_entry.type, None,
adapt(base_entry.cname), base_entry.visibility)
entry.is_inherited = 1
def allocate_temp(self, type):
return Scope.allocate_temp(self.global_scope(), type)
def release_temp(self, cname):
return Scope.release_temp(self.global_scope(), cname)
class PropertyScope(Scope):
# Scope holding the __get__, __set__ and __del__ methods for
# a property of an extension type.
#
# parent_type PyExtensionType The type to which the property belongs
def declare_pyfunction(self, name, pos):
# Add an entry for a method.
signature = get_property_accessor_signature(name)
if signature:
entry = self.declare(name, name, py_object_type, pos)
entry.is_special = 1
entry.signature = signature
return entry
else:
error(pos, "Only __get__, __set__ and __del__ methods allowed "
"in a property declaration")
return None
# Should this go elsewhere (and then get imported)?
#------------------------------------------------------------------------------------
classmethod_utility_code = [
"""
#include "descrobject.h"
static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
""","""
static PyObject* __Pyx_Method_ClassMethod(PyObject *method) {
/* It appears that PyMethodDescr_Type is not anywhere exposed in the Python/C API */
/* if (!PyObject_TypeCheck(method, &PyMethodDescr_Type)) { */
if (strcmp(method->ob_type->tp_name, "method_descriptor") == 0) { /* cdef classes */
PyMethodDescrObject *descr = (PyMethodDescrObject *)method;
return PyDescr_NewClassMethod(descr->d_type, descr->d_method);
}
else if (PyMethod_Check(method)) { /* python classes */
return PyClassMethod_New(PyMethod_GET_FUNCTION(method));
}
PyErr_Format(PyExc_TypeError, "Class-level classmethod() can only be called on a method_descriptor or instance method.");
return NULL;
}
"""
]
| [
"jurgenfd@e0b8d8eb-cb4c-0410-9204-db9c3ed855ff"
] | jurgenfd@e0b8d8eb-cb4c-0410-9204-db9c3ed855ff |
6b55090216aeb866ba417c914c071c4a2a7a7054 | 257bd63361aa846ffdacdc15edaecf84c6364e78 | /psou2/pyanal2_Tensorflow/pack1/tensor13_linear3.py | 9774a149e61814165285e2e138c00a80426e6f28 | [] | no_license | gom4851/hcjeon | 86dcfd05ce47a13d066f13fe187d6a63142fb9fe | 59a00ca9499f30e50127bb16eb510553e88ace43 | refs/heads/master | 2020-06-04T23:16:08.632278 | 2019-01-15T09:54:08 | 2019-01-15T09:54:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 987 | py | '''
Created on 2018. 12. 13.
시험성적 총점 예측. 독립변수 복수
'''
import tensorflow as tf
x1_data = [73., 93., 89., 96., 73.]
x2_data = [80., 88., 91., 98., 66.]
x3_data = [75., 93., 90., 100., 70.]
y_data = [152., 185., 180., 196., 142.]
x1 = tf.placeholder(tf.float32)
x2 = tf.placeholder(tf.float32)
x3 = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32)
w1 = tf.Variable(tf.random_normal([1]))
w2 = tf.Variable(tf.random_normal([1]))
w3 = tf.Variable(tf.random_normal([1]))
b = tf.Variable(tf.random_normal([1]))
h = x1 * w1 + x2 * w2 + x3 * w3 + b
cost = tf.reduce_mean(tf.square(h - y_data))
optimizer = tf.train.GradientDescentOptimizer(0.00001)
train = optimizer.minimize(cost)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
for s in range(5000):
cost_v, h_v, _ = sess.run([cost, h, train], feed_dict={x1: x1_data, x2: x2_data, x3: x3_data, y: y_data})
if s % 500 == 0:
print(s, 'cost : ', cost_v, 'pred : ', h_v) | [
"[email protected]"
] | |
efbec588a9aeeb513cd86ea9ad278914f7c0bd82 | 542f898adea1b36d627d4bf437731022f242d2dd | /detectron2/evaluation/fast_eval_api.py | 2eb202bd5efa3ec3d366027b1debffc269ae8b17 | [
"Apache-2.0"
] | permissive | facebookresearch/detectron2 | 24bf508e374a98a5e5d1bd4cc96556d5914215f4 | 80307d2d5e06f06a8a677cc2653f23a4c56402ac | refs/heads/main | 2023-08-30T17:00:01.293772 | 2023-08-25T22:10:24 | 2023-08-25T22:10:24 | 206,660,580 | 27,469 | 8,047 | Apache-2.0 | 2023-09-13T09:25:57 | 2019-09-05T21:30:20 | Python | UTF-8 | Python | false | false | 5,078 | py | # Copyright (c) Facebook, Inc. and its affiliates.
import copy
import logging
import numpy as np
import time
from pycocotools.cocoeval import COCOeval
from detectron2 import _C
logger = logging.getLogger(__name__)
class COCOeval_opt(COCOeval):
"""
This is a slightly modified version of the original COCO API, where the functions evaluateImg()
and accumulate() are implemented in C++ to speedup evaluation
"""
def evaluate(self):
"""
Run per image evaluation on given images and store results in self.evalImgs_cpp, a
datastructure that isn't readable from Python but is used by a c++ implementation of
accumulate(). Unlike the original COCO PythonAPI, we don't populate the datastructure
self.evalImgs because this datastructure is a computational bottleneck.
:return: None
"""
tic = time.time()
p = self.params
# add backward compatibility if useSegm is specified in params
if p.useSegm is not None:
p.iouType = "segm" if p.useSegm == 1 else "bbox"
logger.info("Evaluate annotation type *{}*".format(p.iouType))
p.imgIds = list(np.unique(p.imgIds))
if p.useCats:
p.catIds = list(np.unique(p.catIds))
p.maxDets = sorted(p.maxDets)
self.params = p
self._prepare() # bottleneck
# loop through images, area range, max detection number
catIds = p.catIds if p.useCats else [-1]
if p.iouType == "segm" or p.iouType == "bbox":
computeIoU = self.computeIoU
elif p.iouType == "keypoints":
computeIoU = self.computeOks
self.ious = {
(imgId, catId): computeIoU(imgId, catId) for imgId in p.imgIds for catId in catIds
} # bottleneck
maxDet = p.maxDets[-1]
# <<<< Beginning of code differences with original COCO API
def convert_instances_to_cpp(instances, is_det=False):
# Convert annotations for a list of instances in an image to a format that's fast
# to access in C++
instances_cpp = []
for instance in instances:
instance_cpp = _C.InstanceAnnotation(
int(instance["id"]),
instance["score"] if is_det else instance.get("score", 0.0),
instance["area"],
bool(instance.get("iscrowd", 0)),
bool(instance.get("ignore", 0)),
)
instances_cpp.append(instance_cpp)
return instances_cpp
# Convert GT annotations, detections, and IOUs to a format that's fast to access in C++
ground_truth_instances = [
[convert_instances_to_cpp(self._gts[imgId, catId]) for catId in p.catIds]
for imgId in p.imgIds
]
detected_instances = [
[convert_instances_to_cpp(self._dts[imgId, catId], is_det=True) for catId in p.catIds]
for imgId in p.imgIds
]
ious = [[self.ious[imgId, catId] for catId in catIds] for imgId in p.imgIds]
if not p.useCats:
# For each image, flatten per-category lists into a single list
ground_truth_instances = [[[o for c in i for o in c]] for i in ground_truth_instances]
detected_instances = [[[o for c in i for o in c]] for i in detected_instances]
# Call C++ implementation of self.evaluateImgs()
self._evalImgs_cpp = _C.COCOevalEvaluateImages(
p.areaRng, maxDet, p.iouThrs, ious, ground_truth_instances, detected_instances
)
self._evalImgs = None
self._paramsEval = copy.deepcopy(self.params)
toc = time.time()
logger.info("COCOeval_opt.evaluate() finished in {:0.2f} seconds.".format(toc - tic))
# >>>> End of code differences with original COCO API
def accumulate(self):
"""
Accumulate per image evaluation results and store the result in self.eval. Does not
support changing parameter settings from those used by self.evaluate()
"""
logger.info("Accumulating evaluation results...")
tic = time.time()
assert hasattr(
self, "_evalImgs_cpp"
), "evaluate() must be called before accmulate() is called."
self.eval = _C.COCOevalAccumulate(self._paramsEval, self._evalImgs_cpp)
# recall is num_iou_thresholds X num_categories X num_area_ranges X num_max_detections
self.eval["recall"] = np.array(self.eval["recall"]).reshape(
self.eval["counts"][:1] + self.eval["counts"][2:]
)
# precision and scores are num_iou_thresholds X num_recall_thresholds X num_categories X
# num_area_ranges X num_max_detections
self.eval["precision"] = np.array(self.eval["precision"]).reshape(self.eval["counts"])
self.eval["scores"] = np.array(self.eval["scores"]).reshape(self.eval["counts"])
toc = time.time()
logger.info("COCOeval_opt.accumulate() finished in {:0.2f} seconds.".format(toc - tic))
| [
"[email protected]"
] | |
958c86a3c3755e7c41208db86b0cfacf371bd9fc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_028/ch40_2020_05_04_14_38_41_592304.py | bb131b1a8c53321e179a9a8c32bbde7f8aaea50d | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | def soma_valores(lista):
i = 0
soma = 0
while i < len(lista):
soma += lista[i]
i += 1
return soma
print(soma_valores(lista)) | [
"[email protected]"
] | |
9cef99f3e88b40bf782aea1a88cb3f12a81a1997 | 3e713a67f370d1cc1ba0882159a03b673bd22f9a | /PYTHON/[hackerrank]- Power - Mod Power.py | 536c0081d1c4363908466e17fb5e5605f36cc315 | [] | no_license | s-abhishek2399/competitive-progamming--PYTHON | 739797ffea0b92cc2781559e7d4eed1d274678a6 | 29f9e63cfc05c01fa605c14fb8a3a55920296d43 | refs/heads/master | 2023-03-08T02:40:00.962109 | 2021-02-16T15:07:52 | 2021-02-16T15:07:52 | 328,732,345 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | # Enter your code here. Read input from STDIN. Print output to STDOUT
n1=int(input())
n2=int(input())
n3=int(input())
print(pow(n1,n2))
print(pow(n1,n2,n3))
| [
"[email protected]"
] | |
8bb28d524fdd7f33329d9757f1e2b63eaa1be891 | 496bb2067635e391b60d645700e419ec0ae28a9d | /firmware/legacy/stereoVisionMintsFinal.py | 908f5b0bc3d23a2d5819591b1bd1bf3580b2ad69 | [] | no_license | 18030410071/stereoVisionCalibration | c4b4518fcb010390d8d4c0df309b250af14bdf72 | 394968c39919aa56530c8c877eae6a1d5e0521cc | refs/heads/master | 2022-02-16T18:23:47.944983 | 2019-08-22T15:47:02 | 2019-08-22T15:47:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,861 | py | import numpy as np
import cv2
import glob
import argparse
def resizeImage(imgOrig,scalePercent):
width = int(imgOrig.shape[1] * scalePercent / 100)
height = int(imgOrig.shape[0] * scalePercent / 100)
dim = (width, height)
Orig = cv2.resize(imgOrig, dim, interpolation = cv2.INTER_AREA)
return(Orig);
class StereoCalibration(object):
def __init__(self, filepath):
# termination criteria
self.criteria = (cv2.TERM_CRITERIA_EPS +
cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
self.criteria_cal = (cv2.TERM_CRITERIA_EPS +
cv2.TERM_CRITERIA_MAX_ITER, 100, 1e-5)
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
self.objp = np.zeros((6*9, 3), np.float32)
self.objp[:, :2] = np.mgrid[0:6, 0:9].T.reshape(-1, 2)
# Arrays to store object points and image points from all the images.
self.objpoints = [] # 3d point in real world space
self.imgpoints_l = [] # 2d points in image plane.
self.imgpoints_r = [] # 2d points in image plane.
# Additions by Lakitha
# Have a Generic Size
self.img_size = []
self.rotational_matrix = []
self.translational_matrix = []
self.img_size = []
self.leftImagesAll = []
self.rightImagesAll = []
self.corners_l = []
self.corners_r = []
self.cal_path = filepath
self.read_images(self.cal_path)
def read_images(self, cal_path):
print("Reading Images")
images_right = glob.glob(cal_path + 'thermal/*.jpg')
images_left = glob.glob(cal_path + 'webCam/*.jpg')
images_left.sort()
images_right.sort()
for i, fname in enumerate(images_right):
img_l = resizeImage(cv2.imread(images_left[i]),100)
img_r = resizeImage(cv2.imread(images_right[i]),100)
gray_l = cv2.cvtColor(img_l, cv2.COLOR_BGR2GRAY)
gray_r = cv2.cvtColor(img_r, cv2.COLOR_BGR2GRAY)
# Find the chess board corners
ret_l, corners_l = cv2.findChessboardCorners(gray_l, (6, 9), None)
ret_r, corners_r = cv2.findChessboardCorners(gray_r, (6, 9), None)
# imgCor = cv2.drawChessboardCorners(gray_l, (6,9), corners_l,ret_l)
#
# cv2.imshow('left',imgCor)
# cv2.waitKey(5000)
#
# imgCor = cv2.drawChessboardCorners(gray_r, (6,9), corners_r,ret_r)
# print(imgCor)
# cv2.imshow('left',imgCor)
# cv2.waitKey(5000)
# If found, add object points, image points (after refining them)
self.objpoints.append(self.objp)
if (ret_l is True) and (ret_r is True):
self.leftImagesAll.append(img_l)
self.rightImagesAll.append(img_r)
self.corners_l.append(corners_l)
self.corners_r.append(corners_r)
rt = cv2.cornerSubPix(gray_l, corners_l, (11, 11),
(-1, -1), self.criteria)
self.imgpoints_l.append(corners_l)
# Draw and display the corners
ret_l = cv2.drawChessboardCorners(img_l, (6, 9),
corners_l, ret_l)
cv2.imshow(images_left[i], img_l)
# cv2.waitKey(10000)
# if ret_r is True:
rt = cv2.cornerSubPix(gray_r, corners_r, (11, 11),
(-1, -1), self.criteria)
self.imgpoints_r.append(corners_r)
# Draw and display the corners
ret_r = cv2.drawChessboardCorners(img_r, (6, 9),
corners_r, ret_r)
cv2.imshow(images_right[i], img_r)
# cv2.waitKey(10000)
img_shape = gray_l.shape[::-1]
self.image_size = img_shape
cv2.waitKey(0)
print(self.image_size)
print(ret_l)
print(ret_r)
print("Here")
rt, self.M1, self.d1, self.r1, self.t1 = cv2.calibrateCamera(
self.objpoints, self.imgpoints_l, img_shape, None, None)
rt, self.M2, self.d2, self.r2, self.t2 = cv2.calibrateCamera(
self.objpoints, self.imgpoints_r, img_shape, None, None)
self.camera_model = self.stereo_calibrate(img_shape)
def stereo_calibrate(self, dims):
flags = 0
flags |= cv2.CALIB_FIX_INTRINSIC
# flags |= cv2.CALIB_FIX_PRINCIPAL_POINT
flags |= cv2.CALIB_USE_INTRINSIC_GUESS
flags |= cv2.CALIB_FIX_FOCAL_LENGTH
# flags |= cv2.CALIB_FIX_ASPECT_RATIO
flags |= cv2.CALIB_ZERO_TANGENT_DIST
# flags |= cv2.CALIB_RATIONAL_MODEL
# flags |= cv2.CALIB_SAME_FOCAL_LENGTH
# flags |= cv2.CALIB_FIX_K3
# flags |= cv2.CALIB_FIX_K4
# flags |= cv2.CALIB_FIX_K5
print("M1:")
print(self.M1)
print("----------------")
print("M2:")
print(self.M2)
print("----------------")
print(dims)
stereocalib_criteria = (cv2.TERM_CRITERIA_MAX_ITER +
cv2.TERM_CRITERIA_EPS, 100, 1e-5)
ret, M1, d1, M2, d2, R, T, E, F = cv2.stereoCalibrate(
self.objpoints, self.imgpoints_l,
self.imgpoints_r, self.M1, self.d1, self.M2,
self.d2, dims,
criteria=stereocalib_criteria, flags=flags)
print("M1Result:")
print(M1)
print("----------------")
print("M2Result:")
print(M2)
print("----------------")
print("-------------------")
print('Intrinsic_mtx_1', M1)
print('dist_1', d1)
print('Intrinsic_mtx_2', M2)
print('dist_2', d2)
print('R', R)
print('T', T)
print('E', E)
print('F', F)
print('')
self.rotational_matrix = R
self.translational_matrix = T
camera_model = dict([('M1', M1), ('M2', M2), ('dist1', d1),
('dist2', d2), ('rvecs1', self.r1),
('rvecs2', self.r2), ('R', R), ('T', T),
('E', E), ('F', F)])
cv2.destroyAllWindows()
return camera_model;
def four_point_transform(image, pts):
# obtain a consistent order of the points and unpack them
# individually
rect = order_points(pts)
(tl, tr, br, bl) = rect
# compute the width of the new image, which will be the
# maximum distance between bottom-right and bottom-left
# x-coordiates or the top-right and top-left x-coordinates
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
# compute the height of the new image, which will be the
# maximum distance between the top-right and bottom-right
# y-coordinates or the top-left and bottom-left y-coordinates
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
# now that we have the dimensions of the new image, construct
# the set of destination points to obtain a "birds eye view",
# (i.e. top-down view) of the image, again specifying points
# in the top-left, top-right, bottom-right, and bottom-left
# order
dst = np.array([
[0, 0],
[maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]], dtype = "float32")
# compute the perspective transform matrix and then apply it
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, M, (maxWidth, maxHeight))
# return the warped image
return warped
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('filepath', help='String Filepath')
args = parser.parse_args()
cal_data = StereoCalibration(args.filepath)
print("--------------------")
print(cal_data.corners_l[0][0])
print(cal_data.corners_r[0][0])
# print(cal_data.M1)
# print(cal_data.d1)
#
# #
# (leftRectification, rightRectification, leftProjection, rightProjection,
# dispartityToDepthMap, leftROI, rightROI) = cv2.stereoRectify(
# cal_data.M1,cal_data.d1,
# cal_data.M2,cal_data.d2,
# cal_data.image_size,
# cal_data.rotational_matrix,
# cal_data.translational_matrix,
# None,
# None,
# None,
# None,
# None,
# cv2.CALIB_ZERO_DISPARITY, 0)
#
# leftMapX, leftMapY = cv2.initUndistortRectifyMap(
# cal_data.M1, cal_data.d1, leftRectification,
# leftProjection, cal_data.image_size, cv2.CV_32FC1)
# rightMapX, rightMapY = cv2.initUndistortRectifyMap(
# cal_data.M2, cal_data.d2, rightRectification,
# rightProjection, cal_data.image_size, cv2.CV_32FC1)
#
#
#
# ## Get Images
#
# for i, fname in enumerate(cal_data.leftImagesAll):
#
# fixedLeft = cv2.remap(cal_data.leftImagesAll[i], leftMapX, leftMapY,cv2.INTER_LINEAR)
# fixedRight = cv2.remap(cal_data.rightImagesAll[i], rightMapX, rightMapY,cv2.INTER_LINEAR)
#
# cv2.imshow("Fixed Left", fixedLeft)
# cv2.waitKey(1000)
#
# cv2.imshow("Fixed Right", fixedRight)
# cv2.waitKey(1000)
#
#
#
#
# stereoMatcher = cv2.StereoBM_create()
#
# grayLeft = cv2.cvtColor(fixedLeft, cv2.COLOR_BGR2GRAY)
# grayRight = cv2.cvtColor(fixedRight, cv2.COLOR_BGR2GRAY)
# depth = stereoMatcher.compute(grayLeft, grayRight)
| [
"[email protected]"
] | |
a9247a4ab26d1108aacf1f62fa5a92520e837613 | eb937d6c7e10b451390007868df8de4912b1d098 | /AutoInerface_project/Day11_Pytest/plugins/test_plugin_06.py | 5dcecf8909521ba54473980deb165062b3000fdf | [] | no_license | chenbaoshun/AutomationTesting | 01bbc3dc84c5ce26a75909a60bb304f7a06253b5 | 98882c3599d0eb9ac84e74193c584ba7b78ecfab | refs/heads/master | 2023-03-14T01:44:07.163998 | 2021-02-24T15:35:01 | 2021-02-24T15:35:01 | 290,236,883 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# @File : test_plugin_06.py
# @Author : Baoshun.Chin
# @Time : 2021-01-07 22:04
# @Site :
# @version : V1.0
import pytest
@pytest.mark.dependency()
def test_a():
assert False
@pytest.mark.dependency()
def test_b():
assert True
@pytest.mark.dependency(depends=['test_a'])
def test_c():
pass
@pytest.mark.dependency(depends=['test_b'])
def test_d():
pass | [
"[email protected]"
] | |
7b181876bac13acb503e5f2cfa17bd79be20980c | 36978086cf5f34e16ceac7c2649b49ccb4c5ac90 | /config/munin/mongo_indexsize | 15898bb14f1297f7e488deac7f1647d64ca39769 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | aragilar/NewsBlur | 04e754093cd52bc2d9957ea767747d6d604dfbba | 64ecd83bf4cea175f1bdeeb6e475fd5cadb679c9 | refs/heads/master | 2021-08-28T17:39:50.734396 | 2013-06-06T01:52:20 | 2013-06-06T01:52:37 | 10,520,281 | 0 | 0 | MIT | 2021-08-13T05:35:33 | 2013-06-06T06:26:24 | Objective-C | UTF-8 | Python | false | false | 3,371 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set sts=4 sw=4 encoding=utf-8
# Copyright (c) 2010, Rene Jochum
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Rene Jochum nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from pymongo import Connection
import os
settings_host = os.environ.get("host", "127.0.0.1")
settings_port = 27017
settings_db = 'newsblur'
def getCollstats():
global settings_host, settings_port, settings_db, settings_user, settings_password
print locals(), settings_host, settings_port
if settings_user and settings_password:
settings_host = "%s:%s@%s" % (settings_user, settings_password, settings_host)
con = Connection(settings_host, int(settings_port), slave_okay=True)
db = con[settings_db]
for coll in db.collection_names():
if coll.startswith('system.'):
continue
stats = db.command("collstats", coll)
yield ("%s_size" % coll.replace('.', '_'), long(stats['totalIndexSize']),)
con.disconnect()
def doData():
for coll, stats in getCollstats():
print "%s.value %s" % (coll, stats)
def doConfig():
print "graph_title MongoDB collection index sizes"
print "graph_args --base 1024 -l 0"
print "graph_vlabel Kb"
print "graph_category MongoDB"
print "graph_total total"
for k,v in getCollstats():
print "%s.label %s" % (k, k)
print "%s.min 0" % k
print "%s.draw LINE1" % k
if __name__ == "__main__":
from sys import argv
from os import environ
# Could be done by a for loop
# but i think if's are faster
if 'HOST' in environ:
settings_host = environ['HOST']
if 'PORT' in environ:
settings_port = environ['PORT']
if 'DB' in environ:
settings_db = environ['DB']
if 'user' in environ:
settings_user = environ['user']
if 'password' in environ:
settings_password = environ['password']
print locals()
if len(argv) > 1 and argv[1] == "config":
doConfig()
else:
doData()
| [
"[email protected]"
] | ||
35e0c88ae6ffeb513481031097e2e553bf772f79 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/application_gateway_probe_py3.py | 60baab8984853cbcc41461da947ca52283319939 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 4,676 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource_py3 import SubResource
class ApplicationGatewayProbe(SubResource):
"""Probe of the application gateway.
:param id: Resource ID.
:type id: str
:param protocol: The protocol used for the probe. Possible values are
'Http' and 'Https'. Possible values include: 'Http', 'Https'
:type protocol: str or
~azure.mgmt.network.v2018_11_01.models.ApplicationGatewayProtocol
:param host: Host name to send the probe to.
:type host: str
:param path: Relative path of probe. Valid path starts from '/'. Probe is
sent to <Protocol>://<host>:<port><path>
:type path: str
:param interval: The probing interval in seconds. This is the time
interval between two consecutive probes. Acceptable values are from 1
second to 86400 seconds.
:type interval: int
:param timeout: the probe timeout in seconds. Probe marked as failed if
valid response is not received with this timeout period. Acceptable values
are from 1 second to 86400 seconds.
:type timeout: int
:param unhealthy_threshold: The probe retry count. Backend server is
marked down after consecutive probe failure count reaches
UnhealthyThreshold. Acceptable values are from 1 second to 20.
:type unhealthy_threshold: int
:param pick_host_name_from_backend_http_settings: Whether the host header
should be picked from the backend http settings. Default value is false.
:type pick_host_name_from_backend_http_settings: bool
:param min_servers: Minimum number of servers that are always marked
healthy. Default value is 0.
:type min_servers: int
:param match: Criterion for classifying a healthy probe response.
:type match:
~azure.mgmt.network.v2018_11_01.models.ApplicationGatewayProbeHealthResponseMatch
:param provisioning_state: Provisioning state of the backend http settings
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Name of the probe that is unique within an Application
Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'host': {'key': 'properties.host', 'type': 'str'},
'path': {'key': 'properties.path', 'type': 'str'},
'interval': {'key': 'properties.interval', 'type': 'int'},
'timeout': {'key': 'properties.timeout', 'type': 'int'},
'unhealthy_threshold': {'key': 'properties.unhealthyThreshold', 'type': 'int'},
'pick_host_name_from_backend_http_settings': {'key': 'properties.pickHostNameFromBackendHttpSettings', 'type': 'bool'},
'min_servers': {'key': 'properties.minServers', 'type': 'int'},
'match': {'key': 'properties.match', 'type': 'ApplicationGatewayProbeHealthResponseMatch'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, *, id: str=None, protocol=None, host: str=None, path: str=None, interval: int=None, timeout: int=None, unhealthy_threshold: int=None, pick_host_name_from_backend_http_settings: bool=None, min_servers: int=None, match=None, provisioning_state: str=None, name: str=None, etag: str=None, type: str=None, **kwargs) -> None:
super(ApplicationGatewayProbe, self).__init__(id=id, **kwargs)
self.protocol = protocol
self.host = host
self.path = path
self.interval = interval
self.timeout = timeout
self.unhealthy_threshold = unhealthy_threshold
self.pick_host_name_from_backend_http_settings = pick_host_name_from_backend_http_settings
self.min_servers = min_servers
self.match = match
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
self.type = type
| [
"[email protected]"
] | |
0b4f9c988a9ef428a461575ec540592e80e59fd4 | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /squiggles/space_mappings_test.py | f5d551b4c3ff6cc51b21faff408be9d498e03d92 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 10,598 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for space_mappings."""
import math
from absl.testing import absltest
import numpy as np
from squiggles import space_mappings
class DerivsToPathPointsTest(absltest.TestCase):
def test_shape(self):
dummy_input = np.zeros(shape=(11, 2, 5), dtype=np.float32)
expected_output_shape = (11, 17, 2)
output = space_mappings.derivs_to_path_points(dummy_input, num_points=17)
self.assertSequenceEqual(expected_output_shape, output.shape)
def test_entries(self):
exp_derivs = np.ones(shape=(16,), dtype=np.float32)
sin_derivs = np.array(4 * [1, 0, -1, 0], dtype=np.float32)
input_array = np.stack([exp_derivs, sin_derivs], axis=0)
input_array = input_array[np.newaxis, :]
output = space_mappings.derivs_to_path_points(input_array, num_points=7)
expected_output = np.array(
[[
[math.exp(-3) - 1, math.sin(-3)], # autoformat: \n
[math.exp(-2) - 1, math.sin(-2)],
[math.exp(-1) - 1, math.sin(-1)],
[math.exp(0) - 1, math.sin(0)],
[math.exp(1) - 1, math.sin(1)],
[math.exp(2) - 1, math.sin(2)],
[math.exp(3) - 1, math.sin(3)]
]],
dtype=np.float32)
np.testing.assert_allclose(expected_output, output, atol=1e-6, rtol=1e-6)
def _sine_net_to_path_points_test_function_to_approximate(t):
"""Function whose Fourier series over [-1, 1] converges reasonably quickly."""
# The Fourier series converges reasonably quickly on the interval [-1, 1]
# since
#
# f(1) = f(-1)
# f'(1) = f'(-1)
# f''(1) = f''(-1)
# f'''(1) = f'''(-1).
#
# Multiplying by (2 - t) makes the function neither even nor odd, ensuring
# that the Fourier series will contain both sine and cosine terms. Since we
# use phases rather than separate sine and cosine terms, this ensures the
# phases will be nontrivial.
return (1 - t)**4 * (1 + t)**4 * (2 - t)
class SinNetToPathPointsTest(absltest.TestCase):
def test_shape(self):
dummy_input = np.zeros(shape=(11, 31, 4), dtype=np.float32)
expected_output_shape = (11, 17, 2)
output = space_mappings.sine_net_to_path_points(dummy_input, num_points=17)
self.assertSequenceEqual(expected_output_shape, output.shape)
def test_entries_x(self):
num_points = 7
t_scale = 2
# `const_coeff`, `x_amplitudes` and `phases` construct the (first few terms
# of the) Fourier series for
# _sine_net_to_path_points_test_function_to_approximate. They were computed
# using SageMath; for the computations and verifications see the notebook at
# https://sagecell.sagemath.org/?z=eJydk9tugzAMhu_zFNZuGrf0AKp2x5OgDmXUbNEgyZKgtW8_F2gp0jpN44IEH77fJk4NOThNFX3pQLKQcp0mkGICMoU1nPBlD8vLfnXbZ70dDyiE6doykm8DQ7KdCNqUlaW6vnwX9aa2ndfky0C8hJLdNPh1pclEqZmaItTWgwZtwCvzRvIGxYOobPiNyO7_ME2IQw5D_8LcIWwhE0K3zvoIDHNnUAGMm7ds3EZ5r85ysiZwjGdHObvqxqr4vMd5U7ecyfpTjlCta3TsjjQmhU8fZb92ytOdIvIPmOwTFVE4r7mbu_6vpgnOSu5dhV6lF5dObzPkI-8LraIy2ay9O4ERNuQzSLrGzuS4stC1spjkCn3gkWKelHrFx7bkWeTXiSMHDAeggEfP41PGZMySpwTGkR4tlW2szxeejosEWuU_yOdP9ollWPRScc3Df70FY_Br09ECBX4DtRYGjQ==&lang=sage&interacts=eJyLjgUAARUAuQ==
const_coeff = 256 / 315
x_amplitudes = np.array([
1.03251546e+00, 2.01003369e-01, 1.93362046e-02, 3.75423077e-03,
1.08763061e-03, 4.04024912e-04, 1.77361643e-04, 8.77096053e-05,
4.74116246e-05, 2.74572061e-05, 1.67990298e-05, 1.07490437e-05,
7.13888030e-06, 4.89272148e-06, 3.44479004e-06, 2.48254596e-06,
1.82592189e-06, 1.36732661e-06, 1.04039452e-06, 8.03023841e-07
],
dtype=np.float32)
y_amplitudes = np.zeros_like(x_amplitudes)
phases = np.array([
1.72361702, 1.99433456, -1.53750914, 1.32483942, 4.26165659, 0.9658349,
3.98821988, 0.75230197, 3.81770253, 0.61345077, 3.70270494, 0.51679847,
3.62042536, 0.4459663, 3.55884523, 0.39196216, 3.51111752, 0.34948904,
3.47308523, 0.31524202
],
dtype=np.float32)
radian_frequencies = np.array([
(i + 1) * math.pi for i in range(len(phases))
])
input_array = np.stack(
[radian_frequencies, phases, x_amplitudes, y_amplitudes], axis=1)
input_array = input_array[np.newaxis, :]
expected_output_x = np.array([
_sine_net_to_path_points_test_function_to_approximate(t)
for t in np.linspace(-1.0, 1.0, 7)
],
dtype=np.float32)
expected_output_y = np.zeros_like(expected_output_x)
expected_output = np.stack([expected_output_x, expected_output_y], axis=1)
expected_output = expected_output[np.newaxis, :]
# The const_coeff could theoretically have been included as a coefficient of
# sin(0*t + pi/2), but that was more effort than it was worth.
output = (
space_mappings.sine_net_to_path_points(
input_array, num_points=num_points, t_scale=t_scale) +
np.array([const_coeff, 0.0]))
np.testing.assert_allclose(expected_output, output, atol=1e-5)
class RescalePointsTest(absltest.TestCase):
def test_shape(self):
dummy_input = np.zeros(shape=(7, 11, 2), dtype=np.float32)
expected_output_shape = (7, 11, 2)
output = space_mappings.rescale_points(dummy_input)
self.assertSequenceEqual(expected_output_shape, output.shape)
def test_x_dominates(self):
input_array = np.array([[(-1, 0), (0, 0), (1, 1)]], dtype=np.float32)
output = space_mappings.rescale_points(input_array, margin=0.0) # pytype: disable=wrong-arg-types # jax-ndarray
# Scale everything by factor of 0.5. (Also translate.)
expected_output = np.array([[(0, 0.25), (0.5, 0.25), (1, 0.75)]],
dtype=np.float32)
np.testing.assert_allclose(expected_output, output)
def test_y_dominates(self):
input_array = np.array([[(0, 0), (0, 2), (1, 1)]], dtype=np.float32)
output = space_mappings.rescale_points(input_array, margin=0.0) # pytype: disable=wrong-arg-types # jax-ndarray
# Scale everything by factor of 0.5. (Also translate.)
expected_output = np.array([[(0.25, 0), (0.25, 1), (0.75, 0.5)]],
dtype=np.float32)
np.testing.assert_allclose(expected_output, output)
def test_margin(self):
input_array = np.array([[(0, 0), (1, 1), (2, 2)]], dtype=np.float32)
output = space_mappings.rescale_points(input_array) # default margin is 0.1 # pytype: disable=wrong-arg-types # jax-ndarray
# Scale everything to fit inside [0.1, 0.9] x [0.1, 0.9].
expected_output = np.array([[(0.1, 0.1), (0.5, 0.5), (0.9, 0.9)]],
dtype=np.float32)
np.testing.assert_allclose(expected_output, output)
def test_two_curves(self):
"""Distinct items in a batch should scale independently."""
input_array = np.array(
[
[(0, 0), (0.5, 0), (1, 1)], #
[(0, 0), (1, 1), (2, 2)]
],
dtype=np.float32)
output = space_mappings.rescale_points(input_array, margin=0.0) # pytype: disable=wrong-arg-types # jax-ndarray
expected_output = np.array(
[
[(0, 0), (0.5, 0), (1, 1)], #
[(0, 0), (0.5, 0.5), (1, 1)]
],
dtype=np.float32)
np.testing.assert_allclose(expected_output, output)
class GaussianActivationTest(absltest.TestCase):
def test_shape(self):
dummy_input = np.zeros(shape=(3, 5, 7, 1), dtype=np.float32)
expected_output_shape = (3, 5, 7, 1)
output = space_mappings.gaussian_activation(dummy_input)
self.assertSequenceEqual(expected_output_shape, output.shape)
def test_entries(self):
input_array = np.array([0.0, 1 / 11, -1 / 11, -1.0, 1.0], dtype=np.float32)
output = space_mappings.gaussian_activation(input_array, spread=1 / 11)
expected_output = np.array([1.0] + 2 * [np.exp(-1)] + 2 * [np.exp(-121.0)])
np.testing.assert_allclose(expected_output, output, atol=1e-6)
def test_computed_spread(self):
input_array = (
np.random.RandomState(seed=328773902) # Make test deterministic
.standard_normal(size=(3, 13, 7, 2)) #
.astype(np.float32, casting='same_kind'))
expected_output = np.exp(-np.square(13 * input_array))
output = space_mappings.gaussian_activation(
input_array, spread=None
) # When spread is *explicitly* set to None, it defaults to 1 / sidelength.
np.testing.assert_allclose(expected_output, output, atol=1e-6)
class NearestPointDistanceTest(absltest.TestCase):
def test_one_point(self):
one_point_input = np.array([[(0.0, 1.0)]], dtype=np.float32)
expected_output = np.array([[
[1.0, 0.0], #
[math.sqrt(2.0), 1.0]
]])
output = space_mappings.nearest_point_distance(
one_point_input, x_pixels=2, y_pixels=2)
np.testing.assert_allclose(expected_output, output)
def test_two_points(self):
two_points_input = np.array([[(0, 0), (0.5, 1.0)]], dtype=np.float32)
expected_output = np.array([[
[0.0, 0.5], #
[1.0, 0.5]
]])
output = space_mappings.nearest_point_distance(
two_points_input, x_pixels=2, y_pixels=2)
np.testing.assert_allclose(expected_output, output)
def test_shape(self):
dummy_input = np.zeros(shape=(17, 19, 2), dtype=np.float32)
expected_output_shape = (17, 11, 13)
output = space_mappings.nearest_point_distance(
dummy_input, x_pixels=11, y_pixels=13)
self.assertSequenceEqual(expected_output_shape, output.shape)
class CoordsToPixelsTest(absltest.TestCase):
def test_two_points(self):
two_points_input = np.array([[(0, 0), (0.5, 1.0)]], dtype=np.float32)
expected_output = np.array(
[[
[[1.0], [0.36787944]], #
[[0.01831563888], [0.36787944]]
]],
dtype=np.float32)
output = space_mappings.coords_to_pixels(
two_points_input, 2, 2, spread=1 / 2)
np.testing.assert_allclose(expected_output, output)
if __name__ == '__main__':
absltest.main()
| [
"[email protected]"
] | |
e6eba346ebf55540f84c30d63ab8d3f50f3fdb65 | 96e0dd08563b1f579992c14207d103ee80222b1b | /Algorithm-master/find_max_crossing_subarray_brute_force.py | f9d6443d36f700b3418debb05985ef52be6aee92 | [] | no_license | tonygodspeed/pytest | 4030e21f3206e3c5cb58aac870e3a1a57cd6943d | 2e87b91c148ff6966096bb8b197c0a84f5a1e7e2 | refs/heads/master | 2020-04-02T13:14:20.811887 | 2018-10-24T09:00:57 | 2018-10-24T09:00:57 | 154,472,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | def find_max_subarray(A, low, high):
left = 0
right = 0
sum = float("-inf")
for i in xrange(low, high):
current_sum = 0
for j in xrange(i, high):
current_sum += A[j]
if sum < current_sum:
sum = current_sum
left = i
right = j
return (left, right, sum)
A = [0, -1, 3, -4, 6, -1, 4]
print find_max_subarray(A, 0, 7)
| [
"[email protected]"
] | |
b011d6a3cdd94bed52a26b5f0ccaf26cf8e75541 | 9e271a3bc1bf388d82bc5a01d275d910c00f315c | /event/templatetags/event_tags.py | 78506192f1ce6a67fb4e2104e5a7dc4ca144e7cb | [
"MIT"
] | permissive | kthaisociety/website | 36f11b704f9c38414e0999b55db4513444b53f9e | 4c4efb8a93218ae128d203b15c4340f90fe9f6a6 | refs/heads/master | 2023-08-09T19:44:16.968356 | 2023-05-20T20:33:05 | 2023-05-20T20:33:05 | 218,593,606 | 2 | 3 | MIT | 2023-05-20T20:33:06 | 2019-10-30T18:17:10 | Python | UTF-8 | Python | false | false | 626 | py | from django import template
from django.utils import timezone
from event.consts import SCHEDULE_EMOJIS
from event.enums import ScheduleType
register = template.Library()
ctz = timezone.get_current_timezone()
@register.filter
def display_clock(time: timezone.datetime):
time = time.astimezone(ctz)
base = int("1F54F", 16)
hour = time.hour % 12
if hour == 0:
hour = 12
return chr(base + hour)
@register.filter
def one_year(time: timezone.datetime):
return time.replace(year=time.year - 1)
@register.filter
def schedule_emoji(type: ScheduleType):
return SCHEDULE_EMOJIS.get(type, "")
| [
"[email protected]"
] | |
5c68cc918c51596991219a74028a7f98ead9732c | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_18481.py | 6952ed857165fc15e31ad3ccc712f250d5d2242b | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | # Unix cat function (cat * > merged.txt) in Python?
import os
os.system("cd mydir;cat * > merged.txt")
| [
"[email protected]"
] | |
32447f30f4cc21c660188aee5e87920ec6663c17 | 59a688e68421794af64bfe69a74f64b2c80cd79d | /utils_all.py | 932ceb655f16948c0aec5628b7ec1358379f78e7 | [] | no_license | hearues-zueke-github/python_programs | f23469b306e057512aadecad0ca0a02705667a15 | d24f04ca143aa93f172210a4b9dfdd9bf1b79a15 | refs/heads/master | 2023-07-26T00:36:56.512635 | 2023-07-17T12:35:16 | 2023-07-17T12:35:16 | 117,093,746 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,666 | py | import datetime
import string
from time import time
import numpy as np
from PIL import Image, ImageTk
from tkinter import Tk, Label, BOTH
from tkinter.ttk import Frame, Style
all_symbols_16 = np.array(list("0123456789ABCDEF"))
def get_random_str_base_16(n):
l = np.random.randint(0, 16, (n, ))
return "".join(all_symbols_16[l])
all_symbols_64 = np.array(list(string.ascii_lowercase+string.ascii_uppercase+string.digits+"-_"))
def get_random_str_base_64(n):
l = np.random.randint(0, 64, (n, ))
return "".join(all_symbols_64[l])
def get_date_time_str_full():
dt = datetime.datetime.now()
dt_params = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond)
return "Y{:04}_m{:02}_d{:02}_H{:02}_M{:02}_S{:02}_f{:06}".format(*dt_params)
def get_date_time_str_full_short():
dt = datetime.datetime.now()
dt_params = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond)
return "{:04}_{:02}_{:02}_{:02}_{:02}_{:02}_{:06}".format(*dt_params)
def time_measure(f, args):
start_time = time()
ret = f(*args)
end_time = time()
diff_time = end_time-start_time
return ret, diff_time
class ShowImg(Frame, object):
def __init__(self, img):
parent = Tk()
Frame.__init__(self, parent)
self.pack(fill=BOTH, expand=1)
label1 = Label(self)
label1.photo= ImageTk.PhotoImage(img)
label1.config(image=label1.photo)
label1.pack(fill=BOTH, expand=1)
parent.mainloop()
def int_sqrt(n):
x_prev = n
x_now = (n//1+1)//2
while x_now<x_prev:
t = (n//x_now+x_now)//2
x_prev = x_now
x_now = t
return x_now
def get_current_datetime_str():
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
| [
"[email protected]"
] | |
355f9aba0beb6cce657ff0a0adfe78d8f056baeb | 07622a0fb38e843ab0eef4f69bb8fb25d107c06d | /pretrained_mol_sim/Theano-master/theano/tensor/tests/test_type_other.py | 436dcdb243a457abb91f7885ffe9150030f48544 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | andreeadeac22/graph_coattention | fa59d77252625e4bee1cb9670e4a0fd0fec98135 | 23781fedaa942ca5614054f965cb7b6543e533fa | refs/heads/master | 2023-08-08T01:51:51.368457 | 2020-02-19T04:56:59 | 2020-02-19T04:56:59 | 207,414,336 | 15 | 4 | MIT | 2023-07-22T15:47:39 | 2019-09-09T22:13:34 | Python | UTF-8 | Python | false | false | 1,424 | py | """ This file don't test everything. It only test one past crash error."""
from __future__ import absolute_import, print_function, division
import theano
from theano.gof import Constant
from theano.tensor.type_other import MakeSlice, make_slice, NoneTypeT, NoneConst
def test_make_slice_merge():
# In the past, this was crahsing during compilation.
i = theano.tensor.iscalar()
s1 = make_slice(0, i)
s2 = make_slice(0, i)
f = theano.function([i], [s1, s2])
nodes = f.maker.fgraph.apply_nodes
assert len([n for n in nodes if isinstance(n.op, MakeSlice)]) == 1
theano.printing.debugprint(f)
def test_none_Constant():
""" Tests equals
We had an error in the past with unpickling
"""
o1 = Constant(NoneTypeT(), None, name='NoneConst')
o2 = Constant(NoneTypeT(), None, name='NoneConst')
assert o1.equals(o2)
assert NoneConst.equals(o1)
assert o1.equals(NoneConst)
assert NoneConst.equals(o2)
assert o2.equals(NoneConst)
# This trigger equals that returned the wrong answer in the past.
import six.moves.cPickle as pickle
import theano
from theano import tensor
x = tensor.vector('x')
y = tensor.argmax(x)
kwargs = {}
# We can't pickle DebugMode
if theano.config.mode in ["DebugMode", "DEBUG_MODE"]:
kwargs = {'mode': 'FAST_RUN'}
f = theano.function([x], [y], **kwargs)
pickle.loads(pickle.dumps(f))
| [
"[email protected]"
] | |
d6172bb361027fd1a364a8217aeb5faa9291df2f | 2584c50ff47765db9df565a2254f762a15821fe0 | /relationshipmvt/app/forms.py | 77fae4641a90bdd6dc4ef45ae365ba6399268ecc | [] | no_license | vipuldhandre/Django | f75cb135761e54eadec57c59c052f676eae1469e | 51db8a59e068deb855e39c8bcc79e819b135f7d8 | refs/heads/master | 2020-10-01T19:50:11.317032 | 2020-01-13T17:00:19 | 2020-01-13T17:00:19 | 227,611,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | from django.forms import ModelForm
from app.models import (Company,Languages,Programmer)
class ProgrammerForm(ModelForm):
class Meta:
model = Programmer
fields = '__all__' # or ['pname','company','languages']
| [
"[email protected]"
] | |
0e493ff5100b18ede19dccc8f1f910fcdfea6413 | 75b289e20c24c07b64a89935f3f671d19b15d387 | /0x02-python-import_modules/100-my_calculator.py | cb22700f9ceae5482fd430d778134408e6c6c0e4 | [] | no_license | luroto/holbertonschool-higher_level_programming | 33c5a2c3b323e89391b9e110da846876085f3b8e | 63efcc1f91207dee9fc095884551333b91674587 | refs/heads/master | 2021-07-08T12:17:25.378993 | 2020-09-02T05:20:41 | 2020-09-02T05:20:41 | 184,124,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 786 | py | #!/usr/bin/python3
if __name__ == "__main__":
from sys import argv
from calculator_1 import add, sub, mul, div
total = len(argv)
if total != 4:
print("Usage: ./100-my_calculator.py <a> <operator> <b>")
exit(1)
opera = argv[2]
if opera != "+" and opera != "-" and opera != "*" and opera != "/":
print("Unknown operator. Available operators: +, -, * and /")
exit(1)
a = int(argv[1])
b = int(argv[3])
if opera == "+":
print("{:d} + {:d} = {:d}".format(a, b, add(a, b)))
if opera == "-":
print("{:d} - {:d} = {:d}".format(a, b, sub(a, b)))
if opera == "*":
print("{:d} * {:d} = {:d}".format(a, b, mul(a, b)))
if opera == "/":
print("{:d} / {:d} = {:d}".format(a, b, div(a, b)))
| [
"[email protected]"
] | |
bf4c983ab86fc148223b793eef9b2d6247bb60e6 | f7a718425de1447836b547f831a120937f1fcf40 | /plumbum/core.py | 94ae4b1f55d439ace21402d8d93e42a6468797b5 | [
"BSD-3-Clause"
] | permissive | coyotevz/plumbum-old-1 | ad8ce697ffb4cbd0a6f238f66a1c546800e47024 | c0f769ca525298ab190592d0997575d917a4bed4 | refs/heads/master | 2021-01-20T10:50:32.516766 | 2016-11-18T04:20:32 | 2016-11-18T04:20:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,879 | py | # -*- coding: utf-8 -*-
__all__ = ['Component', 'ExtensionPoint', 'implements', 'Interface',
'PlumbumBaseError', 'PlumbumError']
class PlumbumBaseError(Exception):
"""Base class for all exceptions defined in Plumbum."""
title = "Plumbum Error"
class PlumbumError(PlumbumBaseError):
"""Standard exception for errors in Plumbum."""
def __init__(self, message, title=None, show_traceback=False):
super(PlumbumError, self).__init__(message)
self._message = message
self.title = title
self.show_traceback = show_traceback
message = property(lambda x: x._message,
lambda x, v: setattr(x, '_message', v))
def __str__(self):
return self.message
class Interface(object):
"""Marker base class for extension point interfaces."""
class ExtensionPoint(property):
"""Marker class for extension points in components."""
def __init__(self, interface):
"""Create the extension point.
@param interface: the `Interface` subclass that defined the protocol
for the extension point.
"""
property.__init__(self, self.extensions)
self.interface = interface
self.__doc__ = ("List of components that implement: `%s.%s`" %
(self.interface.__module__, self.interface.__name__))
def extensions(self, component):
"""Return a list of components that declare to implement the extension
point interface.
"""
classes = ComponentMeta._registry.get(self.interface, ())
components = [component.compmgr[cls] for cls in classes]
return [c for c in components if c]
def __repr__(self):
"""Return a textual representation of the extension point."""
return "<ExtensionPoint %s>" % self.interface.__name__
class ComponentMeta(type):
"""Meta class for components.
Takes care of component and extension point registration.
"""
_components = []
_registry = {}
def __new__(mcs, name, bases, d):
"""Create the component class."""
new_class = type.__new__(mcs, name, bases, d)
if name == 'Component':
# Don't put the Component base class in the registry
return new_class
if d.get('abstract'):
# Don't put abstract component classes in the registry
return new_class
ComponentMeta._components.append(new_class)
registry = ComponentMeta._registry
for cls in new_class.__mro__:
for interface in cls.__dict__.get('_implements', ()):
classes = registry.setdefault(interface, [])
if new_class not in classes:
classes.append(new_class)
return new_class
def __call__(cls, *args, **kwargs):
"""Return an existing instance of the component if it has already been
activated, otherwise create a new instance.
"""
# If this component is also the component manager, just invoke that
if issubclass(cls, ComponentManager):
self = cls.__new__(cls)
self.compmgr = self
self.__init__(*args, **kwargs)
return self
# The normal case where the component is not also the component manager
assert len(args) >= 1 and isinstance(args[0], ComponentManager), \
"First argument must be a ComponentManager instance"
compmgr = args[0]
self = compmgr.components.get(cls)
# Note that this check is racy, we intentionally don't use a lock in
# order to keep things simple and avoid the risk of deadlocks, as the
# impact of having temporarily two (or more) instance for a given `cls`
# is negligible.
if self is None:
self = cls.__new__(cls)
self.compmgr = compmgr
compmgr.component_activated(self)
self.__init__()
# Only register the instance once it is fully initialized (#9418)
compmgr.components[cls] = self
return self
class Component(object, metaclass=ComponentMeta):
"""Base class for components.
Every component can declare what extension points it provides, as well as
what extension point of other components extends.
"""
@staticmethod
def implements(*interfaces):
"""Can be used in the class definition of `Component` subclasses to
declare the extension points that are extended.
"""
import sys
frame = sys._getframe(1)
locals_ = frame.f_locals
# Some sanity checks
assert locals_ is not frame.f_globals and '__module__' in locals_, \
'implements() can only be used in a class definition'
locals_.setdefault('_implements', []).extend(interfaces)
def __repr__(self):
"""Return a textus representantion of the component."""
return '<Component {}.{}>'.format(self.__class__.__module__,
self.__class__.__name__)
implements = Component.implements
class ComponentManager(object):
"""The component manager keeps a pool of active components."""
def __init__(self):
"""Initialize the component manager."""
self.components = {}
self.enabled = {}
if isinstance(self, Component):
self.components[self.__class__] = self
def __contains__(self, cls):
"""Return wether the given class is in the list of active components"""
return cls in self.components
def __getitem__(self, cls):
"""Activate the component instance for the given class, or return the
existing instance if the component has already been activated.
Note that `ComponentManager` components can't be activated that way.
"""
if not self.is_enabled(cls):
return None
component = self.components.get(cls)
if not component and not issubclass(cls, ComponentManager):
if cls not in ComponentMeta._components:
raise PlumbumError('Component "{}" not registered'
.format(cls.__name__))
try:
component = cls(self)
except TypeError as e:
raise PlumbumError('Unable to instantiate component {!r} ({})'
.format(cls, e))
return component
def is_enabled(self, cls):
"""Return whether the given component class is enabled."""
if cls not in self.enabled:
self.enabled[cls] = self.is_component_enabled(cls)
return self.enabled[cls]
def disable_component(self, component):
"""Force a component to be disabled.
@param component: can be a class or an instance.
"""
if not isinstance(component, type):
component = component.__class__
self.enabled[component] = False
self.components[component] = None
def enable_component(self, component):
"""Force a component to be enabled.
@param component: can be a class or an instance.
"""
if not isinstance(component, type):
component = component.__class__
self.enabled[component] = True
def component_activated(self, component):
"""Can be overridden by sub-classes so that special initialization for
components ca be provided.
"""
def is_component_enabled(self, cls):
"""Can be overridden by sub-classes to veto the activation of a
component.
If this method returns `False`, the component was disabled explicitly.
If it returns `None`, the component was neither enabled nor disabled
explicitly. In both cases, the component with the given class will not
be available.
"""
return True
| [
"[email protected]"
] | |
e61ea84cae50e126560594d6977dfe14b17266b9 | 1e528494a929deada984822438b3ab569762e6c6 | /rx/testing/recorded.py | d8ab083f222f404edc7a4af700ecdfcb0f8686bf | [
"MIT"
] | permissive | Sprytile/Sprytile | a0233a00a243f263691921d7e1f6af05c5eb5442 | 6b68d0069aef5bfed6ab40d1d5a94a3382b41619 | refs/heads/master | 2022-07-10T06:54:01.003723 | 2020-09-26T07:25:35 | 2020-09-26T07:25:35 | 72,276,917 | 860 | 91 | MIT | 2022-07-07T23:37:19 | 2016-10-29T09:47:09 | Python | UTF-8 | Python | false | false | 597 | py | from rx.internal.basic import default_comparer
class Recorded(object):
def __init__(self, time, value, comparer=None):
self.time = time
self.value = value
self.comparer = comparer or default_comparer
def __eq__(self, other):
"""Returns true if a recorded value matches another recorded value"""
time_match = self.time == other.time
return time_match and self.comparer(self.value, other.value)
equals = __eq__
def __repr__(self):
return str(self)
def __str__(self):
return "%s@%s" % (self.value, self.time)
| [
"[email protected]"
] | |
f99c83d2712349c0946f6e3b580ce1a637ca20d9 | f444eede3cd341afc969756b00a34816f949238a | /encode.py | 02cc03704c672bb641575cb3f6609f413ad7a7eb | [
"MIT"
] | permissive | dcbriccetti/StegaPy | 28ce6007c0c8a2dbb38de76e52344ec621c4a8ac | a20bb263737ae445e65a602c728acc4e3602baed | refs/heads/master | 2021-07-10T03:28:28.214664 | 2020-10-03T21:11:52 | 2020-10-03T21:11:52 | 204,844,779 | 7 | 4 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | from stegapy import create_image
message = '''Steganography is the practice of concealing a file,
message, image, or video within another file, message, image,
or video. The word steganography combines the Greek words
steganos, meaning "covered or concealed", and graphe meaning
"writing".'''
create_image(message, 'original-image.png', 'secret-image.png')
| [
"[email protected]"
] | |
1fa31a59bd30ae57f8c0e2ffec65c5ef13811b6d | 528dd70727c0da10483323ae0ef2db6d01124e2d | /scrape all quotes from website/scrape all quotes from website with bs4.py | 37f07196dd6e399837b02c997a7e89f5c2b5f56c | [] | no_license | aadarshraj4321/Simple-Scraping-With-BeautifulSoup- | 051b764cb256a9b44ef73cbc1aacdb20ba764add | 79ef2d748af2df28f8cb6a1d02ed0ec76605cb14 | refs/heads/master | 2022-12-12T09:14:34.080825 | 2020-09-10T13:54:04 | 2020-09-10T13:54:04 | 293,579,101 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | import requests
from bs4 import BeautifulSoup
## url of site which we scrape
url = "https://parade.com/937586/parade/life-quotes/"
## request the url to get data from that website and check status code
page = requests.get(url)
#print(page.status_code)
## call BeautifulSoup and store in soup variable
soup = BeautifulSoup(page.text,"html.parser")
#print(soup)
quote = soup.find(class_= "page_content")
#print(quote)
p_class = soup.find_all("p")
#print(p_class)
#print(len(p_class))
#print(p_class[8])
## slice from 8:161
main_p = p_class[8:161]
#print(main_p)
for i in main_p:
print(i.text,end="\n\n")
#### Done ####
| [
"[email protected]"
] | |
3e7c3413828b2ba8defb1d25594e2f29762dd7ea | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2333/60716/257539.py | d12c33899ad0ff7c29ed096997fe168b11f5d990 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | import math
x = int(input())
y = int(input())
bound = int(input())
lists = list()
for i in range(int(math.log(bound,x))+1):
for j in range(int(math.log(bound,y))+1):
temp = x**i+y**j
if temp<=bound:
lists.append(temp)
alist = list(set(lists))
alist.sort()
print(alist) | [
"[email protected]"
] | |
a53241dd468a92f36d1d3775f8261a5146840ffa | 6eec2948c0907b5377de51e61014a48dff3d5ce7 | /CODIGOS PYTHON/screenlets-pack-basic/Trash/TrashScreenlet.py | 350be6935fd1e5c67071eb8861f59dd25aad37e0 | [] | no_license | clcneogeek325/Script_python | 4da937cb2caee93a2e0eb945e77ccac8e88ec4bc | 87607c97fa738b3e64aefbe0e8c4425724ecff73 | refs/heads/master | 2021-01-17T07:44:07.124077 | 2016-06-04T03:26:44 | 2016-06-04T03:26:44 | 15,943,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,271 | py | #!/usr/bin/env python
#
# TrashScreenlet (C) 2008 Natan Yellin
# Based on the original screenlet (C) 2007 Helder Fraga
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import screenlets
from screenlets.options import BoolOption,IntOption
import cairo
import datetime
import pango
import os
import gtk
import gobject
import urllib
# use gettext for translation
import gettext
_ = screenlets.utils.get_translator(__file__)
def tdoc(obj):
obj.__doc__ = _(obj.__doc__)
return obj
@tdoc
class TrashScreenlet(screenlets.Screenlet):
"""A Screenlet that shows information about your trash folder"""
# default meta-info for Screenlets
__name__ = 'TrashScreenlet'
__version__ = '0.2.2+'
__author__ = 'Helder Fraga aka Whise'
__desc__ = __doc__
TRASH_DIRS = [os.environ['HOME'] + '/.local/share/Trash/files',
os.environ['HOME'] + '/.Trash']
style = False
show_count = True
auto_empty = False
auto_empty_size = 1000
if os.path.exists(os.environ['HOME'] +'/.local/share/Trash/files') and os.path.isdir(os.environ['HOME'] +'/.local/share/Trash/files'):
trash_folder = os.environ['HOME'] +'/.local/share/Trash/files'
else:
trash_folder = os.environ['HOME'] + '/.Trash'
item_count = 0
def __init__(self, **keyword_args):
screenlets.Screenlet.__init__(self, width=128, height=160,
drag_drop=True, **keyword_args)
# set default theme name
self.theme_name = "default"
# init other attributes
self.item_count = 0
self.add_options_group(_('Options'), _('Options'))
self.add_option(BoolOption(_('Options'), 'style', False,
_('Use gtk style'), _('Use gtk icon'), callback=self.redraw_canvas_and_update_shape))
self.add_option(BoolOption(_('Options'), 'show_count', True,
_('Show item count'), _('Show item count')))
self.add_option(BoolOption(_('Options'), 'auto_empty', False,
_('Auto empty trash'),
_('Automatically empty trash when the limit is exceded')))
self.add_option(IntOption(_('Options'), 'auto_empty_size', 1000,
_('Auto empty limit'), _('Automatically empty trash when there\
are this many items in the trash. (Only if the above option\
is checked.)'), min=1,max = 100000))
# TODO: Monitor the trash directories and call self.update when
# they change instead of calling self.update once every second
self.update()
self.refresh_timeout = gobject.timeout_add(1000, self.update)
# Redraw and update the shape if the icon theme changes
# TODO: Only redraw if self.style is True
# screenlets.drawing.icon_theme.connect("changed",
# self.redraw_canvas_and_update_shape)
def on_init (self):
print "Screenlet has been initialized."
# add default menuitems
self.add_menuitem("Empty", _("Empty Trash"))
self.add_menuitem("Open", _("Examine Trash"))
self.add_default_menuitems()
# callback for when an item is dragged and then dropped on the applet
def on_drop (self, x, y, sel_data, timestamp):
# If the trash folder doesn't exist then just return
# TODO: Create the trash folder when it doesn't exist.
if self.trash_folder is None:
screenlets.show_error(None, _('File(s) could not be moved to trash.'))
return
filename = ''
# get text from selection data
try:
txt = unicode.encode(sel_data.get_text(), 'utf-8')
except:
txt = sel_data.get_text()
txta = urllib.unquote(txt)
txta = str(txta).split('\n')
for txt in txta:
if txt and txt != '':
# if it is a filename, use it
if txt.startswith('file://'):
filename = txt[7:]
else:
screenlets.show_error(self, _('Invalid string: %s.') % txt)
else:
# else get uri-part of selection
uris = sel_data.get_uris()
if uris and len(uris)>0:
filename = uris[0][7:]
if filename != '':
if self.trash_folder==self.TRASH_DIRS[0]:
infofile=os.environ['HOME'] + '/.local/share/Trash/info/'+ os.path.basename(filename)+'.trashinfo'
count=1
while os.path.exists(infofile):
count=count+1
infofile=os.environ['HOME'] + '/.local/share/Trash/info/'+ os.path.basename(filename)+'.'+str(count)+'.trashinfo'
f=open(infofile, 'w')
f.write('[Trash Info]\n')
f.write('Path='+filename+'\n')
now=datetime.datetime.now()
f.write('DeletionDate='+ str(now.strftime("%Y-%m-%dT%H:%M:%S")))
f.close()
if count>1:
os.system('mv ' + chr(34)+ filename + chr(34) + ' ' + chr(34) + self.trash_folder + '/' + os.path.basename(filename)+'.'+str(count) + chr(34))
else:
os.system('mv ' + chr(34)+ filename + chr(34) + ' ' + self.trash_folder)
filename = ''
def update(self):
# find the correct trash directory or return if no trash directory exists
if os.path.exists(self.TRASH_DIRS[0]) and os.path.isdir(self.TRASH_DIRS[0]):
self.trash_folder = self.TRASH_DIRS[0]
elif os.path.exists(self.TRASH_DIRS[1]) and os.path.isdir(self.TRASH_DIRS[1]):
self.trash_folder = self.TRASH_DIRS[1]
else:
self.trash_folder = None
self.item_count = 0
return
old_item_count = self.item_count
self.item_count = len(os.listdir(self.trash_folder))
# if the auto empty feature is enabled then check if the trash needs to be emptied
if self.auto_empty and self.item_count >= self.auto_empty_size:
if self.trash_folder==self.TRASH_DIRS[0]:
os.system('rm -rf ' + os.environ['HOME'] + '/.local/share/Trash/info/*')
os.system('rm -rf ' + self.trash_folder + '/*')
os.system('rm -rf ' + self.trash_folder + '/*.*')
os.system('rm -rf ' + self.trash_folder + '/.*')
self.item_count = len(os.listdir(self.trash_folder))
# if the number of items in the trash is drawn on the icon then check if it changed
if self.show_count and self.item_count != old_item_count:
self.redraw_canvas()
return True
def on_mouse_down(self, event):
if event.type == gtk.gdk._2BUTTON_PRESS:
if event.button == 1:
os.system('xdg-open trash:/// &')
def menuitem_callback(self, widget, id):
screenlets.Screenlet.menuitem_callback(self, widget, id)
if id=="Empty":
if self.trash_folder is None:
screenlets.show_error(None, _("No trash folder found."))
elif screenlets.show_question(self,_('Do you want to permanently remove all the items in your Trash folder?')):
if self.trash_folder==self.TRASH_DIRS[0]:
os.system('rm -rf ' + os.environ['HOME'] + '/.local/share/Trash/info/*')
os.system('rm -rf ' + self.trash_folder + '/*')
os.system('rm -rf ' + self.trash_folder + '/*.*')
os.system('rm -rf ' + self.trash_folder + '/.*')
self.update()
elif id=="Open":
os.system('xdg-open trash:/// &')
def on_draw(self, ctx):
if self.theme:
ctx.set_operator(cairo.OPERATOR_OVER)
ctx.scale(self.scale, self.scale)
# find the right icon name to use
if self.item_count == 0:
ico = 'user-trash-empty'
if self.style and not self.check_for_icon(ico):
ico = 'emptytrash'
else:
ico = 'user-trash-full'
if self.style and not self.check_for_icon(ico):
ico = 'trashcan_full'
# draw the icon
if self.style == True:
self.draw_icon(ctx, 0, 0, ico, 128, 128)
else:
self.theme.render(ctx, ico)
# draw the item count
if self.show_count:
ctx.set_source_rgba(1,1,1,0.65)
self.draw_rounded_rectangle(ctx,20,128,5,self.width-40,23)
ctx.set_source_rgba(0,0,0,1)
#item counter
self.draw_text(ctx,str(self.item_count) + _(' items'), 0, 132,
"FreeSans", 10, self.width, pango.ALIGN_CENTER)
def on_draw_shape(self,ctx):
if self.theme:
self.on_draw(ctx)
if __name__ == "__main__":
import screenlets.session
screenlets.session.create_session(TrashScreenlet)
| [
"[email protected]"
] | |
15ffc7de7646ab1686857ece673ee18639100744 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp80_18500.py | bdf0609e00110d1aa1b8fd8d5e0f586bb238cfb5 | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,871 | py | ITEM: TIMESTEP
18500
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
4.1898586015657457e-01 4.6781014139841972e+01
4.1898586015657457e-01 4.6781014139841972e+01
4.1898586015657457e-01 4.6781014139841972e+01
ITEM: ATOMS id type xs ys zs
8 1 0.12863 0.0578786 0.0545008
35 1 0.0714952 0.126436 0.0592095
130 1 0.0658708 0.0665836 0.115658
165 1 0.126339 0.121896 0.127018
1415 1 0.18006 0.495626 0.433083
1545 1 0.254214 0.502221 0.494181
1427 1 0.566646 0.498376 0.442307
143 1 0.440758 0.00279837 0.1876
98 1 0.065403 0.44119 0.00251722
12 1 0.251314 0.0599336 0.058193
39 1 0.185226 0.120372 0.0535522
43 1 0.31569 0.121368 0.053695
134 1 0.186861 0.0691723 0.119051
138 1 0.30654 0.0664482 0.124596
169 1 0.252195 0.131632 0.114245
550 1 0.184185 0.179778 0.497749
23 1 0.68829 0.00884979 0.067619
16 1 0.375001 0.0650384 0.0701746
47 1 0.440896 0.124762 0.0712492
142 1 0.439324 0.0652147 0.127436
173 1 0.366145 0.12743 0.124186
20 1 0.500187 0.0613008 0.0698224
177 1 0.501206 0.133362 0.126334
1307 1 0.817198 0.500531 0.318725
46 1 0.433718 0.185343 0.00368843
113 1 0.502534 0.382036 0.00778717
102 1 0.183762 0.442594 -0.000645236
24 1 0.629641 0.0677989 0.0678286
51 1 0.567514 0.124795 0.0620831
146 1 0.564629 0.0711151 0.124599
181 1 0.62508 0.129638 0.12374
110 1 0.435082 0.445038 0.00814298
1425 1 0.506671 0.501616 0.37698
28 1 0.752629 0.0659012 0.0676995
55 1 0.694675 0.129259 0.0667039
59 1 0.813149 0.129126 0.062058
150 1 0.689324 0.0645103 0.127836
154 1 0.815001 0.0681734 0.122494
185 1 0.75336 0.128136 0.126499
137 1 0.251073 0.000129504 0.118471
90 1 0.817223 0.31344 0.00427757
4 1 0.00162167 0.0672573 0.0662244
161 1 1.00233 0.133603 0.124172
141 1 0.377784 -0.000750775 0.122383
32 1 0.875897 0.0588383 0.0642111
63 1 0.939949 0.127233 0.0623594
158 1 0.934593 0.0629485 0.12469
189 1 0.877638 0.130747 0.127321
565 1 0.631064 0.121935 0.499436
638 1 0.945134 0.442318 0.503331
40 1 0.123679 0.190193 0.0513771
67 1 0.0652086 0.253319 0.0636196
72 1 0.127516 0.313357 0.0601657
162 1 0.0664972 0.185128 0.120452
194 1 0.0587427 0.305993 0.123633
197 1 0.121526 0.245723 0.121396
155 1 0.812495 0.00640063 0.184989
1433 1 0.749248 0.497294 0.381284
44 1 0.245972 0.194516 0.055729
71 1 0.187266 0.245794 0.0592779
75 1 0.311404 0.248177 0.0571664
76 1 0.255289 0.319194 0.0632399
166 1 0.184795 0.191371 0.11917
170 1 0.312562 0.189732 0.125663
198 1 0.194256 0.305292 0.118376
201 1 0.245801 0.2453 0.124849
202 1 0.306751 0.308881 0.129815
281 1 0.758581 0.0025176 0.241122
48 1 0.373748 0.187861 0.0607087
79 1 0.441629 0.245284 0.0668735
80 1 0.370975 0.309839 0.0582946
174 1 0.431336 0.187807 0.125327
205 1 0.367079 0.257817 0.124203
206 1 0.438614 0.315439 0.120761
84 1 0.498273 0.314468 0.0634711
209 1 0.506337 0.252297 0.123704
147 1 0.559215 0.00173385 0.18526
52 1 0.503752 0.187331 0.0668195
56 1 0.629386 0.185058 0.0561069
83 1 0.562323 0.260022 0.0653855
88 1 0.627433 0.32546 0.0667923
178 1 0.559864 0.193281 0.122671
210 1 0.560234 0.315651 0.125334
213 1 0.627384 0.245628 0.123345
151 1 0.683178 -0.000246262 0.191895
17 1 0.498887 0.004899 0.0116087
26 1 0.811733 0.0630042 -0.00031692
60 1 0.752134 0.186729 0.0652373
87 1 0.692483 0.247971 0.0527772
91 1 0.819449 0.249968 0.0612067
92 1 0.744863 0.313026 0.0654736
182 1 0.68233 0.193067 0.126795
186 1 0.820449 0.185048 0.128591
214 1 0.688951 0.316057 0.130584
217 1 0.749047 0.258938 0.122566
218 1 0.812344 0.314508 0.118563
1157 1 0.121704 0.495898 0.123365
1169 1 0.499734 0.502307 0.133899
1281 1 0.999683 0.50415 0.245967
10 1 0.310483 0.0536949 -0.00482182
36 1 0.00396184 0.187144 0.0628696
193 1 0.994818 0.257106 0.122444
68 1 -0.00104012 0.311887 0.0567009
64 1 0.880068 0.185439 0.0608582
95 1 0.934849 0.247023 0.0618698
96 1 0.877796 0.312923 0.0613065
190 1 0.933742 0.191887 0.126277
221 1 0.875755 0.24965 0.122731
222 1 0.939377 0.31351 0.117867
637 1 0.885685 0.374395 0.498839
1299 1 0.564227 0.49424 0.315032
1429 1 0.626948 0.498302 0.376203
1431 1 0.689306 0.500962 0.444375
283 1 0.8197 -0.00186732 0.309551
99 1 0.0628482 0.379095 0.0724835
104 1 0.122255 0.434623 0.0677942
226 1 0.057255 0.443505 0.133306
229 1 0.12229 0.376194 0.122458
45 1 0.380977 0.130511 0.000264664
1051 1 0.815946 0.501612 0.0629591
159 1 0.935642 0.0019368 0.193175
1181 1 0.874191 0.5025 0.123992
585 1 0.245297 0.246198 0.502094
598 1 0.686393 0.306332 0.497168
103 1 0.186189 0.375838 0.0562563
107 1 0.316672 0.391584 0.0600581
108 1 0.25407 0.440975 0.06438
230 1 0.186695 0.434685 0.119543
233 1 0.24998 0.375891 0.125507
234 1 0.307813 0.444353 0.123962
263 1 0.180237 -0.00104724 0.316369
478 1 0.939432 0.312243 0.371869
13 1 0.376577 0.00354499 0.0104473
22 1 0.691544 0.0666264 0.00757064
111 1 0.437955 0.381227 0.0629977
112 1 0.373957 0.450738 0.0686193
237 1 0.365822 0.376699 0.124721
238 1 0.443028 0.444104 0.124977
241 1 0.497105 0.376957 0.120935
484 1 0.000392737 0.434529 0.436129
129 1 0.997156 0.00143936 0.131627
116 1 0.501603 0.445423 0.0652284
115 1 0.563748 0.376597 0.0665184
120 1 0.6335 0.434031 0.0634422
242 1 0.565561 0.440613 0.126599
245 1 0.632749 0.379197 0.126304
477 1 0.872677 0.251225 0.374122
119 1 0.693129 0.378985 0.0631808
123 1 0.806933 0.37302 0.0548052
124 1 0.75561 0.43495 0.0694804
246 1 0.697962 0.440001 0.122105
249 1 0.753372 0.37911 0.125342
250 1 0.819438 0.441098 0.126312
448 1 0.874339 0.189171 0.443059
269 1 0.368559 0.0100163 0.246547
512 1 0.881828 0.428263 0.436739
420 1 0.00105872 0.177413 0.428907
271 1 0.434073 0.00090474 0.303262
118 1 0.69266 0.444696 0.00188504
452 1 0.00126296 0.31509 0.432639
1173 1 0.629565 0.496156 0.131219
100 1 0.00301745 0.436042 0.0591336
225 1 0.993118 0.367191 0.115757
127 1 0.930489 0.373672 0.0568808
128 1 0.877522 0.444243 0.0660341
253 1 0.876339 0.375464 0.12504
254 1 0.940382 0.435867 0.117775
1413 1 0.126074 0.501083 0.377018
1311 1 0.935994 0.502588 0.317939
1025 1 0.998984 0.501043 0.00212314
511 1 0.945865 0.373424 0.432658
499 1 0.564639 0.381254 0.440541
136 1 0.118019 0.0648413 0.188309
163 1 0.0620042 0.124178 0.18514
258 1 0.0564018 0.0619852 0.2404
264 1 0.11963 0.0705457 0.306853
291 1 0.0569319 0.122677 0.300888
293 1 0.122784 0.129794 0.241734
482 1 0.0617978 0.437129 0.373597
21 1 0.631016 0.00988324 0.00750874
1163 1 0.313632 0.499503 0.185096
485 1 0.132295 0.368219 0.375533
1435 1 0.812488 0.501248 0.441879
140 1 0.243869 0.0599404 0.180144
167 1 0.191234 0.122516 0.1777
171 1 0.30712 0.125313 0.189483
262 1 0.181261 0.0635827 0.245744
266 1 0.310492 0.0672997 0.242729
268 1 0.244394 0.0552715 0.321719
295 1 0.186657 0.127738 0.301047
297 1 0.243918 0.122033 0.250716
299 1 0.307049 0.121232 0.309158
139 1 0.30661 0.00100524 0.188484
1411 1 0.0695975 0.491283 0.438838
589 1 0.378716 0.248887 0.504295
144 1 0.380911 0.0607473 0.180878
175 1 0.431946 0.127551 0.184125
270 1 0.433188 0.0639009 0.245027
272 1 0.370304 0.0674271 0.317396
301 1 0.377064 0.121569 0.2494
303 1 0.433125 0.121906 0.315191
276 1 0.494233 0.0594888 0.310457
305 1 0.499009 0.114752 0.245676
545 1 -0.00611488 0.128396 0.49392
510 1 0.939734 0.440762 0.375281
148 1 0.500831 0.0596305 0.182308
152 1 0.631748 0.073688 0.185419
179 1 0.564499 0.13274 0.185568
274 1 0.56557 0.0671713 0.244549
280 1 0.623775 0.0554312 0.307683
307 1 0.560316 0.116672 0.314483
309 1 0.628568 0.125838 0.249084
156 1 0.7433 0.0626393 0.191695
183 1 0.68988 0.128763 0.187326
187 1 0.806748 0.125581 0.187885
278 1 0.684924 0.0608812 0.256088
282 1 0.813903 0.0621598 0.249633
284 1 0.745761 0.0562357 0.309803
311 1 0.693347 0.12694 0.313263
313 1 0.751202 0.120276 0.252396
315 1 0.803651 0.11653 0.314347
481 1 -0.000320609 0.381758 0.367597
498 1 0.564368 0.440615 0.378564
409 1 0.751652 -0.0034148 0.377778
509 1 0.871546 0.372059 0.376405
260 1 1.0005 0.058855 0.304135
132 1 0.99612 0.0639734 0.180397
289 1 0.995439 0.131203 0.251657
160 1 0.876356 0.068778 0.1895
191 1 0.933957 0.121513 0.188903
286 1 0.937889 0.0644673 0.251821
288 1 0.878011 0.0584828 0.308517
317 1 0.869209 0.127444 0.260317
319 1 0.943745 0.125005 0.310211
168 1 0.127376 0.186628 0.177668
195 1 0.0591181 0.242411 0.187812
200 1 0.119446 0.304189 0.187268
290 1 0.0541164 0.190932 0.249959
296 1 0.120357 0.189569 0.306517
322 1 0.0603963 0.316103 0.24432
323 1 0.0585978 0.257459 0.314437
325 1 0.129117 0.253638 0.251902
328 1 0.126927 0.312481 0.308805
324 1 -0.00456993 0.31803 0.315082
196 1 0.992748 0.312577 0.186051
172 1 0.250762 0.185502 0.182031
199 1 0.184986 0.254399 0.186363
203 1 0.306897 0.25158 0.188035
204 1 0.24548 0.309499 0.181286
294 1 0.190398 0.185083 0.233575
298 1 0.306697 0.179296 0.255476
300 1 0.253489 0.190342 0.320783
326 1 0.184952 0.309294 0.247757
327 1 0.190426 0.246465 0.310077
329 1 0.253407 0.248168 0.248691
330 1 0.313129 0.308859 0.240459
331 1 0.314149 0.247045 0.301343
332 1 0.25444 0.307842 0.299994
176 1 0.365024 0.19085 0.190511
207 1 0.443775 0.2493 0.180148
208 1 0.368603 0.318745 0.185429
302 1 0.438152 0.185233 0.242697
304 1 0.376126 0.185187 0.31373
333 1 0.377367 0.246485 0.258091
334 1 0.440708 0.321067 0.251291
335 1 0.438077 0.259031 0.313737
336 1 0.372801 0.307429 0.31426
308 1 0.491887 0.181955 0.315647
180 1 0.502889 0.189032 0.189005
340 1 0.500041 0.317447 0.311974
212 1 0.500326 0.310513 0.190178
337 1 0.505346 0.247257 0.251183
184 1 0.618966 0.194063 0.193346
211 1 0.563433 0.254062 0.187848
216 1 0.61976 0.308412 0.18382
306 1 0.562126 0.183394 0.271079
312 1 0.632251 0.183757 0.31466
338 1 0.566273 0.312385 0.247329
339 1 0.564725 0.247907 0.31425
341 1 0.623904 0.252079 0.252927
344 1 0.626791 0.314682 0.310407
188 1 0.75774 0.195094 0.179223
215 1 0.687661 0.254422 0.186749
219 1 0.81743 0.251896 0.188287
220 1 0.760692 0.319997 0.186946
310 1 0.692988 0.1886 0.251448
314 1 0.810622 0.188257 0.245767
316 1 0.753025 0.181184 0.314805
342 1 0.68962 0.316785 0.242684
343 1 0.694003 0.25689 0.303251
345 1 0.751457 0.253772 0.241936
346 1 0.81734 0.309341 0.244898
347 1 0.813436 0.240751 0.318292
348 1 0.760282 0.305916 0.308452
292 1 0.00329683 0.189259 0.311853
321 1 0.99821 0.256599 0.249579
164 1 0.992027 0.193917 0.192179
192 1 0.877655 0.190897 0.190543
223 1 0.940648 0.253349 0.186363
224 1 0.879725 0.309449 0.175582
318 1 0.931578 0.182796 0.254766
320 1 0.878106 0.185531 0.317137
349 1 0.876015 0.247723 0.255577
350 1 0.934502 0.315129 0.249688
351 1 0.942796 0.250149 0.309253
352 1 0.868071 0.321533 0.312427
577 1 1.00429 0.255342 0.490697
227 1 0.0561607 0.368677 0.179034
232 1 0.124627 0.435655 0.185192
354 1 0.0617811 0.429594 0.237669
355 1 0.0688132 0.375838 0.306356
357 1 0.127706 0.375074 0.242045
360 1 0.123813 0.441574 0.307852
446 1 0.944542 0.192817 0.372124
574 1 0.939725 0.191142 0.498784
480 1 0.878349 0.309423 0.431687
449 1 0.00425657 0.248619 0.374994
231 1 0.184335 0.36848 0.180661
235 1 0.305874 0.385269 0.186425
236 1 0.243157 0.438511 0.186143
358 1 0.188806 0.444853 0.254325
359 1 0.184616 0.373681 0.307056
361 1 0.251629 0.371026 0.247562
362 1 0.310435 0.445129 0.255535
363 1 0.303129 0.375418 0.314181
364 1 0.247429 0.436897 0.320836
507 1 0.81431 0.371915 0.443512
526 1 0.438641 0.0616932 0.497049
239 1 0.429359 0.381002 0.18563
240 1 0.377116 0.44141 0.17719
365 1 0.373186 0.371162 0.256414
366 1 0.43503 0.445546 0.244723
367 1 0.430373 0.383782 0.312549
368 1 0.372036 0.437493 0.315873
244 1 0.509653 0.434764 0.186311
549 1 0.121992 0.115692 0.500596
372 1 0.501851 0.435586 0.31491
369 1 0.493642 0.382992 0.249394
243 1 0.56228 0.372487 0.191055
248 1 0.630952 0.437062 0.190857
370 1 0.562684 0.435099 0.246714
371 1 0.564425 0.38162 0.315751
373 1 0.627095 0.372609 0.250912
376 1 0.633503 0.437524 0.30281
247 1 0.697591 0.385153 0.180269
251 1 0.81713 0.378179 0.184954
252 1 0.75603 0.447532 0.184597
374 1 0.695109 0.436374 0.249217
375 1 0.688746 0.36774 0.320186
377 1 0.760151 0.382554 0.242958
378 1 0.818709 0.439241 0.249409
379 1 0.81474 0.379556 0.310599
380 1 0.748803 0.438059 0.315136
479 1 0.938115 0.247034 0.432687
1417 1 0.250068 0.502526 0.382318
411 1 0.809423 -0.00342546 0.439215
508 1 0.749908 0.433234 0.43839
356 1 0.996259 0.443149 0.309882
353 1 1.00014 0.373228 0.248206
228 1 -0.00654261 0.432171 0.185955
255 1 0.93871 0.370264 0.182164
256 1 0.887227 0.4344 0.187026
381 1 0.875334 0.372579 0.245027
382 1 0.934402 0.438748 0.251002
383 1 0.938997 0.381934 0.314446
384 1 0.876782 0.440779 0.317587
285 1 0.875274 -0.000486364 0.251492
491 1 0.310974 0.366885 0.438762
386 1 0.068929 0.0618215 0.37575
392 1 0.127112 0.0578818 0.434578
419 1 0.0682654 0.120693 0.438447
421 1 0.122775 0.123742 0.374274
407 1 0.679413 0.00642075 0.437967
85 1 0.620236 0.252366 -0.00050574
494 1 0.436238 0.43869 0.380089
390 1 0.178481 0.0676024 0.36947
394 1 0.307324 0.0603667 0.374503
396 1 0.246071 0.0618243 0.437113
423 1 0.189366 0.12471 0.432584
425 1 0.251285 0.121208 0.374066
427 1 0.312774 0.12433 0.438028
506 1 0.812841 0.438682 0.373059
492 1 0.254282 0.437526 0.437245
14 1 0.438575 0.0620067 0.00603471
398 1 0.43172 0.0675052 0.378668
400 1 0.37429 0.0638924 0.435241
429 1 0.372251 0.123776 0.376224
431 1 0.435916 0.128694 0.43985
433 1 0.502031 0.124552 0.375561
496 1 0.370337 0.438368 0.434688
505 1 0.749376 0.372628 0.38301
495 1 0.436275 0.374008 0.441806
157 1 0.871686 0.000110023 0.135772
404 1 0.501565 0.0664431 0.435087
402 1 0.566124 0.0572873 0.377304
408 1 0.620975 0.0626447 0.439046
435 1 0.562025 0.12432 0.433386
437 1 0.621679 0.114529 0.374503
493 1 0.371772 0.377482 0.376114
503 1 0.680627 0.372843 0.435559
406 1 0.689004 0.0650457 0.375398
410 1 0.814186 0.0577564 0.373847
412 1 0.747562 0.0596541 0.445354
439 1 0.686726 0.12297 0.436983
441 1 0.748012 0.123413 0.377771
443 1 0.814617 0.123917 0.436227
19 1 0.570796 0.00403543 0.0686369
502 1 0.688536 0.435802 0.37432
486 1 0.184716 0.435896 0.373662
501 1 0.620461 0.378824 0.373221
417 1 0.0175296 0.124929 0.37239
388 1 1.0079 0.0657185 0.433732
414 1 0.935162 0.0586856 0.370205
416 1 0.873022 0.0580128 0.436622
445 1 0.885784 0.124041 0.374957
447 1 0.935034 0.131017 0.434659
488 1 0.124006 0.427108 0.433415
395 1 0.314151 0.00434732 0.446451
418 1 0.0650379 0.18479 0.365566
424 1 0.126568 0.180967 0.436299
450 1 0.0601571 0.319145 0.373006
451 1 0.0612203 0.241614 0.438036
453 1 0.121142 0.259586 0.375721
456 1 0.13047 0.315415 0.434298
483 1 0.0585111 0.374073 0.435561
497 1 0.498564 0.376639 0.379017
422 1 0.185109 0.182612 0.371331
426 1 0.312495 0.18802 0.373914
428 1 0.249849 0.185052 0.430789
454 1 0.190096 0.312539 0.368954
455 1 0.186747 0.249784 0.446782
457 1 0.24955 0.24698 0.380246
458 1 0.313996 0.313095 0.369525
459 1 0.315824 0.248778 0.438298
460 1 0.254799 0.307548 0.442708
500 1 0.503151 0.431632 0.440559
430 1 0.433697 0.182536 0.380911
432 1 0.368776 0.192068 0.440855
461 1 0.372331 0.247002 0.368331
462 1 0.438903 0.320219 0.376625
463 1 0.436368 0.24409 0.43692
464 1 0.368469 0.309874 0.43811
468 1 0.49947 0.323354 0.436519
436 1 0.501331 0.190489 0.43811
504 1 0.624827 0.43402 0.437729
465 1 0.503494 0.253478 0.371356
472 1 0.61907 0.305647 0.426133
434 1 0.56535 0.189679 0.369434
440 1 0.618851 0.187866 0.435238
466 1 0.557687 0.315573 0.367237
467 1 0.556739 0.250805 0.431313
469 1 0.625471 0.248035 0.36448
487 1 0.190596 0.376228 0.434602
489 1 0.242442 0.37266 0.367518
444 1 0.750269 0.182987 0.431195
442 1 0.816192 0.17931 0.373609
438 1 0.686531 0.183322 0.370606
471 1 0.686621 0.24098 0.436429
473 1 0.748128 0.248541 0.376137
475 1 0.814312 0.252375 0.433632
474 1 0.809833 0.309581 0.380676
476 1 0.742053 0.313481 0.437484
470 1 0.695245 0.306631 0.376122
490 1 0.313222 0.43906 0.373394
135 1 0.186068 -0.00492356 0.180357
81 1 0.501035 0.249947 -0.000778243
622 1 0.433476 0.431408 0.499771
614 1 0.18296 0.436566 0.496785
618 1 0.315563 0.441388 0.493234
634 1 0.811969 0.431889 0.496497
61 1 0.877999 0.120223 0.00166446
82 1 0.56865 0.321165 0.00712645
1295 1 0.430963 0.500273 0.310831
1175 1 0.691354 0.502332 0.189127
594 1 0.561443 0.31911 0.498151
617 1 0.253275 0.376399 0.494038
514 1 0.0574599 0.0606271 0.499331
602 1 0.822473 0.310204 0.49386
70 1 0.189829 0.308445 0.000415827
626 1 0.567975 0.442099 0.496111
153 1 0.750812 0.00847262 0.123765
606 1 0.943409 0.310776 0.493585
265 1 0.247097 0.00723283 0.254498
546 1 0.0642508 0.180146 0.494294
554 1 0.300059 0.187686 0.495351
15 1 0.434495 -0.00151573 0.0693166
275 1 0.562321 -0.000744233 0.305888
149 1 0.628678 0.00733847 0.127221
399 1 0.438914 0.00513347 0.439232
413 1 0.88392 -0.00298705 0.373252
542 1 0.941033 0.0602112 0.490278
517 1 0.123182 4.83189e-05 0.497481
1049 1 0.751457 0.502595 0.00380014
570 1 0.813886 0.191142 0.500393
613 1 0.125868 0.374915 0.496557
593 1 0.497587 0.254151 0.494964
105 1 0.252979 0.378371 0.00100016
533 1 0.620363 0.00141616 0.500547
86 1 0.681423 0.309293 0.0051038
114 1 0.559256 0.444464 0.0030918
66 1 0.0597245 0.32211 -0.000725725
94 1 0.935006 0.309846 -9.17446e-05
89 1 0.758924 0.240755 0.00364801
49 1 0.511589 0.12225 0.00739352
1565 1 0.877366 0.49872 0.493249
601 1 0.743027 0.247043 0.493627
520 1 0.124976 0.0570281 0.568929
547 1 0.0579936 0.121936 0.565663
642 1 0.058888 0.05765 0.630489
677 1 0.12011 0.126107 0.620143
516 1 0.000787381 0.0642015 0.564512
1024 1 0.873877 0.437678 0.935361
610 1 0.0595044 0.430779 0.48988
1557 1 0.620863 0.505124 0.510786
1671 1 0.190129 0.498318 0.691051
524 1 0.253566 0.0591809 0.559283
551 1 0.192822 0.119255 0.563432
555 1 0.31462 0.124364 0.556308
646 1 0.196134 0.0626094 0.620983
650 1 0.311356 0.0637104 0.622516
681 1 0.259857 0.125006 0.616042
538 1 0.810047 0.0588832 0.505351
1793 1 -0.000115426 0.498982 0.753051
522 1 0.314856 0.0623515 0.50237
655 1 0.438852 0.000928016 0.682607
528 1 0.376374 0.0619673 0.563858
559 1 0.44249 0.115591 0.562195
654 1 0.441021 0.0672012 0.628663
685 1 0.370176 0.134738 0.618558
532 1 0.50041 0.0608002 0.566026
641 1 0.994199 0.00199448 0.623282
689 1 0.50725 0.127137 0.621479
536 1 0.621479 0.0601718 0.560664
563 1 0.563155 0.124445 0.564915
658 1 0.56214 0.0598254 0.627838
693 1 0.624162 0.128143 0.629377
1821 1 0.883237 0.495634 0.747172
573 1 0.882366 0.117446 0.49657
540 1 0.750517 0.0669984 0.567677
567 1 0.6912 0.131328 0.572225
571 1 0.821425 0.13324 0.557992
662 1 0.683958 0.0645645 0.618983
666 1 0.816967 0.0660651 0.620315
697 1 0.750701 0.123064 0.625573
1819 1 0.810292 0.498674 0.817167
799 1 0.932634 0.000748378 0.809711
2 1 0.0646065 0.0562625 1.00341
1823 1 0.939662 0.503304 0.819
673 1 0.0125888 0.124069 0.632539
544 1 0.879987 0.057621 0.560454
575 1 0.943587 0.127156 0.565494
670 1 0.939433 0.0598925 0.620938
701 1 0.886149 0.130126 0.628359
1679 1 0.439488 0.490973 0.682821
1023 1 0.938848 0.375344 0.942957
1815 1 0.672915 0.491784 0.805161
530 1 0.559614 0.0652277 0.497373
552 1 0.125258 0.176346 0.555742
579 1 0.0589168 0.250774 0.560251
584 1 0.120089 0.315865 0.56623
674 1 0.0666281 0.186495 0.624883
706 1 0.066191 0.314034 0.621694
709 1 0.12309 0.251344 0.617172
556 1 0.247929 0.191411 0.561861
583 1 0.18381 0.238313 0.559069
587 1 0.308903 0.249945 0.558867
588 1 0.250224 0.311014 0.558506
678 1 0.1885 0.174521 0.622731
682 1 0.307838 0.191694 0.628331
710 1 0.184364 0.310141 0.623101
713 1 0.248008 0.246547 0.619318
714 1 0.313188 0.3166 0.630216
1681 1 0.488917 0.497478 0.620251
34 1 0.0633321 0.183844 0.994433
597 1 0.626349 0.252527 0.498182
560 1 0.363977 0.191099 0.562415
591 1 0.44282 0.250469 0.56019
592 1 0.371954 0.313116 0.566185
686 1 0.432231 0.192742 0.6135
717 1 0.367849 0.249161 0.621083
718 1 0.434603 0.309112 0.626086
562 1 0.556835 0.189275 0.497902
791 1 0.681999 0.00498853 0.81051
1813 1 0.617543 0.496054 0.752754
564 1 0.498104 0.18535 0.559506
721 1 0.501757 0.249165 0.626467
596 1 0.49712 0.316906 0.565776
568 1 0.635614 0.188534 0.574553
595 1 0.558628 0.260206 0.558007
600 1 0.62453 0.308078 0.56566
690 1 0.566448 0.184974 0.62371
722 1 0.559746 0.308902 0.626531
725 1 0.636435 0.249829 0.623786
117 1 0.620078 0.383071 1.00435
572 1 0.760653 0.192281 0.566053
599 1 0.695046 0.249861 0.561965
603 1 0.811999 0.26157 0.560824
604 1 0.745104 0.305535 0.55683
694 1 0.697384 0.198658 0.631327
698 1 0.815698 0.191847 0.627443
726 1 0.689994 0.315889 0.62716
729 1 0.752485 0.255721 0.622439
730 1 0.812151 0.322074 0.624743
548 1 7.60752e-05 0.191501 0.55563
580 1 1.005 0.309162 0.55905
705 1 0.00236837 0.251182 0.618666
576 1 0.882917 0.189035 0.568104
607 1 0.943853 0.251213 0.565173
608 1 0.880628 0.310848 0.563529
702 1 0.94698 0.187813 0.625764
733 1 0.870129 0.257081 0.624962
734 1 0.937035 0.315513 0.62472
611 1 0.0615437 0.375232 0.559548
616 1 0.125849 0.434474 0.563919
738 1 0.06375 0.437059 0.625791
741 1 0.132732 0.375085 0.624854
737 1 0.999813 0.380732 0.628682
645 1 0.132388 -0.00279351 0.628269
615 1 0.187734 0.375118 0.558889
619 1 0.308917 0.374275 0.558312
620 1 0.247503 0.435324 0.555868
742 1 0.188957 0.442239 0.630732
745 1 0.245012 0.365391 0.622598
746 1 0.31092 0.429689 0.624994
793 1 0.755225 0.000739964 0.743062
925 1 0.876148 0.0021337 0.875919
623 1 0.43086 0.372245 0.563075
624 1 0.37444 0.438188 0.558598
749 1 0.370412 0.375756 0.624437
750 1 0.431629 0.433984 0.621372
1022 1 0.939098 0.437489 0.879606
1801 1 0.261875 0.504294 0.748999
628 1 0.495091 0.442932 0.559824
753 1 0.503775 0.376573 0.624242
627 1 0.554297 0.37862 0.56113
632 1 0.624599 0.435422 0.561762
754 1 0.561989 0.437846 0.620762
757 1 0.613214 0.370096 0.616645
665 1 0.75213 -0.000916916 0.624528
927 1 0.937435 0.00104184 0.935879
631 1 0.686383 0.369334 0.556377
635 1 0.820811 0.374829 0.558363
636 1 0.750693 0.433765 0.561504
758 1 0.687924 0.435776 0.62428
761 1 0.750097 0.371205 0.624482
762 1 0.806927 0.437005 0.621262
1685 1 0.620789 0.495702 0.624345
612 1 0.0067794 0.438605 0.565462
639 1 0.939184 0.372957 0.562969
640 1 0.877352 0.437838 0.564944
765 1 0.873983 0.377065 0.622349
766 1 0.938917 0.444331 0.626093
1021 1 0.875492 0.367663 0.879703
527 1 0.439753 -0.000934138 0.552844
1687 1 0.682854 0.499399 0.687401
648 1 0.127353 0.062887 0.685134
675 1 0.0635242 0.119768 0.695558
770 1 0.0565298 0.057739 0.748684
776 1 0.13009 0.0562922 0.807164
803 1 0.0665602 0.118186 0.811939
805 1 0.133256 0.12363 0.753062
125 1 0.872656 0.378765 1.00254
909 1 0.377815 0.00731689 0.879792
993 1 1.001 0.369632 0.880199
917 1 0.627304 0.0097238 0.87082
652 1 0.248423 0.0568976 0.687047
679 1 0.186124 0.124253 0.684785
683 1 0.314036 0.121684 0.68461
774 1 0.190934 0.0617053 0.744571
778 1 0.317793 0.0561656 0.746283
780 1 0.253203 0.0581334 0.804983
807 1 0.192324 0.125007 0.817181
809 1 0.252706 0.122313 0.748181
811 1 0.319121 0.122703 0.817518
590 1 0.429404 0.314696 0.496612
656 1 0.375895 0.0581657 0.681827
687 1 0.430638 0.124263 0.695002
782 1 0.440532 0.0626406 0.751695
784 1 0.37198 0.0569303 0.807183
813 1 0.374363 0.121902 0.748795
815 1 0.447745 0.121254 0.814356
1020 1 0.753087 0.436015 0.940962
817 1 0.499001 0.120297 0.750864
660 1 0.501231 0.0652286 0.688637
788 1 0.50364 0.0619245 0.812512
664 1 0.632587 0.0602643 0.687764
691 1 0.568711 0.122836 0.693514
786 1 0.563908 0.0566484 0.748808
792 1 0.628038 0.0705163 0.807551
819 1 0.560808 0.121414 0.810048
821 1 0.633862 0.134564 0.749202
621 1 0.368401 0.371592 0.500254
1951 1 0.942272 0.49866 0.940867
668 1 0.753395 0.0614101 0.682897
695 1 0.690234 0.131491 0.684343
699 1 0.815089 0.124753 0.685066
790 1 0.694023 0.0720995 0.743345
794 1 0.814992 0.0627455 0.747501
796 1 0.749951 0.0668786 0.806344
823 1 0.689842 0.125698 0.81622
825 1 0.753585 0.124982 0.751548
827 1 0.816878 0.123144 0.810879
53 1 0.62256 0.126226 1.00132
801 1 0.997682 0.117713 0.749239
644 1 0.995218 0.0549757 0.6854
772 1 0.996276 0.0630443 0.818691
672 1 0.87902 0.0559993 0.689188
703 1 0.937506 0.117001 0.689061
798 1 0.934011 0.0629839 0.753632
800 1 0.87331 0.0650653 0.808319
829 1 0.882813 0.126659 0.744416
831 1 0.93786 0.12284 0.807415
1019 1 0.817787 0.375084 0.945435
680 1 0.122196 0.189844 0.691055
707 1 0.0595661 0.255885 0.685142
712 1 0.127529 0.315054 0.68393
802 1 0.0607524 0.189704 0.754875
808 1 0.127134 0.178864 0.807584
834 1 0.0599968 0.31435 0.747446
835 1 0.0523414 0.247138 0.812163
837 1 0.118573 0.253131 0.744585
840 1 0.12449 0.312862 0.810543
676 1 -0.00234862 0.177075 0.691866
836 1 1.00234 0.312802 0.817822
684 1 0.249297 0.188112 0.684062
711 1 0.190041 0.253293 0.689913
715 1 0.306717 0.25319 0.68914
716 1 0.249225 0.314422 0.690356
806 1 0.187324 0.183822 0.752187
810 1 0.3144 0.188439 0.745285
812 1 0.255505 0.184479 0.810867
838 1 0.190963 0.310907 0.748142
839 1 0.184974 0.24775 0.806753
841 1 0.246467 0.248195 0.74955
842 1 0.306137 0.314445 0.761912
843 1 0.311249 0.254418 0.815926
844 1 0.241664 0.320999 0.815686
688 1 0.373799 0.185749 0.681387
719 1 0.441379 0.240309 0.681772
720 1 0.370605 0.306623 0.690234
814 1 0.437317 0.195728 0.752138
816 1 0.383891 0.17836 0.812594
845 1 0.371069 0.246606 0.753267
846 1 0.442292 0.304408 0.749413
847 1 0.43883 0.249442 0.813287
848 1 0.374358 0.315061 0.811481
852 1 0.507635 0.306214 0.813185
692 1 0.499774 0.180658 0.688203
820 1 0.493107 0.187354 0.814437
724 1 0.4977 0.30206 0.691281
849 1 0.505741 0.248301 0.751474
696 1 0.622753 0.187052 0.687077
723 1 0.558568 0.245077 0.690007
728 1 0.623667 0.308954 0.681483
818 1 0.559495 0.186285 0.751909
824 1 0.632721 0.181148 0.822641
850 1 0.565574 0.309978 0.744878
851 1 0.568515 0.245736 0.811337
853 1 0.625156 0.25162 0.752403
856 1 0.622637 0.311293 0.808384
700 1 0.758752 0.188054 0.691497
727 1 0.69202 0.255608 0.696969
731 1 0.812679 0.255354 0.687729
732 1 0.751176 0.312823 0.684512
822 1 0.691299 0.191231 0.751373
826 1 0.819812 0.187133 0.744521
828 1 0.757394 0.180332 0.816736
854 1 0.691007 0.316842 0.751653
855 1 0.687601 0.2514 0.814187
857 1 0.759386 0.245104 0.752065
858 1 0.819093 0.311104 0.746942
859 1 0.814952 0.247793 0.812312
860 1 0.753255 0.306593 0.801456
708 1 -0.00248325 0.31487 0.683304
804 1 1.00106 0.18332 0.817104
833 1 0.999535 0.243701 0.744069
704 1 0.882372 0.187266 0.6835
735 1 0.933995 0.25165 0.681544
736 1 0.878166 0.315192 0.690595
830 1 0.935506 0.185394 0.748584
832 1 0.877138 0.185301 0.805986
861 1 0.884377 0.247931 0.747591
862 1 0.950086 0.314762 0.751942
863 1 0.934312 0.251723 0.811522
864 1 0.872277 0.313287 0.810767
739 1 0.0613139 0.384917 0.686998
744 1 0.128866 0.434704 0.683494
866 1 0.0716927 0.442242 0.747415
867 1 0.0649205 0.376857 0.810908
869 1 0.121417 0.374408 0.751937
872 1 0.128821 0.443685 0.818086
740 1 0.00164068 0.443173 0.685013
1018 1 0.8113 0.438361 0.875598
743 1 0.181686 0.375333 0.691406
747 1 0.31553 0.37818 0.695224
748 1 0.255924 0.433793 0.685762
870 1 0.187386 0.440318 0.751731
871 1 0.177586 0.377678 0.813799
873 1 0.249135 0.385211 0.759924
874 1 0.316498 0.437761 0.74497
875 1 0.310434 0.374404 0.821422
876 1 0.241435 0.445798 0.80971
537 1 0.744819 0.00899256 0.506592
561 1 0.498577 0.126855 0.505245
751 1 0.433845 0.370393 0.687317
752 1 0.381943 0.434871 0.688631
877 1 0.375486 0.373179 0.74986
878 1 0.440251 0.442142 0.756279
879 1 0.434494 0.371892 0.819468
880 1 0.378088 0.435373 0.80576
756 1 0.500448 0.433499 0.69083
884 1 0.495701 0.44163 0.816562
881 1 0.508586 0.366595 0.747306
1539 1 0.0578303 0.503592 0.558799
789 1 0.623895 0.0060864 0.748626
787 1 0.566594 0.00820415 0.816987
630 1 0.687997 0.435067 0.499196
755 1 0.56364 0.378109 0.683306
760 1 0.629389 0.435125 0.687486
882 1 0.552533 0.433162 0.750491
883 1 0.55614 0.368393 0.811082
885 1 0.625828 0.372875 0.747934
888 1 0.621641 0.432491 0.805999
65 1 0.00949466 0.248689 0.99312
543 1 0.935081 0.000149997 0.556028
759 1 0.689815 0.366254 0.688114
763 1 0.81872 0.371307 0.692682
764 1 0.750478 0.434176 0.684736
886 1 0.691089 0.435259 0.744916
887 1 0.687813 0.368075 0.817668
889 1 0.751517 0.370621 0.750093
890 1 0.807805 0.434909 0.751622
891 1 0.809248 0.371224 0.809705
892 1 0.750214 0.436985 0.807758
868 1 0.000967602 0.442082 0.81914
865 1 0.00218509 0.379608 0.751319
767 1 0.939942 0.3727 0.694756
768 1 0.875736 0.429448 0.678619
893 1 0.870461 0.380795 0.751168
894 1 0.936149 0.433652 0.750279
895 1 0.938709 0.379124 0.813752
896 1 0.878086 0.438068 0.811899
74 1 0.310884 0.316743 0.999133
919 1 0.692731 0.00614694 0.941546
898 1 0.0583662 0.0543817 0.873632
904 1 0.128895 0.057216 0.935845
931 1 0.0613867 0.117966 0.935867
933 1 0.127023 0.116582 0.874606
900 1 0.00381855 0.0556954 0.937097
929 1 0.004825 0.128028 0.874405
1010 1 0.564475 0.430209 0.876093
1939 1 0.559073 0.492653 0.937712
1011 1 0.560623 0.377747 0.935487
1014 1 0.685327 0.440868 0.869519
1943 1 0.684466 0.502324 0.940585
902 1 0.186336 0.0552299 0.873584
906 1 0.310756 0.0548683 0.878444
908 1 0.251273 0.0512784 0.93789
935 1 0.183721 0.127317 0.934515
937 1 0.247718 0.106198 0.875565
939 1 0.320689 0.119851 0.935721
915 1 0.571895 0.00492363 0.937438
1017 1 0.745711 0.375774 0.882301
1013 1 0.628683 0.377141 0.875557
663 1 0.695771 -0.0039095 0.676649
1563 1 0.809681 0.490654 0.55619
1015 1 0.686329 0.382386 0.946995
910 1 0.439287 0.064303 0.874699
912 1 0.381586 0.0619228 0.944124
941 1 0.377914 0.114774 0.87802
943 1 0.444638 0.125361 0.940074
916 1 0.501765 0.0653858 0.943271
945 1 0.503563 0.119463 0.873195
534 1 0.688065 0.0664354 0.508736
535 1 0.68504 -0.0014461 0.567649
1559 1 0.689279 0.501318 0.564285
914 1 0.565627 0.0627837 0.878742
920 1 0.635961 0.0668889 0.948213
947 1 0.562502 0.121394 0.94274
949 1 0.621433 0.1249 0.883851
1547 1 0.307455 0.489208 0.560021
62 1 0.945529 0.18835 1.00317
918 1 0.698696 0.0710557 0.87272
922 1 0.813077 0.0606998 0.880226
924 1 0.752544 0.0626904 0.941393
951 1 0.691272 0.130593 0.935361
953 1 0.758232 0.124865 0.879777
955 1 0.815903 0.123653 0.933677
669 1 0.872342 -0.00102595 0.618343
926 1 0.928741 0.0637564 0.871343
928 1 0.874043 0.0582307 0.93766
957 1 0.87759 0.128808 0.865712
959 1 0.943297 0.125708 0.9399
1693 1 0.871509 0.499127 0.623969
1689 1 0.746331 0.497445 0.631546
558 1 0.438605 0.18628 0.500357
930 1 0.0675307 0.181616 0.868251
936 1 0.128947 0.187062 0.937223
962 1 0.0675149 0.310672 0.878197
963 1 0.0683993 0.244767 0.931219
965 1 0.123982 0.245232 0.872272
968 1 0.121893 0.322011 0.941093
961 1 -0.00572034 0.253322 0.878876
73 1 0.2515 0.249496 1.00266
1543 1 0.182149 0.500431 0.569077
934 1 0.191474 0.191673 0.87689
938 1 0.315187 0.188483 0.876618
940 1 0.248562 0.196705 0.934886
966 1 0.182243 0.305491 0.872071
967 1 0.178168 0.252372 0.941452
969 1 0.254399 0.251749 0.872318
970 1 0.3126 0.312746 0.87638
971 1 0.302911 0.252981 0.932493
972 1 0.244269 0.318528 0.934108
50 1 0.562677 0.19136 1.00186
1669 1 0.122773 0.491904 0.625637
1675 1 0.317728 0.497877 0.687836
942 1 0.441111 0.180873 0.873388
944 1 0.372811 0.183615 0.932113
973 1 0.375196 0.249558 0.86974
974 1 0.438014 0.308595 0.871654
975 1 0.433071 0.24952 0.939887
976 1 0.369815 0.314527 0.947108
948 1 0.504696 0.18878 0.938552
977 1 0.50526 0.244097 0.875149
33 1 0.00481985 0.123588 0.999349
37 1 0.124251 0.12132 0.994711
980 1 0.494991 0.303301 0.932431
946 1 0.560163 0.184608 0.869241
952 1 0.616723 0.19273 0.939815
978 1 0.563315 0.309789 0.875419
979 1 0.559442 0.255744 0.944859
981 1 0.628535 0.247616 0.867348
984 1 0.624601 0.313826 0.941719
956 1 0.760446 0.190772 0.935256
954 1 0.823188 0.190837 0.872922
950 1 0.702024 0.191959 0.87053
982 1 0.685628 0.314327 0.885066
983 1 0.69722 0.252031 0.938141
988 1 0.750804 0.315256 0.944613
987 1 0.81069 0.254915 0.934456
986 1 0.810532 0.315059 0.875788
985 1 0.750214 0.261161 0.878966
932 1 0.00556112 0.188186 0.93376
964 1 1.00152 0.315336 0.936317
992 1 0.877243 0.30884 0.940365
989 1 0.879309 0.251161 0.883265
960 1 0.883426 0.177585 0.936188
958 1 0.94047 0.178386 0.869225
991 1 0.944653 0.252839 0.942711
990 1 0.937683 0.316591 0.87363
1691 1 0.819423 0.490107 0.688544
78 1 0.438449 0.313387 1.00169
659 1 0.572152 0.00560953 0.686313
109 1 0.372936 0.377885 1.00065
996 1 0.002986 0.435112 0.936846
994 1 0.0643721 0.451925 0.880721
1000 1 0.121656 0.439984 0.93942
995 1 0.0635143 0.380571 0.932598
997 1 0.128242 0.383042 0.880329
1817 1 0.750306 0.500009 0.742461
1677 1 0.378034 0.501063 0.626032
77 1 0.369883 0.247012 0.994193
1002 1 0.311867 0.442949 0.872798
1003 1 0.311452 0.377345 0.928471
999 1 0.186852 0.375702 0.944815
998 1 0.188883 0.447619 0.880982
1004 1 0.246688 0.434167 0.942128
1001 1 0.24284 0.374685 0.875554
1551 1 0.435296 0.501068 0.556128
97 1 -0.00266586 0.378606 1.00518
1016 1 0.625948 0.436622 0.932274
1012 1 0.502062 0.439628 0.941745
1009 1 0.500574 0.375835 0.87877
1008 1 0.378139 0.442574 0.93156
1005 1 0.37644 0.374646 0.878176
1007 1 0.432571 0.376702 0.936485
1006 1 0.436413 0.448363 0.87428
93 1 0.878467 0.241917 0.995859
122 1 0.813028 0.446775 1.00065
101 1 0.119117 0.380265 0.995345
42 1 0.31143 0.183715 0.990054
69 1 0.115846 0.255947 0.998528
633 1 0.748017 0.376683 0.497537
609 1 0.997177 0.378085 0.500814
41 1 0.251513 0.126534 0.995193
54 1 0.689895 0.18367 0.997583
38 1 0.187523 0.188796 0.993896
121 1 0.751935 0.382985 1.00259
582 1 0.178395 0.310713 0.505959
6 1 0.184996 0.0635869 0.995772
553 1 0.24734 0.124538 0.501558
569 1 0.751432 0.135128 0.504487
57 1 0.752179 0.128285 0.998415
126 1 0.938076 0.439561 1.00128
518 1 0.186188 0.0593823 0.506741
586 1 0.315746 0.308204 0.502011
30 1 0.947503 0.0585996 1.00714
525 1 0.37471 0.00888587 0.497882
1541 1 0.125744 0.496159 0.506001
106 1 0.311116 0.441063 0.996276
605 1 0.882423 0.245208 0.50616
58 1 0.817317 0.182849 0.994013
18 1 0.560711 0.0596463 0.999056
566 1 0.687903 0.18168 0.503839
581 1 0.117616 0.249344 0.504904
578 1 0.0702221 0.317677 0.506373
557 1 0.379385 0.123486 0.507386
625 1 0.497378 0.376781 0.498367
25 1 0.748444 0.000373381 1.00846
629 1 0.618825 0.375311 0.509369
1032 1 0.125946 0.567196 0.0618293
1059 1 0.0621624 0.626736 0.0587539
1154 1 0.0706349 0.563678 0.118696
1189 1 0.12947 0.62611 0.126159
1153 1 0.993328 0.506885 0.131241
1577 1 0.248783 0.626425 0.49595
1036 1 0.247714 0.564656 0.0601123
1063 1 0.193384 0.629884 0.057813
1067 1 0.31303 0.626072 0.0654962
1158 1 0.184882 0.567598 0.115426
1162 1 0.309823 0.566384 0.126359
1193 1 0.253846 0.625248 0.123255
389 1 0.119045 0.999915 0.372453
27 1 0.8144 0.997036 0.065314
1040 1 0.376427 0.565647 0.066262
1071 1 0.441682 0.630153 0.0627076
1166 1 0.440181 0.562109 0.122161
1197 1 0.38491 0.628458 0.125509
1201 1 0.497564 0.62655 0.127575
1044 1 0.505324 0.56023 0.0699229
1086 1 0.935828 0.684868 0.00568415
1026 1 0.0583143 0.56229 0.00280733
1048 1 0.626867 0.564399 0.0700533
1075 1 0.560027 0.624055 0.0636651
1170 1 0.570034 0.562192 0.133997
1205 1 0.625692 0.621343 0.128657
1041 1 0.497324 0.505403 0.00294732
1085 1 0.876353 0.626523 0.0024793
145 1 0.496131 0.999659 0.124201
1054 1 0.937556 0.565699 -0.00070927
11 1 0.314964 1.00638 0.0612099
1052 1 0.748177 0.561207 0.0669817
1079 1 0.688267 0.621899 0.0626128
1083 1 0.804734 0.626488 0.0659541
1174 1 0.690253 0.562819 0.131582
1178 1 0.811185 0.562797 0.121132
1209 1 0.754981 0.620897 0.127274
1537 1 0.00578643 0.499906 0.496754
1130 1 0.305465 0.936209 0.0104021
1039 1 0.435268 0.501689 0.0690635
1185 1 0.00528725 0.618691 0.128297
1028 1 1.00112 0.552582 0.0682216
1056 1 0.875545 0.564146 0.0581633
1087 1 0.933887 0.625328 0.0679582
1182 1 0.927612 0.565142 0.12685
1213 1 0.873917 0.631334 0.124867
415 1 0.942191 0.993394 0.441114
1183 1 0.934423 0.505475 0.191101
1064 1 0.12724 0.679169 0.0642671
1091 1 0.064649 0.739618 0.0649564
1096 1 0.124583 0.810277 0.063091
1186 1 0.056321 0.684334 0.127394
1218 1 0.0615317 0.813846 0.119801
1221 1 0.125395 0.748841 0.119884
1060 1 1.00033 0.688433 0.0620973
1092 1 0.998625 0.80743 0.0551308
403 1 0.555775 1.00717 0.434784
3 1 0.0611129 0.996977 0.0613669
1068 1 0.249638 0.687213 0.0644237
1095 1 0.18945 0.742083 0.0671727
1099 1 0.30581 0.748193 0.0661354
1100 1 0.245767 0.806289 0.0602991
1190 1 0.184361 0.681533 0.126687
1194 1 0.312987 0.682977 0.125687
1222 1 0.186546 0.810313 0.123418
1225 1 0.250559 0.73828 0.129315
1226 1 0.303407 0.802563 0.132398
31 1 0.936553 0.999808 0.0623011
257 1 1.00452 1.00072 0.245594
1287 1 0.1875 0.502667 0.31588
393 1 0.247478 0.99135 0.371528
1072 1 0.374409 0.691599 0.0644729
1103 1 0.435368 0.756845 0.0603776
1104 1 0.367378 0.811083 0.0649345
1198 1 0.436056 0.68848 0.118353
1229 1 0.366526 0.748723 0.130488
1230 1 0.43564 0.814968 0.115996
1155 1 0.0609311 0.501152 0.192222
1055 1 0.933318 0.507444 0.0631039
1233 1 0.494767 0.749393 0.124114
1076 1 0.503052 0.690288 0.0643456
1108 1 0.502114 0.816339 0.0648185
1080 1 0.622672 0.679792 0.0715166
1107 1 0.55847 0.743482 0.063459
1112 1 0.616116 0.814862 0.0667775
1202 1 0.556865 0.691888 0.131066
1234 1 0.556211 0.80165 0.125318
1237 1 0.628316 0.747452 0.123241
1070 1 0.440002 0.689401 0.00488516
29 1 0.87122 0.999664 0.00347818
1066 1 0.316269 0.692604 0.00060179
1606 1 0.187684 0.816246 0.495872
1084 1 0.747013 0.68628 0.0655807
1111 1 0.6953 0.747522 0.0742812
1115 1 0.818633 0.75027 0.0592738
1116 1 0.747553 0.817906 0.061527
1206 1 0.686949 0.681048 0.12953
1210 1 0.808319 0.687358 0.118784
1238 1 0.693057 0.805899 0.136006
1241 1 0.758382 0.752319 0.125032
1242 1 0.812393 0.815437 0.126818
1171 1 0.568714 0.5065 0.189507
1217 1 0.00471232 0.758323 0.119207
1088 1 0.875452 0.692589 0.0608818
1119 1 0.937851 0.746502 0.0605675
1120 1 0.875056 0.811716 0.0697355
1214 1 0.944221 0.683689 0.12093
1245 1 0.87085 0.750533 0.122883
1246 1 0.93371 0.813391 0.120951
1123 1 0.0625404 0.872384 0.0592898
1128 1 0.120953 0.938597 0.0584834
1250 1 0.0622076 0.937718 0.117206
1253 1 0.126774 0.872011 0.117749
1124 1 0.000610419 0.936533 0.068483
1027 1 0.0646636 0.497404 0.0690687
1546 1 0.309634 0.569009 0.499714
1031 1 0.1849 0.500923 0.0659863
1127 1 0.190506 0.869561 0.0574916
1131 1 0.310303 0.870555 0.0689523
1132 1 0.245291 0.928744 0.0670406
1254 1 0.179641 0.937038 0.129578
1257 1 0.247897 0.876391 0.12415
1258 1 0.308557 0.936057 0.12911
1549 1 0.370209 0.503152 0.497792
1135 1 0.43532 0.879746 0.0619769
1136 1 0.363282 0.93849 0.0698328
1261 1 0.366003 0.874452 0.132549
1262 1 0.435901 0.936982 0.124871
1140 1 0.498086 0.940064 0.0640986
1265 1 0.498424 0.881084 0.123705
1589 1 0.624407 0.625304 0.505024
405 1 0.626871 1.00185 0.375582
131 1 0.0578981 0.996977 0.184898
1139 1 0.560251 0.876842 0.0640913
1144 1 0.629548 0.940929 0.0623349
1266 1 0.561197 0.934237 0.12708
1269 1 0.629218 0.876179 0.125108
1121 1 0.998999 0.873419 0.000286065
1285 1 0.126155 0.495094 0.249651
1658 1 0.809924 0.939608 0.500408
1146 1 0.803718 0.941902 -0.00525266
1657 1 0.745536 0.871307 0.504882
1161 1 0.249081 0.505795 0.122227
1143 1 0.680858 0.871586 0.0641891
1147 1 0.809224 0.878467 0.0594877
1148 1 0.746958 0.936503 0.0658398
1270 1 0.681791 0.944507 0.120159
1273 1 0.749667 0.876421 0.124946
1274 1 0.805978 0.940485 0.127919
1129 1 0.248721 0.870296 0.00392986
1409 1 -0.00193947 0.505483 0.367455
1043 1 0.570549 0.503606 0.0607251
1038 1 0.443818 0.565101 0.00200379
1149 1 0.868711 0.878003 0.0068987
1249 1 1.00104 0.869966 0.124394
1151 1 0.933119 0.872541 0.0581878
1152 1 0.871321 0.934642 0.0655785
1277 1 0.866682 0.875204 0.124098
1278 1 0.934652 0.937744 0.126711
1554 1 0.555697 0.562399 0.501973
1293 1 0.366493 0.503274 0.256286
1160 1 0.130234 0.565564 0.184624
1187 1 0.0649704 0.62277 0.184644
1282 1 0.0616138 0.567481 0.24809
1288 1 0.11735 0.563864 0.314894
1315 1 0.0558188 0.62449 0.304847
1317 1 0.126772 0.617068 0.244761
1164 1 0.24853 0.561335 0.189986
1191 1 0.189663 0.62634 0.189482
1195 1 0.314955 0.627059 0.184006
1286 1 0.186426 0.561608 0.259217
1290 1 0.31357 0.567636 0.247423
1292 1 0.252899 0.569215 0.317479
1319 1 0.179066 0.626991 0.309425
1321 1 0.252192 0.620208 0.255868
1323 1 0.317492 0.625139 0.314696
259 1 0.0609327 0.998463 0.304276
1505 1 1.00192 0.872719 0.381238
277 1 0.620729 1.0034 0.245697
1110 1 0.678172 0.815033 0.00231192
1168 1 0.381757 0.571301 0.190813
1199 1 0.441196 0.627313 0.19256
1294 1 0.438263 0.566386 0.252823
1296 1 0.38036 0.56531 0.311288
1325 1 0.378132 0.632398 0.252427
1327 1 0.443766 0.625197 0.316586
1617 1 0.495122 0.747235 0.504769
1329 1 0.496854 0.628427 0.257525
1172 1 0.494855 0.567203 0.184593
1300 1 0.508055 0.563353 0.319065
1176 1 0.633691 0.563588 0.187742
1203 1 0.556761 0.631578 0.185563
1298 1 0.567395 0.569252 0.246947
1304 1 0.624969 0.559801 0.312292
1331 1 0.568687 0.627341 0.30656
1333 1 0.62901 0.630342 0.247967
1613 1 0.376468 0.753734 0.493008
1536 1 0.876658 0.945988 0.43362
1535 1 0.938936 0.871027 0.436966
1180 1 0.756384 0.565441 0.188787
1207 1 0.690867 0.626424 0.194497
1211 1 0.817699 0.625911 0.184237
1302 1 0.69413 0.554031 0.253975
1306 1 0.816228 0.56712 0.247308
1308 1 0.753424 0.561818 0.317852
1335 1 0.688705 0.629149 0.316265
1337 1 0.744482 0.620661 0.258453
1339 1 0.811109 0.619732 0.311473
1305 1 0.760965 0.504943 0.257402
1297 1 0.500308 0.496027 0.247865
1534 1 0.938345 0.935965 0.375496
1156 1 0.992578 0.565441 0.188647
1284 1 0.999409 0.563574 0.312611
1313 1 0.994661 0.628263 0.257309
1184 1 0.883555 0.572703 0.191718
1215 1 0.93777 0.63077 0.182842
1310 1 0.933727 0.56423 0.256207
1312 1 0.872387 0.567418 0.322445
1341 1 0.876411 0.635607 0.246578
1343 1 0.934829 0.618484 0.316063
1192 1 0.115151 0.6866 0.187987
1219 1 0.0660973 0.752312 0.189643
1224 1 0.119502 0.811944 0.176001
1314 1 0.0610077 0.685995 0.24803
1320 1 0.12015 0.69015 0.30801
1346 1 0.063705 0.809357 0.249001
1347 1 0.0596446 0.75184 0.311297
1349 1 0.128624 0.757479 0.245441
1352 1 0.124921 0.802535 0.320272
1188 1 -0.00570594 0.695834 0.187789
1196 1 0.252411 0.680843 0.1923
1223 1 0.186123 0.749092 0.183338
1227 1 0.309861 0.745928 0.194182
1228 1 0.252324 0.810694 0.192385
1318 1 0.185522 0.689166 0.247071
1322 1 0.305813 0.686632 0.261064
1324 1 0.25357 0.681478 0.325002
1350 1 0.189501 0.813242 0.242199
1351 1 0.187362 0.741261 0.317966
1353 1 0.245783 0.751755 0.251657
1354 1 0.308798 0.813187 0.257612
1355 1 0.313278 0.749811 0.318229
1356 1 0.252963 0.808674 0.315745
1200 1 0.380433 0.688608 0.183359
1231 1 0.43468 0.748998 0.184307
1232 1 0.363319 0.8116 0.1964
1326 1 0.438527 0.686402 0.249665
1328 1 0.375515 0.689856 0.315485
1357 1 0.377139 0.749292 0.246483
1358 1 0.436801 0.80959 0.250859
1359 1 0.439567 0.751053 0.307517
1360 1 0.372004 0.805795 0.311385
1332 1 0.500483 0.689372 0.312375
1236 1 0.501038 0.813595 0.19062
1361 1 0.501666 0.755531 0.253317
1364 1 0.504487 0.807214 0.317473
1204 1 0.498785 0.692729 0.191356
1208 1 0.626202 0.691827 0.192809
1235 1 0.569672 0.756899 0.191141
1240 1 0.628674 0.811771 0.179987
1330 1 0.562286 0.69021 0.25047
1336 1 0.62833 0.68598 0.319218
1362 1 0.565016 0.811196 0.253914
1363 1 0.558968 0.743732 0.315867
1365 1 0.631538 0.751697 0.249612
1368 1 0.626209 0.808634 0.315558
1212 1 0.756981 0.6926 0.190347
1239 1 0.696727 0.747433 0.196947
1243 1 0.816755 0.749238 0.19477
1244 1 0.753272 0.810766 0.190719
1334 1 0.688463 0.687863 0.253406
1338 1 0.811504 0.686416 0.259591
1340 1 0.746392 0.685953 0.314528
1366 1 0.685637 0.816656 0.25685
1367 1 0.686312 0.750431 0.320719
1369 1 0.748669 0.74869 0.260828
1370 1 0.806997 0.807904 0.256045
1371 1 0.811078 0.751802 0.312606
1372 1 0.75145 0.810892 0.316276
1220 1 1.00042 0.810104 0.186225
1345 1 0.999162 0.744955 0.256915
1348 1 0.998233 0.810285 0.31242
1316 1 0.00156834 0.682286 0.319401
1216 1 0.876526 0.687952 0.184991
1247 1 0.935759 0.74958 0.190516
1248 1 0.871339 0.817744 0.190997
1342 1 0.935994 0.688481 0.261859
1344 1 0.877314 0.694362 0.321033
1373 1 0.873263 0.747533 0.256297
1374 1 0.939519 0.808126 0.251379
1375 1 0.940985 0.747575 0.321061
1376 1 0.889287 0.811167 0.31753
1251 1 0.0576193 0.870127 0.189077
1256 1 0.119195 0.936544 0.194661
1378 1 0.0564243 0.936389 0.249372
1379 1 0.0582498 0.86303 0.306548
1381 1 0.120403 0.871032 0.249448
1384 1 0.128116 0.938653 0.307961
1380 1 -0.00190554 0.928935 0.311533
1533 1 0.873928 0.879085 0.377954
1255 1 0.180155 0.878461 0.184498
1259 1 0.311373 0.879612 0.19648
1260 1 0.247034 0.934627 0.191694
1382 1 0.181644 0.942182 0.248338
1383 1 0.18524 0.868421 0.312688
1385 1 0.242011 0.879992 0.249231
1386 1 0.310759 0.942854 0.251127
1387 1 0.313673 0.876796 0.307494
1388 1 0.246053 0.934468 0.310063
1263 1 0.431168 0.870096 0.185918
1264 1 0.37048 0.943655 0.188313
1389 1 0.374086 0.875032 0.252608
1390 1 0.438136 0.936317 0.252976
1391 1 0.4376 0.861837 0.309588
1392 1 0.372321 0.937945 0.310116
1268 1 0.496351 0.935149 0.19207
1396 1 0.494962 0.93948 0.313678
1393 1 0.502699 0.867776 0.253409
133 1 0.125747 0.99942 0.12027
1289 1 0.249149 0.501167 0.250809
1 1 0.00121416 0.996157 0.003723
7 1 0.185933 0.993242 0.0573356
1267 1 0.564683 0.872239 0.183522
1272 1 0.623551 0.943557 0.185954
1394 1 0.563797 0.941157 0.249516
1395 1 0.559004 0.875972 0.318289
1397 1 0.620212 0.871074 0.25711
1400 1 0.621014 0.946462 0.31155
1090 1 0.0649289 0.80352 0.00406869
1271 1 0.684469 0.879976 0.196391
1275 1 0.807376 0.878892 0.195066
1276 1 0.739701 0.937563 0.184446
1398 1 0.687963 0.938388 0.250934
1399 1 0.679095 0.878098 0.317319
1401 1 0.744495 0.876065 0.255699
1402 1 0.810593 0.935068 0.249921
1403 1 0.815322 0.874663 0.314973
1404 1 0.751781 0.938668 0.317833
1134 1 0.436389 0.940139 0.00763205
387 1 0.0605377 0.99662 0.43964
1553 1 0.496187 0.499156 0.494633
1252 1 -0.00465253 0.934303 0.187262
1377 1 0.00270009 0.872377 0.245908
1279 1 0.93623 0.875208 0.194628
1280 1 0.876448 0.931432 0.188378
1405 1 0.877742 0.883547 0.261187
1406 1 0.934249 0.936245 0.251679
1407 1 0.936829 0.875669 0.318919
1408 1 0.875276 0.939036 0.314831
1410 1 0.0595292 0.560683 0.374795
1416 1 0.122443 0.56346 0.441419
1443 1 0.0626599 0.621481 0.437724
1445 1 0.124374 0.628159 0.371552
1412 1 0.00329364 0.565059 0.437438
1301 1 0.627432 0.501855 0.251124
1441 1 0.997409 0.621236 0.374231
1421 1 0.374315 0.499704 0.370373
1414 1 0.178395 0.562008 0.373369
1418 1 0.31214 0.558381 0.373447
1420 1 0.252785 0.560008 0.431309
1447 1 0.188644 0.622337 0.43847
1449 1 0.253109 0.624123 0.375934
1451 1 0.31777 0.628775 0.440746
1526 1 0.689305 0.938292 0.383566
1520 1 0.373643 0.941051 0.43513
1303 1 0.691065 0.498055 0.317118
1439 1 0.941486 0.502535 0.441923
1514 1 0.308895 0.934632 0.37646
1506 1 0.0624195 0.931298 0.365379
1610 1 0.308877 0.815238 0.491325
1517 1 0.373244 0.873749 0.371523
279 1 0.687249 0.997178 0.320405
1422 1 0.436638 0.561544 0.374784
1424 1 0.373722 0.566268 0.432179
1453 1 0.376372 0.621167 0.377002
1455 1 0.437754 0.626264 0.432639
5 1 0.125454 0.992544 -0.00367266
1518 1 0.434676 0.935861 0.368225
1519 1 0.434317 0.879833 0.433599
1428 1 0.502087 0.568122 0.433939
1457 1 0.505451 0.624371 0.371849
1426 1 0.564026 0.563358 0.378788
1432 1 0.619728 0.560072 0.438772
1459 1 0.567051 0.626713 0.442085
1461 1 0.63149 0.623957 0.378869
1512 1 0.125198 0.940989 0.436844
1513 1 0.245535 0.870047 0.367677
1525 1 0.620349 0.879822 0.378956
1529 1 0.744639 0.873617 0.368309
1165 1 0.372738 0.516157 0.128002
1430 1 0.68907 0.556741 0.37817
1434 1 0.807166 0.565251 0.385326
1436 1 0.746991 0.565395 0.439738
1463 1 0.69332 0.624541 0.439025
1465 1 0.744114 0.6289 0.377275
1467 1 0.803848 0.632157 0.444152
1521 1 0.489234 0.868828 0.369696
1532 1 0.749262 0.933341 0.438916
1508 1 0.998426 0.931823 0.438159
1642 1 0.313767 0.941275 0.498215
1527 1 0.685127 0.872951 0.439797
1438 1 0.93783 0.55704 0.381816
1440 1 0.877495 0.56478 0.449699
1469 1 0.874981 0.620179 0.382863
1471 1 0.939163 0.629011 0.442207
1442 1 0.0682513 0.690652 0.382158
1448 1 0.127593 0.683693 0.438188
1474 1 0.0585886 0.802752 0.375364
1475 1 0.0677825 0.749201 0.440506
1477 1 0.12545 0.748384 0.378243
1480 1 0.127837 0.810163 0.43422
1473 1 0.992772 0.749591 0.380947
1524 1 0.494471 0.945058 0.431957
1515 1 0.297988 0.873104 0.433274
1511 1 0.185079 0.881413 0.439353
1446 1 0.187915 0.683571 0.374514
1450 1 0.315178 0.689041 0.376281
1452 1 0.252579 0.684722 0.439046
1478 1 0.18375 0.810616 0.374778
1479 1 0.185882 0.742719 0.435661
1481 1 0.247057 0.745223 0.377215
1482 1 0.310276 0.81645 0.376844
1483 1 0.311962 0.751518 0.441041
1484 1 0.244151 0.806035 0.431189
1516 1 0.246105 0.949888 0.439284
1523 1 0.55563 0.880794 0.437493
1528 1 0.620949 0.939373 0.43974
1454 1 0.443933 0.685635 0.373547
1456 1 0.378373 0.687712 0.428799
1485 1 0.377247 0.759332 0.374096
1486 1 0.434095 0.808427 0.377838
1487 1 0.436313 0.753444 0.43542
1488 1 0.374286 0.816582 0.437624
1489 1 0.498667 0.745649 0.37839
1510 1 0.18167 0.937459 0.373228
1460 1 0.501614 0.684972 0.435046
1492 1 0.50356 0.819059 0.443853
1496 1 0.618653 0.819266 0.442357
1458 1 0.562831 0.683276 0.370055
1464 1 0.623869 0.690548 0.445904
1490 1 0.560813 0.808707 0.383637
1491 1 0.557504 0.744913 0.441099
1493 1 0.624578 0.754289 0.383764
1582 1 0.435936 0.68354 0.49633
1621 1 0.621491 0.757657 0.499378
1133 1 0.373888 0.869308 0.00177079
1531 1 0.81421 0.878611 0.433572
1494 1 0.684501 0.81231 0.377931
1468 1 0.748558 0.685052 0.437962
1466 1 0.81161 0.693055 0.368789
1462 1 0.6862 0.688632 0.384821
1497 1 0.74636 0.750537 0.381681
1500 1 0.750613 0.813524 0.441168
1499 1 0.805142 0.745475 0.439723
1495 1 0.685972 0.749309 0.441618
1498 1 0.813046 0.807943 0.377396
1476 1 1.00247 0.808483 0.439189
1444 1 0.00677216 0.687549 0.439226
1470 1 0.934519 0.688246 0.385689
1502 1 0.938428 0.817492 0.380224
1504 1 0.868597 0.807015 0.440619
1503 1 0.938917 0.746292 0.443313
1501 1 0.876253 0.74809 0.38739
1472 1 0.863199 0.687068 0.437761
1522 1 0.559057 0.943694 0.376826
1530 1 0.814889 0.941636 0.37174
1507 1 0.0686093 0.884581 0.436306
1509 1 0.123972 0.870293 0.370543
1109 1 0.629007 0.745533 0.00671488
1614 1 0.436327 0.814463 0.498934
1653 1 0.628226 0.878062 0.498736
1654 1 0.685041 0.948374 0.497856
1309 1 0.87408 0.507876 0.258797
1179 1 0.813535 0.502856 0.186304
1142 1 0.689568 0.938685 -0.00359815
1574 1 0.187634 0.689943 0.501336
1159 1 0.186313 0.505599 0.189609
397 1 0.378863 0.999124 0.371087
391 1 0.186522 1.00054 0.42717
1423 1 0.432819 0.502484 0.435746
261 1 0.125624 1.00723 0.250379
1167 1 0.438066 0.508125 0.191767
385 1 1.00109 0.997258 0.369292
1150 1 0.933727 0.939786 -0.000652926
1291 1 0.30752 0.504616 0.312052
1618 1 0.558138 0.82105 0.498634
1035 1 0.313121 0.514404 0.0596121
1047 1 0.690364 0.501863 0.0674699
1649 1 0.493011 0.878723 0.497944
1437 1 0.871766 0.500784 0.379131
1419 1 0.31278 0.503678 0.433783
1177 1 0.754397 0.505857 0.131841
1283 1 0.064047 0.502311 0.316893
1069 1 0.377351 0.624645 0.00382521
267 1 0.301317 0.996141 0.31792
1646 1 0.434101 0.942943 0.49366
1645 1 0.375312 0.879166 0.492562
273 1 0.499739 0.998893 0.247956
401 1 0.489392 1.00376 0.376043
287 1 0.937062 0.993525 0.308109
1117 1 0.872897 0.749178 0.00019915
1590 1 0.691051 0.693138 0.501477
1550 1 0.430548 0.558499 0.497611
1082 1 0.813141 0.687231 0.00716709
541 1 0.874638 1.00075 0.495629
1074 1 0.565875 0.681304 0.00510825
1093 1 0.12558 0.74402 0.00738654
1078 1 0.679527 0.679601 0.0117267
1050 1 0.811925 0.561363 -0.000793243
1138 1 0.565901 0.939595 0.00625751
1101 1 0.375724 0.746535 0.00976721
1057 1 0.00408955 0.629487 0.00233123
1105 1 0.495867 0.752031 0.00384724
1661 1 0.864961 0.881072 0.499583
1633 1 0.997839 0.871698 0.498111
1102 1 0.441302 0.813094 0.00534492
1145 1 0.741572 0.875594 0.00633229
1081 1 0.754534 0.628222 -0.00192303
1569 1 0.00450252 0.626944 0.503663
1630 1 0.937868 0.810425 0.498641
1118 1 0.936571 0.811395 -0.00154734
1544 1 0.127291 0.563617 0.561321
1571 1 0.0668753 0.620539 0.562561
1666 1 0.0587581 0.561072 0.628817
1701 1 0.132067 0.626673 0.616433
1923 1 0.0656161 0.503255 0.946008
1540 1 -0.00182556 0.563963 0.555565
1697 1 -0.00356972 0.611466 0.622955
2017 1 0.995417 0.875462 0.870269
523 1 0.316956 0.99927 0.562024
1797 1 0.126195 0.504407 0.749075
521 1 0.25218 0.987252 0.510559
1548 1 0.242408 0.553721 0.560891
1575 1 0.191705 0.623282 0.560665
1579 1 0.307108 0.624276 0.557772
1670 1 0.187533 0.556808 0.62327
1674 1 0.306748 0.566919 0.625226
1705 1 0.251933 0.628297 0.625609
1641 1 0.243725 0.874645 0.49494
1929 1 0.24959 0.506637 0.87346
1811 1 0.552198 0.49794 0.803203
1552 1 0.374565 0.558747 0.566912
1583 1 0.437231 0.627633 0.563879
1678 1 0.43865 0.560174 0.621867
1709 1 0.378054 0.629919 0.622511
1713 1 0.498962 0.626551 0.624874
1567 1 0.936164 0.505235 0.553267
1650 1 0.558788 0.938073 0.492826
1558 1 0.689403 0.56015 0.50432
1556 1 0.489546 0.56437 0.555635
1560 1 0.621156 0.558646 0.572734
1587 1 0.554244 0.620342 0.563624
1682 1 0.558332 0.556408 0.626074
1717 1 0.627616 0.623151 0.624734
1626 1 0.809414 0.808088 0.506131
1935 1 0.442797 0.502145 0.936382
1667 1 0.0668265 0.49686 0.689663
657 1 0.505164 0.999109 0.624658
1931 1 0.312244 0.506436 0.933088
1555 1 0.559835 0.499774 0.564243
1638 1 0.185403 0.932482 0.501194
1591 1 0.682985 0.622723 0.558829
1564 1 0.751457 0.554351 0.562893
1595 1 0.799978 0.617404 0.564413
1686 1 0.692388 0.561401 0.629963
1690 1 0.810041 0.561747 0.625447
1721 1 0.745357 0.621313 0.619861
1570 1 0.0725469 0.681237 0.501621
1538 1 0.0643002 0.561274 0.501316
1113 1 0.753719 0.754063 0.999136
2048 1 0.87131 0.941961 0.944042
1568 1 0.864525 0.566657 0.565308
1599 1 0.936447 0.622148 0.561535
1694 1 0.927224 0.557713 0.626986
1725 1 0.871441 0.624733 0.625454
903 1 0.1876 0.999834 0.933603
1927 1 0.189487 0.502018 0.939313
1629 1 0.873386 0.741841 0.507446
911 1 0.439891 1.00451 0.943144
1576 1 0.130205 0.68641 0.565894
1603 1 0.0673065 0.742858 0.565532
1608 1 0.125656 0.812416 0.566864
1698 1 0.0706199 0.683895 0.626815
1730 1 0.0635343 0.815141 0.623814
1733 1 0.130367 0.744542 0.630651
1729 1 0.00377373 0.753747 0.628157
1572 1 0.011462 0.67742 0.573138
1665 1 0.997455 0.504637 0.626731
1580 1 0.246576 0.687331 0.557558
1607 1 0.187863 0.75364 0.556395
1611 1 0.313484 0.751704 0.55979
1612 1 0.252359 0.809161 0.564136
1702 1 0.186546 0.689345 0.619683
1706 1 0.312926 0.679554 0.616665
1734 1 0.189695 0.809243 0.624437
1737 1 0.253157 0.747506 0.61851
1738 1 0.318868 0.814072 0.613871
905 1 0.251024 0.991428 0.875739
901 1 0.12138 0.997366 0.871391
1573 1 0.135599 0.624688 0.501851
1584 1 0.376838 0.687058 0.560647
1615 1 0.434192 0.744532 0.563154
1616 1 0.367674 0.817857 0.553308
1710 1 0.440914 0.686777 0.622783
1741 1 0.377477 0.741327 0.619746
1742 1 0.430654 0.806898 0.622869
1588 1 0.489569 0.685683 0.563134
1593 1 0.746283 0.624319 0.499342
1745 1 0.497225 0.748855 0.615018
1620 1 0.496178 0.807167 0.557449
1592 1 0.632201 0.681946 0.564586
1619 1 0.562263 0.749647 0.556799
1624 1 0.625995 0.811877 0.562721
1714 1 0.557339 0.687977 0.62141
1746 1 0.562956 0.814706 0.622348
1749 1 0.620877 0.749354 0.623283
1045 1 0.624738 0.496889 1.00287
1933 1 0.37473 0.502664 0.873585
1609 1 0.248678 0.756649 0.495609
1596 1 0.742343 0.679215 0.564743
1623 1 0.682634 0.751858 0.569388
1627 1 0.81618 0.750466 0.564892
1628 1 0.747918 0.808388 0.568104
1718 1 0.682982 0.691236 0.624774
1722 1 0.811423 0.68753 0.620539
1750 1 0.685275 0.815218 0.623636
1753 1 0.747465 0.744163 0.626299
1754 1 0.806748 0.810927 0.629856
1634 1 0.0564454 0.930632 0.50498
1604 1 1.00069 0.818239 0.566053
1561 1 0.746867 0.497294 0.498983
1600 1 0.887932 0.684664 0.565947
1631 1 0.947136 0.753671 0.563061
1632 1 0.875804 0.813175 0.560628
1726 1 0.945919 0.679129 0.633188
1757 1 0.876682 0.748559 0.617828
1758 1 0.932953 0.806366 0.625326
913 1 0.505101 0.99975 0.874143
1673 1 0.255722 0.497245 0.621614
1635 1 0.0695526 0.874068 0.563893
1640 1 0.124072 0.936997 0.566226
1762 1 0.0610637 0.940116 0.628159
1765 1 0.130797 0.869918 0.629252
1636 1 0.994959 0.930196 0.566705
2047 1 0.929339 0.87554 0.94647
1598 1 0.946085 0.688138 0.50653
1639 1 0.189152 0.870588 0.560604
1643 1 0.306639 0.871795 0.553559
1644 1 0.246416 0.927805 0.570709
1766 1 0.190094 0.933588 0.626105
1769 1 0.251778 0.865779 0.633156
1770 1 0.311721 0.931723 0.621735
1602 1 0.0696935 0.815824 0.505302
2046 1 0.92818 0.937006 0.878225
2036 1 0.512691 0.95082 0.944466
1097 1 0.251399 0.747709 1.00482
2037 1 0.618364 0.874182 0.868124
1647 1 0.431614 0.871578 0.56156
1648 1 0.375875 0.934674 0.562989
1773 1 0.373337 0.876437 0.620056
1774 1 0.436091 0.937925 0.614966
1597 1 0.872077 0.636428 0.509139
661 1 0.622014 0.999833 0.624041
2045 1 0.868748 0.870381 0.867075
1652 1 0.503211 0.932106 0.563417
1777 1 0.502588 0.871278 0.615572
1651 1 0.565702 0.877808 0.554339
1656 1 0.623274 0.941184 0.562501
1778 1 0.558311 0.93881 0.633481
1781 1 0.623307 0.876984 0.617162
1921 1 -0.00214357 0.505048 0.882397
785 1 0.495531 0.989363 0.743717
1655 1 0.690344 0.872582 0.560722
1659 1 0.810591 0.870819 0.562866
1660 1 0.742288 0.941359 0.553561
1782 1 0.689338 0.933714 0.619429
1785 1 0.743327 0.872191 0.623049
1786 1 0.800979 0.937497 0.61536
783 1 0.429298 0.997169 0.814251
2035 1 0.565114 0.882968 0.936172
899 1 0.0645243 1.00062 0.930531
1126 1 0.187164 0.93282 1.00055
1761 1 -0.00243414 0.876538 0.627505
1663 1 0.933469 0.871772 0.562365
1664 1 0.871238 0.937927 0.564877
1789 1 0.873214 0.874793 0.623091
1790 1 0.933692 0.933527 0.62852
1683 1 0.556224 0.499306 0.692384
1065 1 0.248234 0.624981 1.00236
1672 1 0.129382 0.561195 0.685118
1699 1 0.0628885 0.625383 0.679983
1794 1 0.0603557 0.56677 0.749592
1800 1 0.126284 0.561541 0.814621
1827 1 0.0682739 0.62193 0.809735
1829 1 0.125962 0.626783 0.752416
1668 1 0.0100013 0.560958 0.692163
1825 1 1.00065 0.621389 0.746694
1803 1 0.314978 0.510617 0.811219
1809 1 0.49506 0.501037 0.744419
777 1 0.253852 0.99567 0.745166
1676 1 0.251383 0.561603 0.685655
1703 1 0.192635 0.623708 0.691417
1707 1 0.310203 0.627633 0.68175
1798 1 0.189141 0.559482 0.751387
1802 1 0.311875 0.564003 0.737848
1804 1 0.249117 0.56671 0.812649
1831 1 0.187822 0.629913 0.808595
1833 1 0.254317 0.622146 0.755649
1835 1 0.314535 0.632097 0.81512
769 1 0.995525 0.992349 0.755872
1586 1 0.556839 0.679898 0.502394
1695 1 0.938361 0.496173 0.686409
1680 1 0.370779 0.562028 0.684434
1711 1 0.438675 0.624123 0.678424
1806 1 0.4356 0.573422 0.751815
1808 1 0.372627 0.564741 0.807956
1837 1 0.372256 0.633409 0.744587
1839 1 0.441346 0.630597 0.80979
1684 1 0.495495 0.562323 0.690703
1841 1 0.507852 0.633285 0.742246
1812 1 0.502717 0.563834 0.81255
1688 1 0.626374 0.559764 0.686599
1715 1 0.564418 0.621244 0.68696
1810 1 0.557741 0.567208 0.746801
1816 1 0.620087 0.555252 0.805669
1843 1 0.561344 0.626686 0.815045
1845 1 0.619351 0.627624 0.755351
2044 1 0.742957 0.931178 0.93076
2043 1 0.815997 0.876709 0.937866
771 1 0.0642498 0.995538 0.808441
1692 1 0.75844 0.562833 0.690752
1719 1 0.687946 0.629941 0.682537
1723 1 0.813709 0.622693 0.682541
1814 1 0.682795 0.566458 0.743539
1818 1 0.811055 0.56181 0.753015
1820 1 0.749822 0.55674 0.805399
1847 1 0.686186 0.627926 0.804935
1849 1 0.743673 0.619296 0.751686
1851 1 0.814519 0.619552 0.80893
653 1 0.378999 0.993926 0.615932
781 1 0.379922 0.995716 0.738784
1799 1 0.187254 0.501801 0.810545
1796 1 0.999019 0.567959 0.810515
1696 1 0.8689 0.559356 0.68921
1727 1 0.942265 0.616039 0.687221
1822 1 0.936591 0.558512 0.743895
1824 1 0.869433 0.55592 0.809729
1853 1 0.876424 0.624047 0.746293
1855 1 0.938339 0.628207 0.811498
1925 1 0.124612 0.506684 0.878672
2042 1 0.812183 0.938216 0.880859
1704 1 0.123298 0.685037 0.689114
1731 1 0.0621718 0.74906 0.691374
1736 1 0.130539 0.808903 0.6869
1826 1 0.0630052 0.68869 0.74666
1832 1 0.12821 0.689818 0.814092
1858 1 0.064358 0.8077 0.752139
1859 1 0.0670013 0.743878 0.80825
1861 1 0.129893 0.754091 0.748249
1864 1 0.129033 0.808097 0.811671
1828 1 -0.00183632 0.683598 0.812337
1860 1 0.996731 0.815211 0.808923
1732 1 0.00765847 0.809674 0.686172
1708 1 0.257755 0.687929 0.683445
1735 1 0.195538 0.748677 0.683889
1739 1 0.310795 0.751744 0.683082
1740 1 0.257616 0.812911 0.694074
1830 1 0.195147 0.688866 0.746269
1834 1 0.315792 0.691855 0.749453
1836 1 0.247657 0.686598 0.814295
1862 1 0.186888 0.809138 0.751985
1863 1 0.190123 0.748401 0.811451
1865 1 0.254714 0.750524 0.750262
1866 1 0.318701 0.815624 0.752853
1867 1 0.309208 0.751132 0.814331
1868 1 0.245332 0.815806 0.812302
1712 1 0.373992 0.688002 0.679507
1743 1 0.432289 0.751777 0.694302
1744 1 0.373047 0.811093 0.688473
1838 1 0.442725 0.692876 0.749054
1840 1 0.377759 0.69118 0.810253
1869 1 0.375647 0.751419 0.747404
1870 1 0.440616 0.81548 0.758409
1871 1 0.443545 0.747244 0.809621
1872 1 0.379193 0.802856 0.820629
1876 1 0.502067 0.813943 0.818061
1748 1 0.49297 0.811278 0.675209
1844 1 0.506439 0.693211 0.815928
1873 1 0.500801 0.760989 0.752539
1716 1 0.495317 0.695391 0.684726
1720 1 0.627617 0.689628 0.684204
1747 1 0.559041 0.756193 0.684136
1752 1 0.624291 0.812238 0.68968
1842 1 0.563877 0.694394 0.742406
1848 1 0.629836 0.688918 0.818145
1874 1 0.568186 0.803816 0.753428
1875 1 0.565259 0.747683 0.814087
1877 1 0.6329 0.748814 0.754602
1880 1 0.63114 0.813156 0.814623
1724 1 0.752371 0.692965 0.687263
1751 1 0.683093 0.753701 0.677035
1755 1 0.808225 0.751183 0.691319
1756 1 0.744688 0.810085 0.689539
1846 1 0.684341 0.687139 0.745978
1850 1 0.808177 0.678977 0.747362
1852 1 0.754887 0.672457 0.816081
1878 1 0.685688 0.809356 0.751904
1879 1 0.687941 0.761045 0.825778
1881 1 0.743029 0.746093 0.755244
1882 1 0.805754 0.826058 0.75236
1883 1 0.809508 0.741572 0.815436
1884 1 0.740477 0.818912 0.807894
1700 1 1.00343 0.687368 0.68789
1857 1 0.996196 0.750215 0.751116
1728 1 0.876819 0.680872 0.682083
1759 1 0.940203 0.743463 0.681321
1760 1 0.872997 0.812744 0.687658
1854 1 0.935235 0.680226 0.741604
1856 1 0.873144 0.690428 0.80649
1885 1 0.866638 0.748627 0.753068
1886 1 0.932814 0.812694 0.748646
1887 1 0.932378 0.748133 0.808789
1888 1 0.863278 0.810718 0.809308
1763 1 0.067587 0.877518 0.682292
1768 1 0.124038 0.937659 0.685098
1890 1 0.0601718 0.935115 0.743069
1891 1 0.0625975 0.871349 0.815909
1893 1 0.123604 0.875572 0.747214
1896 1 0.117545 0.931955 0.810848
1892 1 0.991916 0.936817 0.813622
1889 1 0.00512172 0.872732 0.745234
1795 1 0.0658809 0.503745 0.815941
1767 1 0.188277 0.875 0.690062
1771 1 0.318714 0.878294 0.686991
1772 1 0.257695 0.94126 0.68224
1894 1 0.186848 0.939861 0.757978
1895 1 0.191608 0.871223 0.818848
1897 1 0.247504 0.878318 0.744479
1898 1 0.308828 0.930641 0.746398
1899 1 0.312767 0.871258 0.810059
1900 1 0.25118 0.930593 0.810716
651 1 0.321447 0.993746 0.671392
671 1 0.931089 0.992729 0.68598
1562 1 0.80531 0.560844 0.496672
1775 1 0.442042 0.872062 0.677981
1776 1 0.383497 0.939674 0.678803
1901 1 0.378281 0.871581 0.752393
1902 1 0.439737 0.927082 0.747974
1903 1 0.445575 0.876192 0.816238
1904 1 0.372914 0.938628 0.805627
1780 1 0.499025 0.934216 0.686895
2041 1 0.738187 0.872589 0.877144
1114 1 0.81774 0.808928 0.992987
1061 1 0.123613 0.634011 0.996588
1908 1 0.498262 0.936461 0.813461
1905 1 0.50309 0.873201 0.747838
1779 1 0.563109 0.869269 0.681943
1784 1 0.628328 0.937826 0.688728
1906 1 0.564447 0.94035 0.74669
1907 1 0.564988 0.872756 0.815173
1909 1 0.626974 0.873758 0.75729
1912 1 0.620753 0.943879 0.806421
1605 1 0.122272 0.746927 0.496898
1783 1 0.68247 0.871497 0.692878
1787 1 0.811098 0.871861 0.68434
1788 1 0.745205 0.933291 0.689838
1910 1 0.683442 0.943755 0.750656
1911 1 0.683624 0.873702 0.814879
1913 1 0.741505 0.880725 0.756921
1914 1 0.808106 0.929594 0.746592
1915 1 0.805748 0.880952 0.81345
1916 1 0.738664 0.939484 0.81155
649 1 0.25154 0.999837 0.615437
2039 1 0.676313 0.873587 0.939055
2038 1 0.678755 0.942071 0.871438
1764 1 0.998733 0.939023 0.683945
1791 1 0.938707 0.875436 0.68726
1792 1 0.863856 0.938303 0.678456
1917 1 0.875667 0.873235 0.747425
1918 1 0.934046 0.934638 0.746616
1919 1 0.933079 0.875831 0.808742
1920 1 0.874736 0.935825 0.814402
1922 1 0.0635224 0.564065 0.874594
1928 1 0.121125 0.565921 0.937659
1955 1 0.0637986 0.628913 0.938614
1957 1 0.124368 0.637187 0.875138
1953 1 0.0068594 0.624784 0.873572
1924 1 0.00422372 0.563431 0.943591
519 1 0.185894 0.995916 0.560197
1926 1 0.185704 0.571627 0.874046
1930 1 0.317426 0.564377 0.870801
1932 1 0.253104 0.559999 0.935897
1959 1 0.180443 0.62565 0.933168
1961 1 0.25307 0.625364 0.877886
1963 1 0.316167 0.625721 0.93788
2034 1 0.562127 0.939595 0.872234
1934 1 0.44054 0.561845 0.874528
1936 1 0.377839 0.564677 0.930272
1965 1 0.375171 0.62796 0.86943
1967 1 0.43476 0.634126 0.932429
1969 1 0.500308 0.634639 0.874819
1940 1 0.497091 0.573958 0.937055
1062 1 0.190036 0.686029 0.999507
2040 1 0.62406 0.94406 0.938216
1938 1 0.559313 0.5645 0.876624
1944 1 0.617502 0.555517 0.93349
1971 1 0.556798 0.623701 0.934385
1973 1 0.623311 0.6257 0.868978
1566 1 0.942068 0.57044 0.495646
1941 1 0.614585 0.498216 0.867026
1042 1 0.562209 0.566321 0.993355
1942 1 0.68902 0.559816 0.862945
1946 1 0.820779 0.563848 0.872333
1948 1 0.757374 0.561812 0.939749
1975 1 0.688638 0.618547 0.933859
1977 1 0.753571 0.614058 0.875672
1979 1 0.811902 0.628875 0.937408
1585 1 0.498407 0.62087 0.495403
797 1 0.874886 0.99603 0.751045
795 1 0.804704 0.997773 0.808123
1950 1 0.940111 0.571064 0.874973
1952 1 0.876155 0.564822 0.934244
1981 1 0.867889 0.631365 0.871551
1983 1 0.944887 0.62644 0.935299
643 1 0.0563725 0.997355 0.695487
1954 1 0.0634507 0.699271 0.879413
1960 1 0.121467 0.690942 0.937083
1986 1 0.0621757 0.803447 0.862543
1987 1 0.0550774 0.756488 0.937052
1989 1 0.126797 0.754805 0.873178
1992 1 0.122478 0.809606 0.933005
1985 1 0.99608 0.754477 0.8688
1956 1 -2.91484e-05 0.691754 0.9328
2033 1 0.502389 0.880987 0.884368
1949 1 0.879176 0.501559 0.874485
2031 1 0.437486 0.87251 0.944681
1958 1 0.199057 0.686991 0.87653
1962 1 0.319119 0.687858 0.875312
1964 1 0.257866 0.681376 0.935586
1990 1 0.18496 0.812333 0.881162
1991 1 0.189977 0.743334 0.938554
1993 1 0.246559 0.7501 0.879344
1994 1 0.308467 0.81583 0.87492
1995 1 0.312525 0.747377 0.939322
1996 1 0.25055 0.808523 0.937822
539 1 0.808842 0.998708 0.562295
1966 1 0.442092 0.693754 0.873414
1968 1 0.372586 0.686831 0.94837
1997 1 0.3766 0.747194 0.877416
1998 1 0.437475 0.812906 0.88118
1999 1 0.439892 0.753364 0.941163
2000 1 0.377577 0.810741 0.947731
2001 1 0.505782 0.752878 0.880477
1972 1 0.501701 0.68711 0.944638
2004 1 0.506307 0.820037 0.935944
1805 1 0.38149 0.49928 0.744957
1970 1 0.56396 0.685912 0.873735
1976 1 0.626671 0.676411 0.940894
2002 1 0.571146 0.804186 0.868225
2003 1 0.571037 0.734653 0.935461
2005 1 0.628808 0.74158 0.87663
2008 1 0.618078 0.810531 0.943071
2030 1 0.436094 0.93795 0.886251
1974 1 0.6852 0.685171 0.879359
1978 1 0.808161 0.685242 0.877164
1980 1 0.748621 0.6773 0.935747
2006 1 0.679996 0.813589 0.887625
2007 1 0.685837 0.743954 0.940632
2009 1 0.745229 0.751447 0.881678
2010 1 0.808533 0.816402 0.874716
2011 1 0.817379 0.743921 0.93788
2012 1 0.750578 0.81174 0.94138
775 1 0.184255 0.996453 0.816738
531 1 0.566108 0.997683 0.563697
2032 1 0.373295 0.945555 0.952353
779 1 0.310224 0.995886 0.811513
2029 1 0.373584 0.875933 0.87807
1988 1 1.00157 0.814134 0.940432
2015 1 0.927571 0.754088 0.935975
2013 1 0.872482 0.747069 0.871295
1984 1 0.878586 0.686825 0.943683
1982 1 0.936953 0.691356 0.875613
2016 1 0.872294 0.814467 0.930925
2014 1 0.937294 0.811374 0.863862
667 1 0.810997 1.0017 0.685055
2020 1 0.993123 0.941272 0.942549
2025 1 0.253736 0.875225 0.874724
2023 1 0.191956 0.87466 0.939908
2018 1 0.0602542 0.937987 0.872264
2019 1 0.0588134 0.870202 0.924171
2021 1 0.127071 0.870865 0.877073
2024 1 0.122986 0.937951 0.927262
2026 1 0.323569 0.939435 0.8804
2027 1 0.31615 0.876716 0.945096
2022 1 0.192372 0.935035 0.879604
2028 1 0.254219 0.927335 0.936547
1601 1 0.00924646 0.75344 0.502302
1945 1 0.749375 0.503505 0.886145
1073 1 0.501283 0.625984 1.00675
647 1 0.183847 0.995657 0.692912
1947 1 0.810789 0.499085 0.937983
513 1 1.00396 0.998076 0.501712
1622 1 0.684279 0.810994 0.502309
907 1 0.313637 0.997196 0.942936
923 1 0.807689 0.999548 0.940091
1030 1 0.188652 0.566143 0.994452
1937 1 0.500084 0.499828 0.868625
773 1 0.124935 0.996845 0.751922
515 1 0.0616867 1.00806 0.564547
1807 1 0.440588 0.508426 0.813562
921 1 0.752253 1.006 0.874985
897 1 -0.00307031 1.00113 0.872603
1542 1 0.186492 0.553125 0.497888
1125 1 0.130852 0.864975 0.996767
1137 1 0.50655 0.882891 0.99811
1141 1 0.619973 0.879742 1.00194
1053 1 0.872752 0.503161 0.991277
1058 1 0.0621724 0.695914 0.997035
1033 1 0.249459 0.50079 0.998687
1625 1 0.747147 0.7479 0.508843
1098 1 0.312172 0.814014 1.00501
1046 1 0.688169 0.561656 1.00012
1077 1 0.624122 0.619465 0.995561
1034 1 0.311214 0.567731 0.991463
1106 1 0.561858 0.812372 0.999723
1122 1 0.0718262 0.924978 0.991975
1089 1 0.99343 0.748636 0.995465
1094 1 0.185168 0.803189 0.997956
1637 1 0.125703 0.873321 0.504239
1578 1 0.313461 0.69148 0.502785
1594 1 0.805826 0.686355 0.507282
1581 1 0.375815 0.621923 0.504814
1037 1 0.385181 0.511163 1.00071
1662 1 0.939213 0.934091 0.509496
529 1 0.503975 0.995327 0.4985
1029 1 0.129129 0.501999 0.999013
9 1 0.245515 0.993917 0.995154
| [
"[email protected]"
] | |
adec619e3b44525d2a8552143b3945a1d9f4e157 | 2424063d657d643c1f8ccc6cca343271d6d0f708 | /Project24/app24/models.py | 5a8ad3f6ed2fce774fd34f11bc0610adb453e749 | [] | no_license | pythonwithnaveen/DjangoExamples | a0a07cbc53564522cf39649c235716ef5c3a4ba0 | 57c7a6302ada4079bd3625481e660587bf8015c6 | refs/heads/main | 2023-07-16T02:36:01.283938 | 2021-08-12T07:26:22 | 2021-08-12T07:26:22 | 371,881,524 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | from django.db import models
class EmployeeModel(models.Model):
desig = (('Manager','Manager'),
('Developer','Developer'),
('Tester','Tester'))
idno = models.AutoField(primary_key=True)
name = models.CharField(max_length=100)
salary = models.FloatField()
designation = models.CharField(max_length=100,choices=desig,default='Developer')
def __str__(self):
return self.name | [
"="
] | = |
7f9e8ea4f6d4b7553bfcc20e663036a133e34e08 | d1c3cfa4707c3d7dcb7578608c317d1e1441c3d2 | /동빈좌/기타알고리즘/문제풀이/유형문제풀이/구현/zip_str.py | 888762e95a853a77c2598f538a007400d9b19a6c | [] | no_license | wookim789/baekjoon_algo_note | 0ad4ddc6dde187afb4346f14ef9ded2f4b7de9ae | a08903d118218d17f4ed86b16ac39bce70adeb78 | refs/heads/master | 2023-02-08T21:13:25.079062 | 2021-01-01T14:56:10 | 2021-01-01T14:56:10 | 295,260,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | # 입력받은 반복가능한 2개의 개체를 입력받으면 index에 맞게 압축함
a = [1,2,3]
b = [4,5,6]
print(list(zip(a,b)))
| [
"[email protected]"
] | |
57d922bb376a8688b87bd15551f00ed4bc091aa1 | 78c3082e9082b5b50435805723ae00a58ca88e30 | /03.AI알고리즘 소스코드/venv/Lib/site-packages/caffe2/experiments/python/device_reduce_sum_bench.py | 04628c97394f2cab1ab8c5c006c88f2316bc60eb | [] | no_license | jinStar-kimmy/algorithm | 26c1bc456d5319578110f3d56f8bd19122356603 | 59ae8afd8d133f59a6b8d8cee76790fd9dfe1ff7 | refs/heads/master | 2023-08-28T13:16:45.690232 | 2021-10-20T08:23:46 | 2021-10-20T08:23:46 | 419,217,105 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,076 | py | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package device_reduce_sum_bench
# Module caffe2.experiments.python.device_reduce_sum_bench
import argparse
import itertools
import logging
import os
from six import add_metaclass
import numpy as np
from caffe2.python import workspace, core
from caffe2.python.hypothesis_test_util import runOpBenchmark, gpu_do
logging.basicConfig()
logger = logging.getLogger(os.path.basename(__file__))
logger.setLevel(logging.INFO)
ALL_BENCHMARKS = {}
class BenchmarkMeta(type):
def __new__(metacls, name, bases, class_dict):
cls = type.__new__(metacls, name, bases, class_dict)
if name != 'Benchmark':
ALL_BENCHMARKS[name] = cls
return cls
@add_metaclass(BenchmarkMeta)
class Benchmark(object):
def __init__(self):
self.results = []
def display(self):
print('Results ({}):'.format(type(self).__name__))
print('input size ms/iter')
print('------------------------------ -----------')
for size, ms in self.results:
print('{!s:<30} {:.4f}'.format(size, ms))
class SumElements(Benchmark):
def run(self):
op = core.CreateOperator(
"SumElements",
["X"],
["y"]
)
for n in itertools.imap(pow, itertools.cycle([10]), range(10)):
X = np.random.rand(n).astype(np.float32)
logger.info('Running benchmark for n = {}'.format(n))
ret = runOpBenchmark(gpu_do, op, inputs=[X])
self.results.append((n, ret[1]))
class SumSqrElements(Benchmark):
def run(self):
op = core.CreateOperator(
"SumSqrElements",
["X"],
["y"]
)
for n in itertools.imap(pow, itertools.cycle([10]), range(10)):
X = np.random.rand(n).astype(np.float32)
logger.info('Running benchmark for n = {}'.format(n))
ret = runOpBenchmark(gpu_do, op, inputs=[X])
self.results.append((n, ret[1]))
class SoftMaxWithLoss(Benchmark):
def run(self):
op = core.CreateOperator(
"SoftmaxWithLoss",
["X", "label"],
["probs", "avgloss"],
)
for n in itertools.imap(pow, itertools.cycle([10]), range(8)):
for D in itertools.imap(pow, itertools.cycle([10]), range(3)):
X = np.random.rand(n, D).astype(np.float32)
label = (np.random.rand(n) * D).astype(np.int32)
logger.info('Running benchmark for n = {}, D= {}'.format(n, D))
ret = runOpBenchmark(gpu_do, op, inputs=[X, label])
self.results.append(((n, D), ret[1]))
def parse_args():
parser = argparse.ArgumentParser(os.path.basename(__file__))
parser.add_argument('-b', '--benchmarks', nargs='+',
default=ALL_BENCHMARKS.keys(),
help='benchmarks to run (default: %(default)s))')
return parser.parse_args()
def main():
args = parse_args()
benchmarks = [ALL_BENCHMARKS[name]() for name in args.benchmarks]
for bench in benchmarks:
bench.run()
for bench in benchmarks:
bench.display()
if __name__ == '__main__':
workspace.GlobalInit(['caffe2', '--caffe2_log_level=2'])
main()
| [
"[email protected]"
] | |
b3852a41403ec640fd038714c357b524040ec896 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/word-count/14cde529d953488da375451dc827b0ba.py | 51da302e01332cc4536cbc94b62458702df054c3 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 218 | py | def word_count(s):
"""
Return a dict with 'word': number of occurrences for each word in `s`.
"""
counts = {}
for word in s.split():
counts[word] = counts.get(word, 0) + 1
return counts
| [
"[email protected]"
] | |
ee7b79b54a232d6dcc47d354eae1376a44c49c4f | 4d2cc76dbceff9eded071ba542ab2a1dd8c19f7b | /bhp056/apps/mpepu_maternal/migrations/0027_auto__add_field_maternalconsentaudit_is_literate__add_field_maternalco.py | cc57b045aff711a4c93c5ad87a1687ec0d8cce49 | [] | no_license | botswana-harvard/mpepu | 5d436638b760150ed76ec223121f5ac7aeee1020 | 6aa29c91f4fab50782b27e5f55aa33b30aee1dd0 | refs/heads/master | 2021-01-16T23:15:44.335940 | 2016-08-12T14:30:17 | 2016-08-12T14:30:17 | 65,557,693 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 213,774 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'MaternalConsentAudit.is_literate'
db.add_column('mpepu_maternal_maternalconsent_audit', 'is_literate',
self.gf('django.db.models.fields.CharField')(default='-', max_length=3),
keep_default=False)
# Adding field 'MaternalConsentAudit.witness_name'
db.add_column('mpepu_maternal_maternalconsent_audit', 'witness_name',
self.gf('django.db.models.fields.CharField')(max_length=78L, null=True, blank=True),
keep_default=False)
# Adding field 'MaternalConsent.is_literate'
db.add_column('mpepu_maternal_maternalconsent', 'is_literate',
self.gf('django.db.models.fields.CharField')(default='-', max_length=3),
keep_default=False)
# Adding field 'MaternalConsent.witness_name'
db.add_column('mpepu_maternal_maternalconsent', 'witness_name',
self.gf('django.db.models.fields.CharField')(max_length=78L, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'MaternalConsentAudit.is_literate'
db.delete_column('mpepu_maternal_maternalconsent_audit', 'is_literate')
# Deleting field 'MaternalConsentAudit.witness_name'
db.delete_column('mpepu_maternal_maternalconsent_audit', 'witness_name')
# Deleting field 'MaternalConsent.is_literate'
db.delete_column('mpepu_maternal_maternalconsent', 'is_literate')
# Deleting field 'MaternalConsent.witness_name'
db.delete_column('mpepu_maternal_maternalconsent', 'witness_name')
models = {
'bhp_adverse.deathcausecategory': {
'Meta': {'ordering': "['display_index']", 'object_name': 'DeathCauseCategory'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'bhp_adverse.deathcauseinfo': {
'Meta': {'ordering': "['display_index']", 'object_name': 'DeathCauseInfo'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'bhp_adverse.deathmedicalresponsibility': {
'Meta': {'ordering': "['display_index']", 'object_name': 'DeathMedicalResponsibility'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'bhp_adverse.deathreasonhospitalized': {
'Meta': {'ordering': "['display_index']", 'object_name': 'DeathReasonHospitalized'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'bhp_appointment.appointment': {
'Meta': {'ordering': "['registered_subject', 'appt_datetime']", 'unique_together': "(('registered_subject', 'visit_definition', 'visit_instance'),)", 'object_name': 'Appointment'},
'appt_close_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'appt_datetime': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'appt_reason': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'appt_status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '25', 'db_index': 'True'}),
'appt_type': ('django.db.models.fields.CharField', [], {'default': "'clinic'", 'max_length': '20'}),
'best_appt_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'contact_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'contact_tel': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dashboard_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'is_confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'study_site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_variables.StudySite']", 'null': 'True'}),
'timepoint_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'visit_definition': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['bhp_visit.VisitDefinition']"}),
'visit_instance': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '1', 'null': 'True', 'db_index': 'True', 'blank': 'True'})
},
'bhp_code_lists.dxcode': {
'Meta': {'object_name': 'DxCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'list_ref': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'long_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'bhp_code_lists.wcsdxadult': {
'Meta': {'object_name': 'WcsDxAdult'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'list_ref': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'long_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'bhp_consent.consentcatalogue': {
'Meta': {'ordering': "['name', 'version']", 'unique_together': "(('name', 'version'),)", 'object_name': 'ConsentCatalogue'},
'add_for_app': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'consent_type': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'content_type_map': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_content_type_map.ContentTypeMap']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'end_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'list_for_update': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'start_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {})
},
'bhp_content_type_map.contenttypemap': {
'Meta': {'ordering': "['name']", 'unique_together': "(['app_label', 'model'],)", 'object_name': 'ContentTypeMap', 'db_table': "'bhp_common_contenttypemap'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'bhp_registration.registeredsubject': {
'Meta': {'ordering': "['subject_identifier']", 'unique_together': "(('identity', 'first_name', 'dob', 'initials', 'registration_identifier'),)", 'object_name': 'RegisteredSubject'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'hiv_status': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'identity': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'identity_type': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'initials': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'is_dob_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'may_store_samples': ('django.db.models.fields.CharField', [], {'default': "'?'", 'max_length': '3'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'randomization_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'registration_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'registration_identifier': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}),
'registration_status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'relative_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'salt': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'screening_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sid': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'study_site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_variables.StudySite']", 'null': 'True', 'blank': 'True'}),
'subject_consent_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'subject_identifier': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '50', 'blank': 'True'}),
'subject_identifier_as_pk': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'subject_type': ('django.db.models.fields.CharField', [], {'default': "'undetermined'", 'max_length': '25', 'null': 'True'}),
'survival_status': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'bhp_variables.studysite': {
'Meta': {'ordering': "['site_code']", 'unique_together': "[('site_code', 'site_name')]", 'object_name': 'StudySite'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'site_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4'}),
'site_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '35'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'bhp_visit.membershipform': {
'Meta': {'object_name': 'MembershipForm'},
'category': ('django.db.models.fields.CharField', [], {'default': "'subject'", 'max_length': '25', 'null': 'True'}),
'content_type_map': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'+'", 'unique': 'True', 'to': "orm['bhp_content_type_map.ContentTypeMap']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'bhp_visit.schedulegroup': {
'Meta': {'ordering': "['group_name']", 'object_name': 'ScheduleGroup'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'group_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'grouping_key': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'membership_form': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_visit.MembershipForm']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'bhp_visit.visitdefinition': {
'Meta': {'ordering': "['code', 'time_point']", 'object_name': 'VisitDefinition'},
'base_interval': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'base_interval_unit': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '10'}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '6', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grouping': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'instruction': ('django.db.models.fields.TextField', [], {'max_length': '255', 'blank': 'True'}),
'lower_window': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lower_window_unit': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'schedule_group': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['bhp_visit.ScheduleGroup']", 'null': 'True', 'blank': 'True'}),
'time_point': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '35', 'db_index': 'True'}),
'upper_window': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'upper_window_unit': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '10'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'mpepu_list.chroniccond': {
'Meta': {'object_name': 'ChronicCond'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.delcomp': {
'Meta': {'object_name': 'DelComp'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.healthcond': {
'Meta': {'object_name': 'HealthCond'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.hhgoods': {
'Meta': {'object_name': 'HhGoods'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.maternalbfffrisksbenefits': {
'Meta': {'object_name': 'MaternalBfFfRisksBenefits'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.maternalfeedinginfluence': {
'Meta': {'object_name': 'MaternalFeedingInfluence'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.maternalundecidedfeeding': {
'Meta': {'object_name': 'MaternalUndecidedFeeding'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.obcomp': {
'Meta': {'object_name': 'ObComp'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.priorarv': {
'Meta': {'object_name': 'PriorArv'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_list.suppliment': {
'Meta': {'object_name': 'Suppliment'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_index': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '35'})
},
'mpepu_maternal.feedingchoice': {
'Meta': {'object_name': 'FeedingChoice'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'first_time_feeding': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.feedingchoiceaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'FeedingChoiceAudit', 'db_table': "'mpepu_maternal_feedingchoice_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'first_time_feeding': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_feedingchoice'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.feedingchoicesectionone': {
'Meta': {'object_name': 'FeedingChoiceSectionOne'},
'baby_weaned_age': ('django.db.models.fields.IntegerField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hiv_aware_feeding': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hiv_status': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'last_baby_feeding': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.feedingchoicesectiononeaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'FeedingChoiceSectionOneAudit', 'db_table': "'mpepu_maternal_feedingchoicesectionone_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'baby_weaned_age': ('django.db.models.fields.IntegerField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hiv_aware_feeding': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hiv_status': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'last_baby_feeding': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_feedingchoicesectionone'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.feedingchoicesectionthree': {
'Meta': {'object_name': 'FeedingChoiceSectionThree'},
'bf_advice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'chosen_feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'feeding_choice_made': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'ff_advice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'risk_benefit_training': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.MaternalBfFfRisksBenefits']", 'symmetrical': 'False'}),
'und_risk_benefit': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'undecided_feeding': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.MaternalUndecidedFeeding']", 'symmetrical': 'False'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.feedingchoicesectionthreeaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'FeedingChoiceSectionThreeAudit', 'db_table': "'mpepu_maternal_feedingchoicesectionthree_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'bf_advice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'chosen_feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'feeding_choice_made': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'ff_advice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_feedingchoicesectionthree'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'und_risk_benefit': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.feedingchoicesectiontwo': {
'Meta': {'object_name': 'FeedingChoiceSectionTwo'},
'baby_bf_choice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'bf_ff_benefits': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'bf_hiv_arv': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'bf_hiv_worry': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'death_worry': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'disclose_hiv_father': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'doc_feeding_advice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'hiv_worry': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'infant_hiv_risk': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'influential_people': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.MaternalFeedingInfluence']", 'symmetrical': 'False'}),
'influential_people_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'outside_disclosure': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'safe_ff': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'status_disclosure': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'work_influence': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'work_return': ('django.db.models.fields.CharField', [], {'max_length': '35'})
},
'mpepu_maternal.feedingchoicesectiontwoaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'FeedingChoiceSectionTwoAudit', 'db_table': "'mpepu_maternal_feedingchoicesectiontwo_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'baby_bf_choice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'bf_ff_benefits': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'bf_hiv_arv': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'bf_hiv_worry': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'death_worry': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'disclose_hiv_father': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'doc_feeding_advice': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'hiv_worry': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'infant_hiv_risk': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'influential_people_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_feedingchoicesectiontwo'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'outside_disclosure': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'safe_ff': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'status_disclosure': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'work_influence': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'work_return': ('django.db.models.fields.CharField', [], {'max_length': '35'})
},
'mpepu_maternal.maternalarv': {
'Meta': {'object_name': 'MaternalArv'},
'arv_code': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_stop': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_arv_pp_history': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalArvPPHistory']", 'null': 'True', 'blank': 'True'}),
'maternal_arv_preg_history': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalArvPregHistory']", 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'transaction_flag': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvAudit', 'db_table': "'mpepu_maternal_maternalarv_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'arv_code': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_stop': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_arv_pp_history': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_audit_maternalarv'", 'null': 'True', 'to': "orm['mpepu_maternal.MaternalArvPPHistory']"}),
'maternal_arv_preg_history': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_audit_maternalarv'", 'null': 'True', 'to': "orm['mpepu_maternal.MaternalArvPregHistory']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'transaction_flag': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpost': {
'Meta': {'object_name': 'MaternalArvPost'},
'arv_status': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '25'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'haart_last_visit': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'haart_reason': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '25'}),
'haart_reason_other': ('django.db.models.fields.TextField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpostadh': {
'Meta': {'object_name': 'MaternalArvPostAdh'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_arv_post': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalArvPost']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'missed_days': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'missed_days_discnt': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'missed_doses': ('django.db.models.fields.IntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpostadhaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvPostAdhAudit', 'db_table': "'mpepu_maternal_maternalarvpostadh_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_arv_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpostadh'", 'to': "orm['mpepu_maternal.MaternalArvPost']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpostadh'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'missed_days': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'missed_days_discnt': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'missed_doses': ('django.db.models.fields.IntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpostaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvPostAudit', 'db_table': "'mpepu_maternal_maternalarvpost_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'arv_status': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '25'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'haart_last_visit': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'haart_reason': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '25'}),
'haart_reason_other': ('django.db.models.fields.TextField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpost'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpostmod': {
'Meta': {'object_name': 'MaternalArvPostMod'},
'arv_code': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dose_status': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_arv_post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalArvPost']"}),
'modification_code': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'modification_date': ('django.db.models.fields.DateField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpostmodaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvPostModAudit', 'db_table': "'mpepu_maternal_maternalarvpostmod_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'arv_code': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dose_status': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_arv_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpostmod'", 'to': "orm['mpepu_maternal.MaternalArvPost']"}),
'modification_code': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'modification_date': ('django.db.models.fields.DateField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpphistory': {
'Meta': {'object_name': 'MaternalArvPPHistory'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_arv_preg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalArvPreg']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpphistoryaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvPPHistoryAudit', 'db_table': "'mpepu_maternal_maternalarvpphistory_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_arv_preg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpphistory'", 'to': "orm['mpepu_maternal.MaternalArvPreg']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpphistory'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpreg': {
'Meta': {'object_name': 'MaternalArvPreg'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'sd_nvp': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'start_pp': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'took_arv': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpregaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvPregAudit', 'db_table': "'mpepu_maternal_maternalarvpreg_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpreg'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'sd_nvp': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'start_pp': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'took_arv': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpreghistory': {
'Meta': {'object_name': 'MaternalArvPregHistory'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'interrupt': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '50'}),
'interrupt_other': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'is_interrupt': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'maternal_arv_preg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalArvPreg']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalarvpreghistoryaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalArvPregHistoryAudit', 'db_table': "'mpepu_maternal_maternalarvpreghistory_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'interrupt': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '50'}),
'interrupt_other': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'is_interrupt': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'maternal_arv_preg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpreghistory'", 'to': "orm['mpepu_maternal.MaternalArvPreg']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalarvpreghistory'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalconsent': {
'Meta': {'object_name': 'MaternalConsent'},
'assessment_score': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'confirm_identity': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'consent_copy': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'consent_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'consent_reviewed': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'consent_version_on_entry': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'consent_version_recent': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'guardian_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'identity': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '78L'}),
'identity_type': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'initials': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'is_dob_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True'}),
'is_incarcerated': ('django.db.models.fields.CharField', [], {'default': "'-'", 'max_length': '3'}),
'is_literate': ('django.db.models.fields.CharField', [], {'default': "'-'", 'max_length': '3'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_verified_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'may_store_samples': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'study_questions': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'study_site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_variables.StudySite']"}),
'subject_identifier': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '50', 'blank': 'True'}),
'subject_identifier_as_pk': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'subject_type': ('django.db.models.fields.CharField', [], {'default': "'undetermined'", 'max_length': '25', 'null': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'witness_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'})
},
'mpepu_maternal.maternalconsentaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalConsentAudit', 'db_table': "'mpepu_maternal_maternalconsent_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'assessment_score': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'confirm_identity': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'consent_copy': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'consent_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'consent_reviewed': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'consent_version_on_entry': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'consent_version_recent': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'guardian_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'identity': ('django.db.models.fields.CharField', [], {'max_length': '78L'}),
'identity_type': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'initials': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'is_dob_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True'}),
'is_incarcerated': ('django.db.models.fields.CharField', [], {'default': "'-'", 'max_length': '3'}),
'is_literate': ('django.db.models.fields.CharField', [], {'default': "'-'", 'max_length': '3'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_verified_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True'}),
'may_store_samples': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'study_questions': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'study_site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalconsent'", 'to': "orm['bhp_variables.StudySite']"}),
'subject_identifier': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'subject_identifier_as_pk': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'subject_type': ('django.db.models.fields.CharField', [], {'default': "'undetermined'", 'max_length': '25', 'null': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'witness_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'})
},
'mpepu_maternal.maternalconsentupdate': {
'Meta': {'unique_together': "(('maternal_consent', 'consent_version'), ('maternal_consent', 'consent_datetime'))", 'object_name': 'MaternalConsentUpdate'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'consent_catalogue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_consent.ConsentCatalogue']"}),
'consent_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'consent_version': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'guardian_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_consent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalConsent']"}),
'may_store_samples': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'study_site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_variables.StudySite']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaldeath': {
'Meta': {'object_name': 'MaternalDeath'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'days_hospitalized': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'death_cause': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'death_cause_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_adverse.DeathCauseCategory']"}),
'death_cause_info': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_adverse.DeathCauseInfo']"}),
'death_cause_info_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'death_cause_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'death_date': ('django.db.models.fields.DateField', [], {}),
'death_medical_responsibility': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_adverse.DeathMedicalResponsibility']"}),
'death_reason_hospitalized': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_adverse.DeathReasonHospitalized']", 'null': 'True', 'blank': 'True'}),
'dx_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_code_lists.DxCode']", 'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'illness_duration': ('django.db.models.fields.IntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'participant_hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'perform_autopsy': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'registered_subject': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_registration.RegisteredSubject']", 'unique': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaldeathaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalDeathAudit', 'db_table': "'mpepu_maternal_maternaldeath_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'days_hospitalized': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'death_cause': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'death_cause_category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaldeath'", 'to': "orm['bhp_adverse.DeathCauseCategory']"}),
'death_cause_info': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaldeath'", 'to': "orm['bhp_adverse.DeathCauseInfo']"}),
'death_cause_info_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'death_cause_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'death_date': ('django.db.models.fields.DateField', [], {}),
'death_medical_responsibility': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaldeath'", 'to': "orm['bhp_adverse.DeathMedicalResponsibility']"}),
'death_reason_hospitalized': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_audit_maternaldeath'", 'null': 'True', 'to': "orm['bhp_adverse.DeathReasonHospitalized']"}),
'dx_code': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaldeath'", 'max_length': '25', 'to': "orm['bhp_code_lists.DxCode']"}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'illness_duration': ('django.db.models.fields.IntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'participant_hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'perform_autopsy': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaldeath'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaleligibilityante': {
'Meta': {'object_name': 'MaternalEligibilityAnte'},
'agree_follow_up': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'gestational_age': ('django.db.models.fields.IntegerField', [], {}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'is_cd4_low': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'is_hiv_positive': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'maternal_consent': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalConsent']", 'unique': 'True'}),
'maternal_haart': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_registration.RegisteredSubject']", 'unique': 'True'}),
'registration_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaleligibilityanteaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEligibilityAnteAudit', 'db_table': "'mpepu_maternal_maternaleligibilityante_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'agree_follow_up': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'gestational_age': ('django.db.models.fields.IntegerField', [], {}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'is_cd4_low': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'is_hiv_positive': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'maternal_consent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaleligibilityante'", 'to': "orm['mpepu_maternal.MaternalConsent']"}),
'maternal_haart': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaleligibilityante'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'registration_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaleligibilitypost': {
'Meta': {'object_name': 'MaternalEligibilityPost'},
'agree_follow_up': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'days_pnc': ('django.db.models.fields.IntegerField', [], {}),
'feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'is_cd4_low': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'is_hiv_positive': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'maternal_consent': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalConsent']", 'unique': 'True'}),
'maternal_haart': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_registration.RegisteredSubject']", 'unique': 'True'}),
'registration_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaleligibilitypostaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEligibilityPostAudit', 'db_table': "'mpepu_maternal_maternaleligibilitypost_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'agree_follow_up': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'days_pnc': ('django.db.models.fields.IntegerField', [], {}),
'feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'is_cd4_low': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'is_hiv_positive': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'maternal_consent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaleligibilitypost'", 'to': "orm['mpepu_maternal.MaternalConsent']"}),
'maternal_haart': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaleligibilitypost'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'registration_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenroll': {
'Meta': {'object_name': 'MaternalEnroll'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'height': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'prev_pregnancies': ('django.db.models.fields.IntegerField', [], {}),
'prev_pregnancy_arv': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prior_health_haart': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'recruit_source': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'recruit_source_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'})
},
'mpepu_maternal.maternalenrollarv': {
'Meta': {'object_name': 'MaternalEnrollArv'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'haart_changes': ('django.db.models.fields.IntegerField', [], {}),
'haart_start_date': ('django.db.models.fields.DateField', [], {}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'is_date_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'maternal_enroll': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalEnroll']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'preg_on_haart': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prior_arv': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.PriorArv']", 'symmetrical': 'False'}),
'prior_arv_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'prior_preg': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrollarvaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollArvAudit', 'db_table': "'mpepu_maternal_maternalenrollarv_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'haart_changes': ('django.db.models.fields.IntegerField', [], {}),
'haart_start_date': ('django.db.models.fields.DateField', [], {}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'is_date_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'maternal_enroll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollarv'", 'to': "orm['mpepu_maternal.MaternalEnroll']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollarv'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'preg_on_haart': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prior_arv_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'prior_preg': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrollaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollAudit', 'db_table': "'mpepu_maternal_maternalenroll_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'height': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenroll'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'prev_pregnancies': ('django.db.models.fields.IntegerField', [], {}),
'prev_pregnancy_arv': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prior_health_haart': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'recruit_source': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'recruit_source_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'})
},
'mpepu_maternal.maternalenrollclin': {
'Meta': {'object_name': 'MaternalEnrollClin'},
'cd4_count': ('django.db.models.fields.IntegerField', [], {}),
'cd4_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'is_date_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'maternal_enroll': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalEnroll']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'prev_preg_azt': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prev_preg_haart': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prev_sdnvp_labour': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrollclinaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollClinAudit', 'db_table': "'mpepu_maternal_maternalenrollclin_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'cd4_count': ('django.db.models.fields.IntegerField', [], {}),
'cd4_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'is_date_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'maternal_enroll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollclin'", 'to': "orm['mpepu_maternal.MaternalEnroll']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollclin'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'prev_preg_azt': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prev_preg_haart': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'prev_sdnvp_labour': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrolldem': {
'Meta': {'object_name': 'MaternalEnrollDem'},
'cooking_method': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'current_occupation': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'current_occupation_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'ethnicity': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'ethnicity_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'hh_goods': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.HhGoods']", 'symmetrical': 'False'}),
'highest_education': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'house_electrified': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'house_fridge': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'house_people_number': ('django.db.models.fields.IntegerField', [], {}),
'house_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'know_hiv_status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'marital_status': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'marital_status_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'maternal_enroll': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalEnroll']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'money_earned': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'money_earned_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'own_phone': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'provides_money': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'provides_money_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'toilet_facility': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'toilet_facility_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'water_source': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'mpepu_maternal.maternalenrolldemaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollDemAudit', 'db_table': "'mpepu_maternal_maternalenrolldem_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'cooking_method': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'current_occupation': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'current_occupation_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'ethnicity': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'ethnicity_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'highest_education': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'house_electrified': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'house_fridge': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'house_people_number': ('django.db.models.fields.IntegerField', [], {}),
'house_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'know_hiv_status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'marital_status': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'marital_status_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'maternal_enroll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrolldem'", 'to': "orm['mpepu_maternal.MaternalEnroll']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrolldem'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'money_earned': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'money_earned_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'own_phone': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'provides_money': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'provides_money_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'toilet_facility': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'toilet_facility_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'water_source': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'mpepu_maternal.maternalenrolldx': {
'Meta': {'object_name': 'MaternalEnrollDx'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'diagnosis': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'diagnosis_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_enroll_med': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalEnrollMed']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrolldxaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollDxAudit', 'db_table': "'mpepu_maternal_maternalenrolldx_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'diagnosis': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'diagnosis_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_enroll_med': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrolldx'", 'to': "orm['mpepu_maternal.MaternalEnrollMed']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrollmed': {
'Meta': {'object_name': 'MaternalEnrollMed'},
'chronic_cond': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.ChronicCond']", 'symmetrical': 'False'}),
'chronic_cond_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_chronic_cond': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_enroll': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalEnroll']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'who_diagnosis': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
'mpepu_maternal.maternalenrollmedaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollMedAudit', 'db_table': "'mpepu_maternal_maternalenrollmed_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'chronic_cond_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_chronic_cond': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_enroll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollmed'", 'to': "orm['mpepu_maternal.MaternalEnroll']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollmed'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'who_diagnosis': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
'mpepu_maternal.maternalenrollob': {
'Meta': {'object_name': 'MaternalEnrollOb'},
'children_died_b4_5yrs': ('django.db.models.fields.IntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'live_children': ('django.db.models.fields.IntegerField', [], {}),
'lost_after_24wks': ('django.db.models.fields.IntegerField', [], {}),
'lost_before_24wks': ('django.db.models.fields.IntegerField', [], {}),
'maternal_enroll': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalEnroll']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'pregs_24wks_or_more': ('django.db.models.fields.IntegerField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalenrollobaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalEnrollObAudit', 'db_table': "'mpepu_maternal_maternalenrollob_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'children_died_b4_5yrs': ('django.db.models.fields.IntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'live_children': ('django.db.models.fields.IntegerField', [], {}),
'lost_after_24wks': ('django.db.models.fields.IntegerField', [], {}),
'lost_before_24wks': ('django.db.models.fields.IntegerField', [], {}),
'maternal_enroll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollob'", 'to': "orm['mpepu_maternal.MaternalEnroll']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalenrollob'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'pregs_24wks_or_more': ('django.db.models.fields.IntegerField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdel': {
'Meta': {'object_name': 'MaternalLabDel'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'del_comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'del_comp': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.DelComp']", 'symmetrical': 'False'}),
'del_comp_other': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'del_hosp': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'del_hosp_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'del_mode': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'del_time_is_est': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'delivery_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'ga': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'has_chorioamnionitis': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_del_comp': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_ga': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'has_urine_tender': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'labour_hrs': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'labr_max_temp': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '1'}),
'live_infants': ('django.db.models.fields.IntegerField', [], {}),
'live_infants_to_register': ('django.db.models.fields.IntegerField', [], {}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'still_born_congen_abn': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'still_born_has_congen_abn': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'still_borns': ('django.db.models.fields.IntegerField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdelaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalLabDelAudit', 'db_table': "'mpepu_maternal_maternallabdel_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'del_comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'del_comp_other': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'del_hosp': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'del_hosp_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'del_mode': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'del_time_is_est': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'delivery_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'ga': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'has_chorioamnionitis': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_del_comp': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_ga': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'has_urine_tender': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'labour_hrs': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'labr_max_temp': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '1'}),
'live_infants': ('django.db.models.fields.IntegerField', [], {}),
'live_infants_to_register': ('django.db.models.fields.IntegerField', [], {}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdel'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'still_born_congen_abn': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'still_born_has_congen_abn': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'still_borns': ('django.db.models.fields.IntegerField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdelclinic': {
'Meta': {'object_name': 'MaternalLabDelClinic'},
'cd4_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cd4_result': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_cd4': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_vl': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_lab_del': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalLabDel']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'suppliment': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.Suppliment']", 'symmetrical': 'False'}),
'took_suppliments': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'vl_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'vl_result': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'})
},
'mpepu_maternal.maternallabdelclinicaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalLabDelClinicAudit', 'db_table': "'mpepu_maternal_maternallabdelclinic_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'cd4_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cd4_result': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_cd4': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_vl': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_lab_del': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdelclinic'", 'to': "orm['mpepu_maternal.MaternalLabDel']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdelclinic'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'took_suppliments': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'vl_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'vl_result': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'})
},
'mpepu_maternal.maternallabdeldx': {
'Meta': {'object_name': 'MaternalLabDelDx'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_preg_dx': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_who_dx': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_lab_del': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalLabDel']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'wcs_dx_adult': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['bhp_code_lists.WcsDxAdult']", 'symmetrical': 'False'})
},
'mpepu_maternal.maternallabdeldxaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalLabDelDxAudit', 'db_table': "'mpepu_maternal_maternallabdeldx_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_preg_dx': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_who_dx': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_lab_del': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdeldx'", 'to': "orm['mpepu_maternal.MaternalLabDel']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdeldx'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdeldxt': {
'Meta': {'object_name': 'MaternalLabDelDxT'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grade': ('django.db.models.fields.IntegerField', [], {}),
'hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'lab_del_dx': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'lab_del_dx_specify': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'maternal_lab_del_dx': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalLabDelDx']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdeldxtaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalLabDelDxTAudit', 'db_table': "'mpepu_maternal_maternallabdeldxt_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grade': ('django.db.models.fields.IntegerField', [], {}),
'hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'lab_del_dx': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'lab_del_dx_specify': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'maternal_lab_del_dx': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdeldxt'", 'to': "orm['mpepu_maternal.MaternalLabDelDx']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdelmed': {
'Meta': {'object_name': 'MaternalLabDelMed'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_health_cond': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_ob_comp': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'health_cond': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.HealthCond']", 'symmetrical': 'False'}),
'health_cond_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_lab_del': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalLabDel']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'ob_comp': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.ObComp']", 'symmetrical': 'False'}),
'ob_comp_other': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallabdelmedaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalLabDelMedAudit', 'db_table': "'mpepu_maternal_maternallabdelmed_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'has_health_cond': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'has_ob_comp': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'health_cond_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_lab_del': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdelmed'", 'to': "orm['mpepu_maternal.MaternalLabDel']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallabdelmed'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'ob_comp_other': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallocator': {
'Meta': {'object_name': 'MaternalLocator'},
'care_clinic': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'caretaker_cell': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'caretaker_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'caretaker_tel': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_cell': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_physical_address': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'contact_rel': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_signed': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 5, 7, 0, 0)'}),
'has_caretaker_alt': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'home_visit_permission': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'mail_address': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'may_call_work': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'may_contact_someone': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'may_follow_up': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'physical_address': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_registration.RegisteredSubject']", 'unique': 'True', 'null': 'True'}),
'report_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 5, 7, 0, 0)'}),
'subject_cell': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_cell_alt': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_phone': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_phone_alt': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_work_phone': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_work_place': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternallocatoraudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalLocatorAudit', 'db_table': "'mpepu_maternal_maternallocator_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'care_clinic': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'caretaker_cell': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'caretaker_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'caretaker_tel': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_cell': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_name': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'contact_physical_address': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'contact_rel': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_signed': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 5, 7, 0, 0)'}),
'has_caretaker_alt': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'home_visit_permission': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'mail_address': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallocator'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'may_call_work': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'may_contact_someone': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'may_follow_up': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'physical_address': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternallocator'", 'null': 'True', 'to': "orm['bhp_registration.RegisteredSubject']"}),
'report_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 5, 7, 0, 0)'}),
'subject_cell': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_cell_alt': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_phone': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_phone_alt': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_work_phone': ('django.db.models.fields.CharField', [], {'max_length': '78L', 'null': 'True', 'blank': 'True'}),
'subject_work_place': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaloffstudy': {
'Meta': {'object_name': 'MaternalOffStudy'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'offstudy_date': ('django.db.models.fields.DateField', [], {}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'reason_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_registration.RegisteredSubject']", 'unique': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternaloffstudyaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalOffStudyAudit', 'db_table': "'mpepu_maternal_maternaloffstudy_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'offstudy_date': ('django.db.models.fields.DateField', [], {}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'reason_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternaloffstudy'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalpostfu': {
'Meta': {'object_name': 'MaternalPostFu'},
'breastfeeding': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'chronic_cond': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mpepu_list.ChronicCond']", 'symmetrical': 'False'}),
'chronic_cond_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '350', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'enter_weight': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '4', 'decimal_places': '1', 'blank': 'True'}),
'had_mastitis': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'has_chronic_cond': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'mother_weight': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'started_ctx': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalpostfuaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalPostFuAudit', 'db_table': "'mpepu_maternal_maternalpostfu_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'breastfeeding': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'chronic_cond_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '350', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'enter_weight': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '4', 'decimal_places': '1', 'blank': 'True'}),
'had_mastitis': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'has_chronic_cond': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalpostfu'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'mother_weight': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'started_ctx': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalpostfudx': {
'Meta': {'object_name': 'MaternalPostFuDx'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_post_fu': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['mpepu_maternal.MaternalPostFu']", 'unique': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'mother_hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'new_diagnoses': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'wcs_dx_adult': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['bhp_code_lists.WcsDxAdult']", 'symmetrical': 'False'}),
'who_clinical_stage': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'mpepu_maternal.maternalpostfudxaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalPostFuDxAudit', 'db_table': "'mpepu_maternal_maternalpostfudx_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_post_fu': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalpostfudx'", 'to': "orm['mpepu_maternal.MaternalPostFu']"}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalpostfudx'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'mother_hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'new_diagnoses': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'who_clinical_stage': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'mpepu_maternal.maternalpostfudxt': {
'Meta': {'object_name': 'MaternalPostFuDxT'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grade': ('django.db.models.fields.IntegerField', [], {'max_length': '3'}),
'hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_post_fu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalPostFuDx']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'post_fu_dx': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'post_fu_specify': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalpostfudxtaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalPostFuDxTAudit', 'db_table': "'mpepu_maternal_maternalpostfudxt_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grade': ('django.db.models.fields.IntegerField', [], {'max_length': '3'}),
'hospitalized': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_post_fu': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalpostfudxt'", 'to': "orm['mpepu_maternal.MaternalPostFuDx']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'post_fu_dx': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'post_fu_specify': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalpostreg': {
'Meta': {'object_name': 'MaternalPostReg'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'reg_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'registered_subject': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_registration.RegisteredSubject']", 'unique': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalpostregaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalPostRegAudit', 'db_table': "'mpepu_maternal_maternalpostreg_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'reg_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalpostreg'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalvisit': {
'Meta': {'object_name': 'MaternalVisit'},
'appointment': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_appointment.Appointment']", 'unique': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'info_source': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'info_source_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'reason_missed': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'report_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.maternalvisitaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalVisitAudit', 'db_table': "'mpepu_maternal_maternalvisit_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'appointment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalvisit'", 'to': "orm['bhp_appointment.Appointment']"}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'info_source': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'info_source_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'reason_missed': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'report_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.postnatalinfantfeedingsurvey': {
'Meta': {'object_name': 'PostNatalInfantFeedingSurvey'},
'correct_bf_duration': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'feeding_duration': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'feeding_period': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'feeding_satisfaction': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'next_feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
},
'mpepu_maternal.postnatalinfantfeedingsurveyaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'PostNatalInfantFeedingSurveyAudit', 'db_table': "'mpepu_maternal_postnatalinfantfeedingsurvey_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'correct_bf_duration': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'feeding_duration': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'feeding_period': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'feeding_satisfaction': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'mac.local'", 'max_length': '50', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_postnatalinfantfeedingsurvey'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'next_feeding_choice': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'db_index': 'True'})
}
}
complete_apps = ['mpepu_maternal'] | [
"[email protected]"
] | |
071f313333644280eff6ca1bb6fa556a4d39940e | 5e0755091efd2d4ed61bead8aa38b45bab5a8b07 | /python/anyascii/_data/_283.py | b81e852d72d7e36591fc185caf37afc52ce242cd | [
"ISC"
] | permissive | casept/anyascii | c27261d87257c17c47fe0e9fc77438437de94c1c | d4f426b91751254b68eaa84c6cd23099edd668e6 | refs/heads/master | 2022-12-05T07:13:53.075144 | 2020-08-07T07:55:50 | 2020-08-07T07:55:50 | 285,904,577 | 0 | 0 | ISC | 2020-08-07T19:20:00 | 2020-08-07T19:19:59 | null | UTF-8 | Python | false | false | 669 | py | b='Nhe Hong Zang Chai Hong Tian Zhi Xing Xu Zhen Wan Jun Wo Lu Zheng Rong Cheng Fu E Tao Tang Juan Chao Ta Di Juk Zong Keng Tui Keng Hon Co Rong Yun He Zong Cong Qiu Mu Duo Xu Keng Xian Du Kan Ying Zi Jyun Sau Huang Peng Li Bo Ge Ju Ke Hu Yao Tang Qiong Rong Liu Hui Ji Gwan So Zhi Gwan Tang Zhi Kang Yang Tang Hong Liang Cao Ngao Truoc Truoc Nai Zong Deng Jiao Peng Guang Er Jian Jiao Nuo Zao Peng Dang Qu Lian Mu Lan Fen Hun Nhe Kuang Yin Shuan Jian Luo Lu Ge Rang Pin Long Zhen Xian So Lin Lian Shan Bo Li Xie Ge Min Lian Jue Zhou Ke' | [
"[email protected]"
] | |
133c0a93bb9ae4ac98104c70fb11d0e6f8f560f2 | 5e2a66e0416dcaf4674bd58f7dd7bc905800aa92 | /chapter1_intro_to_wx/hello_with_classes.py | 65730999e668f7a09c7a3dd0cb78c78784a4fe2c | [] | no_license | gridl/applications_with_wxpython | c96ed05b49e3494323e612afb1baccc8ea1e6f93 | 1fc63c384b7856402b99a97bf91fe0966a5ec413 | refs/heads/master | 2020-05-17T08:37:01.672727 | 2019-04-25T19:24:14 | 2019-04-25T19:24:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | # hello_with_classes.py
import wx
class MyFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, title='Hello World')
self.Show()
if __name__ == '__main__':
app = wx.App(redirect=False)
frame = MyFrame()
app.MainLoop()
| [
"[email protected]"
] | |
019b8a110b6d7f632b7b2b3980b4615fba637ae6 | 2d2c10ffa7aa5ee35393371e7f8c13b4fab94446 | /projects/ai/sentiment/prepare.test/to-chars.py | c4b01deea6d39c8ddb5798ccb4ffe0b1d88f440d | [] | no_license | faker2081/pikachu2 | bec83750a5ff3c7b5a26662000517df0f608c1c1 | 4f06d47c7bf79eb4e5a22648e088b3296dad3b2d | refs/heads/main | 2023-09-02T00:28:41.723277 | 2021-11-17T11:15:44 | 2021-11-17T11:15:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,486 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==============================================================================
# \file to-chars.py
# \author chenghuige
# \date 2018-10-28 08:37:28.846557
# \Description
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import six
assert six.PY3
import pandas as pd
from projects.ai2018.sentiment.prepare import filter
from tqdm import tqdm
import traceback
ifile = sys.argv[1]
ofile = sys.argv[2]
ids_set = set()
fm = 'w'
if os.path.exists(ofile):
fm = 'a'
for line in open(ofile):
ids_set.add(line.split('\t')[0])
print('%s already done %d' % (ofile, len(ids_set)))
num_errs = 0
with open(ofile, fm) as out:
df = pd.read_csv(ifile, lineterminator='\n')
contents = df['content'].values
ids = df['id'].values
for i in tqdm(range(len(df)), ascii=True):
if str(ids[i]) in ids_set:
continue
#if i != 2333:
# continue
#print(gezi.cut(filter.filter(contents[i]), type_))
try:
l = []
for ch in filter.filter(contents[i]):
l.append(ch)
print(' '.join(l), file=out)
except Exception:
if num_errs == 0:
print(traceback.format_exc())
num_errs += 1
continue
#exit(0)
print('num_errs:', num_errs, 'ratio:', num_errs / len(df))
| [
"[email protected]"
] | |
edc78db0c65027ba6f752324f11b3a42bab1d392 | 3f6c16ea158a8fb4318b8f069156f1c8d5cff576 | /.PyCharm2019.1/system/python_stubs/-1046095393/math.py | ba1d90a62a63a5288b163f4b942262c8e01a3128 | [] | no_license | sarthak-patidar/dotfiles | 08494170d2c0fedc0bbe719cc7c60263ce6fd095 | b62cd46f3491fd3f50c704f0255730af682d1f80 | refs/heads/master | 2020-06-28T23:42:17.236273 | 2019-10-01T13:56:27 | 2019-10-01T13:56:27 | 200,369,900 | 0 | 0 | null | 2019-08-03T12:56:33 | 2019-08-03T11:53:29 | Shell | UTF-8 | Python | false | false | 11,974 | py | # encoding: utf-8
# module math
# from (built-in)
# by generator 1.147
"""
This module is always available. It provides access to the
mathematical functions defined by the C standard.
"""
# no imports
# Variables with simple values
e = 2.718281828459045
inf = inf
nan = nan
pi = 3.141592653589793
tau = 6.283185307179586
# functions
def acos(x): # real signature unknown; restored from __doc__
"""
acos(x)
Return the arc cosine (measured in radians) of x.
"""
pass
def acosh(x): # real signature unknown; restored from __doc__
"""
acosh(x)
Return the inverse hyperbolic cosine of x.
"""
pass
def asin(x): # real signature unknown; restored from __doc__
"""
asin(x)
Return the arc sine (measured in radians) of x.
"""
pass
def asinh(x): # real signature unknown; restored from __doc__
"""
asinh(x)
Return the inverse hyperbolic sine of x.
"""
pass
def atan(x): # real signature unknown; restored from __doc__
"""
atan(x)
Return the arc tangent (measured in radians) of x.
"""
pass
def atan2(y, x): # real signature unknown; restored from __doc__
"""
atan2(y, x)
Return the arc tangent (measured in radians) of y/x.
Unlike atan(y/x), the signs of both x and y are considered.
"""
pass
def atanh(x): # real signature unknown; restored from __doc__
"""
atanh(x)
Return the inverse hyperbolic tangent of x.
"""
pass
def ceil(x): # real signature unknown; restored from __doc__
"""
ceil(x)
Return the ceiling of x as an Integral.
This is the smallest integer >= x.
"""
pass
def copysign(x, y): # real signature unknown; restored from __doc__
"""
copysign(x, y)
Return a float with the magnitude (absolute value) of x but the sign
of y. On platforms that support signed zeros, copysign(1.0, -0.0)
returns -1.0.
"""
pass
def cos(x): # real signature unknown; restored from __doc__
"""
cos(x)
Return the cosine of x (measured in radians).
"""
pass
def cosh(x): # real signature unknown; restored from __doc__
"""
cosh(x)
Return the hyperbolic cosine of x.
"""
pass
def degrees(x): # real signature unknown; restored from __doc__
"""
degrees(x)
Convert angle x from radians to degrees.
"""
pass
def erf(x): # real signature unknown; restored from __doc__
"""
erf(x)
Error function at x.
"""
pass
def erfc(x): # real signature unknown; restored from __doc__
"""
erfc(x)
Complementary error function at x.
"""
pass
def exp(x): # real signature unknown; restored from __doc__
"""
exp(x)
Return e raised to the power of x.
"""
pass
def expm1(x): # real signature unknown; restored from __doc__
"""
expm1(x)
Return exp(x)-1.
This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x.
"""
pass
def fabs(x): # real signature unknown; restored from __doc__
"""
fabs(x)
Return the absolute value of the float x.
"""
pass
def factorial(x): # real signature unknown; restored from __doc__
"""
factorial(x) -> Integral
Find x!. Raise a ValueError if x is negative or non-integral.
"""
pass
def floor(x): # real signature unknown; restored from __doc__
"""
floor(x)
Return the floor of x as an Integral.
This is the largest integer <= x.
"""
pass
def fmod(x, y): # real signature unknown; restored from __doc__
"""
fmod(x, y)
Return fmod(x, y), according to platform C. x % y may differ.
"""
pass
def frexp(x): # real signature unknown; restored from __doc__
"""
frexp(x)
Return the mantissa and exponent of x, as pair (m, e).
m is a float and e is an int, such that x = m * 2.**e.
If x is 0, m and e are both 0. Else 0.5 <= abs(m) < 1.0.
"""
pass
def fsum(iterable): # real signature unknown; restored from __doc__
"""
fsum(iterable)
Return an accurate floating point sum of values in the iterable.
Assumes IEEE-754 floating point arithmetic.
"""
pass
def gamma(x): # real signature unknown; restored from __doc__
"""
gamma(x)
Gamma function at x.
"""
pass
def gcd(x, y): # real signature unknown; restored from __doc__
"""
gcd(x, y) -> int
greatest common divisor of x and y
"""
return 0
def hypot(x, y): # real signature unknown; restored from __doc__
"""
hypot(x, y)
Return the Euclidean distance, sqrt(x*x + y*y).
"""
pass
def isclose(a, b, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
"""
isclose(a, b, *, rel_tol=1e-09, abs_tol=0.0) -> bool
Determine whether two floating point numbers are close in value.
rel_tol
maximum difference for being considered "close", relative to the
magnitude of the input values
abs_tol
maximum difference for being considered "close", regardless of the
magnitude of the input values
Return True if a is close in value to b, and False otherwise.
For the values to be considered close, the difference between them
must be smaller than at least one of the tolerances.
-inf, inf and NaN behave similarly to the IEEE 754 Standard. That
is, NaN is not close to anything, even itself. inf and -inf are
only close to themselves.
"""
pass
def isfinite(x): # real signature unknown; restored from __doc__
"""
isfinite(x) -> bool
Return True if x is neither an infinity nor a NaN, and False otherwise.
"""
return False
def isinf(x): # real signature unknown; restored from __doc__
"""
isinf(x) -> bool
Return True if x is a positive or negative infinity, and False otherwise.
"""
return False
def isnan(x): # real signature unknown; restored from __doc__
"""
isnan(x) -> bool
Return True if x is a NaN (not a number), and False otherwise.
"""
return False
def ldexp(x, i): # real signature unknown; restored from __doc__
"""
ldexp(x, i)
Return x * (2**i).
"""
pass
def lgamma(x): # real signature unknown; restored from __doc__
"""
lgamma(x)
Natural logarithm of absolute value of Gamma function at x.
"""
pass
def log(x, base=None): # real signature unknown; restored from __doc__
"""
log(x[, base])
Return the logarithm of x to the given base.
If the base not specified, returns the natural logarithm (base e) of x.
"""
pass
def log10(x): # real signature unknown; restored from __doc__
"""
log10(x)
Return the base 10 logarithm of x.
"""
pass
def log1p(x): # real signature unknown; restored from __doc__
"""
log1p(x)
Return the natural logarithm of 1+x (base e).
The result is computed in a way which is accurate for x near zero.
"""
pass
def log2(x): # real signature unknown; restored from __doc__
"""
log2(x)
Return the base 2 logarithm of x.
"""
pass
def modf(x): # real signature unknown; restored from __doc__
"""
modf(x)
Return the fractional and integer parts of x. Both results carry the sign
of x and are floats.
"""
pass
def pow(x, y): # real signature unknown; restored from __doc__
"""
pow(x, y)
Return x**y (x to the power of y).
"""
pass
def radians(x): # real signature unknown; restored from __doc__
"""
radians(x)
Convert angle x from degrees to radians.
"""
pass
def sin(x): # real signature unknown; restored from __doc__
"""
sin(x)
Return the sine of x (measured in radians).
"""
pass
def sinh(x): # real signature unknown; restored from __doc__
"""
sinh(x)
Return the hyperbolic sine of x.
"""
pass
def sqrt(x): # real signature unknown; restored from __doc__
"""
sqrt(x)
Return the square root of x.
"""
pass
def tan(x): # real signature unknown; restored from __doc__
"""
tan(x)
Return the tangent of x (measured in radians).
"""
pass
def tanh(x): # real signature unknown; restored from __doc__
"""
tanh(x)
Return the hyperbolic tangent of x.
"""
pass
def trunc(x): # real signature unknown; restored from __doc__
"""
trunc(x:Real) -> Integral
Truncates x to the nearest Integral toward 0. Uses the __trunc__ magic method.
"""
pass
# classes
class __loader__(object):
"""
Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def create_module(cls, *args, **kwargs): # real signature unknown
""" Create a built-in module """
pass
@classmethod
def exec_module(cls, *args, **kwargs): # real signature unknown
""" Exec a built-in module """
pass
@classmethod
def find_module(cls, *args, **kwargs): # real signature unknown
"""
Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
This method is deprecated. Use find_spec() instead.
"""
pass
@classmethod
def find_spec(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def get_code(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have code objects. """
pass
@classmethod
def get_source(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have source code. """
pass
@classmethod
def is_package(cls, *args, **kwargs): # real signature unknown
""" Return False as built-in modules are never packages. """
pass
@classmethod
def load_module(cls, *args, **kwargs): # real signature unknown
"""
Load the specified module into sys.modules and return it.
This method is deprecated. Use loader.exec_module instead.
"""
pass
def module_repr(module): # reliably restored by inspect
"""
Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is "mappingproxy({'__module__': '_frozen_importlib', '__doc__': 'Meta path import for built-in modules.\\n\\n All methods are either class or static methods to avoid the need to\\n instantiate the class.\\n\\n ', 'module_repr': <staticmethod object at 0x7f1f2a7150f0>, 'find_spec': <classmethod object at 0x7f1f2a715128>, 'find_module': <classmethod object at 0x7f1f2a715160>, 'create_module': <classmethod object at 0x7f1f2a715198>, 'exec_module': <classmethod object at 0x7f1f2a7151d0>, 'get_code': <classmethod object at 0x7f1f2a715240>, 'get_source': <classmethod object at 0x7f1f2a7152b0>, 'is_package': <classmethod object at 0x7f1f2a715320>, 'load_module': <classmethod object at 0x7f1f2a715358>, '__dict__': <attribute '__dict__' of 'BuiltinImporter' objects>, '__weakref__': <attribute '__weakref__' of 'BuiltinImporter' objects>})"
# variables with complex values
__spec__ = None # (!) real value is "ModuleSpec(name='math', loader=<class '_frozen_importlib.BuiltinImporter'>, origin='built-in')"
| [
"[email protected]"
] | |
5c23552b144d0f866937c9b01f222bb74d2a6c65 | c415caab95b63c8b3dd217cd5cf2845362e5df77 | /concept/auto_generated/afrl/cmasi/WavelengthBand.py | 4ff57fc6791f5a2d3d3092ee43b40dcb4fdd41b1 | [] | no_license | GaloisInc/amase-code-generator | 10fe109061c78a7a41ebee6a805476eb0fde9b73 | 2d18cc8e25b86dd22e1b3d2862178e2f598b18ab | refs/heads/master | 2020-09-27T10:26:35.411118 | 2017-01-06T23:52:38 | 2017-01-06T23:52:38 | 66,033,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | #! /usr/bin/python
class WavelengthBand:
AllAny = 0
EO = 1
LWIR = 2
SWIR = 3
MWIR = 4
Other = 5
def get_WavelengthBand_str(str):
"""
Returns a numerical value from a string
"""
if str == "AllAny": return WavelengthBand.AllAny
if str == "EO": return WavelengthBand.EO
if str == "LWIR": return WavelengthBand.LWIR
if str == "SWIR": return WavelengthBand.SWIR
if str == "MWIR": return WavelengthBand.MWIR
if str == "Other": return WavelengthBand.Other
def get_WavelengthBand_int(val):
"""
Returns a string representation from an int
"""
if val == WavelengthBand.AllAny: return "AllAny"
if val == WavelengthBand.EO: return "EO"
if val == WavelengthBand.LWIR: return "LWIR"
if val == WavelengthBand.SWIR: return "SWIR"
if val == WavelengthBand.MWIR: return "MWIR"
if val == WavelengthBand.Other: return "Other"
return WavelengthBand.AllAny
| [
"[email protected]"
] | |
3c854ed5a7b301776fed37dd14bf8b1ac5d6ea1b | d539072e556343c748619883a525a88318cf6003 | /db/models.py | d48d8080051b2dc5314381da4892bbaecb802c2e | [] | no_license | W1ntersnow/fastapi_simple_sku | 87dcf861ba02e0314e9f2c48a1ac82ef0e0d8969 | f62abc33810e17bac7a7227f76a6fcfafa0f5979 | refs/heads/master | 2022-07-31T06:14:41.011961 | 2020-05-19T08:14:59 | 2020-05-19T08:14:59 | 265,180,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from . import Base
class ItemType(Base):
__tablename__ = "item_types"
id = Column(Integer, primary_key=True, index=True)
title = Column(String, unique=True)
description = Column(String)
items = relationship("Item", back_populates="type")
class Item(Base):
__tablename__ = "items"
id = Column(Integer, primary_key=True, index=True)
title = Column(String)
sku = Column(String)
balance = Column(Integer, default=0)
description = Column(String)
type_id = Column(Integer, ForeignKey("item_types.id"))
type = relationship("ItemType", back_populates="items")
| [
"[email protected]"
] | |
06e6786e163a00c5c9e3fc1ccb82b1b65a94661e | 2576f319799e2a6ed6f4b7025ab87bf7262a3a9f | /eng_text_norm/cmudict.py | 6caf56c745ddc90b522cee6bded625d78724f2aa | [] | no_license | soon14/TTS_text_norm | f262a6ca04dfcfb6137662ceee9fe59617266dab | d094160a958b9117f15131742b4751acf45d1249 | refs/heads/master | 2022-06-19T10:37:47.872213 | 2020-05-12T07:06:54 | 2020-05-12T07:06:54 | 264,113,334 | 2 | 1 | null | 2020-05-15T06:23:01 | 2020-05-15T06:23:01 | null | UTF-8 | Python | false | false | 2,090 | py | import re
valid_symbols = [
'AA', 'AA0', 'AA1', 'AA2', 'AE', 'AE0', 'AE1', 'AE2', 'AH', 'AH0', 'AH1', 'AH2',
'AO', 'AO0', 'AO1', 'AO2', 'AW', 'AW0', 'AW1', 'AW2', 'AY', 'AY0', 'AY1', 'AY2',
'B', 'CH', 'D', 'DH', 'EH', 'EH0', 'EH1', 'EH2', 'ER', 'ER0', 'ER1', 'ER2', 'EY',
'EY0', 'EY1', 'EY2', 'F', 'G', 'HH', 'IH', 'IH0', 'IH1', 'IH2', 'IY', 'IY0', 'IY1',
'IY2', 'JH', 'K', 'L', 'M', 'N', 'NG', 'OW', 'OW0', 'OW1', 'OW2', 'OY', 'OY0',
'OY1', 'OY2', 'P', 'R', 'S', 'SH', 'T', 'TH', 'UH', 'UH0', 'UH1', 'UH2', 'UW',
'UW0', 'UW1', 'UW2', 'V', 'W', 'Y', 'Z', 'ZH'
]
_valid_symbol_set = set(valid_symbols)
class CMUDict:
'''Thin wrapper around CMUDict data. http://www.speech.cs.cmu.edu/cgi-bin/cmudict'''
def __init__(self, file_or_path, keep_ambiguous=True):
if isinstance(file_or_path, str):
with open(file_or_path, encoding='latin-1') as f:
entries = _parse_cmudict(f)
else:
entries = _parse_cmudict(file_or_path)
if not keep_ambiguous:
entries = {word: pron for word, pron in entries.items() if len(pron) == 1}
self._entries = entries
def __len__(self):
return len(self._entries)
def lookup(self, word):
'''Returns list of ARPAbet pronunciations of the given word.'''
return self._entries.get(word.upper())
_alt_re = re.compile(r'\([0-9]+\)')
def _parse_cmudict(file):
cmudict = {}
for line in file:
if len(line) and (line[0] >= 'A' and line[0] <= 'Z' or line[0] == "'"):
parts = line.split(' ')
word = re.sub(_alt_re, '', parts[0])
pronunciation = _get_pronunciation(parts[1])
if pronunciation:
if word in cmudict:
cmudict[word].append(pronunciation)
else:
cmudict[word] = [pronunciation]
return cmudict
def _get_pronunciation(s):
parts = s.strip().split(' ')
for part in parts:
if part not in _valid_symbol_set:
return None
return ' '.join(parts)
| [
"[email protected]"
] | |
1ea61fd5bebffa3f350071c167bcb521e65cee9d | 5fda498ef0bfc06962ad9b864d229193c45ccb4a | /Project2_Data_Wrangle_OpenStreetMaps_Data_R1/problem_sets1-6/Lesson_3_Problem_Set/01-Auditing_Data_Quality/audit.py | 0f95f771d5fb8eea4a50b47eeedc344417c91214 | [] | no_license | prabhurgit/Data_Aanlyst_Nanodegree_projects | 7934869b63cae57cb2851e22a5023c6cbe3d18ba | a7a13d93c632cd1840ba3a00fff80a60a131b7f3 | refs/heads/master | 2021-05-31T18:47:48.669414 | 2016-03-30T04:08:39 | 2016-03-30T04:08:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,195 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
In this problem set you work with cities infobox data, audit it, come up with a cleaning idea and then
clean it up. In the first exercise we want you to audit the datatypes that can be found in some
particular fields in the dataset.
The possible types of values can be:
- 'NoneType' if the value is a string "NULL" or an empty string ""
- 'list', if the value starts with "{"
- 'int', if the value can be cast to int
- 'float', if the value can be cast to float, but is not an int
- 'str', for all other values
The audit_file function should return a dictionary containing fieldnames and a set of the datatypes
that can be found in the field.
All the data initially is a string, so you have to do some checks on the values first.
"""
import codecs
import csv
import json
import pprint
CITIES = 'cities.csv'
FIELDS = ["name", "timeZone_label", "utcOffset", "homepage", "governmentType_label", "isPartOf_label", "areaCode", "populationTotal",
"elevation", "maximumElevation", "minimumElevation", "populationDensity", "wgs84_pos#lat", "wgs84_pos#long",
"areaLand", "areaMetro", "areaUrban"]
def audit_file(filename, fields):
fieldtypes = {}
for key in fields:
fieldtypes[key] = set([])
with open(filename, "rb") as file_data:
reader = csv.DictReader(file_data)
for line in reader:
if line["URI"][:18] == "http://dbpedia.org":
for key in FIELDS:
fieldtypes[key].add(check_type(line, key))
# YOUR CODE HERE
return fieldtypes
def check_type(row_dict, key):
if row_dict[key] == "NULL" or "":
return type(None)
elif row_dict[key][0] == "{":
return type([])
elif row_dict[key].isdigit():
return type(1)
try:
float(row_dict[key])
return type(1.0)
except ValueError:
pass
return type("a")
def test():
fieldtypes = audit_file(CITIES, FIELDS)
pprint.pprint(fieldtypes)
assert fieldtypes["areaLand"] == set([type(1.1), type([]), type(None)])
assert fieldtypes['areaMetro'] == set([type(1.1), type(None)])
if __name__ == "__main__":
test()
| [
"[email protected]"
] | |
19c4a163691d31b64a38113c543e213cfe860715 | 431945304483c2bb1645e8a5d8bc2ea2fac01ed0 | /planetstack/hpc_wizard/query.py | 874022e4231596f7a2deed6770992b4a5f12d3b7 | [
"Apache-2.0"
] | permissive | bits3rpent/xos | 42719ecc3305f0095e47f9479cce81b6e62ae56d | d4f4eb5f78180b49c822bb93669c9ee8d7a3d0f6 | refs/heads/master | 2021-01-24T03:56:56.587180 | 2015-02-11T18:33:30 | 2015-02-11T18:33:30 | 30,662,846 | 0 | 1 | null | 2015-02-11T18:32:18 | 2015-02-11T18:32:18 | null | UTF-8 | Python | false | false | 9,372 | py | import re
import base64
import requests
import urllib
import json
import httplib2
import threading
import os
import time
import traceback
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import run_flow,run
from bigquery_config import BIGQUERY_SECRETS_FN, BIGQUERY_CREDENTIALS_FN
"""
yum -y install python-httplib2
easy_install python_gflags
easy_install google_api_python_client
"""
PROJECT_NUMBER = '549187599759'
try:
FLOW = flow_from_clientsecrets(BIGQUERY_SECRETS_FN,
scope='https://www.googleapis.com/auth/bigquery')
except:
print "exception while initializing bigquery flow"
traceback.print_exc()
FLOW = None
MINUTE_MS = 60*1000
HOUR_MS = 60*60*1000
class HpcQuery:
def __init__(self):
self.mapping = json.loads(self.fetch_mapping(table="demoevents"))
self.reverse_mapping = {v:k for k, v in self.mapping.items()}
def fetch_mapping(self, m=0, table="events"):
req = 'http://cloud-scrutiny.appspot.com/command?action=get_allocations&multiplexer=%d&table=%s'% (m,table)
resp = requests.get(req)
if (resp.status_code==200):
return resp.text
else:
raise Exception('Error accessing register allocations: %d'%resp.status_code)
def run_query_old(self, query):
req = 'http://cloud-scrutiny.appspot.com/command?action=send_query&q=%s' % urllib.quote(query)
resp = requests.get(req)
if (resp.status_code==200):
return resp.text
else:
raise Exception('Error running query: %d'%resp.status_code)
return resp
def run_query(self, query):
storage = Storage(BIGQUERY_CREDENTIALS_FN)
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(FLOW, storage)
http = httplib2.Http()
http = credentials.authorize(http)
service = build('bigquery', 'v2', http=http)
body = {"query": query}
response = service.jobs().query(projectId=PROJECT_NUMBER, body=body).execute()
fieldNames = []
for field in response["schema"]["fields"]:
fieldNames.append(field["name"])
result = []
if "rows" in response:
for row in response["rows"]:
this_result = {}
for (i,column) in enumerate(row["f"]):
this_result[self.reverse_mapping.get(fieldNames[i],fieldNames[i])] = column["v"]
result.append(this_result)
return result
def remap(self, match):
token = match.group()[1:]
if token in self.mapping:
return self.mapping[token]
else:
raise Exception('unknown token %s' % token)
def get_usage(self, cp=None, hostname=None, site=None, slice=None, timeStart=-HOUR_MS, timeStop=-1, groupBy=["%hostname", "%cp"]):
where = []
if slice is not None:
where.append("%slice='" + slice + "'")
if cp is not None:
where.append("%cp='" + cp + "'")
if hostname is not None:
where.append("%hostname='" + hostname + "'")
if site is not None:
where.append("%hostname contains " + site)
where.append("%bytes_sent>0")
where = "WHERE " + " AND ".join(where)
if timeStart is not None:
tableName = "[vicci.demoevents@%d-%d]" % (timeStart,timeStop)
else:
tableName = "[vicci.demoevents]"
query = "SELECT %hostname,%cp,sum(%bytes_sent) as sum_bytes_sent,sum(%bytes_hit) as sum_bytes_hit, AVG(%bandwidth) as avg_bandwidth," + \
" MAX(TIMESTAMP_TO_MSEC(time))-MIN(TIMESTAMP_TO_MSEC(time)) as time_delta FROM " + \
tableName + " " + where
if groupBy:
query = query + " GROUP BY " + ",".join(groupBy)
p = re.compile('%[a-zA-z_]*')
query = p.sub(self.remap, query)
rows = self.run_query(query)
for row in rows:
row["sum_bytes_sent"] = int(row.get("sum_bytes_sent",0))
row["sum_bytes_hit"] = int(row.get("sum_bytes_hit",0))
row["avg_bandwidth"] = int(float(row.get("avg_bandwidth",0)))
row["time_delta"] = float(row.get("time_delta",0.0))/1000.0
elapsed = (timeStop-timeStart)/1000
KBps = int(row.get("sum_bytes_sent",0)) / elapsed / 1024
row["KBps"] = KBps
return rows
def sites_from_usage(self, rows, nodes_to_sites={}):
sites = {}
for row in rows:
hostname = row["hostname"]
if hostname in nodes_to_sites:
site_name = nodes_to_sites[hostname]
else:
parts = hostname.split(".")
if len(parts)<=2:
continue
site_name = parts[1]
if not (site_name in sites):
row = row.copy()
row["site"] = site_name
row["max_avg_bandwidth"] = row["avg_bandwidth"]
# sites table doesn't care about hostnames or avg_bandwidth
del row["hostname"]
del row["avg_bandwidth"]
sites[site_name] = row
else:
site_row = sites[site_name]
site_row["sum_bytes_sent"] = site_row["sum_bytes_sent"] + row["sum_bytes_sent"]
site_row["sum_bytes_hit"] = site_row["sum_bytes_hit"] + row["sum_bytes_hit"]
site_row["max_avg_bandwidth"] = max(site_row["max_avg_bandwidth"], row["avg_bandwidth"])
site_row["time_delta"] = max(site_row["time_delta"], row["time_delta"])
return sites.values()
def get_usage_sites(self, cp=None, slice=None, timeStart=-HOUR_MS, timeStop=-1):
rows = self.get_usage(cp=cp, slice=slice, timeStart=timeStart, timeStop=timeStop)
return self.sites_from_usage(rows)
def dump_table(self, rows, keys=None):
if not keys:
keys = rows[0].keys()
lens = {}
for key in keys:
lens[key] = len(key)
for row in rows:
for key in keys:
thislen = len(str(row.get(key,"")))
lens[key] = max(lens.get(key,0), thislen)
for key in keys:
print "%*s" % (lens[key], key),
print
for row in rows:
for key in keys:
print "%*s" % (lens[key], str(row.get(key,""))),
print
class HpcQueryThread(HpcQuery, threading.Thread):
def __init__(self, interval=30, slice=None, timeStart=-HOUR_MS, cp=None, nodes_to_sites={}):
threading.Thread.__init__(self)
HpcQuery.__init__(self)
self.daemon = True
self.interval = interval
self.timeStart = timeStart
self.nodes_to_sites = nodes_to_sites
self.slice = slice
self.cp = cp
self.data_version = 0
self.please_die = False
self.update_time = time.time()
self.start()
def is_stalled(self):
if time.time()-self.update_time > 300:
return True
else:
return False
def run(self):
while not self.please_die:
try:
self.rows = self.get_usage(timeStart=self.timeStart, cp=self.cp, slice=self.slice)
self.site_rows = self.sites_from_usage(self.rows, self.nodes_to_sites)
self.update_time = time.time()
self.new_data()
self.data_version += 1
except:
file("/tmp/hpcquery_fail.txt","a").write(traceback.format_exc() + "\n")
time.sleep(self.interval)
def new_data(self):
pass
class HpcDumpThread(HpcQueryThread):
def __init__(self, interval=30, slice=None, timeStart=-HOUR_MS, cp=None):
HpcQueryThread.__init__(self, interval, slice, timeStart, cp)
def new_data(self):
os.system("clear")
print "update %d, data for last %d minutes" % (self.data_version, -self.timeStart/1000/60)
print
self.dump_table(self.rows, ["hostname", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
self.dump_table(self.site_rows, ["site", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
def main_old():
hq = HpcQuery()
# print hq.mapping
print "5 minute"
hq.dump_table(hq.get_usage(timeStart=-MINUTE_MS*5), ["hostname", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
hq.dump_table(hq.get_usage_sites(timeStart=-MINUTE_MS*5), ["site", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
print "1 hour"
hq.dump_table(hq.get_usage(), ["hostname", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
hq.dump_table(hq.get_usage_sites(), ["site", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
print "24 hours"
hq.dump_table(hq.get_usage(timeStart=-HOUR_MS*24), ["hostname", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
hq.dump_table(hq.get_usage_sites(timeStart=-HOUR_MS*24), ["site", "cp", "sum_bytes_sent", "sum_bytes_hit", "KBps"])
print
def main():
hd = HpcDumpThread()
while True:
time.sleep(30)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
7e8beeca2a34635f4596d185c06264add736b251 | e7b7505c084e2c2608cbda472bc193d4a0153248 | /LeetcodeNew/python/Q_17_VendingMachine.py | be91ce07b43dce97430308e53ad4bab5ac50180e | [] | no_license | Taoge123/OptimizedLeetcode | 8e5c1cd07904dfce1248bc3e3f960d2f48057a5d | 3e50f6a936b98ad75c47d7c1719e69163c648235 | refs/heads/master | 2023-02-27T21:13:40.450089 | 2023-02-07T04:11:09 | 2023-02-07T04:11:09 | 170,044,224 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 4,591 | py | from abc import abstractmethod, ABC
from enum import Enum
from collections import Counter
class ProductType(Enum):
COKE = 1
class State(ABC):
def __init__(self, vendingMachine):
self.__vendingMachine = vendingMachine
@abstractmethod
def selectItem(self, item: Product):
pass
@abstractmethod
def insertPayment(self, value: int):
pass
@abstractmethod
def executeTransaction(self):
pass
@abstractmethod
def cancelTransaction(self):
pass
@abstractmethod
def name(self):
pass
class NoSelectionState(State):
def selectItem(self, item: Product):
self.__vendingMachine.addSelectedItem(item)
self.__vendingMachine.changeToHasSelectionState()
def insertPayment(self, value: int):
raise Exception('No selected item.')
def executeTransaction(self):
raise Exception('No selected item.')
def cancelTransaction(self):
return 0
def name(self):
return 'No selection'
class HasSelectionState(State):
def selectItem(self, item: Product):
raise Exception('Has selected item.')
def insertPayment(self, value: int):
self.__vendingMachine.addMoney(value)
self.__vendingMachine.changeToInsertMoneyState()
def executeTransaction(self):
raise Exception('No payment made')
def cancelTransaction(self):
self.__vendingMachine.changeToNoSelectionState()
self.__vendingMachine.cancelSelectedItem()
return 0
def name(self):
return 'Has selection'
class InsertMoneyState(State):
def selectItem(self, item: Product):
raise Exception('Has selected item.')
def insertPayment(self, value: int):
self.__vendingMachine.addMoney(value)
def executeTransaction(self):
diff = self.__vendingMachine.getInsertedMoney() - self.__vendingMachine.getPrice()
if diff >= 0:
self.__vendingMachine.setSelectedItem(None)
self.__vendingMachine.changeToNoSelectionState()
else:
raise Exception('Not enough')
def cancelTransaction(self):
money = self.__vendingMachine.getInsertedMoney()
self.__vendingMachine.changeToNoSelectionState()
self.__vendingMachine.cancelSelectedItem()
return money
def name(self):
return 'Has selection'
class Product(ABC):
def __init__(self, name, price):
self.__name = name
self.__price = price
def getName(self):
return self.__name
def getPrice(self):
return self.__price
def updatePrice(self, price):
self.__price = price
class Coke(Product):
def __init__(self, price):
super(Coke, self).__init__(ProductType.COKE, price)
class VendingMachine:
def __init__(self):
self.__inventory = Counter()
self.__selectedItem = None
self.__noSelectionState = NoSelectionState(self)
self.__hasSelectionState = HasSelectionState(self)
self.__insertPaymentState = InsertPaymentState(self)
self.__state = self.__noSelectionState
self.__currentMoney = 0
def getMoney(self):
return self.__currentMoney
def addMoney(self, value):
self.__currentMoney += value
def clearMoney(self):
self.__currentMoney = None
def addInventory(self, product, quantity):
self.__inventory[product] += quantity
def getPrice(self):
if self.__selectedItem is None:
return 0
return self.__selectedItem.getPrice()
def setSelectedItem(self, item):
self.__selectedItem = item
def addSelectedItem(self, item):
if self.__inventory[item] == 0:
raise Exception('')
self.__inventory[item] -= 1
self.__selectedItem = item
def cancelSelectedItem(self):
item = self.__selectedItem
self.__inventory[item] += 1
self.__selectedItem = None
self.__currentMoney = 0
def changeToNoSelectionState(self):
self.__state = self.__noSelectionState
def changeToHasSelectionState(self):
self.__state = self.__hasSelectionState
def changeToInsertPaymentState(self):
self.__state = self.__insertPaymentState
def selectItem(self, item: Product):
self.__state.selectItem(item)
def insertPayment(self, value: int):
self.__state.insertPayment(value)
def executeTransaction(self):
self.__state.executeTransaction()
def cancelTransaction(self):
self.__state.cancelTransaction()
| [
"[email protected]"
] | |
1ac4ce437d608a5a1b7a11050359959e8ed317f5 | 4881b8c2c038d449485598c5761d4a3ca098792c | /LeetcodeNew/python/LC_793.py | ae0d5246551452e2c7ebeb32dad1b1c5ba7528e0 | [] | no_license | derrickweiruluo/OptimizedLeetcode-1 | 2ae3b177e7bd39ceb45993f4de9cc10e40fadc5b | a4d8b54d3004866fd304e732707eef4401dfdb0a | refs/heads/master | 2023-08-29T03:46:44.177836 | 2021-10-10T19:22:50 | 2021-10-10T19:22:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 662 | py |
class Solution:
def preimageSizeFZF(self, K: int) -> int:
left = 0
right = 10 ** 5 * (K + 1)
while left < right:
mid = left + (right - left) // 2
count = self.cal2(mid)
if count == K:
return 5
elif count < K:
left = mid + 1
else:
right = mid
return 0
def cal(self, num):
res = 0
while num > 0:
res += num // 5
num //= 5
return res
def cal2(self, num):
if num == 0:
return 0
else:
return num // 5 + self.cal2(num // 5)
| [
"[email protected]"
] | |
7b95fcb647de0f245df18616325ca3e65b5ff326 | ac608801a67543c800d3534a961359592019f1ed | /My stuff/Listss/Main.py | b48f4977635a5e0a60f821bd7762a0e277723384 | [] | no_license | Codingmace/PiHole-2020 | 4834dbf605aa3469141ca0e4895b89c39a61bf43 | 42cab0cc3ba1803a18fec6ad6e0b37d0de51d696 | refs/heads/main | 2023-03-20T22:28:52.114619 | 2021-03-18T16:37:18 | 2021-03-18T16:37:18 | 329,333,161 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,694 | py | import dns.resolver
import os.path
debug = True
def get_records(domain):
ids = ['A', 'AAAA', 'SOA', 'CNAME', 'MX', 'NS', 'PTR', 'CERT', 'SRV', 'TXT']
for a in ids:
try:
answers = dns.resolver.query(domain, a)
for rdata in answers:
return a
except Exception as e:
print(e) # or pass
return "NA"
def validation(filename):
a = open(filename, "r") # The current file
b = open("valid.txt", "w") # For the shorter valid types
if (debug):
c = open("invalid.txt", "w") # For the shorter invalid types
lines = a.readlines()
lines.sort()
a.close()
for line in lines:
ans = get_records(line)
if (!(ans == "NA")):
b.write(line)
elif (debug):
c.write(line)
b.close()
if (debug):
c.close()
def removeDuplicates(filename):
a = open(filename, "r")
b = open("unique.txt", "w") # The new shorter list
if(debug):
c = open("dipli.txt", "w") # Duplicate ones that are detected
lines = a.readlines()
a.close()
fir = 0 # First Value
sec = 1 # Comparing value
lim = len(lines) # Limit AKA number of lines
while(sec < lim):
if(lines[fir] == lines[sec]): # Same keep going
if(debug):
c.write(lines[sec])
sec = sec + 1
else: # Not the same
b.write(lines[fir])
fir = sec
sec = fir + 1
b.write(lines[len(lines)-1]) # for the last element
b.close()
if(debug):
c.close()
""" CAN'T GET THIS FUCKING THING WORKING """
def subFiles(foldPath):
# walk the folder for files
""" GOING TO THE DEFAULT OF FOLDER SEPERATING BECAUSE I CANT GET THE OTHER THING TO WORK"""
fileList = []
foldPath = "Seperating\\"
folderpath = os.listdir(foldPath)
for f in folderpath:
if (os.path.isfile(f)):
fileList.append(f)
print(fileList)
def merger(files):
# Merge the files together into one
def mergeFiles(foldername):
# Walk the path of the files
fileList = subFiles(foldername)
# Merge the files together
merger(fileList)
filename = "valid.txt" # The merged file name
def main():
print("Let us start this out with a few questions")
print("What do we want to do. Keep in mind 3 can also do steps 1 and 2")
print("1. Validate List\n2. Merge List, Sort, and remove duplicates")
print("3. Split up the list\n4. Crawl for new list")
selection = input()
if(selection == 1):
print("Awesome you are going easy on me. All I need you to do is enter the path of the file and we will be on our way")
filepath = input()
validation(filepath)
print("Ok that is it. I am done")
if(selction == 2):
print("Ok, a little bit of work but still easy.")
print("I need you to now input the folder path")
foldPath = input()
newFilename = "mergedList.txt"
mergeFiles(foldPath)
doValid = input("Just making sure, do you want to validate (Yes/No): ")
if (doValid == "Yes"):
print("Ok validating")
validation("valid.txt")
else:
print("Awesome, no validation")
print("Removing Duplicates")
removeDuplicates("valid.txt")
print("Ok we are all done. The requested file is named unique.txt")
if (selection == 3):
print("Picky one are we. I want to make sure that we are not going to waste time")
firstStep = input("Do you want to merge any files (Yes/No): ")
currentFile = "" # Name of the file reading from
if (firstStep == "Yes"):
foldPath =input("Enter the folder path: ")
mergeFiles(foldPath)
currentFile = "valid.txt"
else:
filepath =input("Enter the file path: ")
currentFile = filepath
removeDuplicates(currentFile)
currentFile = "unique.txt"
secondStep = input("Do you want to validate the entries (Yes/No): ")
if (secondStep == "Yes"):
print("Dang it you are making me do so much work")
validation(currentFile)
else:
print("Ok. That will make things go quicker")
print("Now for the seperation. I bet you don't know what you want to seperate by.")
print("I will make it easy and give you some options")
foldPath = input("Enter in the folder with the files")
if (selection == 4):
print("Oh my, you want the hardest thing. I haven't programmed that far so I will let you answer some more questions.")
firstStep = input("Do you want to merge any files (Yes/No): ")
currentFile = "" # Name of the file reading from
if (firstStep == "Yes"):
foldPath =input("Enter the folder path: ")
mergeFiles(foldPath)
currentFile = "valid.txt"
else:
filepath =input("Enter the file path: ")
currentFile = filepath
removeDuplicates(currentFile)
currentFile = "unique.txt"
secondStep = input("Do you want to validate the entries (Yes/No): ")
if (secondStep == "Yes"):
print("Dang it you are making me do so much work")
validation(currentFile)
else:
print("Ok. That will make things go quicker")
print("Sucks if you want to seperate the files. That is your punishment for choosing an advanced thing.")
print("Rerun the program if you want to do Selection 3")
print("Goodbye")
main()
| [
"[email protected]"
] | |
2688aa4039d5144652063095a7e8cdde6888dcbe | 4e229e075a3f5e71a33525981fa51fd7878c9715 | /sacrerouge/metrics/sumqe.py | 0286a60a36075834184076678d59a089418e5a63 | [] | no_license | CogComp/content-analysis-experiments | 57d68441272c39b687656976d20eddd817c28250 | f6abd72029b6853627ddd191979f105a9385eed7 | refs/heads/master | 2023-06-27T00:29:34.115264 | 2021-08-04T14:36:17 | 2021-08-04T14:36:17 | 305,768,372 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,996 | py | import argparse
import json
import os
from overrides import overrides
from subprocess import Popen, PIPE
from typing import List
from sacrerouge.commands import Subcommand
from sacrerouge.common import DATA_ROOT, TemporaryDirectory
from sacrerouge.io import JsonlWriter
from sacrerouge.data import MetricsDict
from sacrerouge.data.fields import SummaryField
from sacrerouge.data.types import SummaryType
from sacrerouge.metrics import Metric
@Metric.register('sum-qe')
class SumQE(Metric):
def __init__(self,
model_file: str = f'{DATA_ROOT}/metrics/SumQE/models/multitask_5-duc2006_duc2007.npy',
sum_qe_root: str = f'{DATA_ROOT}/metrics/SumQE',
environment_name: str = None,
verbose: bool = False):
super().__init__([])
self.model_file = os.path.abspath(model_file)
self.sum_qe_root = sum_qe_root
self.environment_name = environment_name
self.verbose = verbose
def _flatten_summary(self, summary: SummaryType) -> str:
if isinstance(summary, list):
return ' '.join(summary)
return summary
def _run(self, summaries_list: List[List[SummaryType]]) -> List[List[MetricsDict]]:
with TemporaryDirectory() as temp_dir:
summaries_file = f'{temp_dir}/summaries.jsonl'
predictions_file = f'{temp_dir}/predictions.json'
# Save all of the summaries to a file
with JsonlWriter(summaries_file) as out:
for summaries in summaries_list:
for summary in summaries:
out.write({'summary': self._flatten_summary(summary)})
commands = [f'cd {self.sum_qe_root}']
if self.environment_name:
commands += [f'source activate {self.environment_name}']
commands += [
' '.join([
'python', '-m', 'src.BERT_experiments.predict',
summaries_file,
self.model_file,
predictions_file
])
]
redirect = None if self.verbose else PIPE
process = Popen(' && '.join(commands), stdout=redirect, stderr=redirect, shell=True)
stdout, stderr = process.communicate()
predictions = json.loads(open(predictions_file, 'r').read())
index = 0
metrics_lists = []
for summaries in summaries_list:
metrics_lists.append([])
for summary in summaries:
preds = predictions[index]
metrics_lists[-1].append(MetricsDict({
'SumQE': {
'Q1': preds[0],
'Q2': preds[1],
'Q3': preds[2],
'Q4': preds[3],
'Q5': preds[4]
}
}))
index += 1
return metrics_lists
def score_multi_all(self, summaries_list: List[List[SummaryField]]) -> List[List[MetricsDict]]:
# Just take the summaries themselves, not the fields
summaries_list = [[field.summary for field in fields] for fields in summaries_list]
return self._run(summaries_list)
class SumQESetupSubcommand(Subcommand):
@overrides
def add_subparser(self, parser: argparse._SubParsersAction):
self.parser = parser.add_parser('sum-qe')
self.parser.add_argument('--download-2005-2006-model', action='store_true')
self.parser.add_argument('--download-2005-2007-model', action='store_true')
self.parser.add_argument('--download-2006-2007-model', action='store_true')
self.parser.set_defaults(subfunc=self.run)
@overrides
def run(self, args):
commands = [
f'mkdir -p {DATA_ROOT}/metrics',
f'cd {DATA_ROOT}/metrics',
f'git clone https://github.com/danieldeutsch/SumQE',
f'mkdir -p SumQE/models'
]
if args.download_2005_2006_model:
commands.append('wget https://danieldeutsch.s3.amazonaws.com/sacrerouge/metrics/SumQE/models/multitask_5-duc2005_duc2006.npy -O SumQE/models/multitask_5-duc2005_duc2006.npy')
if args.download_2005_2007_model:
commands.append('wget https://danieldeutsch.s3.amazonaws.com/sacrerouge/metrics/SumQE/models/multitask_5-duc2005_duc2007.npy -O SumQE/models/multitask_5-duc2005_duc2007.npy')
if args.download_2006_2007_model:
commands.append('wget https://danieldeutsch.s3.amazonaws.com/sacrerouge/metrics/SumQE/models/multitask_5-duc2006_duc2007.npy -O SumQE/models/multitask_5-duc2006_duc2007.npy')
command = ' && '.join(commands)
process = Popen(command, shell=True)
process.communicate()
if process.returncode == 0:
print('SumQE setup success')
else:
print('SumQE setup failure')
| [
"[email protected]"
] | |
436bd3b89520e5808d16be5ff1543ca6a878491e | 79bf797423e4c591e33b199ae578fff328c811cd | /practico_02/ejercicio_04.py | 9503962a31047e707994cd7f7088dd3dae758180 | [] | no_license | DanielDruetta/frro-soporte-2019-25 | 6512c7c7ebaca8429883a09dbaac5c8b4e49bf0c | 5244116177a67023694cfd6966ff35d22d31c284 | refs/heads/master | 2020-04-29T05:15:10.050444 | 2019-11-01T18:46:54 | 2019-11-01T18:46:54 | 175,875,936 | 0 | 0 | null | 2019-08-16T18:44:23 | 2019-03-15T18:58:00 | Python | UTF-8 | Python | false | false | 1,291 | py | # Escribir una clase Estudiante, que herede de Persona, y que agregue las siguientes condiciones:
# Atributos:
# - nombre de la carrera.
# - año de ingreso a la misma.
# - cantidad de materias de la carrera.
# - cantidad de materias aprobadas.
# Métodos:
# - avance(): indica que porcentaje de la carrera tiene aprobada.
# - edad_ingreso(): indica que edad tenia al ingresar a la carrera (basándose en el año actual).
import time
from practico_02.ejercicio_03 import Persona
class Estudiante(Persona):
def __init__(self,nombre,edad,sexo,peso,altura,carrera,anioIngreso,cantidadMaterias,cantidadMateriasAprobadas):
Persona.__init__(self,nombre,edad,sexo,peso,altura)
self.carrera=carrera
self.anioIngreso=anioIngreso
self.cantidadMaterias=cantidadMaterias
self.cantidadMateriasAprobadas=cantidadMateriasAprobadas
def avance(self):
porcentaje=('{0:.2f}'.format((self.cantidadMateriasAprobadas/self.cantidadMaterias)*100))
return porcentaje
def edad_ingreso(self):
edadIng=(self.edad-(int(time.strftime('%Y'))-self.anioIngreso))
return edadIng
estudiante=Estudiante('Agustin Yurescia',22,'H',69.60,1.75,'ISI',2015,41,27)
assert estudiante.edad_ingreso() == 18
assert estudiante.avance() == '65.85'
| [
"[email protected]"
] | |
65ce00e4875680bca80aae59bd314f775465f09e | 5430cd3b483c858567f1687b0bed43eccd4d0fe0 | /gpregression_gpy/main.py | 4981085b19c277a8043ea4726eb72657fbb8eb5f | [] | no_license | roholazandie/gaussian_process_tutorial | a1b46c712d31545ee428db04a511150933902de8 | f7efbaa534f6834447cb77a97e5162efd92f9a1a | refs/heads/master | 2021-02-14T15:40:12.649801 | 2020-05-21T16:17:37 | 2020-05-21T16:17:37 | 244,816,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | import numpy as np
from GPy.kern import Matern32, GridRBF, RBF
from GPy.models import GPRegression
import pods
import matplotlib.pyplot as plt
# read data
from visualization import plot_gp
data = pods.datasets.olympic_marathon_men()
x_train = data["X"]
y_train = data["Y"]
# choose a kernel
#kernel = Matern32(input_dim=1, variance=2.0)
#kernel = GridRBF(input_dim=1)
#kernel = RBF(input_dim=1, variance=2.0)
# gp regression and optimize the paramters using logliklihood
gp_regression = GPRegression(x_train, y_train)
#gp_regression.kern.lengthscale = 500
#gp_regression.likelihood.variance = 0.001
print("loglikelihood: ", gp_regression.log_likelihood())
gp_regression.optimize()
print("loglikelihood: ", gp_regression.log_likelihood())
# predict new unseen samples
x_test = np.linspace(1870, 2030, 200)[:, np.newaxis]
yt_mean, yt_var = gp_regression.predict(x_test)
yt_sd = np.sqrt(yt_var)
# draw some samples from the posterior
samples = gp_regression.posterior_samples(x_test, size=1).squeeze(1)
# plot
plot_gp(yt_mean, yt_var, x_test, X_train=x_train, Y_train=y_train, samples=samples)
| [
"[email protected]"
] | |
c02214574892daaeca723ea71aff7f4af91b48a2 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/d67af04c45b8cfc4e7e19cf2d2af2f980db88e7d-<_get_body>-fix.py | 636c93a950b87104066d3e66dc628ca3297088b7 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | def _get_body(self, commands, output, reqid=None):
'Create a valid eAPI JSON-RPC request message\n '
if (output not in EAPI_FORMATS):
msg = ('invalid format, received %s, expected one of %s' % (output, ', '.join(EAPI_FORMATS)))
self._error(msg=msg)
params = dict(version=1, cmds=commands, format=output)
return dict(jsonrpc='2.0', id=reqid, method='runCmds', params=params) | [
"[email protected]"
] | |
04fb8959fbbd684932517689f4c8569c493a7ca6 | 1c6a74f3b2b9ce4be60eb1714336d16fcdc2bead | /docs/conf.py | 567253684a12cb3411e175487faf87e0caeda112 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jtratner/python-future | d190fc299a621b1fe8483ad671eb372ed2546ed1 | 04882be6e6c34f47001893d496740d243b4d80ba | refs/heads/master | 2021-01-21T08:50:38.125582 | 2013-12-01T13:15:50 | 2013-12-01T13:15:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,679 | py | # -*- coding: utf-8 -*-
#
# Python-Future documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 22 07:02:03 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import absolute_import, print_function
import sys, os
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Was: sys.path.append(os.path.dirname(os.path.abspath(__file__)))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Python-Future'
copyright = u'2013, Python Charmers Pty Ltd, Australia'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# if 'dev' in release:
# release = release.split('dev')[0] + 'dev'
release = '0.10.0'
version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' # 'futureext.FutureStyle'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/python-future-logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sidebarlogo.html', 'sidebarintro.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebarlogo.html', 'localtoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Futuredoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '12pt',
# Additional stuff for the LaTeX preamble.
#'preamble': r'''
#\usepackage{futurestyle}
#''',
# 'fontpkg': r'\usepackage{mathpazo}',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Python-Future.tex', u'Python-Future Documentation',
u'Python Charmers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
latex_use_parts = True
# latex_additional_files = ['futurestyle.sty', 'logo.pdf']
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python-future', u'Python-Future Documentation',
[u'Python Charmers'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Python-Future', u'Python-Future Documentation',
u'Python Charmers', 'Python-Future', 'Easy support for Python 2 and 3',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| [
"[email protected]"
] | |
c79b99caca7a967e2ae90f0c8801dfaa8b685e62 | 257cd39cce602506f6c892584f79180f96ce8729 | /artevenue/migrations/0059_remove_amazon_data_parent_key.py | 24664ad5899666727752f7395adff6e692cd06a5 | [] | no_license | santhoshanandhan/artevenue | 7cbfac2e4ef8f03f44c085a8ce3823504a8ecc7e | 0ce86149a0b706cb2ffa672b7b066e3bfeeef74c | refs/heads/master | 2022-12-26T12:34:55.685330 | 2020-10-13T07:04:28 | 2020-10-13T07:04:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | # Generated by Django 2.2.4 on 2019-10-21 05:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('artevenue', '0058_auto_20191021_1101'),
]
operations = [
migrations.RemoveField(
model_name='amazon_data',
name='parent_key',
),
]
| [
"[email protected]"
] | |
9164964ff22bbcd7e054324032425a0ebdfb679c | aaaf3b641bbc03180646c427bbfc52510357e55e | /models_inheritance/wsgi.py | dc5064ebe45456dddd93e47d2a99208ebd5499a4 | [] | no_license | deepanshu-jain1999/inheritance_in_model | 5b47677575b1c8c9d6168b1ba1979a231ed78ba2 | 2d3080a5affa561aacc02081252d6e8ebb90d7dc | refs/heads/master | 2021-04-12T09:52:47.816940 | 2018-03-23T20:02:57 | 2018-03-23T20:02:57 | 126,522,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | """
WSGI config for models_inheritance project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "models_inheritance.settings")
application = get_wsgi_application()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.